focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public void delete(final String path, final int version, final AsyncCallback.VoidCallback cb, final Object ctx) { final RetryCallback callback = new RetryCallback() { @Override protected void retry() { _log.info("Retry delete operation: path = " + path + " version = " + version); zkDelete(path, version, this, ctx); } @Override protected void processVoidResult(int cbRC, String cbPath, Object cbCtx) { cb.processResult(cbRC, cbPath, cbCtx); } }; zkDelete(path, version, callback, ctx) ; }
@Test public void testDelete() throws NoSuchMethodException { final RetryZooKeeper rzkPartialMock = createMockObject( RetryZooKeeper.class.getMethod("zkDelete", String.class, int.class, AsyncCallback.VoidCallback.class, Object.class)); // mock up zkDelete, which wrapper's ZooKeeper's delete rzkPartialMock.zkDelete( (String) EasyMock.anyObject(), EasyMock.anyInt(), (AsyncCallback.VoidCallback) EasyMock.anyObject(), EasyMock.anyObject()); // first try, "connection loss" expectDeleteCallbackWithCode(_connectionLossRC); // second try, "no node" expectDeleteCallbackWithCode(KeeperException.Code.NONODE.intValue()); EasyMock.replay(rzkPartialMock); rzkPartialMock.delete(_dummyPath, _dummyVersion, _dummyVoidCallback, _dummyCtx); EasyMock.verify(rzkPartialMock); }
@JsonIgnore public Map<String, StepRuntimeState> decodeStepOverview(Map<String, StepTransition> runtimeDag) { AtomicLong ordinal = new AtomicLong(0); Map<Long, String> ordinalStepMap = runtimeDag.keySet().stream() .collect(Collectors.toMap(s -> ordinal.incrementAndGet(), Function.identity())); Map<String, StepRuntimeState> states = new LinkedHashMap<>(); stepOverview.forEach( (status, summary) -> { if (summary.getSteps() != null) { summary .getSteps() .forEach( stepInfo -> { String stepId = Checks.notNull( ordinalStepMap.remove(stepInfo.get(0)), "cannot find step id for stepInfo [%s]", stepInfo); StepRuntimeState state = new StepRuntimeState(); state.setStatus(status); state.setStartTime(stepInfo.get(1)); state.setEndTime(stepInfo.get(2)); states.put(stepId, state); }); } }); return states; }
@Test public void testDecodeStepOverview() throws Exception { WorkflowRuntimeOverview overview = loadObject( "fixtures/instances/sample-workflow-runtime-overview.json", WorkflowRuntimeOverview.class); assertFalse(overview.existsNotCreatedStep()); WorkflowInstance instance = loadObject( "fixtures/instances/sample-workflow-instance-created.json", WorkflowInstance.class); Map<String, StepRuntimeState> actual = overview.decodeStepOverview(instance.getRuntimeDag()); assertEquals(1, overview.decodeStepOverview(instance.getRuntimeDag()).size()); assertEquals(StepInstance.Status.RUNNING, actual.get("job3").getStatus()); assertEquals(1647977244273L, actual.get("job3").getStartTime().longValue()); }
static RuntimeException handleException(Throwable e) { if (e instanceof OutOfMemoryError error) { OutOfMemoryErrorDispatcher.onOutOfMemory(error); throw error; } if (e instanceof Error error) { throw error; } if (e instanceof HazelcastSerializationException exception) { return exception; } if (e instanceof HazelcastInstanceNotActiveException exception) { return exception; } if (e instanceof HazelcastClientNotActiveException exception) { return exception; } return new HazelcastSerializationException(e); }
@Test(expected = Error.class) public void testHandleException_OOME() { SerializationUtil.handleException(new OutOfMemoryError()); }
public Resource getResource(Resource clusterResource) { if (percentages != null && clusterResource != null) { long memory = (long) (clusterResource.getMemorySize() * percentages[0]); int vcore = (int) (clusterResource.getVirtualCores() * percentages[1]); Resource res = Resource.newInstance(memory, vcore); ResourceInformation[] clusterInfo = clusterResource.getResources(); for (int i = 2; i < clusterInfo.length; i++) { res.setResourceValue(i, (long)(clusterInfo[i].getValue() * percentages[i])); } return res; } else { return resource; } }
@Test public void testGetResourceWithPercentage() { ConfigurableResource configurableResource = new ConfigurableResource(new double[] {0.5, 0.5}); assertEquals( configurableResource.getResource(clusterResource).getMemorySize(), 1024); assertEquals( configurableResource.getResource(clusterResource).getVirtualCores(), 1); assertNull("The absolute resource should be null since object" + " configurableResource is initialized with percentages", configurableResource.getResource()); assertNull("The absolute resource should be null since cluster resource" + " is null", configurableResource.getResource(null)); }
@Override public BlueRun getLatestRun() { Run run = job.getLastBuild(); if(run instanceof FreeStyleBuild){ BlueRun blueRun = new FreeStyleRunImpl((FreeStyleBuild) run, this, organization); return new FreeStyleRunSummary(blueRun, run, this, organization); } return super.getLatestRun(); }
@Test public void findModernRun() throws Exception { FreeStyleProject freestyle = Mockito.spy(j.createProject(FreeStyleProject.class, "freestyle")); FreeStyleBuild build1 = Mockito.mock(FreeStyleBuild.class); FreeStyleBuild build2 = Mockito.mock(FreeStyleBuild.class); Mockito.when(build1.getParent()).thenReturn(freestyle); Mockito.when(build1.getNextBuild()).thenReturn(build2); Mockito.when(build2.getParent()).thenReturn(freestyle); Mockito.when(build2.getPreviousBuild()).thenReturn(build1); RunList<FreeStyleBuild> runs = RunList.fromRuns(Arrays.asList(build1, build2)); Mockito.doReturn(runs).when(freestyle).getBuilds(); Mockito.doReturn(build2).when(freestyle).getLastBuild(); FreeStylePipeline freeStylePipeline = (FreeStylePipeline) BluePipelineFactory.resolve(freestyle); assertNotNull(freeStylePipeline); BlueRun blueRun = freeStylePipeline.getLatestRun(); assertNotNull(blueRun); Links links = blueRun.getLinks(); assertNotNull(links); assertNotNull(links.get("self")); }
public static SqlPrimitiveType of(final String typeName) { switch (typeName.toUpperCase()) { case INT: return SqlPrimitiveType.of(SqlBaseType.INTEGER); case VARCHAR: return SqlPrimitiveType.of(SqlBaseType.STRING); default: try { final SqlBaseType sqlType = SqlBaseType.valueOf(typeName.toUpperCase()); return SqlPrimitiveType.of(sqlType); } catch (final IllegalArgumentException e) { throw new SchemaException("Unknown primitive type: " + typeName, e); } } }
@Test public void shouldThrowOnMapType() { // When: final Exception e = assertThrows( SchemaException.class, () -> SqlPrimitiveType.of(SqlBaseType.MAP) ); // Then: assertThat(e.getMessage(), containsString("Invalid primitive type: MAP")); }
@Override public DbEntitiesCatalog get() { final Stopwatch stopwatch = Stopwatch.createStarted(); final DbEntitiesCatalog catalog = scan(packagesToScan, packagesToExclude, chainingClassLoader); stopwatch.stop(); LOG.info("{} entities have been scanned and added to DB Entity Catalog, it took {}", catalog.size(), stopwatch); return catalog; }
@Test void testScansEntitiesWithDefaultReadPermissionFieldProperly() { DbEntitiesScanner scanner = new DbEntitiesScanner(new String[]{"org.graylog2.cluster"}, new String[]{}); final DbEntitiesCatalog dbEntitiesCatalog = scanner.get(); final DbEntityCatalogEntry entryByCollectionName = dbEntitiesCatalog.getByCollectionName("nodes").get(); assertEquals(new DbEntityCatalogEntry("nodes", "node_id", ServerNodeEntity.class, NOBODY_ALLOWED), entryByCollectionName); }
@Override public String encrypt(final Object plainValue, final AlgorithmSQLContext algorithmSQLContext) { return digestAlgorithm.digest(plainValue); }
@Test void assertEncrypt() { assertThat(encryptAlgorithm.encrypt("test", mock(AlgorithmSQLContext.class)), is("098f6bcd4621d373cade4e832627b4f6")); }
public static RecordBuilder<Schema> record(String name) { return builder().record(name); }
@Test void record() { Schema schema = SchemaBuilder.record("myrecord").namespace("org.example").aliases("oldrecord").fields().name("f0") .aliases("f0alias").type().stringType().noDefault().name("f1").doc("This is f1").type().longType().noDefault() .name("f2").type().nullable().booleanType().booleanDefault(true).name("f3").type().unionOf().nullType().and() .booleanType().endUnion().nullDefault().endRecord(); assertEquals("myrecord", schema.getName()); assertEquals("org.example", schema.getNamespace()); assertEquals("org.example.oldrecord", schema.getAliases().iterator().next()); assertFalse(schema.isError()); List<Schema.Field> fields = schema.getFields(); assertEquals(4, fields.size()); assertEquals(new Schema.Field("f0", Schema.create(Schema.Type.STRING)), fields.get(0)); assertTrue(fields.get(0).aliases().contains("f0alias")); assertEquals(new Schema.Field("f1", Schema.create(Schema.Type.LONG), "This is f1"), fields.get(1)); List<Schema> types = new ArrayList<>(); types.add(Schema.create(Schema.Type.BOOLEAN)); types.add(Schema.create(Schema.Type.NULL)); Schema optional = Schema.createUnion(types); assertEquals(new Schema.Field("f2", optional, null, true), fields.get(2)); List<Schema> types2 = new ArrayList<>(); types2.add(Schema.create(Schema.Type.NULL)); types2.add(Schema.create(Schema.Type.BOOLEAN)); Schema optional2 = Schema.createUnion(types2); assertNotEquals(new Schema.Field("f3", optional2, null, (Object) null), fields.get(3)); assertEquals(new Schema.Field("f3", optional2, null, Schema.Field.NULL_DEFAULT_VALUE), fields.get(3)); }
@Override public void transform(Message message, DataType fromType, DataType toType) { final Optional<ValueRange> valueRange = getValueRangeBody(message); String range = message.getHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "range", "A:A").toString(); String majorDimension = message .getHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "majorDimension", RangeCoordinate.DIMENSION_ROWS).toString(); String spreadsheetId = message.getHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "spreadsheetId", "").toString(); String[] columnNames = message.getHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "columnNames", "A").toString().split(","); boolean splitResults = Boolean .parseBoolean(message.getHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "splitResults", "false").toString()); if (valueRange.isPresent()) { message.setBody( transformFromValueRangeModel(message, valueRange.get(), spreadsheetId, range, majorDimension, columnNames)); } else if (splitResults) { message.setBody(transformFromSplitValuesModel(message, spreadsheetId, range, majorDimension, columnNames)); } else { String valueInputOption = message.getHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "valueInputOption", "USER_ENTERED").toString(); message.setBody( transformToValueRangeModel(message, spreadsheetId, range, majorDimension, valueInputOption, columnNames)); } }
@Test public void testTransformToValueRangeAutoFillColumnValues() throws Exception { Exchange inbound = new DefaultExchange(camelContext); inbound.getMessage().setHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "range", "A1:C2"); List<String> model = Arrays.asList("{" + "\"spreadsheetId\": \"" + spreadsheetId + "\"," + "\"A\": \"a1\"," + "\"C\": \"c1\"" + "}", "{" + "\"spreadsheetId\": \"" + spreadsheetId + "\"," + "\"A\": \"a2\"," + "\"B\": \"b2\"" + "}"); inbound.getMessage().setBody(model); transformer.transform(inbound.getMessage(), DataType.ANY, DataType.ANY); Assertions.assertEquals("A1:C2", inbound.getMessage().getHeader(GoogleSheetsStreamConstants.RANGE)); Assertions.assertEquals(RangeCoordinate.DIMENSION_ROWS, inbound.getMessage().getHeader(GoogleSheetsStreamConstants.MAJOR_DIMENSION)); Assertions.assertEquals("USER_ENTERED", inbound.getMessage().getHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "valueInputOption")); ValueRange valueRange = (ValueRange) inbound.getMessage().getHeader(GoogleSheetsConstants.PROPERTY_PREFIX + "values"); Assertions.assertEquals(2L, valueRange.getValues().size()); Assertions.assertEquals(3L, valueRange.getValues().get(0).size()); Assertions.assertEquals("a1", valueRange.getValues().get(0).get(0)); Assertions.assertNull(valueRange.getValues().get(0).get(1)); Assertions.assertEquals("c1", valueRange.getValues().get(0).get(2)); Assertions.assertEquals(3L, valueRange.getValues().get(1).size()); Assertions.assertEquals("a2", valueRange.getValues().get(1).get(0)); Assertions.assertEquals("b2", valueRange.getValues().get(1).get(1)); Assertions.assertNull(valueRange.getValues().get(1).get(2)); }
private static boolean canSatisfyConstraints(ApplicationId appId, PlacementConstraint constraint, SchedulerNode node, AllocationTagsManager atm, Optional<DiagnosticsCollector> dcOpt) throws InvalidAllocationTagsQueryException { if (constraint == null) { LOG.debug("Constraint is found empty during constraint validation for" + " app:{}", appId); return true; } // If this is a single constraint, transform to SingleConstraint SingleConstraintTransformer singleTransformer = new SingleConstraintTransformer(constraint); constraint = singleTransformer.transform(); AbstractConstraint sConstraintExpr = constraint.getConstraintExpr(); // TODO handle other type of constraints, e.g CompositeConstraint if (sConstraintExpr instanceof SingleConstraint) { SingleConstraint single = (SingleConstraint) sConstraintExpr; return canSatisfySingleConstraint(appId, single, node, atm, dcOpt); } else if (sConstraintExpr instanceof And) { And and = (And) sConstraintExpr; return canSatisfyAndConstraint(appId, and, node, atm, dcOpt); } else if (sConstraintExpr instanceof Or) { Or or = (Or) sConstraintExpr; return canSatisfyOrConstraint(appId, or, node, atm, dcOpt); } else { throw new InvalidAllocationTagsQueryException( "Unsupported type of constraint: " + sConstraintExpr.getClass().getSimpleName()); } }
@Test public void testRackAntiAffinityAssignment() throws InvalidAllocationTagsQueryException { AllocationTagsManager tm = new AllocationTagsManager(rmContext); PlacementConstraintManagerService pcm = new MemoryPlacementConstraintManager(); // Register App1 with anti-affinity constraint map pcm.registerApplication(appId1, constraintMap2); /** * Place container: * Node0:123 (Rack1): * container_app1_1 (hbase-rs) */ RMNode n0_r1 = rmNodes.get(0); RMNode n1_r1 = rmNodes.get(1); RMNode n2_r2 = rmNodes.get(2); RMNode n3_r2 = rmNodes.get(3); // 1 Containers on Node0-Rack1 with allocationTag 'hbase-rs' ContainerId hbase_m = ContainerId .newContainerId(ApplicationAttemptId.newInstance(appId1, 0), 0); tm.addContainer(n0_r1.getNodeID(), hbase_m, ImmutableSet.of("hbase-rs")); SchedulerNode schedulerNode0 =newSchedulerNode(n0_r1.getHostName(), n0_r1.getRackName(), n0_r1.getNodeID()); SchedulerNode schedulerNode1 =newSchedulerNode(n1_r1.getHostName(), n1_r1.getRackName(), n1_r1.getNodeID()); SchedulerNode schedulerNode2 =newSchedulerNode(n2_r2.getHostName(), n2_r2.getRackName(), n2_r2.getNodeID()); SchedulerNode schedulerNode3 =newSchedulerNode(n3_r2.getHostName(), n3_r2.getRackName(), n3_r2.getNodeID()); // 'zk' placement on Rack1 should FAIL Assert.assertFalse(PlacementConstraintsUtil.canSatisfyConstraints(appId1, createSchedulingRequest(sourceTag2), schedulerNode0, pcm, tm)); Assert.assertFalse(PlacementConstraintsUtil.canSatisfyConstraints(appId1, createSchedulingRequest(sourceTag2), schedulerNode1, pcm, tm)); // SUCCEED on the rest of the RACKs Assert.assertTrue(PlacementConstraintsUtil.canSatisfyConstraints(appId1, createSchedulingRequest(sourceTag2), schedulerNode2, pcm, tm)); Assert.assertTrue(PlacementConstraintsUtil.canSatisfyConstraints(appId1, createSchedulingRequest(sourceTag2), schedulerNode3, pcm, tm)); }
@VisibleForTesting boolean isOAuth2Auth(@Nullable Credential credential) { return credential != null && credential.isOAuth2RefreshToken(); }
@Test public void isOAuth2Auth_oauth2() { Credential credential = Credential.from("<token>", "oauth2_token"); Assert.assertTrue(registryAuthenticator.isOAuth2Auth(credential)); }
public void prioritize(T element) { final Iterator<T> iterator = deque.iterator(); // Already prioritized? Then, do not reorder elements. for (int i = 0; i < numPriorityElements && iterator.hasNext(); i++) { if (iterator.next() == element) { return; } } // If the next non-priority element is the given element, we can simply include it in the // priority section if (iterator.hasNext() && iterator.next() == element) { numPriorityElements++; return; } // Remove the given element. while (iterator.hasNext()) { if (iterator.next() == element) { iterator.remove(); break; } } addPriorityElement(element); }
@Test void testPrioritize() { final PrioritizedDeque<Integer> deque = new PrioritizedDeque<>(); deque.add(0); deque.add(1); deque.add(2); deque.add(3); deque.prioritize(3); assertThat(deque.asUnmodifiableCollection()).containsExactly(3, 0, 1, 2); }
@Override public Dataset<Row> apply( final JavaSparkContext jsc, final SparkSession sparkSession, final Dataset<Row> rowDataset, final TypedProperties props) { final String sqlFile = getStringWithAltKeys(props, SqlTransformerConfig.TRANSFORMER_SQL_FILE); final FileSystem fs = HadoopFSUtils.getFs(sqlFile, jsc.hadoopConfiguration(), true); // tmp table name doesn't like dashes final String tmpTable = TMP_TABLE.concat(UUID.randomUUID().toString().replace("-", "_")); LOG.info("Registering tmp table: {}", tmpTable); rowDataset.createOrReplaceTempView(tmpTable); try (final Scanner scanner = new Scanner(fs.open(new Path(sqlFile)), "UTF-8")) { Dataset<Row> rows = null; // each sql statement is separated with semicolon hence set that as delimiter. scanner.useDelimiter(";"); LOG.info("SQL Query for transformation:"); while (scanner.hasNext()) { String sqlStr = scanner.next(); sqlStr = sqlStr.replaceAll(SRC_PATTERN, tmpTable).trim(); if (!sqlStr.isEmpty()) { LOG.info(sqlStr); // overwrite the same dataset object until the last statement then return. rows = sparkSession.sql(sqlStr); } } return rows; } catch (final IOException ioe) { throw new HoodieTransformExecutionException("Error reading transformer SQL file.", ioe); } finally { sparkSession.catalog().dropTempView(tmpTable); } }
@Test public void testSqlFileBasedTransformerInvalidSQL() throws IOException { UtilitiesTestBase.Helpers.copyToDFS( "streamer-config/sql-file-transformer-invalid.sql", UtilitiesTestBase.storage, UtilitiesTestBase.basePath + "/sql-file-transformer-invalid.sql"); // Test if the SQL file based transformer works as expected for the invalid SQL statements. props.setProperty( "hoodie.streamer.transformer.sql.file", UtilitiesTestBase.basePath + "/sql-file-transformer-invalid.sql"); assertThrows( ParseException.class, () -> sqlFileTransformer.apply(jsc, sparkSession, inputDatasetRows, props)); }
public static Expression generateFilterExpression(SearchArgument sarg) { return translate(sarg.getExpression(), sarg.getLeaves()); }
@Test public void testIsNullOperand() { SearchArgument.Builder builder = SearchArgumentFactory.newBuilder(); SearchArgument arg = builder.startAnd().isNull("salary", PredicateLeaf.Type.LONG).end().build(); UnboundPredicate expected = Expressions.isNull("salary"); UnboundPredicate actual = (UnboundPredicate) HiveIcebergFilterFactory.generateFilterExpression(arg); assertThat(expected.op()).isEqualTo(actual.op()); assertThat(expected.ref().name()).isEqualTo(actual.ref().name()); }
@Override public String toString() { return getClass().getSimpleName() + "." + root.getLocalName() + "(id=" + id + ")"; }
@Test public void testAllowAndDisallowSnapshot() throws Exception { final Path dir = new Path("/dir"); final Path file0 = new Path(dir, "file0"); final Path file1 = new Path(dir, "file1"); DFSTestUtil.createFile(hdfs, file0, BLOCKSIZE, REPLICATION, seed); DFSTestUtil.createFile(hdfs, file1, BLOCKSIZE, REPLICATION, seed); INodeDirectory dirNode = fsdir.getINode4Write(dir.toString()).asDirectory(); assertFalse(dirNode.isSnapshottable()); hdfs.allowSnapshot(dir); dirNode = fsdir.getINode4Write(dir.toString()).asDirectory(); assertTrue(dirNode.isSnapshottable()); // call allowSnapshot again hdfs.allowSnapshot(dir); dirNode = fsdir.getINode4Write(dir.toString()).asDirectory(); assertTrue(dirNode.isSnapshottable()); // disallowSnapshot on dir hdfs.disallowSnapshot(dir); dirNode = fsdir.getINode4Write(dir.toString()).asDirectory(); assertFalse(dirNode.isSnapshottable()); // do it again hdfs.disallowSnapshot(dir); dirNode = fsdir.getINode4Write(dir.toString()).asDirectory(); assertFalse(dirNode.isSnapshottable()); // same process on root final Path root = new Path("/"); INodeDirectory rootNode = fsdir.getINode4Write(root.toString()) .asDirectory(); assertTrue(rootNode.isSnapshottable()); // root is snapshottable dir, but with 0 snapshot quota assertEquals(0, rootNode.getDirectorySnapshottableFeature() .getSnapshotQuota()); hdfs.allowSnapshot(root); rootNode = fsdir.getINode4Write(root.toString()).asDirectory(); assertTrue(rootNode.isSnapshottable()); assertEquals(DirectorySnapshottableFeature.SNAPSHOT_QUOTA_DEFAULT, rootNode.getDirectorySnapshottableFeature().getSnapshotQuota()); // call allowSnapshot again hdfs.allowSnapshot(root); rootNode = fsdir.getINode4Write(root.toString()).asDirectory(); assertTrue(rootNode.isSnapshottable()); assertEquals(DirectorySnapshottableFeature.SNAPSHOT_QUOTA_DEFAULT, rootNode.getDirectorySnapshottableFeature().getSnapshotQuota()); // disallowSnapshot on dir hdfs.disallowSnapshot(root); rootNode = fsdir.getINode4Write(root.toString()).asDirectory(); assertTrue(rootNode.isSnapshottable()); assertEquals(0, rootNode.getDirectorySnapshottableFeature().getSnapshotQuota()); // do it again hdfs.disallowSnapshot(root); rootNode = fsdir.getINode4Write(root.toString()).asDirectory(); assertTrue(rootNode.isSnapshottable()); assertEquals(0, rootNode.getDirectorySnapshottableFeature().getSnapshotQuota()); }
public static boolean isWebService(Optional<String> serviceName) { return serviceName.isPresent() && IS_PLAIN_HTTP_BY_KNOWN_WEB_SERVICE_NAME.containsKey( Ascii.toLowerCase(serviceName.get())); }
@Test public void isWebService_whenHasAtLeastOneHttpMethod_returnsTrue() { assertThat( NetworkServiceUtils.isWebService( NetworkService.newBuilder() .setServiceName("irrelevantService") .addSupportedHttpMethods("IrrelevantMethodName") .build())) .isTrue(); }
@Override public Iterator<IndexKeyEntries> getSqlRecordIteratorBatch(@Nonnull Comparable value, boolean descending) { return getSqlRecordIteratorBatch(value, descending, null); }
@Test public void getRecordsUsingExactValueInequalityAscending() { var expectedOrder = List.of(1, 4, 7, 2, 5, 8); var actual = store.getSqlRecordIteratorBatch(Comparison.GREATER, 0, false); assertResult(expectedOrder, actual); }
@Override public List<Catalogue> sort(List<Catalogue> catalogueTree, SortTypeEnum sortTypeEnum) { log.debug( "sort catalogue tree based on creation time. catalogueTree: {}, sortTypeEnum: {}", catalogueTree, sortTypeEnum); return recursionSortCatalogues(catalogueTree, sortTypeEnum); }
@Test public void sortEmptyTest() { List<Catalogue> catalogueTree = Lists.newArrayList(); SortTypeEnum sortTypeEnum = SortTypeEnum.ASC; List<Catalogue> resultList = catalogueTreeSortCreateTimeStrategyTest.sort(catalogueTree, sortTypeEnum); assertEquals(Lists.newArrayList(), resultList); }
public static <Req extends RpcRequest> Matcher<Req> serviceEquals(String service) { if (service == null) throw new NullPointerException("service == null"); if (service.isEmpty()) throw new NullPointerException("service is empty"); return new RpcServiceEquals<Req>(service); }
@Test void serviceEquals_unmatched_null() { assertThat(serviceEquals("grpc.health.v1.Health").matches(request)).isFalse(); }
@Override public void register(@NonNull ThreadPoolPlugin plugin) { mainLock.runWithWriteLock(() -> { String id = plugin.getId(); Assert.isTrue(!isRegistered(id), "The plugin with id [" + id + "] has been registered"); registeredPlugins.put(id, plugin); forQuickIndexes(quickIndex -> quickIndex.addIfPossible(plugin)); plugin.start(); }); }
@Test public void testGetAllPluginRuntimes() { manager.register(new TestExecuteAwarePlugin()); manager.register(new TestRejectedAwarePlugin()); Assert.assertEquals(2, manager.getAllPluginRuntimes().size()); }
public int getBytes(int index, byte[] dst, int off, int len) { int count = Math.min(len, size - index); if (buf.hasArray()) { System.arraycopy(buf.array(), buf.arrayOffset() + index, dst, off, count); } else { ByteBuffer dup = buf.duplicate(); dup.position(index); dup.get(dst, off, count); } return count; }
@Test public void testGetBytesLength() { final Msg msg = initMsg(); final byte[] dst = new byte[5]; msg.getBytes(2, dst, 0, 2); assertThat(dst, is(new byte[] { 2, 3, 0, 0, 0 })); }
public static InetAddress getLocalhost() { final LinkedHashSet<InetAddress> localAddressList = localAddressList(address -> { // 非loopback地址,指127.*.*.*的地址 return false == address.isLoopbackAddress() // 需为IPV4地址 && address instanceof Inet4Address; }); if (CollUtil.isNotEmpty(localAddressList)) { InetAddress address2 = null; for (InetAddress inetAddress : localAddressList) { if (false == inetAddress.isSiteLocalAddress()) { // 非地区本地地址,指10.0.0.0 ~ 10.255.255.255、172.16.0.0 ~ 172.31.255.255、192.168.0.0 ~ 192.168.255.255 return inetAddress; } else if (null == address2) { address2 = inetAddress; } } if (null != address2) { return address2; } } try { return InetAddress.getLocalHost(); } catch (UnknownHostException e) { // ignore } return null; }
@Test public void getLocalHostTest() { assertNotNull(NetUtil.getLocalhost()); }
@Override public Path move(final Path file, final Path renamed, final TransferStatus status, final Delete.Callback delete, final ConnectionCallback callback) throws BackgroundException { try { if(status.isExists()) { if(log.isWarnEnabled()) { log.warn(String.format("Delete file %s to be replaced with %s", renamed, file)); } new DeepboxTrashFeature(session, fileid).delete(Collections.singletonList(renamed), callback, delete); } final String sourceId = fileid.getFileId(file); final NodeMove nodeMove = new NodeMove(); final String targetParentId = fileid.getFileId(renamed.getParent()); nodeMove.setTargetParentNodeId(targetParentId); new CoreRestControllerApi(session.getClient()).moveNode(nodeMove, sourceId); final NodeUpdate nodeUpdate = new NodeUpdate(); nodeUpdate.setName(renamed.getName()); new CoreRestControllerApi(session.getClient()).updateNode(nodeUpdate, sourceId); fileid.cache(file, null); fileid.cache(renamed, sourceId); return renamed.withAttributes(file.attributes().withFileId(sourceId)); } catch(ApiException e) { throw new DeepboxExceptionMappingService(fileid).map("Cannot rename {0}", e, file); } }
@Test public void testMoveOverrideFile() throws Exception { final DeepboxIdProvider fileid = new DeepboxIdProvider(session); final Path documents = new Path("/ORG 4 - DeepBox Desktop App/ORG3:Box1/Documents/", EnumSet.of(Path.Type.directory, Path.Type.volume)); final Path trash = new Path("/ORG 4 - DeepBox Desktop App/ORG3:Box1/Trash", EnumSet.of(Path.Type.directory, Path.Type.volume)); final Path test = new DeepboxTouchFeature(session, fileid).touch( new Path(documents, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus()); final Path target = new DeepboxTouchFeature(session, fileid).touch( new Path(documents, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file)), new TransferStatus()); final Path targetInTrash = new Path(trash, target.getName(), target.getType()); final PathAttributes originalTestAttributes = new DeepboxAttributesFinderFeature(session, fileid).find(test); final PathAttributes originalTargetAttributes = new DeepboxAttributesFinderFeature(session, fileid).find(target); new DeepboxMoveFeature(session, fileid).move(test, target, new TransferStatus().exists(true), new Delete.DisabledCallback(), new DisabledConnectionCallback()); assertFalse(new DeepboxFindFeature(session, fileid).find(test.withAttributes(new PathAttributes()))); assertTrue(new DeepboxFindFeature(session, fileid).find(target.withAttributes(new PathAttributes()))); assertTrue(new DeepboxFindFeature(session, fileid).find(targetInTrash)); final PathAttributes overriddenTargetAttributes = new DeepboxAttributesFinderFeature(session, fileid).find(target.withAttributes(new PathAttributes())); assertNotNull(originalTestAttributes.getFileId()); assertEquals(originalTestAttributes.getFileId(), overriddenTargetAttributes.getFileId()); assertEquals(originalTestAttributes.getModificationDate(), overriddenTargetAttributes.getModificationDate()); assertEquals(originalTestAttributes.getChecksum(), overriddenTargetAttributes.getChecksum()); final PathAttributes trashedTargetAttributes = new DeepboxAttributesFinderFeature(session, fileid).find(targetInTrash.withAttributes(new PathAttributes())); assertNotNull(originalTargetAttributes.getFileId()); assertEquals(originalTargetAttributes.getFileId(), trashedTargetAttributes.getFileId()); assertEquals(originalTargetAttributes.getModificationDate(), trashedTargetAttributes.getModificationDate()); assertEquals(originalTargetAttributes.getChecksum(), trashedTargetAttributes.getChecksum()); new DeepboxDeleteFeature(session, fileid).delete(Collections.singletonList(targetInTrash), new DisabledLoginCallback(), new Delete.DisabledCallback()); new DeepboxDeleteFeature(session, fileid).delete(Collections.singletonList(target), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
@Override public long estimate() { final double raw = (1 / computeE()) * alpha() * m * m; return applyRangeCorrection(raw); }
@RequireAssertEnabled @Test(expected = AssertionError.class) public void testAlpha_withGivenZeroAsInvalidMemoryFootprint() { DenseHyperLogLogEncoder encoder = new DenseHyperLogLogEncoder(0); encoder.estimate(); }
public static void main(String[] args) { App app = new App(App.REST_API_URL, HttpClient.newHttpClient()); app.createDynamicProxy(); app.callMethods(); }
@Test void shouldRunAppWithoutExceptions() { assertDoesNotThrow(() -> App.main(null)); }
@Override @Nullable public Object convert(@Nullable String value) { if (isNullOrEmpty(value)) { return null; } LOG.debug("Trying to parse date <{}> with pattern <{}>, locale <{}>, and timezone <{}>.", value, dateFormat, locale, timeZone); final DateTimeFormatter formatter; if (containsTimeZone) { formatter = DateTimeFormat .forPattern(dateFormat) .withDefaultYear(YearMonth.now(timeZone).getYear()) .withLocale(locale); } else { formatter = DateTimeFormat .forPattern(dateFormat) .withDefaultYear(YearMonth.now(timeZone).getYear()) .withLocale(locale) .withZone(timeZone); } return DateTime.parse(value, formatter); }
@Test public void testBasicConvert() throws Exception { final DateConverter converter = new DateConverter(config("YYYY MMM dd HH:mm:ss", null, null)); final DateTime result = (DateTime) converter.convert("2013 Aug 15 23:15:16"); assertThat(result) .isNotNull() .isEqualTo("2013-08-15T23:15:16.000Z"); }
public RegistryBuilder appendParameters(Map<String, String> appendParameters) { this.parameters = appendParameters(parameters, appendParameters); return getThis(); }
@Test void appendParameters() { Map<String, String> source = new HashMap<>(); source.put("default.num", "one"); source.put("num", "ONE"); RegistryBuilder builder = new RegistryBuilder(); builder.appendParameters(source); Map<String, String> parameters = builder.build().getParameters(); Assertions.assertTrue(parameters.containsKey("default.num")); Assertions.assertEquals("ONE", parameters.get("num")); }
@SuppressWarnings("unchecked") public Output run(RunContext runContext) throws Exception { Logger logger = runContext.logger(); try (HttpClient client = this.client(runContext, this.method)) { HttpRequest<String> request = this.request(runContext); HttpResponse<String> response; try { response = client .toBlocking() .exchange(request, Argument.STRING, Argument.STRING); // check that the string is a valid Unicode string if (response.getBody().isPresent()) { OptionalInt illegalChar = response.body().chars().filter(c -> !Character.isDefined(c)).findFirst(); if (illegalChar.isPresent()) { throw new IllegalArgumentException("Illegal unicode code point in request body: " + illegalChar.getAsInt() + ", the Request task only support valid Unicode strings as body.\n" + "You can try using the Download task instead."); } } } catch (HttpClientResponseException e) { if (!allowFailed) { throw e; } //noinspection unchecked response = (HttpResponse<String>) e.getResponse(); } logger.debug("Request '{}' with the response code '{}'", request.getUri(), response.getStatus().getCode()); return this.output(runContext, request, response); } }
@Test void run() throws Exception { try ( ApplicationContext applicationContext = ApplicationContext.run(); EmbeddedServer server = applicationContext.getBean(EmbeddedServer.class).start(); ) { Request task = Request.builder() .id(RequestTest.class.getSimpleName()) .type(RequestTest.class.getName()) .uri(server.getURL().toString() + "/hello") .build(); RunContext runContext = TestsUtils.mockRunContext(this.runContextFactory, task, ImmutableMap.of()); Request.Output output = task.run(runContext); assertThat(output.getBody(), is("{ \"hello\": \"world\" }")); assertThat(output.getEncryptedBody(), nullValue()); assertThat(output.getCode(), is(200)); } }
public VersionMatchResult matches(DeploymentInfo info) { // Skip if no manifest configuration if(info.getManifest() == null || info.getManifest().size() == 0) { return VersionMatchResult.SKIPPED; } for (ManifestInfo manifest: info.getManifest()) { VersionMatchResult result = match(manifest); if(VersionMatchResult.MATCHED.equals(result)){ LOGGER.debug("Matched {} with {}", this, manifest); return VersionMatchResult.MATCHED; } if(VersionMatchResult.REJECTED.equals(result)){ LOGGER.debug("Rejected {} with {}", this, manifest); return VersionMatchResult.REJECTED; } } // There were no matches (maybe another matcher will pass) return VersionMatchResult.SKIPPED; }
@Test public void testSkipped() throws IOException { Set<MavenInfo> maven = new HashSet<MavenInfo>(); ManifestInfo manifest = new ManifestInfo(new java.util.jar.Manifest(this.getClass().getResourceAsStream("/org/hotswap/agent/versions/matcher/TEST.MF"))); DeploymentInfo info = new DeploymentInfo(maven, Collections.singleton(manifest)); System.err.println(info); PluginMatcher p = new PluginMatcher(PluginMatcherTest.class); assertEquals("Skipped Matching",VersionMatchResult.SKIPPED, p.matches(info)); }
@Override public Iterable<MappingEntry> getMappingEntries(Type type, DeviceId deviceId) { return store.getMappingEntries(type, deviceId); }
@Test public void getMappingEntries() { assertTrue("Store should be empty", Sets.newHashSet( service.getMappingEntries(MAP_DATABASE, LISP_DID)).isEmpty()); addMapping(MAP_DATABASE, 1); addMapping(MAP_DATABASE, 2); assertEquals("2 mappings should exist", 2, mappingCount(MAP_DATABASE)); addMapping(MAP_DATABASE, 1); assertEquals("should still be 2 mappings", 2, mappingCount(MAP_DATABASE)); }
public void registerClient(SonarLintClient sonarLintClient) { clients.add(sonarLintClient); sonarLintClient.scheduleHeartbeat(); sonarLintClient.addListener(new SonarLintClientEventsListener(sonarLintClient)); LOG.debug("Registering new SonarLint client"); }
@Test public void registerClient_whenCalledFirstTime_addsAsyncListenerToClient() { SonarLintClient client = mock(SonarLintClient.class); underTest.registerClient(client); verify(client).addListener(any()); }
public void updateConnection(Connection connection) { if (connection != null) { connectionRepository.saveAndFlush(connection); cacheService.evictRelatedCacheValues("metadata-response", connection.getEntityId()); cacheService.evictSingleCacheValue("metadata", connection.getEntityId()); } }
@Test void updateConnection() { connectionServiceMock.updateConnection(new Connection()); verify(connectionRepositoryMock, times(1)).saveAndFlush(any(Connection.class)); }
@Override public final BootstrapConfig config() { return config; }
@Test public void optionsAndAttributesMustBeAvailableOnChannelInit() throws InterruptedException { final AttributeKey<String> key = AttributeKey.valueOf(UUID.randomUUID().toString()); new Bootstrap() .group(groupA) .channel(LocalChannel.class) .option(ChannelOption.CONNECT_TIMEOUT_MILLIS, 4242) .attr(key, "value") .handler(new ChannelInitializer<LocalChannel>() { @Override protected void initChannel(LocalChannel ch) throws Exception { Integer option = ch.config().getOption(ChannelOption.CONNECT_TIMEOUT_MILLIS); assertEquals(4242, (int) option); assertEquals("value", ch.attr(key).get()); } }) .bind(LocalAddress.ANY).sync(); }
public void notify(DashboardNotification e) { notificationMappers.stream() .filter(notificationMapper -> notificationMapper.supports(e)) .map(notificationMapper -> notificationMapper.map(e)) .forEach(this::saveDashboardNotificationAsMetadata); }
@Test void notifyForCpuAllocationIrregularityShouldSaveJobRunrMetadataToStorageProvider() { dashboardNotificationManager.notify(new CpuAllocationIrregularityNotification(11)); verify(storageProviderMock).saveMetadata(jobRunrMetadataToSaveArgumentCaptor.capture()); assertThat(jobRunrMetadataToSaveArgumentCaptor.getValue()) .hasName(CpuAllocationIrregularityNotification.class.getSimpleName()) .hasOwner("BackgroundJobServer " + backgroundJobServerId.toString()) .valueContains("11"); }
@Udf public boolean check(@UdfParameter(description = "The input JSON string") final String input) { if (input == null) { return false; } try { return !UdfJsonMapper.parseJson(input).isMissingNode(); } catch (KsqlFunctionException e) { return false; } }
@Test public void shouldInterpretString() { assertTrue(udf.check("\"abc\"")); }
public String format(List<String> args) { // https://invisible-island.net/xterm/ctlseqs/ctlseqs.html#h3-Operating-System-Commands // XTerm accepts either BEL or ST for terminating OSC // sequences, and when returning information, uses the same // terminator used in a query. return Ascii.ESC_CHAR + "]" + String.join(String.valueOf(ARG_SEPARATOR), args) + StringsKt.takeLast(text, terminatorLength(text)); }
@Test public void formatUsingSameTerminator() { var seq1 = create("2;Test 1" + BEL_CHAR); assertEquals(ESC_CHAR + "]foo" + BEL_CHAR, seq1.format(List.of("foo"))); var seq2 = create("2;Test 1" + TWO_BYTES_TERMINATOR); assertEquals(ESC_CHAR + "]bar;baz" + TWO_BYTES_TERMINATOR, seq2.format(List.of("bar", "baz"))); }
@Override public int inferParallelism(Context dynamicParallelismContext) { FileEnumerator fileEnumerator; List<HiveTablePartition> partitions; if (dynamicFilterPartitionKeys != null) { fileEnumerator = new HiveSourceDynamicFileEnumerator.Provider( tablePath.getFullName(), dynamicFilterPartitionKeys, partitionBytes, hiveVersion, jobConfWrapper) .create(); if (dynamicParallelismContext.getDynamicFilteringInfo().isPresent()) { DynamicFilteringInfo dynamicFilteringInfo = dynamicParallelismContext.getDynamicFilteringInfo().get(); if (dynamicFilteringInfo instanceof DynamicFilteringEvent) { ((HiveSourceDynamicFileEnumerator) fileEnumerator) .setDynamicFilteringData( ((DynamicFilteringEvent) dynamicFilteringInfo).getData()); } } partitions = ((HiveSourceDynamicFileEnumerator) fileEnumerator).getFinalPartitions(); } else { fileEnumerator = getEnumeratorFactory().create(); partitions = ((HiveSourceFileEnumerator) fileEnumerator).getPartitions(); } return new HiveDynamicParallelismInferenceFactory( tablePath, jobConfWrapper.conf(), dynamicParallelismContext.getParallelismInferenceUpperBound()) .create() .infer( () -> HiveSourceFileEnumerator.getNumFiles( partitions, jobConfWrapper.conf()), () -> HiveSourceFileEnumerator.createInputSplits( 0, partitions, jobConfWrapper.conf(), true) .size()) .limit(limit); }
@Test void testDynamicParallelismInferenceWithLimit() throws Exception { ObjectPath tablePath = new ObjectPath("default", "hiveTbl2"); createTable(tablePath, hiveCatalog, true); HiveSource<RowData> hiveSource = createHiveSourceWithPartition(tablePath, new Configuration(), 1L, null); // test inferred parallelism less than maxParallelism DynamicParallelismInference.Context context = genDynamicParallelismContext(10, Collections.emptyList()); assertThat(hiveSource.inferParallelism(context)).isEqualTo(1); hiveCatalog.dropTable(tablePath, false); }
@Override public InputStream open(String path) throws IOException { try (InputStream in = delegate.open(path)) { final String config = new String(in.readAllBytes(), StandardCharsets.UTF_8); final String substituted = substitutor.replace(config); return new ByteArrayInputStream(substituted.getBytes(StandardCharsets.UTF_8)); } }
@Test void shouldNotBeVulnerableToCVE_2022_42889() throws IOException { StringLookup dummyLookup = (x) -> null; SubstitutingSourceProvider provider = new SubstitutingSourceProvider(new DummySourceProvider(), new StringSubstitutor(dummyLookup)); assertThat(provider.open("foo: ${script:javascript:3 + 4}")).hasSameContentAs(new ByteArrayInputStream("foo: ${script:javascript:3 + 4}".getBytes(StandardCharsets.UTF_8))); }
@Override public String toString() { return "path:'" + getPath() + "'|param:'" + getParamName() + "'|mimetype:'" + getMimeType() + "'"; }
@Test public void testToString() throws Exception { HTTPFileArg file = new HTTPFileArg("path1", "param1", "mimetype1"); assertEquals("path:'path1'|param:'param1'|mimetype:'mimetype1'", file.toString()); }
@SuppressWarnings({"rawtypes", "unchecked"}) public static <T> int compare(T c1, T c2, Comparator<T> comparator) { if (null == comparator) { return compare((Comparable) c1, (Comparable) c2); } return comparator.compare(c1, c2); }
@Test public void compareTest(){ int compare = CompareUtil.compare(null, "a", true); assertTrue(compare > 0); compare = CompareUtil.compare(null, "a", false); assertTrue(compare < 0); }
public static List<InetSocketAddress> getIpListByRegistry(String registryIp) { List<String[]> ips = new ArrayList<String[]>(); String defaultPort = null; String[] srcIps = registryIp.split(","); for (String add : srcIps) { int a = add.indexOf("://"); if (a > -1) { add = add.substring(a + 3); // 去掉协议头 } String[] s1 = add.split(":"); if (s1.length > 1) { if (defaultPort == null && s1[1] != null && s1[1].length() > 0) { defaultPort = s1[1]; } ips.add(new String[] { s1[0], s1[1] }); // 得到ip和端口 } else { ips.add(new String[] { s1[0], defaultPort }); } } List<InetSocketAddress> ads = new ArrayList<InetSocketAddress>(); for (int j = 0; j < ips.size(); j++) { String[] ip = ips.get(j); try { InetSocketAddress address = new InetSocketAddress(ip[0], Integer.parseInt(ip[1] == null ? defaultPort : ip[1])); ads.add(address); } catch (Exception ignore) { //NOPMD } } return ads; }
@Test public void getIpListByRegistry() throws Exception { }
public static <InputT, OutputT> DoFnInvoker<InputT, OutputT> invokerFor( DoFn<InputT, OutputT> fn) { return ByteBuddyDoFnInvokerFactory.only().newByteBuddyInvoker(fn); }
@Test public void testStableName() { DoFnInvoker<Void, Void> invoker = DoFnInvokers.invokerFor(new StableNameTestDoFn()); assertThat( invoker.getClass().getName(), equalTo( String.format( "%s$%s", StableNameTestDoFn.class.getName(), DoFnInvoker.class.getSimpleName()))); }
@Override public MutableNetwork<Node, Edge> apply(MapTask mapTask) { List<ParallelInstruction> parallelInstructions = Apiary.listOrEmpty(mapTask.getInstructions()); MutableNetwork<Node, Edge> network = NetworkBuilder.directed() .allowsSelfLoops(false) .allowsParallelEdges(true) .expectedNodeCount(parallelInstructions.size() * 2) .build(); // Add all the instruction nodes and output nodes ParallelInstructionNode[] instructionNodes = new ParallelInstructionNode[parallelInstructions.size()]; InstructionOutputNode[][] outputNodes = new InstructionOutputNode[parallelInstructions.size()][]; for (int i = 0; i < parallelInstructions.size(); ++i) { // InstructionOutputNode's are the source of truth on instruction outputs. // Clear the instruction's outputs to reduce chance for confusion. List<InstructionOutput> outputs = Apiary.listOrEmpty(parallelInstructions.get(i).getOutputs()); outputNodes[i] = new InstructionOutputNode[outputs.size()]; JsonFactory factory = MoreObjects.firstNonNull(mapTask.getFactory(), Transport.getJsonFactory()); ParallelInstruction parallelInstruction = clone(factory, parallelInstructions.get(i)).setOutputs(null); ParallelInstructionNode instructionNode = ParallelInstructionNode.create(parallelInstruction, Nodes.ExecutionLocation.UNKNOWN); instructionNodes[i] = instructionNode; network.addNode(instructionNode); // Connect the instruction node output to the output PCollection node for (int j = 0; j < outputs.size(); ++j) { InstructionOutput instructionOutput = outputs.get(j); InstructionOutputNode outputNode = InstructionOutputNode.create( instructionOutput, "generatedPcollection" + this.idGenerator.getId()); network.addNode(outputNode); if (parallelInstruction.getParDo() != null) { network.addEdge( instructionNode, outputNode, MultiOutputInfoEdge.create( parallelInstruction.getParDo().getMultiOutputInfos().get(j))); } else { network.addEdge(instructionNode, outputNode, DefaultEdge.create()); } outputNodes[i][j] = outputNode; } } // Connect PCollections as inputs to instructions for (ParallelInstructionNode instructionNode : instructionNodes) { ParallelInstruction parallelInstruction = instructionNode.getParallelInstruction(); if (parallelInstruction.getFlatten() != null) { for (InstructionInput input : Apiary.listOrEmpty(parallelInstruction.getFlatten().getInputs())) { attachInput(input, network, instructionNode, outputNodes); } } else if (parallelInstruction.getParDo() != null) { attachInput( parallelInstruction.getParDo().getInput(), network, instructionNode, outputNodes); } else if (parallelInstruction.getPartialGroupByKey() != null) { attachInput( parallelInstruction.getPartialGroupByKey().getInput(), network, instructionNode, outputNodes); } else if (parallelInstruction.getRead() != null) { // Reads have no inputs so nothing to do } else if (parallelInstruction.getWrite() != null) { attachInput( parallelInstruction.getWrite().getInput(), network, instructionNode, outputNodes); } else { throw new IllegalArgumentException( String.format( "Unknown type of instruction %s for map task %s", parallelInstruction, mapTask)); } } return network; }
@Test public void testParallelEdgeFlatten() { // /---\ // Read --> Read.out --> Flatten // \---/ InstructionOutput readOutput = createInstructionOutput("Read.out"); ParallelInstruction read = createParallelInstruction("Read", readOutput); read.setRead(new ReadInstruction()); FlattenInstruction flattenInstruction = new FlattenInstruction(); flattenInstruction.setInputs( ImmutableList.of( createInstructionInput(0, 0), // Read.out createInstructionInput(0, 0), // Read.out createInstructionInput(0, 0))); // Read.out InstructionOutput flattenOutput = createInstructionOutput("Flatten.out"); ParallelInstruction flatten = createParallelInstruction("Flatten", flattenOutput); flatten.setFlatten(flattenInstruction); MapTask mapTask = new MapTask(); mapTask.setInstructions(ImmutableList.of(read, flatten)); mapTask.setFactory(Transport.getJsonFactory()); Network<Node, Edge> network = new MapTaskToNetworkFunction(IdGenerators.decrementingLongs()).apply(mapTask); assertNetworkProperties(network); assertEquals(4, network.nodes().size()); assertEquals(5, network.edges().size()); ParallelInstructionNode readNode = get(network, read); InstructionOutputNode readOutputNode = getOnlySuccessor(network, readNode); assertEquals(readOutput, readOutputNode.getInstructionOutput()); ParallelInstructionNode flattenNode = getOnlySuccessor(network, readOutputNode); // Assert that the three parallel edges are maintained assertEquals(3, network.edgesConnecting(readOutputNode, flattenNode).size()); InstructionOutputNode flattenOutputNode = getOnlySuccessor(network, flattenNode); assertEquals(flattenOutput, flattenOutputNode.getInstructionOutput()); }
@SuppressWarnings("unused") // Part of required API. public void execute( final ConfiguredStatement<InsertValues> statement, final SessionProperties sessionProperties, final KsqlExecutionContext executionContext, final ServiceContext serviceContext ) { final InsertValues insertValues = statement.getStatement(); final MetaStore metaStore = executionContext.getMetaStore(); final KsqlConfig config = statement.getSessionConfig().getConfig(true); final DataSource dataSource = getDataSource(config, metaStore, insertValues); validateInsert(insertValues.getColumns(), dataSource); final ProducerRecord<byte[], byte[]> record = buildRecord(statement, metaStore, dataSource, serviceContext); try { producer.sendRecord(record, serviceContext, config.getProducerClientConfigProps()); } catch (final TopicAuthorizationException e) { // TopicAuthorizationException does not give much detailed information about why it failed, // except which topics are denied. Here we just add the ACL to make the error message // consistent with other authorization error messages. final Exception rootCause = new KsqlTopicAuthorizationException( AclOperation.WRITE, e.unauthorizedTopics() ); throw new KsqlException(createInsertFailedExceptionMessage(insertValues), rootCause); } catch (final ClusterAuthorizationException e) { // ClusterAuthorizationException is thrown when using idempotent producers // and either a topic write permission or a cluster-level idempotent write // permission (only applicable for broker versions no later than 2.8) is // missing. In this case, we include additional context to help the user // distinguish this type of failure from other permissions exceptions // such as the ones thrown above when TopicAuthorizationException is caught. throw new KsqlException( createInsertFailedExceptionMessage(insertValues), createClusterAuthorizationExceptionRootCause(dataSource) ); } catch (final KafkaException e) { if (e.getCause() != null && e.getCause() instanceof ClusterAuthorizationException) { // The error message thrown when an idempotent producer is missing permissions // is (nondeterministically) inconsistent: it is either a raw ClusterAuthorizationException, // as checked for above, or a ClusterAuthorizationException wrapped inside a KafkaException. // ksqlDB handles these two the same way, accordingly. // See https://issues.apache.org/jira/browse/KAFKA-14138 for more. throw new KsqlException( createInsertFailedExceptionMessage(insertValues), createClusterAuthorizationExceptionRootCause(dataSource) ); } else { throw new KsqlException(createInsertFailedExceptionMessage(insertValues), e); } } catch (final Exception e) { throw new KsqlException(createInsertFailedExceptionMessage(insertValues), e); } }
@Test public void shouldInsertWrappedSingleField() { // Given: givenSourceStreamWithSchema(SINGLE_VALUE_COLUMN_SCHEMA, SerdeFeatures.of(), SerdeFeatures.of()); final ConfiguredStatement<InsertValues> statement = givenInsertValues( valueColumnNames(SINGLE_VALUE_COLUMN_SCHEMA), ImmutableList.of(new StringLiteral("new")) ); // When: executor.execute(statement, mock(SessionProperties.class), engine, serviceContext); // Then: verify(keySerializer).serialize(TOPIC_NAME, genericKey((String) null)); verify(valueSerializer).serialize(TOPIC_NAME, genericRow("new")); verify(producer).send(new ProducerRecord<>(TOPIC_NAME, null, 1L, KEY, VALUE)); }
@Override public Optional<String> getValue(Object arg, String type) { if (arg instanceof List) { List<?> list = (List<?>) arg; int index = Integer.parseInt(getKey(type)); if (index < 0 || index >= list.size()) { return Optional.empty(); } Object object = list.get(index); return object == null ? Optional.empty() : Optional.of(String.valueOf(object)); } return Optional.empty(); }
@Test public void testValue() { TypeStrategy strategy = new ListTypeStrategy(); List<String> list = new ArrayList<>(); list.add("foo"); list.add(null); // normal Assert.assertEquals("foo", strategy.getValue(list, ".get(0)").orElse(null)); // test null Assert.assertNotEquals("foo", strategy.getValue(list, ".get(1)").orElse(null)); // test out of bounds Assert.assertNotEquals("foo", strategy.getValue(list, ".get(2)").orElse(null)); // test non arrays Assert.assertNotEquals("foo", strategy.getValue("bar", ".get(0)").orElse(null)); // the test is not equal Assert.assertNotEquals("bar", strategy.getValue(list, ".get(0)").orElse(null)); }
public Span nextSpan(TraceContextOrSamplingFlags extracted) { if (extracted == null) throw new NullPointerException("extracted == null"); TraceContext context = extracted.context(); if (context != null) return newChild(context); TraceIdContext traceIdContext = extracted.traceIdContext(); if (traceIdContext != null) { return _toSpan(null, decorateContext( InternalPropagation.instance.flags(extracted.traceIdContext()), traceIdContext.traceIdHigh(), traceIdContext.traceId(), 0L, 0L, 0L, extracted.extra() )); } SamplingFlags samplingFlags = extracted.samplingFlags(); List<Object> extra = extracted.extra(); TraceContext parent = currentTraceContext.get(); int flags; long traceIdHigh = 0L, traceId = 0L, localRootId = 0L, spanId = 0L; if (parent != null) { // At this point, we didn't extract trace IDs, but do have a trace in progress. Since typical // trace sampling is up front, we retain the decision from the parent. flags = InternalPropagation.instance.flags(parent); traceIdHigh = parent.traceIdHigh(); traceId = parent.traceId(); localRootId = parent.localRootId(); spanId = parent.spanId(); extra = concat(extra, parent.extra()); } else { flags = InternalPropagation.instance.flags(samplingFlags); } return _toSpan(parent, decorateContext(flags, traceIdHigh, traceId, localRootId, spanId, 0L, extra)); }
@Test void localRootId_nextSpan_ids_notYetSampled() { TraceIdContext context1 = TraceIdContext.newBuilder().traceId(1).build(); TraceIdContext context2 = TraceIdContext.newBuilder().traceId(2).build(); localRootId(context1, context2, ctx -> tracer.nextSpan(ctx)); }
@Override public int run(String[] argv) { if (argv.length < 1) { printUsage(""); return -1; } int exitCode = -1; int i = 0; String cmd = argv[i++]; // // verify that we have enough command line parameters // if ("-safemode".equals(cmd)) { if (argv.length != 2) { printUsage(cmd); return exitCode; } } else if ("-allowSnapshot".equalsIgnoreCase(cmd)) { if (argv.length != 2) { printUsage(cmd); return exitCode; } } else if ("-disallowSnapshot".equalsIgnoreCase(cmd)) { if (argv.length != 2) { printUsage(cmd); return exitCode; } } else if ("-provisionSnapshotTrash".equalsIgnoreCase(cmd)) { if (argv.length != 2) { printUsage(cmd); return exitCode; } } else if ("-report".equals(cmd)) { if (argv.length > DFS_REPORT_ARGS.length + 1) { printUsage(cmd); return exitCode; } } else if ("-saveNamespace".equals(cmd)) { if (argv.length != 1 && argv.length != 2) { printUsage(cmd); return exitCode; } } else if ("-rollEdits".equals(cmd)) { if (argv.length != 1) { printUsage(cmd); return exitCode; } } else if ("-restoreFailedStorage".equals(cmd)) { if (argv.length != 2) { printUsage(cmd); return exitCode; } } else if ("-refreshNodes".equals(cmd)) { if (argv.length != 1) { printUsage(cmd); return exitCode; } } else if ("-finalizeUpgrade".equals(cmd)) { if (argv.length != 1) { printUsage(cmd); return exitCode; } } else if (RollingUpgradeCommand.matches(cmd)) { if (argv.length > 2) { printUsage(cmd); return exitCode; } } else if ("-upgrade".equals(cmd)) { if (argv.length != 2) { printUsage(cmd); return exitCode; } } else if ("-metasave".equals(cmd)) { if (argv.length != 2) { printUsage(cmd); return exitCode; } } else if ("-refreshServiceAcl".equals(cmd)) { if (argv.length != 1) { printUsage(cmd); return exitCode; } } else if ("-refresh".equals(cmd)) { if (argv.length < 3) { printUsage(cmd); return exitCode; } } else if ("-refreshUserToGroupsMappings".equals(cmd)) { if (argv.length != 1) { printUsage(cmd); return exitCode; } } else if ("-printTopology".equals(cmd)) { if(argv.length != 1) { printUsage(cmd); return exitCode; } } else if ("-refreshNamenodes".equals(cmd)) { if (argv.length != 2) { printUsage(cmd); return exitCode; } } else if ("-getVolumeReport".equals(cmd)) { if (argv.length != 2) { printUsage(cmd); return exitCode; } } else if ("-reconfig".equals(cmd)) { if (argv.length != 4) { printUsage(cmd); return exitCode; } } else if ("-deleteBlockPool".equals(cmd)) { if ((argv.length != 3) && (argv.length != 4)) { printUsage(cmd); return exitCode; } } else if ("-setBalancerBandwidth".equals(cmd)) { if (argv.length != 2) { printUsage(cmd); return exitCode; } } else if ("-getBalancerBandwidth".equalsIgnoreCase(cmd)) { if (argv.length != 2) { printUsage(cmd); return exitCode; } } else if ("-fetchImage".equals(cmd)) { if (argv.length != 2) { printUsage(cmd); return exitCode; } } else if ("-shutdownDatanode".equals(cmd)) { if ((argv.length != 2) && (argv.length != 3)) { printUsage(cmd); return exitCode; } } else if ("-getDatanodeInfo".equals(cmd)) { if (argv.length != 2) { printUsage(cmd); return exitCode; } } else if ("-triggerBlockReport".equals(cmd)) { if ((argv.length < 2) || (argv.length > 5)) { printUsage(cmd); return exitCode; } } else if ("-listOpenFiles".equals(cmd)) { if ((argv.length > 4)) { printUsage(cmd); return exitCode; } } // initialize DFSAdmin init(); Exception debugException = null; exitCode = 0; try { if ("-report".equals(cmd)) { report(argv, i); } else if ("-safemode".equals(cmd)) { setSafeMode(argv, i); } else if ("-allowSnapshot".equalsIgnoreCase(cmd)) { allowSnapshot(argv); } else if ("-disallowSnapshot".equalsIgnoreCase(cmd)) { disallowSnapshot(argv); } else if ("-provisionSnapshotTrash".equalsIgnoreCase(cmd)) { provisionSnapshotTrash(argv); } else if ("-saveNamespace".equals(cmd)) { exitCode = saveNamespace(argv); } else if ("-rollEdits".equals(cmd)) { exitCode = rollEdits(); } else if ("-restoreFailedStorage".equals(cmd)) { exitCode = restoreFailedStorage(argv[i]); } else if ("-refreshNodes".equals(cmd)) { exitCode = refreshNodes(); } else if ("-finalizeUpgrade".equals(cmd)) { exitCode = finalizeUpgrade(); } else if (RollingUpgradeCommand.matches(cmd)) { exitCode = RollingUpgradeCommand.run(getDFS(), argv, i); } else if ("-upgrade".equals(cmd)) { exitCode = upgrade(argv[i]); } else if ("-metasave".equals(cmd)) { exitCode = metaSave(argv, i); } else if (ClearQuotaCommand.matches(cmd)) { exitCode = new ClearQuotaCommand(argv, i, getConf()).runAll(); } else if (SetQuotaCommand.matches(cmd)) { exitCode = new SetQuotaCommand(argv, i, getConf()).runAll(); } else if (ClearSpaceQuotaCommand.matches(cmd)) { exitCode = new ClearSpaceQuotaCommand(argv, i, getConf()).runAll(); } else if (SetSpaceQuotaCommand.matches(cmd)) { exitCode = new SetSpaceQuotaCommand(argv, i, getConf()).runAll(); } else if ("-refreshServiceAcl".equals(cmd)) { exitCode = refreshServiceAcl(); } else if ("-refreshUserToGroupsMappings".equals(cmd)) { exitCode = refreshUserToGroupsMappings(); } else if ("-refreshSuperUserGroupsConfiguration".equals(cmd)) { exitCode = refreshSuperUserGroupsConfiguration(); } else if ("-refreshCallQueue".equals(cmd)) { exitCode = refreshCallQueue(); } else if ("-refresh".equals(cmd)) { exitCode = genericRefresh(argv, i); } else if ("-printTopology".equals(cmd)) { exitCode = printTopology(); } else if ("-refreshNamenodes".equals(cmd)) { exitCode = refreshNamenodes(argv, i); } else if ("-getVolumeReport".equals(cmd)) { exitCode = getVolumeReport(argv, i); } else if ("-deleteBlockPool".equals(cmd)) { exitCode = deleteBlockPool(argv, i); } else if ("-setBalancerBandwidth".equals(cmd)) { exitCode = setBalancerBandwidth(argv, i); } else if ("-getBalancerBandwidth".equals(cmd)) { exitCode = getBalancerBandwidth(argv, i); } else if ("-fetchImage".equals(cmd)) { exitCode = fetchImage(argv, i); } else if ("-shutdownDatanode".equals(cmd)) { exitCode = shutdownDatanode(argv, i); } else if ("-evictWriters".equals(cmd)) { exitCode = evictWriters(argv, i); } else if ("-getDatanodeInfo".equals(cmd)) { exitCode = getDatanodeInfo(argv, i); } else if ("-reconfig".equals(cmd)) { exitCode = reconfig(argv, i); } else if ("-triggerBlockReport".equals(cmd)) { exitCode = triggerBlockReport(argv); } else if ("-listOpenFiles".equals(cmd)) { exitCode = listOpenFiles(argv); } else if ("-help".equals(cmd)) { if (i < argv.length) { printHelp(argv[i]); } else { printHelp(""); } } else { exitCode = -1; System.err.println(cmd.substring(1) + ": Unknown command"); printUsage(""); } } catch (IllegalArgumentException arge) { debugException = arge; exitCode = -1; System.err.println(cmd.substring(1) + ": " + arge.getLocalizedMessage()); printUsage(cmd); } catch (RemoteException e) { // // This is a error returned by hadoop server. Print // out the first line of the error message, ignore the stack trace. exitCode = -1; debugException = e; try { String[] content; content = e.getLocalizedMessage().split("\n"); System.err.println(cmd.substring(1) + ": " + content[0]); } catch (Exception ex) { System.err.println(cmd.substring(1) + ": " + ex.getLocalizedMessage()); debugException = ex; } } catch (Exception e) { exitCode = -1; debugException = e; System.err.println(cmd.substring(1) + ": " + e.getLocalizedMessage()); } if (LOG.isDebugEnabled() && debugException != null) { LOG.debug("Exception encountered:", debugException); } return exitCode; }
@Test public void testAllowDisallowSnapshot() throws Exception { final Path dirPath = new Path("/ssdir1"); final Path trashRoot = new Path(dirPath, ".Trash"); final DistributedFileSystem dfs = cluster.getFileSystem(); final DFSAdmin dfsAdmin = new DFSAdmin(conf); dfs.mkdirs(dirPath); assertEquals(0, ToolRunner.run(dfsAdmin, new String[]{"-allowSnapshot", dirPath.toString()})); // Verify .Trash creation after -allowSnapshot command assertTrue(dfs.exists(trashRoot)); assertEquals(TRASH_PERMISSION, dfs.getFileStatus(trashRoot).getPermission()); // Move a file to trash final Path file1 = new Path(dirPath, "file1"); try (FSDataOutputStream s = dfs.create(file1)) { s.write(0); } FsShell fsShell = new FsShell(dfs.getConf()); assertEquals(0, ToolRunner.run(fsShell, new String[]{"-rm", file1.toString()})); // User directory inside snapshottable directory trash should have 700 final String username = UserGroupInformation.getLoginUser().getShortUserName(); final Path trashRootUserSubdir = new Path(trashRoot, username); assertTrue(dfs.exists(trashRootUserSubdir)); final FsPermission trashUserdirPermission = new FsPermission( FsAction.ALL, FsAction.NONE, FsAction.NONE, false); assertEquals(trashUserdirPermission, dfs.getFileStatus(trashRootUserSubdir).getPermission()); // disallowSnapshot should fail when .Trash is not empty assertNotEquals(0, ToolRunner.run(dfsAdmin, new String[]{"-disallowSnapshot", dirPath.toString()})); dfs.delete(trashRootUserSubdir, true); // disallowSnapshot should succeed now that we have an empty .Trash assertEquals(0, ToolRunner.run(dfsAdmin, new String[]{"-disallowSnapshot", dirPath.toString()})); // Cleanup dfs.delete(dirPath, true); }
@Override protected void decode(ChannelHandlerContext ctx, ByteBuf buffer, List<Object> out) throws Exception { final int frameSizeValueLength = findFrameSizeValueLength(buffer); // We have not found the frame length value byte size yet. if (frameSizeValueLength <= 0) { return; } // Convert the frame length value bytes into an integer without mutating the buffer reader index. final String lengthString = buffer.slice(buffer.readerIndex(), frameSizeValueLength).toString(StandardCharsets.UTF_8); final int length = Integer.parseInt(lengthString); final int skipLength = frameSizeValueLength + 1; // Frame length value bytes and the whitespace that follows it. // We have to take the skipped bytes (frame size value length + whitespace) into account when checking if // the buffer has enough data to read the complete message. if (buffer.readableBytes() - skipLength < length) { // We cannot read the complete frame yet. return; } else { // Skip the frame length value bytes and the whitespace that follows it. buffer.skipBytes(skipLength); } final ByteBuf frame = buffer.readRetainedSlice(length); out.add(frame); }
@Test public void testDecode() throws Exception { final ByteBuf buf1 = Unpooled.copiedBuffer("123 <45>1 2014-10-21T10:21:09+00:00 c4dc57ba1ebb syslog-ng 7120 - [meta sequenceId=\"1\"] syslog-ng starting up; version='3.5.3'\n", StandardCharsets.US_ASCII); final ByteBuf buf2 = Unpooled.copiedBuffer("186 <45>1 2014-10-21T10:21:09+00:00 c4dc57ba1ebb syslog-ng 7120 - [meta sequenceId=\"2\"] Syslog connection established; fd='9', server='AF_INET(172.17.42.1:6666)', local='AF_INET(0.0.0.0:0)'\n", StandardCharsets.US_ASCII); final ByteBuf buf3 = Unpooled.copiedBuffer(buf1, buf2, buf1); assertTrue(channel.writeInbound(buf1, buf2, buf3)); final ByteBuf actual1 = channel.readInbound(); assertEquals("<45>1 2014-10-21T10:21:09+00:00 c4dc57ba1ebb syslog-ng 7120 - [meta sequenceId=\"1\"] syslog-ng starting up; version='3.5.3'\n", actual1.toString(StandardCharsets.US_ASCII)); final ByteBuf actual2 = channel.readInbound(); assertEquals("<45>1 2014-10-21T10:21:09+00:00 c4dc57ba1ebb syslog-ng 7120 - [meta sequenceId=\"2\"] Syslog connection established; fd='9', server='AF_INET(172.17.42.1:6666)', local='AF_INET(0.0.0.0:0)'\n", actual2.toString(StandardCharsets.US_ASCII)); final ByteBuf actual3 = channel.readInbound(); assertEquals("<45>1 2014-10-21T10:21:09+00:00 c4dc57ba1ebb syslog-ng 7120 - [meta sequenceId=\"1\"] syslog-ng starting up; version='3.5.3'\n", actual3.toString(StandardCharsets.US_ASCII)); final ByteBuf actual4 = channel.readInbound(); assertEquals("<45>1 2014-10-21T10:21:09+00:00 c4dc57ba1ebb syslog-ng 7120 - [meta sequenceId=\"2\"] Syslog connection established; fd='9', server='AF_INET(172.17.42.1:6666)', local='AF_INET(0.0.0.0:0)'\n", actual4.toString(StandardCharsets.US_ASCII)); final ByteBuf actual5 = channel.readInbound(); assertEquals("<45>1 2014-10-21T10:21:09+00:00 c4dc57ba1ebb syslog-ng 7120 - [meta sequenceId=\"1\"] syslog-ng starting up; version='3.5.3'\n", actual5.toString(StandardCharsets.US_ASCII)); assertNull(channel.readInbound()); }
@Override public void updateArticleCategory(ArticleCategoryUpdateReqVO updateReqVO) { // 校验存在 validateArticleCategoryExists(updateReqVO.getId()); // 更新 ArticleCategoryDO updateObj = ArticleCategoryConvert.INSTANCE.convert(updateReqVO); articleCategoryMapper.updateById(updateObj); }
@Test public void testUpdateArticleCategory_success() { // mock 数据 ArticleCategoryDO dbArticleCategory = randomPojo(ArticleCategoryDO.class); articleCategoryMapper.insert(dbArticleCategory);// @Sql: 先插入出一条存在的数据 // 准备参数 ArticleCategoryUpdateReqVO reqVO = randomPojo(ArticleCategoryUpdateReqVO.class, o -> { o.setId(dbArticleCategory.getId()); // 设置更新的 ID }); // 调用 articleCategoryService.updateArticleCategory(reqVO); // 校验是否更新正确 ArticleCategoryDO articleCategory = articleCategoryMapper.selectById(reqVO.getId()); // 获取最新的 assertPojoEquals(reqVO, articleCategory); }
@Override public Output run(RunContext runContext) throws Exception { URI from = new URI(runContext.render(this.from)); final PebbleExpressionPredicate predicate = getExpressionPredication(runContext); final Path path = runContext.workingDir().createTempFile(".ion"); long processedItemsTotal = 0L; long droppedItemsTotal = 0L; try (final BufferedWriter writer = Files.newBufferedWriter(path); final BufferedReader reader = newBufferedReader(runContext, from)) { String item; while ((item = reader.readLine()) != null) { IllegalVariableEvaluationException exception = null; Boolean match = null; try { match = predicate.apply(item); } catch (IllegalVariableEvaluationException e) { exception = e; } FilterType action = this.filterType; if (match == null) { switch (errorOrNullBehavior) { case FAIL -> { if (exception != null) { throw exception; } else { throw new IllegalVariableEvaluationException(String.format( "Expression `%s` return `null` on item `%s`", filterCondition, item )); } } case INCLUDE -> action = FilterType.INCLUDE; case EXCLUDE -> action = FilterType.EXCLUDE; } match = true; } if (!match) { action = action.reverse(); } switch (action) { case INCLUDE -> { writer.write(item); writer.newLine(); } case EXCLUDE -> droppedItemsTotal++; } processedItemsTotal++; } } URI uri = runContext.storage().putFile(path.toFile()); return Output.builder() .uri(uri) .processedItemsTotal(processedItemsTotal) .droppedItemsTotal(droppedItemsTotal) .build(); }
@Test void shouldFilterWithNotMatchGivenNonBooleanValue() throws Exception { // Given RunContext runContext = runContextFactory.of(); FilterItems task = FilterItems .builder() .from(generateKeyValueFile(TEST_VALID_ITEMS, runContext).toString()) .filterCondition("{{ value }}") .filterType(FilterItems.FilterType.INCLUDE) .errorOrNullBehavior(FilterItems.ErrorOrNullBehavior.FAIL) .build(); // When FilterItems.Output output = task.run(runContext); // Then Assertions.assertNotNull(output); Assertions.assertNotNull(output.getUri()); Assertions.assertEquals(0, output.getDroppedItemsTotal()); Assertions.assertEquals(4, output.getProcessedItemsTotal()); assertFile(runContext, output, TEST_VALID_ITEMS, KeyValue.class); }
public static String prependHexPrefix(String input) { if (!containsHexPrefix(input)) { return HEX_PREFIX + input; } else { return input; } }
@Test public void testPrependHexPrefix() { assertEquals(Numeric.prependHexPrefix(""), ("0x")); assertEquals(Numeric.prependHexPrefix("0x0123456789abcdef"), ("0x0123456789abcdef")); assertEquals(Numeric.prependHexPrefix("0x"), ("0x")); assertEquals(Numeric.prependHexPrefix("0123456789abcdef"), ("0x0123456789abcdef")); }
public static void writeIdlProtocol(Writer writer, Protocol protocol) throws IOException { final String protocolFullName = protocol.getName(); final int lastDotPos = protocolFullName.lastIndexOf("."); final String protocolNameSpace; if (lastDotPos < 0) { protocolNameSpace = protocol.getNamespace(); } else if (lastDotPos > 0) { protocolNameSpace = protocolFullName.substring(0, lastDotPos); } else { protocolNameSpace = null; } writeIdlProtocol(writer, protocol, protocolNameSpace, protocolFullName.substring(lastDotPos + 1), protocol.getTypes(), protocol.getMessages().values()); }
@Test public void cannotWriteEmptyEnums() { assertThrows(AvroRuntimeException.class, () -> IdlUtils.writeIdlProtocol(new StringWriter(), Schema.createEnum("Single", null, "naming", emptyList()))); }
@Override public void onCycleComplete(com.netflix.hollow.api.producer.Status status, HollowProducer.ReadState readState, long version, Duration elapsed) { boolean isCycleSuccess; long cycleEndTimeNano = System.nanoTime(); if (status.getType() == com.netflix.hollow.api.producer.Status.StatusType.SUCCESS) { isCycleSuccess = true; consecutiveFailures = 0l; lastCycleSuccessTimeNanoOptional = OptionalLong.of(cycleEndTimeNano); } else { isCycleSuccess = false; consecutiveFailures ++; } CycleMetrics.Builder cycleMetricsBuilder = new CycleMetrics.Builder() .setConsecutiveFailures(consecutiveFailures) .setCycleDurationMillis(elapsed.toMillis()) .setIsCycleSuccess(isCycleSuccess); lastCycleSuccessTimeNanoOptional.ifPresent(cycleMetricsBuilder::setLastCycleSuccessTimeNano); cycleMetricsReporting(cycleMetricsBuilder.build()); }
@Test public void testCycleCompleteWithFail() { final class TestProducerMetricsListener extends AbstractProducerMetricsListener { @Override public void cycleMetricsReporting(CycleMetrics cycleMetrics) { Assert.assertNotNull(cycleMetrics); Assert.assertEquals(1l, cycleMetrics.getConsecutiveFailures()); Assert.assertEquals(Optional.of(false), cycleMetrics.getIsCycleSuccess()); Assert.assertEquals(OptionalLong.of(TEST_CYCLE_DURATION_MILLIS.toMillis()), cycleMetrics.getCycleDurationMillis()); Assert.assertEquals(OptionalLong.of(TEST_LAST_CYCLE_NANOS), cycleMetrics.getLastCycleSuccessTimeNano()); } } AbstractProducerMetricsListener concreteProducerMetricsListener = new TestProducerMetricsListener(); concreteProducerMetricsListener.lastCycleSuccessTimeNanoOptional = OptionalLong.of(TEST_LAST_CYCLE_NANOS); concreteProducerMetricsListener.onCycleStart(TEST_VERSION); concreteProducerMetricsListener.onCycleComplete(TEST_STATUS_FAIL, mockReadState, TEST_VERSION, TEST_CYCLE_DURATION_MILLIS); }
public Future<KafkaVersionChange> reconcile() { return getVersionFromController() .compose(i -> getPods()) .compose(this::detectToAndFromVersions) .compose(i -> prepareVersionChange()); }
@Test public void testNewClusterWithNewProtocolVersion(VertxTestContext context) { VersionChangeCreator vcc = mockVersionChangeCreator( mockKafka(VERSIONS.defaultVersion().version(), "3.2", "2.8"), mockNewCluster(null, null, List.of()) ); Checkpoint async = context.checkpoint(); vcc.reconcile().onComplete(context.succeeding(c -> context.verify(() -> { assertThat(c.from(), is(VERSIONS.defaultVersion())); assertThat(c.to(), is(VERSIONS.defaultVersion())); assertThat(c.interBrokerProtocolVersion(), nullValue()); assertThat(c.logMessageFormatVersion(), nullValue()); assertThat(c.metadataVersion(), is(VERSIONS.defaultVersion().metadataVersion())); async.flag(); }))); }
public static Optional<Throwable> findThrowableOfThrowableType( Throwable throwable, ThrowableType throwableType) { if (throwable == null || throwableType == null) { return Optional.empty(); } Throwable t = throwable; while (t != null) { final ThrowableAnnotation annotation = t.getClass().getAnnotation(ThrowableAnnotation.class); if (annotation != null && annotation.value() == throwableType) { return Optional.of(t); } else { t = t.getCause(); } } return Optional.empty(); }
@Test void testFindThrowableOfThrowableType() { // no throwable type assertThat( ThrowableClassifier.findThrowableOfThrowableType( new Exception(), ThrowableType.RecoverableError)) .isNotPresent(); // no recoverable throwable type assertThat( ThrowableClassifier.findThrowableOfThrowableType( new TestPartitionDataMissingErrorException(), ThrowableType.RecoverableError)) .isNotPresent(); // direct recoverable throwable assertThat( ThrowableClassifier.findThrowableOfThrowableType( new TestRecoverableErrorException(), ThrowableType.RecoverableError)) .isPresent(); // nested recoverable throwable assertThat( ThrowableClassifier.findThrowableOfThrowableType( new Exception(new TestRecoverableErrorException()), ThrowableType.RecoverableError)) .isPresent(); // inherit recoverable throwable assertThat( ThrowableClassifier.findThrowableOfThrowableType( new TestRecoverableFailureSubException(), ThrowableType.RecoverableError)) .isPresent(); }
static PublicKey recoverPublicKey(KeyFactory kf, PrivateKey priv) throws NoSuchAlgorithmException, InvalidKeySpecException { if (priv instanceof RSAPrivateCrtKey) { RSAPrivateCrtKey rsaPriv = (RSAPrivateCrtKey) priv; return kf.generatePublic(new RSAPublicKeySpec(rsaPriv.getModulus(), rsaPriv .getPublicExponent())); } else if (priv instanceof RSAPrivateKey) { BigInteger publicExponent = getRSAPublicExponentFromPkcs8Encoded(priv.getEncoded()); RSAPrivateKey rsaPriv = (RSAPrivateKey) priv; return kf.generatePublic(new RSAPublicKeySpec(rsaPriv.getModulus(), publicExponent)); } else if (priv instanceof DSAPrivateKey) { DSAPrivateKey dsaPriv = (DSAPrivateKey) priv; DSAParams params = dsaPriv.getParams(); // Calculate public key Y BigInteger y = params.getG().modPow(dsaPriv.getX(), params.getP()); return kf.generatePublic(new DSAPublicKeySpec(y, params.getP(), params.getQ(), params .getG())); } else if (priv instanceof ECPrivateKey) { ECPrivateKey ecPriv = (ECPrivateKey) priv; ECParameterSpec params = ecPriv.getParams(); // Calculate public key Y ECPoint generator = params.getGenerator(); BigInteger[] wCoords = EcCore.multiplyPointA(new BigInteger[] { generator.getAffineX(), generator.getAffineY() }, ecPriv.getS(), params); ECPoint w = new ECPoint(wCoords[0], wCoords[1]); return kf.generatePublic(new ECPublicKeySpec(w, params)); } else { throw new NoSuchAlgorithmException("Key type must be RSA, DSA, or EC"); } }
@Test public void recoverPublicKey_FakeKey_Failure() throws Exception { try { PubkeyUtils.recoverPublicKey(null, new MyPrivateKey()); fail("Should not accept unknown key types"); } catch (NoSuchAlgorithmException expected) { } }
@Override public HealthStatus getStatus() { if (cr.getStatus() == ComponentStatus.INITIALIZING) return HealthStatus.INITIALIZING; PartitionHandlingManager partitionHandlingManager = cr.getComponent(PartitionHandlingManager.class); if (!isComponentHealthy() || partitionHandlingManager.getAvailabilityMode() == AvailabilityMode.DEGRADED_MODE) { return HealthStatus.DEGRADED; } DistributionManager distributionManager = cr.getDistributionManager(); if (distributionManager != null && distributionManager.isRehashInProgress()) { return HealthStatus.HEALTHY_REBALANCING; } return HealthStatus.HEALTHY; }
@Test public void testHealthyStatus() { //given ComponentRegistry componentRegistryMock = mock(ComponentRegistry.class); DistributionManager distributionManagerMock = mock(DistributionManager.class); doReturn(false).when(distributionManagerMock).isRehashInProgress(); doReturn(distributionManagerMock).when(componentRegistryMock).getDistributionManager(); doReturn(ComponentStatus.RUNNING).when(componentRegistryMock).getStatus(); PartitionHandlingManager partitionHandlingManagerMock = mock(PartitionHandlingManager.class); doReturn(AvailabilityMode.AVAILABLE).when(partitionHandlingManagerMock).getAvailabilityMode(); doReturn(partitionHandlingManagerMock).when(componentRegistryMock).getComponent(eq(PartitionHandlingManager.class)); CacheHealth cacheHealth = new CacheHealthImpl(componentRegistryMock); //when HealthStatus status = cacheHealth.getStatus(); //then assertEquals(status, HealthStatus.HEALTHY); }
@Override public LoggingConfig extract(ConfigValue<?> value) { if (value instanceof ConfigValue.NullValue) { return new LoggingConfig(Map.of("ROOT", "info")); } if (!(value instanceof ObjectValue objectValue)) { throw ConfigValueExtractionException.unexpectedValueType(value, ObjectValue.class); } var levels = new LinkedHashMap<String, String>(); var levelsObject = objectValue.get("levels"); if (levelsObject instanceof ConfigValue.NullValue) { levelsObject = objectValue.get("level"); } if (levelsObject instanceof ConfigValue.NullValue) { return new LoggingConfig(levels); } if (levelsObject instanceof ObjectValue l) { for (var entry : l) { collectLevels("", entry.getKey(), entry.getValue(), levels); } return new LoggingConfig(levels); } else { throw ConfigValueExtractionException.unexpectedValueType(value, ObjectValue.class); } }
@Test void testParseConfig() { var config = MapConfigFactory.fromMap(Map.of( "logging", Map.of( "level", Map.of( "root", "info", "ru.tinkoff.package1", "debug", "ru.tinkoff.package2", "trace", "ru.tinkoff.package3", "warn", "ru.tinkoff.package4", "error", "ru.tinkoff.package5", "all" )) )); var extractor = new LoggingConfigValueExtractor(); var result = extractor.extract(config.get("logging")); Assertions.assertThat(result.levels()) .containsEntry("root", "info") .containsEntry("ru.tinkoff.package1", "debug") .containsEntry("ru.tinkoff.package2", "trace") .containsEntry("ru.tinkoff.package3", "warn") .containsEntry("ru.tinkoff.package4", "error") .containsEntry("ru.tinkoff.package5", "all") ; }
static Optional<RegistryAuthenticator> fromAuthenticationMethod( String authenticationMethod, RegistryEndpointRequestProperties registryEndpointRequestProperties, @Nullable String userAgent, FailoverHttpClient httpClient) throws RegistryAuthenticationFailedException { // If the authentication method starts with 'basic' (case insensitive), no registry // authentication is needed. if (authenticationMethod.matches("^(?i)(basic).*")) { return Optional.empty(); } String registryUrl = registryEndpointRequestProperties.getServerUrl(); String imageName = registryEndpointRequestProperties.getImageName(); // Checks that the authentication method starts with 'bearer ' (case insensitive). if (!authenticationMethod.matches("^(?i)(bearer) .*")) { throw newRegistryAuthenticationFailedException( registryUrl, imageName, authenticationMethod, "Bearer"); } Pattern realmPattern = Pattern.compile("realm=\"(.*?)\""); Matcher realmMatcher = realmPattern.matcher(authenticationMethod); if (!realmMatcher.find()) { throw newRegistryAuthenticationFailedException( registryUrl, imageName, authenticationMethod, "realm"); } String realm = realmMatcher.group(1); Pattern servicePattern = Pattern.compile("service=\"(.*?)\""); Matcher serviceMatcher = servicePattern.matcher(authenticationMethod); // use the provided registry location when missing service (e.g., for OpenShift) String service = serviceMatcher.find() ? serviceMatcher.group(1) : registryUrl; return Optional.of( new RegistryAuthenticator( realm, service, registryEndpointRequestProperties, userAgent, httpClient)); }
@Test public void testFromAuthenticationMethod_basic() throws RegistryAuthenticationFailedException { assertThat( RegistryAuthenticator.fromAuthenticationMethod( "Basic", registryEndpointRequestProperties, "user-agent", httpClient)) .isEmpty(); assertThat( RegistryAuthenticator.fromAuthenticationMethod( "Basic realm=\"https://somerealm\",service=\"someservice\",scope=\"somescope\"", registryEndpointRequestProperties, "user-agent", httpClient)) .isEmpty(); assertThat( RegistryAuthenticator.fromAuthenticationMethod( "BASIC realm=\"https://somerealm\",service=\"someservice\",scope=\"somescope\"", registryEndpointRequestProperties, "user-agent", httpClient)) .isEmpty(); assertThat( RegistryAuthenticator.fromAuthenticationMethod( "bASIC realm=\"https://somerealm\",service=\"someservice\",scope=\"somescope\"", registryEndpointRequestProperties, "user-agent", httpClient)) .isEmpty(); }
@PUT @Consumes("application/json") @Produces("application/json") @Path("compare") public Map<String, Object> compare(InputStream is) throws Exception { JsonNode node = null; try (BufferedReader reader = new BufferedReader( new InputStreamReader(is, StandardCharsets.UTF_8))) { node = new ObjectMapper().readTree(reader); } String id = node.get(ID).asText(); String textA = node.get(TEXT_A).asText(); String textB = node.get(TEXT_B).asText(); long timeoutMillis = node.has("timeoutMillis") ? node.get("timeoutMillis").asLong() : DEFAULT_TIMEOUT_MILLIS; return compareText(id, textA, textB, timeoutMillis); }
@Test public void testBasicCompare() throws Exception { Map<String, String> request = new HashMap<>(); request.put(TikaEvalResource.ID, "1"); request.put(TikaEvalResource.TEXT_A, "the quick brown fox jumped qwertyuiop"); request.put(TikaEvalResource.TEXT_B, "the the the fast brown dog jumped qwertyuiop"); Response response = compare(request); Map<String, Object> results = deserialize(response); assertEquals(6, (int)results.get(TikaEvalMetadataFilter.NUM_TOKENS.getName() + "A")); assertEquals(0.166, (double)results.get(TikaEvalMetadataFilter.OUT_OF_VOCABULARY.getName() + "A"), 0.01); assertEquals("eng", results.get(TikaEvalMetadataFilter.LANGUAGE.getName() + "A")); assertEquals(0.666, (double)results.get(TikaEvalResource.DICE.getName()), 0.01); assertEquals(0.571, (double)results.get(TikaEvalResource.OVERLAP.getName()), 0.01); }
public ClusterStatsResponse clusterStats() { return execute(() -> { Request request = new Request("GET", "/_cluster/stats"); Response response = restHighLevelClient.getLowLevelClient().performRequest(request); return ClusterStatsResponse.toClusterStatsResponse(gson.fromJson(EntityUtils.toString(response.getEntity()), JsonObject.class)); }); }
@Test public void should_rethrow_ex_on_cluster_stat_fail() throws Exception { when(restClient.performRequest(argThat(new RawRequestMatcher( "GET", "/_cluster/stats")))) .thenThrow(IOException.class); assertThatThrownBy(() -> underTest.clusterStats()) .isInstanceOf(ElasticsearchException.class); }
void forwardToStateService(DeviceStateServiceMsgProto deviceStateServiceMsg, TbCallback callback) { if (statsEnabled) { stats.log(deviceStateServiceMsg); } stateService.onQueueMsg(deviceStateServiceMsg, callback); }
@Test public void givenProcessingSuccess_whenForwardingInactivityMsgToStateService_thenOnSuccessCallbackIsCalled() { // GIVEN var inactivityMsg = TransportProtos.DeviceInactivityProto.newBuilder() .setTenantIdMSB(tenantId.getId().getMostSignificantBits()) .setTenantIdLSB(tenantId.getId().getLeastSignificantBits()) .setDeviceIdMSB(deviceId.getId().getMostSignificantBits()) .setDeviceIdLSB(deviceId.getId().getLeastSignificantBits()) .setLastInactivityTime(time) .build(); doCallRealMethod().when(defaultTbCoreConsumerServiceMock).forwardToStateService(inactivityMsg, tbCallbackMock); // WHEN defaultTbCoreConsumerServiceMock.forwardToStateService(inactivityMsg, tbCallbackMock); // THEN then(stateServiceMock).should().onDeviceInactivity(tenantId, deviceId, time); then(tbCallbackMock).should().onSuccess(); then(tbCallbackMock).should(never()).onFailure(any()); }
@Override public void doStart() throws Exception { if (!pluginConfigService.config().getCurrent().spamhausEnabled()) { throw new AdapterDisabledException("Spamhaus service is disabled, not starting (E)DROP adapter. To enable it please go to System / Configurations."); } final ImmutableMap.Builder<String, Map<SubnetUtils.SubnetInfo, String>> builder = ImmutableMap.builder(); for (String list : lists) { final Map<SubnetUtils.SubnetInfo, String> subnetMap = fetchSubnetsFromEDROPLists(list); if (subnetMap != null) { builder.put(list, subnetMap); } } this.subnets.set(builder.build()); }
@Test public void tableStateShouldRetrieveListsSuccessfully() throws Exception { when(httpFileRetriever.fetchFileIfNotModified("https://www.spamhaus.org/drop/drop.txt")).thenReturn(Optional.of(dropSnapshot)); when(httpFileRetriever.fetchFileIfNotModified("https://www.spamhaus.org/drop/edrop.txt")).thenReturn(Optional.of(edropSnapshot)); adapter.doStart(); verifyAdapterFunctionality(adapter); }
@Override public Serializer getSerializer(Class cl) throws HessianProtocolException { if (GenericObject.class == cl) { return GenericObjectSerializer.getInstance(); } if (GenericArray.class == cl) { return GenericArraySerializer.getInstance(); } if (GenericCollection.class == cl) { return GenericCollectionSerializer.getInstance(); } if (GenericMap.class == cl) { return GenericMapSerializer.getInstance(); } if (GenericClass.class == cl) { return GenericClassSerializer.getInstance(); } return super.getSerializer(cl); }
@Test public void getSerializer() throws Exception { GenericMultipleClassLoaderSofaSerializerFactory factory = new GenericMultipleClassLoaderSofaSerializerFactory(); Assert.assertEquals(factory.getSerializer(GenericObject.class).getClass(), GenericObjectSerializer.class); Assert.assertEquals(factory.getSerializer(GenericArray.class).getClass(), GenericArraySerializer.class); Assert.assertEquals(factory.getSerializer(GenericCollection.class).getClass(), GenericCollectionSerializer.class); Assert.assertEquals(factory.getSerializer(GenericMap.class).getClass(), GenericMapSerializer.class); Assert.assertEquals(factory.getSerializer(GenericClass.class).getClass(), GenericClassSerializer.class); }
public ImmutableList<Path> walk(PathConsumer pathConsumer) throws IOException { ImmutableList<Path> files = walk(); for (Path path : files) { pathConsumer.accept(path); } return files; }
@Test public void testWalk() throws IOException { new DirectoryWalker(testDir).walk(addToWalkedPaths); Set<Path> expectedPaths = new HashSet<>( Arrays.asList( testDir, testDir.resolve("a"), testDir.resolve("a").resolve("b"), testDir.resolve("a").resolve("b").resolve("bar"), testDir.resolve("c"), testDir.resolve("c").resolve("cat"), testDir.resolve("foo"))); Assert.assertEquals(expectedPaths, walkedPaths); }
@Override public View onCreateInputView() { final View view = super.onCreateInputView(); // triggering a new controller creation mKeyPreviewSubject.onNext(SystemClock.uptimeMillis()); return view; }
@Test public void testNewKeyPreviewControllerOnInputViewReCreate() { simulateOnStartInputFlow(); final ArgumentCaptor<KeyPreviewsController> captor = ArgumentCaptor.forClass(KeyPreviewsController.class); Mockito.verify(mAnySoftKeyboardUnderTest.getSpiedKeyboardView()) .setKeyPreviewController(captor.capture()); final KeyPreviewsController firstInstance = captor.getValue(); simulateFinishInputFlow(); mAnySoftKeyboardUnderTest.onCreateInputView(); simulateOnStartInputFlow(); Mockito.verify(mAnySoftKeyboardUnderTest.getSpiedKeyboardView()) .setKeyPreviewController( Mockito.argThat(argument -> argument != null && argument != firstInstance)); }
@Udf(description = "Adds a duration to a timestamp") public Timestamp timestampAdd( @UdfParameter(description = "A unit of time, for example DAY or HOUR") final TimeUnit unit, @UdfParameter(description = "An integer number of intervals to add") final Integer interval, @UdfParameter(description = "A TIMESTAMP value.") final Timestamp timestamp ) { if (unit == null || interval == null || timestamp == null) { return null; } return new Timestamp(timestamp.getTime() + unit.toMillis(interval)); }
@Test public void addNegativeTimestamp() { // When: final Timestamp result = udf.timestampAdd(TimeUnit.MILLISECONDS, -300, new Timestamp(100)); // Then: final Timestamp expectedResult = new Timestamp(-200); assertThat(result, is(expectedResult)); }
public Map<String, Parameter> generateMergedWorkflowParams( WorkflowInstance instance, RunRequest request) { Workflow workflow = instance.getRuntimeWorkflow(); Map<String, ParamDefinition> allParamDefs = new LinkedHashMap<>(); Map<String, ParamDefinition> defaultWorkflowParams = defaultParamManager.getDefaultWorkflowParams(); // merge workflow params for start if (request.isFreshRun()) { // merge default workflow params ParamsMergeHelper.mergeParams( allParamDefs, defaultWorkflowParams, ParamsMergeHelper.MergeContext.workflowCreate(ParamSource.SYSTEM_DEFAULT, request)); // merge defined workflow params if (workflow.getParams() != null) { ParamsMergeHelper.mergeParams( allParamDefs, workflow.getParams(), ParamsMergeHelper.MergeContext.workflowCreate(ParamSource.DEFINITION, request)); } } // merge workflow params from previous instance for restart if (!request.isFreshRun() && instance.getParams() != null) { Map<String, ParamDefinition> previousParamDefs = instance.getParams().entrySet().stream() .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().toDefinition())); // remove reserved params, which should be injected again by the system. for (String paramName : Constants.RESERVED_PARAM_NAMES) { previousParamDefs.remove(paramName); } ParamsMergeHelper.mergeParams( allParamDefs, previousParamDefs, ParamsMergeHelper.MergeContext.workflowCreate(ParamSource.SYSTEM, false)); } // merge run params if (request.getRunParams() != null) { ParamSource source = getParamSource(request.getInitiator(), request.isFreshRun()); ParamsMergeHelper.mergeParams( allParamDefs, request.getRunParams(), ParamsMergeHelper.MergeContext.workflowCreate(source, request)); } // merge user provided restart run params getUserRestartParam(request) .ifPresent( userRestartParams -> { ParamSource source = getParamSource(request.getInitiator(), request.isFreshRun()); ParamsMergeHelper.mergeParams( allParamDefs, userRestartParams, ParamsMergeHelper.MergeContext.workflowCreate(source, request)); }); // cleanup any placeholder params and convert to params return ParamsMergeHelper.convertToParameters(ParamsMergeHelper.cleanupParams(allParamDefs)); }
@Test public void testWorkflowParamRunParamsStartRestart() { Map<String, ParamDefinition> runParams = singletonMap("p1", ParamDefinition.buildParamDefinition("p1", "d1")); RunRequest request = RunRequest.builder() .initiator(new ManualInitiator()) .currentPolicy(RunPolicy.START_FRESH_NEW_RUN) .runParams(runParams) .build(); Map<String, Parameter> workflowParams = paramsManager.generateMergedWorkflowParams(workflowInstance, request); Assert.assertFalse(workflowParams.isEmpty()); Assert.assertEquals("d1", workflowParams.get("p1").asStringParam().getValue()); Assert.assertEquals(ParamSource.LAUNCH, workflowParams.get("p1").getSource()); // restart should propagate to source request = RunRequest.builder() .initiator(new ManualInitiator()) .currentPolicy(RunPolicy.RESTART_FROM_SPECIFIC) .runParams(runParams) .build(); Map<String, Parameter> instanceParams = new LinkedHashMap<>(); instanceParams.put("RUN_TS", buildParam("RUN_TS", 123L)); instanceParams.put( "DSL_DEFAULT_TZ", StringParameter.builder() .name("DSL_DEFAULT_TZ") .value("US/Pacific") .evaluatedResult("US/Pacific") .evaluatedTime(123L) .mode(ParamMode.MUTABLE_ON_START) .build()); workflowInstance.setParams(instanceParams); Map<String, Parameter> restartWorkflowParams = paramsManager.generateMergedWorkflowParams(workflowInstance, request); Assert.assertEquals("d1", restartWorkflowParams.get("p1").asStringParam().getValue()); Assert.assertEquals(ParamSource.RESTART, restartWorkflowParams.get("p1").getSource()); Assert.assertEquals( Long.valueOf(123), restartWorkflowParams.get("RUN_TS").asLongParam().getValue()); Assert.assertEquals( "US/Pacific", restartWorkflowParams.get("DSL_DEFAULT_TZ").asStringParam().getValue()); // should fail the restart runParams = singletonMap( "DSL_DEFAULT_TZ", ParamDefinition.buildParamDefinition("DSL_DEFAULT_TZ", "UTC")); RunRequest badRequest = RunRequest.builder() .initiator(new ManualInitiator()) .currentPolicy(RunPolicy.RESTART_FROM_SPECIFIC) .runParams(runParams) .build(); AssertHelper.assertThrows( "Cannot modify param with MUTABLE_ON_START during restart", MaestroValidationException.class, "Cannot modify param with mode [MUTABLE_ON_START] for parameter [DSL_DEFAULT_TZ]", () -> paramsManager.generateMergedWorkflowParams(workflowInstance, badRequest)); }
@VisibleForTesting BackupReplayFile openOrCreateReplayFile() { try { final Optional<BackupReplayFile> backupReplayFile = latestReplayFile(); if (backupReplayFile.isPresent()) { return backupReplayFile.get(); } return newReplayFile(); } catch (final IOException e) { throw new RuntimeException(e); } }
@Test public void shouldOpenReplayFileAndIgnoreFileWithInvalidTimestamp() throws IOException { // Given: backupLocation.newFile("backup_command_topic_111"); backupLocation.newFile("backup_command_topic_222x"); // When: final BackupReplayFile replayFile = commandTopicBackup.openOrCreateReplayFile(); // Then: assertThat(replayFile.getPath(), is(String.format( "%s/backup_command_topic_111", backupLocation.getRoot().getAbsolutePath() ))); }
private static VerificationResult verifyChecksums(String expectedDigest, String actualDigest, boolean caseSensitive) { if (expectedDigest == null) { return VerificationResult.NOT_PROVIDED; } if (actualDigest == null) { return VerificationResult.NOT_COMPUTED; } if (caseSensitive) { if (MessageDigest.isEqual(expectedDigest.getBytes(StandardCharsets.US_ASCII), actualDigest.getBytes(StandardCharsets.US_ASCII))) { return VerificationResult.PASS; } } else { if (MessageDigest.isEqual(expectedDigest.toLowerCase().getBytes(StandardCharsets.US_ASCII), actualDigest.toLowerCase().getBytes(StandardCharsets.US_ASCII))) { return VerificationResult.PASS; } } return VerificationResult.FAIL; }
@Test public void sha1Match() throws Exception { UpdateCenter.verifyChecksums( new MockDownloadJob(EMPTY_SHA1, null, null), buildEntryWithExpectedChecksums(EMPTY_SHA1, null, null), new File("example")); }
@Override public boolean match(Message msg, StreamRule rule) { if (msg.getField(rule.getField()) == null) return rule.getInverted(); try { final Pattern pattern = patternCache.get(rule.getValue()); final CharSequence charSequence = new InterruptibleCharSequence(msg.getField(rule.getField()).toString()); return rule.getInverted() ^ pattern.matcher(charSequence).find(); } catch (ExecutionException e) { LOG.error("Unable to get pattern from regex cache: ", e); } return false; }
@Test public void testNullFieldShouldNotMatch() throws Exception { final String fieldName = "nullfield"; final StreamRule rule = getSampleRule(); rule.setField(fieldName); rule.setValue("^foo"); final Message msg = getSampleMessage(); msg.addField(fieldName, null); final StreamRuleMatcher matcher = getMatcher(rule); assertFalse(matcher.match(msg, rule)); }
public BeaconParser setBeaconLayout(String beaconLayout) { LogManager.d(TAG, "API setBeaconLayout "+beaconLayout); mBeaconLayout = beaconLayout; Log.d(TAG, "Parsing beacon layout: "+beaconLayout); String[] terms = beaconLayout.split(","); mExtraFrame = false; // this is not an extra frame by default for (String term : terms) { boolean found = false; Matcher matcher = I_PATTERN.matcher(term); while (matcher.find()) { found = true; try { int startOffset = Integer.parseInt(matcher.group(1)); int endOffset = Integer.parseInt(matcher.group(2)); Boolean littleEndian = matcher.group(3).contains(LITTLE_ENDIAN_SUFFIX); mIdentifierLittleEndianFlags.add(littleEndian); Boolean variableLength = matcher.group(3).contains(VARIABLE_LENGTH_SUFFIX); mIdentifierVariableLengthFlags.add(variableLength); mIdentifierStartOffsets.add(startOffset); mIdentifierEndOffsets.add(endOffset); } catch (NumberFormatException e) { throw new BeaconLayoutException("Cannot parse integer byte offset in term: " + term); } } matcher = D_PATTERN.matcher(term); while (matcher.find()) { found = true; try { int startOffset = Integer.parseInt(matcher.group(1)); int endOffset = Integer.parseInt(matcher.group(2)); Boolean littleEndian = matcher.group(3).contains("l"); mDataLittleEndianFlags.add(littleEndian); mDataStartOffsets.add(startOffset); mDataEndOffsets.add(endOffset); } catch (NumberFormatException e) { throw new BeaconLayoutException("Cannot parse integer byte offset in term: " + term); } } matcher = P_PATTERN.matcher(term); while (matcher.find()) { found = true; String correctionString = "none"; try { if (matcher.group(1) != null && matcher.group(2) != null) { int startOffset = Integer.parseInt(matcher.group(1)); int endOffset = Integer.parseInt(matcher.group(2)); mPowerStartOffset=startOffset; mPowerEndOffset=endOffset; } int dBmCorrection = 0; if (matcher.group(3) != null) { correctionString = matcher.group(3); dBmCorrection = Integer.parseInt(correctionString); } mDBmCorrection=dBmCorrection; } catch (NumberFormatException e) { throw new BeaconLayoutException("Cannot parse integer power byte offset ("+correctionString+") in term: " + term); } } matcher = M_PATTERN.matcher(term); while (matcher.find()) { found = true; try { int startOffset = Integer.parseInt(matcher.group(1)); int endOffset = Integer.parseInt(matcher.group(2)); mMatchingBeaconTypeCodeStartOffset = startOffset; mMatchingBeaconTypeCodeEndOffset = endOffset; } catch (NumberFormatException e) { throw new BeaconLayoutException("Cannot parse integer byte offset in term: " + term); } String hexString = matcher.group(3); try { mMatchingBeaconTypeCode = Long.decode("0x"+hexString); } catch (NumberFormatException e) { throw new BeaconLayoutException("Cannot parse beacon type code: "+hexString+" in term: " + term); } } matcher = S_PATTERN.matcher(term); while (matcher.find()) { found = true; try { int startOffset = Integer.parseInt(matcher.group(1)); int endOffset = Integer.parseInt(matcher.group(2)); mServiceUuidStartOffset = startOffset; mServiceUuidEndOffset = endOffset; } catch (NumberFormatException e) { throw new BeaconLayoutException("Cannot parse integer byte offset in term: " + term); } String hexString = matcher.group(3); if (mServiceUuidEndOffset - mServiceUuidStartOffset + 1 == 2) { try { mServiceUuid = Long.decode("0x" + hexString); } catch (NumberFormatException e) { throw new BeaconLayoutException("Cannot parse serviceUuid: " + hexString + " in term: " + term); } } else if (mServiceUuidEndOffset - mServiceUuidStartOffset + 1 == 16) { String bytesString = hexString.replace("-", ""); if (bytesString.length() != 32) { throw new BeaconLayoutException("128-bit ServiceUuid must be 16 bytes long: "+hexString+" in term: " + term); } mServiceUuid128Bit = new byte[16]; for (int i = 0; i < 16; i++) { String byteString = bytesString.substring(i*2,i*2+2); try { mServiceUuid128Bit[15-i] = (byte) Integer.parseInt(byteString, 16); } catch (NumberFormatException e) { throw new BeaconLayoutException("Cannot parse serviceUuid byte "+byteString+" in term: " + term); } } } else { throw new BeaconLayoutException("Cannot parse serviceUuid -- it must be 2 bytes or 16 bytes long: " + hexString + " in term: " + term); } } matcher = X_PATTERN.matcher(term); while (matcher.find()) { found = true; mExtraFrame = true; } if (!found) { LogManager.d(TAG, "cannot parse term %s", term); throw new BeaconLayoutException("Cannot parse beacon layout term: " + term); } } mLayoutSize = calculateLayoutSize(); return this; }
@Test public void testSetBeaconLayout() { byte[] bytes = hexStringToByteArray("02011a1bffbeac2f234454cf6d4a0fadf2f4911ba9ffa600010002c509000000"); BeaconParser parser = new BeaconParser(); parser.setBeaconLayout("m:2-3=beac,i:4-19,i:20-21,i:22-23,p:24-24,d:25-25"); assertEquals("parser should get beacon type code start offset", new Integer(2), parser.mMatchingBeaconTypeCodeStartOffset); assertEquals("parser should get beacon type code end offset", new Integer(3), parser.mMatchingBeaconTypeCodeEndOffset); assertEquals("parser should get beacon type code", new Long(0xbeac), parser.getMatchingBeaconTypeCode()); assertEquals("parser should get identifier start offset", new Integer(4), parser.mIdentifierStartOffsets.get(0)); assertEquals("parser should get identifier end offset", new Integer(19), parser.mIdentifierEndOffsets.get(0)); assertEquals("parser should get identifier start offset", new Integer(20), parser.mIdentifierStartOffsets.get(1)); assertEquals("parser should get identifier end offset", new Integer(21), parser.mIdentifierEndOffsets.get(1)); assertEquals("parser should get identifier start offset", new Integer(22), parser.mIdentifierStartOffsets.get(2)); assertEquals("parser should get identifier end offset", new Integer(23), parser.mIdentifierEndOffsets.get(2)); assertEquals("parser should get power start offset", new Integer(24), parser.mPowerStartOffset); assertEquals("parser should get power end offset", new Integer(24), parser.mPowerEndOffset); assertEquals("parser should get data start offset", new Integer(25), parser.mDataStartOffsets.get(0)); assertEquals("parser should get data end offset", new Integer(25), parser.mDataEndOffsets.get(0)); }
public OpenAPI read(Class<?> cls) { return read(cls, resolveApplicationPath(), null, false, null, null, new LinkedHashSet<String>(), new ArrayList<Parameter>(), new HashSet<Class<?>>()); }
@Test public void test4446CyclicProp() { Reader reader = new Reader(new OpenAPI()); OpenAPI openAPI = reader.read(Ticket4446Resource.class); String yaml = "openapi: 3.0.1\n" + "paths:\n" + " /test/test:\n" + " get:\n" + " operationId: getCart\n" + " responses:\n" + " default:\n" + " description: default response\n" + " content:\n" + " '*/*':\n" + " schema:\n" + " $ref: '#/components/schemas/MyPojo'\n" + "components:\n" + " schemas:\n" + " MyPojo:\n" + " type: object\n" + " properties:\n" + " someStrings:\n" + " type: array\n" + " items:\n" + " type: string\n" + " morePojos:\n" + " type: array\n" + " items:\n" + " $ref: '#/components/schemas/MyPojo'\n"; SerializationMatchers.assertEqualsToYaml(openAPI, yaml); }
public static L3ModificationInstruction modL3Dst(IpAddress addr) { checkNotNull(addr, "Dst l3 IPv4 address cannot be null"); return new ModIPInstruction(L3SubType.IPV4_DST, addr); }
@Test public void testModL3DstMethod() { final Instruction instruction = Instructions.modL3Dst(ip41); final L3ModificationInstruction.ModIPInstruction modIPInstruction = checkAndConvert(instruction, Instruction.Type.L3MODIFICATION, L3ModificationInstruction.ModIPInstruction.class); assertThat(modIPInstruction.ip(), is(equalTo(ip41))); assertThat(modIPInstruction.subtype(), is(equalTo(L3ModificationInstruction.L3SubType.IPV4_DST))); }
@Private @VisibleForTesting static void checkResourceRequestAgainstAvailableResource(Resource reqResource, Resource availableResource) throws InvalidResourceRequestException { for (int i = 0; i < ResourceUtils.getNumberOfCountableResourceTypes(); i++) { final ResourceInformation requestedRI = reqResource.getResourceInformation(i); final String reqResourceName = requestedRI.getName(); if (requestedRI.getValue() < 0) { throwInvalidResourceException(reqResource, availableResource, reqResourceName, InvalidResourceType.LESS_THAN_ZERO); } boolean valid = checkResource(requestedRI, availableResource); if (!valid) { throwInvalidResourceException(reqResource, availableResource, reqResourceName, InvalidResourceType.GREATER_THEN_MAX_ALLOCATION); } } }
@Test public void testCustomResourceRequestedUnitIsSameAsAvailableUnit() { Resource requestedResource = ResourceTypesTestHelper.newResource(1, 1, ImmutableMap.of("custom-resource-1", "11M")); Resource availableResource = ResourceTypesTestHelper.newResource(1, 1, ImmutableMap.of("custom-resource-1", "100M")); try { SchedulerUtils.checkResourceRequestAgainstAvailableResource( requestedResource, availableResource); } catch (InvalidResourceRequestException e) { fail(String.format( "Resource request should be accepted. Requested: %s, available: %s", requestedResource, availableResource)); } }
@Override public byte[] fromConnectData(String topic, Schema schema, Object value) { if (schema == null && value == null) { return null; } JsonNode jsonValue = config.schemasEnabled() ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } }
@Test public void longToJson() { JsonNode converted = parse(converter.fromConnectData(TOPIC, Schema.INT64_SCHEMA, 4398046511104L)); validateEnvelope(converted); assertEquals(parse("{ \"type\": \"int64\", \"optional\": false }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME)); assertEquals(4398046511104L, converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).longValue()); }
public float[] decodeFloat4Array(final byte[] parameterBytes, final boolean isBinary) { ShardingSpherePreconditions.checkState(!isBinary, () -> new UnsupportedSQLOperationException("binary mode")); String parameterValue = new String(parameterBytes, StandardCharsets.UTF_8); Collection<String> parameterElements = decodeText(parameterValue); float[] result = new float[parameterElements.size()]; int index = 0; for (String each : parameterElements) { result[index++] = Float.parseFloat(each); } return result; }
@Test void assertParseFloat4ArrayNormalTextMode() { float[] actual = DECODER.decodeFloat4Array(FLOAT_ARRAY_STR.getBytes(), false); assertThat(actual.length, is(2)); assertThat(Float.compare(actual[0], 11.1F), is(0)); assertThat(Float.compare(actual[1], 12.1F), is(0)); }
@Override public AwsProxyResponse handle(Throwable ex) { log.error("Called exception handler for:", ex); // adding a print stack trace in case we have no appender or we are running inside SAM local, where need the // output to go to the stderr. ex.printStackTrace(); if (ex instanceof InvalidRequestEventException || ex instanceof InternalServerErrorException) { return new AwsProxyResponse(500, HEADERS, getErrorJson(INTERNAL_SERVER_ERROR)); } else { return new AwsProxyResponse(502, HEADERS, getErrorJson(GATEWAY_TIMEOUT_ERROR)); } }
@Test void typedHandle_InvalidRequestEventException_responseString() throws JsonProcessingException { AwsProxyResponse resp = exceptionHandler.handle(new InvalidRequestEventException(INVALID_REQUEST_MESSAGE, null)); assertNotNull(resp); String body = objectMapper.writeValueAsString(new ErrorModel(AwsProxyExceptionHandler.INTERNAL_SERVER_ERROR)); assertEquals(body, resp.getBody()); }
@Override public boolean remove(final Object value) { return value instanceof Integer i && remove(i.intValue()); }
@Test public void removingAnElementFromAnEmptyListDoesNothing() { assertFalse(set.remove(0)); }
public static <T> T deserializeByType(Object src, Class<T> clazz) { if (src == null) { return (T) ClassUtils.getDefaultPrimitiveValue(clazz); } else if (src instanceof Boolean) { return (T) CompatibleTypeUtils.convert(src, clazz); } else if (src instanceof Number) { return (T) CompatibleTypeUtils.convert(src, clazz); } else if (src instanceof Map) { // map-->可能是map或者自定义对象 Map srcMap = (Map) src; if (clazz == Object.class) { // 需要自省 if (srcMap.containsKey(JSON.CLASS_KEY)) { return (T) mapToObject(srcMap, Object.class); // 自定义对象 } else { return (T) mapToMap(srcMap, srcMap.getClass()); } } else { if (Map.class.isAssignableFrom(clazz)) { // map转map return (T) mapToMap(srcMap, (Class<? extends Map>) clazz); } else { return mapToObject(srcMap, clazz); // 自定义对象 } } } else if (src instanceof Collection) { Collection list = (Collection) src; if (clazz == Object.class) { return (T) collection2Collection(list, list.getClass(), Object.class); } else if (Collection.class.isAssignableFrom(clazz)) { return (T) collection2Collection(list, (Class<? extends Collection>) clazz, Object.class); } else if (clazz.isArray()) { if (clazz.getComponentType().isPrimitive()) { return (T) CompatibleTypeUtils.convert(list, clazz); } else { return (T) collectionToArray(list, clazz.getComponentType()); } } else { return (T) list; } } else if (src.getClass().isArray()) { Class componentType = src.getClass().getComponentType(); if (componentType.isPrimitive()) { if (Collection.class.isAssignableFrom(clazz)) { return (T) arrayToCollection(src, (Class<? extends Collection>) clazz, Object.class); } else { return (T) src; } } else { Object[] array = (Object[]) src; if (clazz == Object.class) { return (T) array2Array(array, array.getClass().getComponentType()); } else if (clazz.isArray()) { return (T) array2Array(array, clazz.getComponentType()); } else if (Collection.class.isAssignableFrom(clazz)) { return (T) arrayToCollection(src, (Class<? extends Collection>) clazz, Object.class); } else { return (T) src; } } } else if (clazz.isEnum()) { // 枚举 从字符串进行读取 if (src instanceof String) { return (T) Enum.valueOf((Class<? extends Enum>) clazz, (String) src); } else { throw new RuntimeException("Enum field must set string!"); } } else if (Date.class.isAssignableFrom(clazz)) { // 日期:支持long和标准格式字符串 if (src instanceof Long) { return (T) new Date((Long) src); } else if (src instanceof String) { try { return (T) DateUtils.strToDate((String) src); } catch (Exception e) { throw new RuntimeException("Date field must set string(yyyy-MM-dd HH:mm:ss)!"); } } else { throw new RuntimeException("Date field must set long or string(yyyy-MM-dd HH:mm:ss)!"); } } else if (src instanceof String) { // 字符串支持转换 return (T) CompatibleTypeUtils.convert(src, clazz); } else { // 其它返回src return (T) src; } }
@Test public void testDeserializeByType() { Assert.assertTrue(0 == BeanSerializer.deserializeByType(null, int.class)); Assert.assertTrue(0 == BeanSerializer.deserializeByType(null, long.class)); Assert.assertFalse(BeanSerializer.deserializeByType(null, boolean.class)); Assert.assertArrayEquals(new int[] { 123 }, BeanSerializer.deserializeByType(Arrays.asList(123), int[].class)); Assert.assertFalse(BeanSerializer.deserializeByType(Arrays.asList(123), String.class) instanceof String); Assert.assertTrue(CommonUtils.listEquals(Arrays.asList(123), BeanSerializer.deserializeByType(new int[] { 123 }, List.class))); Assert.assertTrue(CommonUtils.listEquals(Arrays.asList("xxx"), BeanSerializer.deserializeByType(new String[] { "xxx" }, List.class))); Assert.assertEquals(TestJsonBean.Status.START, BeanSerializer.deserializeByType("START", TestJsonBean.Status.class)); try { BeanSerializer.deserializeByType(new TestJsonBean(), TestJsonBean.Status.class); Assert.fail(); } catch (Exception e) { } Date now = new Date(); Assert.assertEquals(now, BeanSerializer.deserializeByType(now.getTime(), Date.class)); Assert.assertEquals(DateUtils.dateToStr(now), DateUtils.dateToStr( BeanSerializer.deserializeByType(DateUtils.dateToStr(now), Date.class))); try { BeanSerializer.deserializeByType("xxxx", Date.class); Assert.fail(); } catch (Exception e) { } try { BeanSerializer.deserializeByType(new TestJsonBean(), Date.class); Assert.fail(); } catch (Exception e) { } }
@Override public <T> List<T> stores(final String storeName, final QueryableStoreType<T> queryableStoreType) { final List<T> allStores = new ArrayList<>(); for (final StreamThreadStateStoreProvider storeProvider : storeProviders) { final List<T> stores = storeProvider.stores(storeQueryParameters); if (!stores.isEmpty()) { allStores.addAll(stores); if (storeQueryParameters.partition() != null) { break; } } } if (allStores.isEmpty()) { if (storeQueryParameters.partition() != null) { throw new InvalidStateStorePartitionException( String.format("The specified partition %d for store %s does not exist.", storeQueryParameters.partition(), storeName)); } throw new InvalidStateStoreException("The state store, " + storeName + ", may have migrated to another instance."); } return allStores; }
@Test public void shouldFindKeyValueStores() { final List<ReadOnlyKeyValueStore<String, String>> results = wrappingStoreProvider.stores("kv", QueryableStoreTypes.<String, String>keyValueStore()); assertEquals(2, results.size()); }
@SuppressWarnings("unchecked") public static <R> R getField(final Object object, final String fieldName) { try { return traverseClassHierarchy( object.getClass(), NoSuchFieldException.class, traversalClass -> { Field field = traversalClass.getDeclaredField(fieldName); field.setAccessible(true); return (R) field.get(object); }); } catch (Exception e) { throw new RuntimeException(e); } }
@Test public void getFieldReflectively_givesHelpfulExceptions() { ExampleDescendant example = new ExampleDescendant(); try { ReflectionHelpers.getField(example, "nonExistent"); fail("Expected exception not thrown"); } catch (RuntimeException e) { if (!e.getMessage().contains("nonExistent")) { throw new RuntimeException("Incorrect exception thrown", e); } } }
@Override public ParsedLine parse(final String line, final int cursor, final ParseContext context) { final ParsedLine parsed = delegate.parse(line, cursor, context); if (context != ParseContext.ACCEPT_LINE) { return parsed; } if (UnclosedQuoteChecker.isUnclosedQuote(line)) { throw new EOFError(-1, -1, "Missing end quote", "end quote char"); } final String bare = CommentStripper.strip(parsed.line()); if (bare.isEmpty()) { return parsed; } if (cliCmdPredicate.test(bare)) { return parsed; } if (!bare.endsWith(TERMINATION_CHAR)) { throw new EOFError(-1, -1, "Missing termination char", "termination char"); } return parsed; }
@Test public void shouldAcceptIfPredicateReturnsTrue() { // Given: givenPredicateWillReturnTrue(); givenDelegateWillReturn(UNTERMINATED_LINE); // When: final ParsedLine result = parser.parse("what ever", 0, ParseContext.ACCEPT_LINE); // Then: assertThat(result, is(parsedLine)); }
public static List<String> revertForbid(List<String> forbid, Set<URL> subscribed) { if (CollectionUtils.isNotEmpty(forbid)) { List<String> newForbid = new ArrayList<>(); for (String serviceName : forbid) { if (StringUtils.isNotContains(serviceName, ':') && StringUtils.isNotContains(serviceName, '/')) { for (URL url : subscribed) { if (serviceName.equals(url.getServiceInterface())) { newForbid.add(url.getServiceKey()); break; } } } else { newForbid.add(serviceName); } } return newForbid; } return forbid; }
@Test void testRevertForbid() { String service = "dubbo.test.api.HelloService"; List<String> forbid = new ArrayList<String>(); forbid.add(service); Set<URL> subscribed = new HashSet<URL>(); subscribed.add(URL.valueOf("dubbo://127.0.0.1:20880/" + service + "?group=perf&version=1.0.0")); List<String> newForbid = UrlUtils.revertForbid(forbid, subscribed); List<String> expectForbid = new ArrayList<String>(); expectForbid.add("perf/" + service + ":1.0.0"); assertEquals(expectForbid, newForbid); }
@Override public long stringAppend(String path, Object value) { return get(stringAppendAsync(path, value)); }
@Test public void testStringAppend() { RJsonBucket<TestType> al = redisson.getJsonBucket("test", new JacksonCodec<>(TestType.class)); TestType t = new TestType(); t.setName("name1"); al.set(t); long s1 = al.stringAppend("name", "23"); assertThat(s1).isEqualTo(7); String n = al.get(new JacksonCodec<>(String.class), "name"); assertThat(n).isEqualTo("name123"); List<Long> s2 = al.stringAppendMulti("$.name", "45"); assertThat(s2).containsExactly(9L); String n2 = al.get(new JacksonCodec<>(String.class), "name"); assertThat(n2).isEqualTo("name12345"); }
@Override public void filter(ContainerRequestContext requestContext) throws IOException { final Optional<String> header = getBearerHeader(requestContext); if (header.isEmpty()) { // no JWT token, we'll fail immediately abortRequest(requestContext); } else { final String token = header.map(h -> h.replaceFirst(AUTHENTICATION_SCHEME + " ", "")).get(); try { verifyToken(token); } catch (TokenVerificationException e) { LOG.error("Failed to verify auth token", e); abortRequest(requestContext); } } }
@Test void verifyInvalidToken() throws IOException { final String generationKey = "gTVfiF6A0pB70A3UP1EahpoR6LId9DdNadIkYNygK5Z8lpeJIpw9vN0jZ6fdsfeuV9KIg9gVLkCHIPj6FHW5Q9AvpOoGZO3h"; final String verificationKey = "n51wcO3jn8w3JNyGgKc7k1fTCr1FWvGg7ODfQOyBT2fizBrCVsRJg2GsbYGLNejfi3QsKaqJgo3zAWMuAZhJznuizHZpv92S"; final JwtTokenAuthFilter validator = new JwtTokenAuthFilter(verificationKey); final ContainerRequest mockedRequest = mockRequest("Bearer " + generateToken(generationKey)); validator.filter(mockedRequest); Mockito.verify(mockedRequest, atLeastOnce()).abortWith(Mockito.any()); }
@DeleteMapping @Secured(resource = AuthConstants.CONSOLE_RESOURCE_NAME_PREFIX + "users", action = ActionTypes.WRITE) public Object deleteUser(@RequestParam String username) { List<RoleInfo> roleInfoList = roleService.getRoles(username); if (roleInfoList != null) { for (RoleInfo roleInfo : roleInfoList) { if (AuthConstants.GLOBAL_ADMIN_ROLE.equals(roleInfo.getRole())) { throw new IllegalArgumentException("cannot delete admin: " + username); } } } userDetailsService.deleteUser(username); return RestResultUtils.success("delete user ok!"); }
@Test void testDeleteUser2() { List<RoleInfo> roleInfoList = new ArrayList<>(1); RoleInfo testRole = new RoleInfo(); testRole.setUsername("nacos"); testRole.setRole("testRole"); roleInfoList.add(testRole); when(roleService.getRoles(anyString())).thenReturn(roleInfoList); RestResult<String> result = (RestResult<String>) userController.deleteUser("nacos"); assertEquals(200, result.getCode()); }
public static boolean isSupported(String charsetName) { try { if (isSupportedICU != null && (Boolean) isSupportedICU.invoke(null, charsetName)) { return true; } return Charset.isSupported(charsetName); } catch (IllegalCharsetNameException e) { return false; } catch (IllegalArgumentException e) { // null, for example return false; } catch (Exception e) { // Unexpected exception, what to do? return false; } }
@Test public void testValidCharset() { assertTrue(CharsetUtils.isSupported("UTF-8")); assertFalse(CharsetUtils.isSupported("bogus")); }
@Override public AttributedList<Path> list(final Path directory, final ListProgressListener listener) throws BackgroundException { return this.list(directory, listener, String.valueOf(Path.DELIMITER)); }
@Test public void testListEncodedCharacterFile() throws Exception { final Path container = new Path("cyberduck-test-eu", EnumSet.of(Path.Type.directory, Path.Type.volume)); container.attributes().setRegion("us-east-1"); final Path placeholder = new GoogleStorageTouchFeature(session).touch( new Path(container, String.format("^<%%%s", new AlphanumericRandomStringService().random()), EnumSet.of(Path.Type.file)), new TransferStatus()); assertTrue(new GoogleStorageObjectListService(session).list(container, new DisabledListProgressListener()).contains(placeholder)); new GoogleStorageDeleteFeature(session).delete(Collections.singletonList(placeholder), new DisabledLoginCallback(), new Delete.DisabledCallback()); }
public synchronized TopologyDescription describe() { return internalTopologyBuilder.describe(); }
@Test public void shouldDescribeGlobalStoreTopology() { addGlobalStoreToTopologyAndExpectedDescription("globalStore", "source", "globalTopic", "processor", 0); assertThat(topology.describe(), equalTo(expectedDescription)); assertThat(topology.describe().hashCode(), equalTo(expectedDescription.hashCode())); }
public FunSpec getFunSpec() { return methodNeedsInjection() ? new FunSpecGenerator( uniqueFunctionName, BeforeAll.class, defaultParameterSpecsForEachUnitTest(), generateStatementBody()) .generate() : new FunSpecGenerator(uniqueFunctionName, generateStatementBody()).generate(); }
@Test public void testThatNewGreetingMethodWasGenerated() { Optional<Method> deployFun = filteredMethods.stream().filter(m -> m.getName().equals("newGreeting")).findAny(); FunSpec deployFunSpec = new FunParser(deployFun.get(), greeterContractClass, "newGreeting").getFunSpec(); assertEquals( deployFunSpec.toString(), "@org.junit.jupiter.api.Test\n" + "public fun newGreeting() {\n" + " val transactionReceiptVar = greeter.newGreeting(\"REPLACE_ME\").send()\n" + " org.junit.jupiter.api.Assertions.assertTrue(transactionReceiptVar.isStatusOK())\n" + "}\n"); }
@SuppressWarnings("unused") public static void main(String[] args) { BarSeries a = buildAndAddData(); System.out.println("a: " + a.getBar(0).getClosePrice().getName()); BaseBarSeriesBuilder.setDefaultNum(DoubleNum::valueOf); a = buildAndAddData(); System.out.println("a: " + a.getBar(0).getClosePrice().getName()); BarSeries b = buildWithDouble(); BarSeries c = buildWithBigDecimal(); BarSeries d = buildManually(); BarSeries e = buildManuallyDoubleNum(); BarSeries f = buildManuallyAndAddBarManually(); BarSeries g = buildAndAddBarsFromList(); // Fix: Reset default function, such that this test case does not influence the // following test cases in a combined test run BaseBarSeriesBuilder.setDefaultNum(DecimalNum::valueOf); }
@Test public void test() { BuildBarSeries.main(null); }
static SeekableInputStream wrap(FSDataInputStream stream) { return new HadoopSeekableInputStream(stream); }
@Test void closeShouldThrowIOExceptionWhenInterrupted() throws Exception { S3ABlockOutputStream s3ABlockOutputStream = new S3ABlockOutputStream(); FSDataOutputStream fsDataOutputStream = new FSDataOutputStream(s3ABlockOutputStream, null); PositionOutputStream wrap = HadoopStreams.wrap(fsDataOutputStream); // interrupt mock upload on close after a delay Executors.newSingleThreadExecutor() .execute( () -> { try { Thread.sleep(1000); } catch (InterruptedException e) { throw new RuntimeException(e); } s3ABlockOutputStream.interruptClose(); }); assertThatThrownBy(wrap::close) .isInstanceOf(IOException.class) .hasMessage("S3ABlockOutputStream failed to upload object after stream was closed"); }
@Override public void onProjectsDeleted(Set<DeletedProject> projects) { checkNotNull(projects, "projects can't be null"); if (projects.isEmpty()) { return; } Arrays.stream(listeners) .forEach(safelyCallListener(listener -> listener.onProjectsDeleted(projects))); }
@Test public void onProjectsDeleted_throws_NPE_if_set_is_null() { assertThatThrownBy(() -> underTestWithListeners.onProjectsDeleted(null)) .isInstanceOf(NullPointerException.class) .hasMessage("projects can't be null"); }
public boolean validate(final Protocol protocol, final LoginOptions options) { return protocol.validate(this, options); }
@Test public void testLoginReasonable() { Credentials credentials = new Credentials("guest", "changeme"); assertTrue(credentials.validate(new TestProtocol(Scheme.ftp), new LoginOptions())); }
@Override public Set<String> getClasses() { return new LinkedHashSet<>(new HostPreferences(session.getHost()).getList("s3.storage.class.options")); }
@Test public void testGetClasses() { assertArrayEquals(Collections.singletonList(S3Object.STORAGE_CLASS_STANDARD).toArray(), new S3StorageClassFeature(new S3Session(new Host(new S3Protocol())), new S3AccessControlListFeature(session)).getClasses().toArray()); assertArrayEquals(Arrays.asList(S3Object.STORAGE_CLASS_STANDARD, "INTELLIGENT_TIERING", S3Object.STORAGE_CLASS_INFREQUENT_ACCESS, "ONEZONE_IA", S3Object.STORAGE_CLASS_REDUCED_REDUNDANCY, S3Object.STORAGE_CLASS_GLACIER, "GLACIER_IR", "DEEP_ARCHIVE").toArray(), new S3StorageClassFeature(session, new S3AccessControlListFeature(session)).getClasses().toArray()); }
@Override public void onDataReceived(@NonNull final BluetoothDevice device, @NonNull final Data data) { super.onDataReceived(device, data); if (data.size() < 2) { onInvalidDataReceived(device, data); return; } // Read the Op Code final int opCode = data.getIntValue(Data.FORMAT_UINT8, 0); // Estimate the expected operand size based on the Op Code int expectedOperandSize; switch (opCode) { case OP_CODE_COMMUNICATION_INTERVAL_RESPONSE -> // UINT8 expectedOperandSize = 1; case OP_CODE_CALIBRATION_VALUE_RESPONSE -> // Calibration Value expectedOperandSize = 10; case OP_CODE_PATIENT_HIGH_ALERT_LEVEL_RESPONSE, OP_CODE_PATIENT_LOW_ALERT_LEVEL_RESPONSE, OP_CODE_HYPO_ALERT_LEVEL_RESPONSE, OP_CODE_HYPER_ALERT_LEVEL_RESPONSE, OP_CODE_RATE_OF_DECREASE_ALERT_LEVEL_RESPONSE, OP_CODE_RATE_OF_INCREASE_ALERT_LEVEL_RESPONSE -> // SFLOAT expectedOperandSize = 2; case OP_CODE_RESPONSE_CODE -> // Request Op Code (UINT8), Response Code Value (UINT8) expectedOperandSize = 2; default -> { onInvalidDataReceived(device, data); return; } } // Verify packet length if (data.size() != 1 + expectedOperandSize && data.size() != 1 + expectedOperandSize + 2) { onInvalidDataReceived(device, data); return; } // Verify CRC if present final boolean crcPresent = data.size() == 1 + expectedOperandSize + 2; // opCode + expected operand + CRC if (crcPresent) { final int expectedCrc = data.getIntValue(Data.FORMAT_UINT16_LE, 1 + expectedOperandSize); final int actualCrc = CRC16.MCRF4XX(data.getValue(), 0, 1 + expectedOperandSize); if (expectedCrc != actualCrc) { onCGMSpecificOpsResponseReceivedWithCrcError(device, data); return; } } switch (opCode) { case OP_CODE_COMMUNICATION_INTERVAL_RESPONSE -> { final int interval = data.getIntValue(Data.FORMAT_UINT8, 1); onContinuousGlucoseCommunicationIntervalReceived(device, interval, crcPresent); return; } case OP_CODE_CALIBRATION_VALUE_RESPONSE -> { final float glucoseConcentrationOfCalibration = data.getFloatValue(Data.FORMAT_SFLOAT, 1); final int calibrationTime = data.getIntValue(Data.FORMAT_UINT16_LE, 3); final int calibrationTypeAndSampleLocation = data.getIntValue(Data.FORMAT_UINT8, 5); @SuppressLint("WrongConstant") final int calibrationType = calibrationTypeAndSampleLocation & 0x0F; final int calibrationSampleLocation = calibrationTypeAndSampleLocation >> 4; final int nextCalibrationTime = data.getIntValue(Data.FORMAT_UINT16_LE, 6); final int calibrationDataRecordNumber = data.getIntValue(Data.FORMAT_UINT16_LE, 8); final int calibrationStatus = data.getIntValue(Data.FORMAT_UINT8, 10); onContinuousGlucoseCalibrationValueReceived(device, glucoseConcentrationOfCalibration, calibrationTime, nextCalibrationTime, calibrationType, calibrationSampleLocation, calibrationDataRecordNumber, new CGMCalibrationStatus(calibrationStatus), crcPresent); return; } case OP_CODE_RESPONSE_CODE -> { final int requestCode = data.getIntValue(Data.FORMAT_UINT8, 1); // ignore final int responseCode = data.getIntValue(Data.FORMAT_UINT8, 2); if (responseCode == CGM_RESPONSE_SUCCESS) { onCGMSpecificOpsOperationCompleted(device, requestCode, crcPresent); } else { onCGMSpecificOpsOperationError(device, requestCode, responseCode, crcPresent); } return; } } // Read SFLOAT value final float value = data.getFloatValue(Data.FORMAT_SFLOAT, 1); switch (opCode) { case OP_CODE_PATIENT_HIGH_ALERT_LEVEL_RESPONSE -> onContinuousGlucosePatientHighAlertReceived(device, value, crcPresent); case OP_CODE_PATIENT_LOW_ALERT_LEVEL_RESPONSE -> onContinuousGlucosePatientLowAlertReceived(device, value, crcPresent); case OP_CODE_HYPO_ALERT_LEVEL_RESPONSE -> onContinuousGlucoseHypoAlertReceived(device, value, crcPresent); case OP_CODE_HYPER_ALERT_LEVEL_RESPONSE -> onContinuousGlucoseHyperAlertReceived(device, value, crcPresent); case OP_CODE_RATE_OF_DECREASE_ALERT_LEVEL_RESPONSE -> onContinuousGlucoseRateOfDecreaseAlertReceived(device, value, crcPresent); case OP_CODE_RATE_OF_INCREASE_ALERT_LEVEL_RESPONSE -> onContinuousGlucoseRateOfIncreaseAlertReceived(device, value, crcPresent); } }
@Test public void onContinuousGlucosePatientHighAlertReceived() { final Data data = new Data(new byte[] { 9, 12, -16}); callback.onDataReceived(null, data); assertEquals("Level", 1.2f, patientHighAlertLevel, 0.01); assertFalse(secured); }
@Override public Set<NodeFileDescriptorStats> fileDescriptorStats() { final List<NodeResponse> result = nodes(); return result.stream() .map(node -> NodeFileDescriptorStats.create(node.name(), node.ip(), node.host(), node.fileDescriptorMax())) .collect(Collectors.toSet()); }
@Test void testFileDescriptorStats() { doReturn(List.of(NODE_WITH_CORRECT_INFO, NODE_WITH_MISSING_DISK_STATISTICS)).when(catApi).nodes(); final Set<NodeFileDescriptorStats> nodeFileDescriptorStats = clusterAdapter.fileDescriptorStats(); assertThat(nodeFileDescriptorStats) .hasSize(1) .noneSatisfy( nodeDescr -> assertThat(nodeDescr.name()).isEqualTo("nodeWithMissingDiskStatistics") ) .first() .satisfies( nodeDescr -> { assertThat(nodeDescr.name()).isEqualTo("nodeWithCorrectInfo"); assertThat(nodeDescr.ip()).isEqualTo("182.88.0.2"); assertThat(nodeDescr.fileDescriptorMax()).isPresent(); assertThat(nodeDescr.fileDescriptorMax().get()).isEqualTo(1048576L); } ); }
public RowExpression extract(PlanNode node) { return node.accept(new Visitor(domainTranslator, functionAndTypeManager), null); }
@Test public void testAggregation() { PlanNode node = new AggregationNode( Optional.empty(), newId(), filter(baseTableScan, and( equals(AV, DV), equals(BV, EV), equals(CV, FV), lessThan(DV, bigintLiteral(10)), lessThan(CV, DV), greaterThan(AV, bigintLiteral(2)), equals(EV, FV))), ImmutableMap.of( CV, count(metadata.getFunctionAndTypeManager()), DV, count(metadata.getFunctionAndTypeManager())), singleGroupingSet(ImmutableList.of(AV, BV, CV)), ImmutableList.of(), AggregationNode.Step.FINAL, Optional.empty(), Optional.empty(), Optional.empty()); RowExpression effectivePredicate = effectivePredicateExtractor.extract(node); // Rewrite in terms of group by symbols assertEquals(normalizeConjuncts(effectivePredicate), normalizeConjuncts( lessThan(AV, bigintLiteral(10)), lessThan(BV, AV), greaterThan(AV, bigintLiteral(2)), equals(BV, CV))); }