focal_method
stringlengths
13
60.9k
test_case
stringlengths
25
109k
@Override public long longValue() { return (long) value; }
@Override @Test void testLongValue() { new LongValueTester().runTests(); }
public void set(String name, String value) { List<String> values = headers.get(name); if (values == null) { values = new ArrayList<>(1); headers.put(name, values); } else { values.clear(); } values.add(value); }
@Test public void testSet() { HttpHeaders headers = new HttpHeaders(2); headers.set("Connection", "close"); headers.set("Connection", "Keep-Alive"); headers.set("Accept-Encoding", "gzip, deflate"); Assert.assertEquals("Keep-Alive", headers.get("Connection")); Assert.assertEquals("gzip, deflate", headers.get("Accept-Encoding")); Assert.assertEquals(Collections.singletonList("Keep-Alive"), headers.getValues("Connection")); Assert.assertEquals(Collections.singletonList("gzip, deflate"), headers.getValues("Accept-Encoding")); }
public static boolean isFalse(Boolean b) { return b != null && !b; }
@Test public void testIsFalse() { Assert.assertTrue(CommonUtils.isFalse("false")); Assert.assertTrue(CommonUtils.isFalse("False")); Assert.assertFalse(CommonUtils.isFalse("null")); Assert.assertFalse(CommonUtils.isFalse("")); Assert.assertFalse(CommonUtils.isFalse("xxx")); Assert.assertFalse(CommonUtils.isFalse((Boolean) null)); Assert.assertFalse(CommonUtils.isFalse(Boolean.TRUE)); Assert.assertTrue(CommonUtils.isFalse(Boolean.FALSE)); }
public static List<RlpType> asRlpValues( RawTransaction rawTransaction, Sign.SignatureData signatureData) { return rawTransaction.getTransaction().asRlpValues(signatureData); }
@Test public void testEtherTransactionAsRlpValues() { List<RlpType> rlpStrings = TransactionEncoder.asRlpValues( createEtherTransaction(), new Sign.SignatureData((byte) 0, new byte[32], new byte[32])); assertEquals(rlpStrings.size(), (9)); assertEquals(rlpStrings.get(3), (RlpString.create(new BigInteger("add5355", 16)))); }
public static SerdeFeatures of(final SerdeFeature... features) { return new SerdeFeatures(ImmutableSet.copyOf(features)); }
@Test public void shouldReturnFeatureFromFindAnyOnAMatch() { assertThat(SerdeFeatures.of(WRAP_SINGLES) .findAny(WRAPPING_FEATURES), is(Optional.of(WRAP_SINGLES))); assertThat(SerdeFeatures.of(UNWRAP_SINGLES) .findAny(WRAPPING_FEATURES), is(Optional.of(UNWRAP_SINGLES))); }
@Nullable public Float getFloatValue(@FloatFormat final int formatType, @IntRange(from = 0) final int offset) { if ((offset + getTypeLen(formatType)) > size()) return null; switch (formatType) { case FORMAT_SFLOAT -> { if (mValue[offset + 1] == 0x07 && mValue[offset] == (byte) 0xFE) return Float.POSITIVE_INFINITY; if ((mValue[offset + 1] == 0x07 && mValue[offset] == (byte) 0xFF) || (mValue[offset + 1] == 0x08 && mValue[offset] == 0x00) || (mValue[offset + 1] == 0x08 && mValue[offset] == 0x01)) return Float.NaN; if (mValue[offset + 1] == 0x08 && mValue[offset] == 0x02) return Float.NEGATIVE_INFINITY; return bytesToFloat(mValue[offset], mValue[offset + 1]); } case FORMAT_FLOAT -> { if (mValue[offset + 3] == 0x00) { if (mValue[offset + 2] == 0x7F && mValue[offset + 1] == (byte) 0xFF) { if (mValue[offset] == (byte) 0xFE) return Float.POSITIVE_INFINITY; if (mValue[offset] == (byte) 0xFF) return Float.NaN; } else if (mValue[offset + 2] == (byte) 0x80 && mValue[offset + 1] == 0x00) { if (mValue[offset] == 0x00 || mValue[offset] == 0x01) return Float.NaN; if (mValue[offset] == 0x02) return Float.NEGATIVE_INFINITY; } } return bytesToFloat(mValue[offset], mValue[offset + 1], mValue[offset + 2], mValue[offset + 3]); } } return null; }
@Test public void setValue_SFLOAT_negativeInfinity() { final MutableData data = new MutableData(new byte[2]); data.setValue(Float.NEGATIVE_INFINITY, Data.FORMAT_SFLOAT, 0); final float value = data.getFloatValue(Data.FORMAT_SFLOAT, 0); assertEquals(Float.NEGATIVE_INFINITY, value, 0.00); }
public UiTopoLayout offsetX(double offsetX) { this.offsetX = offsetX; return this; }
@Test public void setXOff() { mkOtherLayout(); layout.offsetX(23.4); assertEquals("wrong x-offset", 23.4, layout.offsetX(), DELTA); }
public static Path getSegmentPath( String basePath, TieredStoragePartitionId partitionId, int subpartitionId, long segmentId) { String subpartitionPath = getSubpartitionPath(basePath, partitionId, subpartitionId); return new Path(subpartitionPath, SEGMENT_FILE_PREFIX + segmentId); }
@Test void testGetSegmentPath() { TieredStoragePartitionId partitionId = TieredStorageIdMappingUtils.convertId(new ResultPartitionID()); int subpartitionId = 0; int segmentId = 1; String segmentPath = SegmentPartitionFile.getSegmentPath( tempFolder.getPath(), partitionId, subpartitionId, segmentId) .toString(); File partitionFile = new File( tempFolder.getPath(), TieredStorageIdMappingUtils.convertId(partitionId).toString()); File subpartitionFile = new File(partitionFile, String.valueOf(subpartitionId)); assertThat(segmentPath) .isEqualTo(new File(subpartitionFile, SEGMENT_FILE_PREFIX + segmentId).toString()); }
@Override public QueryHeader build(final QueryResultMetaData queryResultMetaData, final ShardingSphereDatabase database, final String columnName, final String columnLabel, final int columnIndex) throws SQLException { String schemaName = null == database ? "" : database.getName(); String actualTableName = queryResultMetaData.getTableName(columnIndex); String tableName; boolean primaryKey; if (null == actualTableName || null == database) { tableName = actualTableName; primaryKey = false; } else { tableName = getLogicTableName(database, actualTableName); ShardingSphereSchema schema = database.getSchema(schemaName); primaryKey = null != schema && Optional.ofNullable(schema.getTable(tableName)).map(optional -> optional.getColumn(columnName)).map(ShardingSphereColumn::isPrimaryKey).orElse(false); } int columnType = queryResultMetaData.getColumnType(columnIndex); String columnTypeName = queryResultMetaData.getColumnTypeName(columnIndex); int columnLength = queryResultMetaData.getColumnLength(columnIndex); int decimals = queryResultMetaData.getDecimals(columnIndex); boolean signed = queryResultMetaData.isSigned(columnIndex); boolean notNull = queryResultMetaData.isNotNull(columnIndex); boolean autoIncrement = queryResultMetaData.isAutoIncrement(columnIndex); return new QueryHeader(schemaName, tableName, columnLabel, columnName, columnType, columnTypeName, columnLength, decimals, signed, primaryKey, notNull, autoIncrement); }
@Test void assertBuildWithoutPrimaryKeyColumn() throws SQLException { QueryResultMetaData queryResultMetaData = createQueryResultMetaData(); assertFalse(new MySQLQueryHeaderBuilder().build(queryResultMetaData, createDatabase(), queryResultMetaData.getColumnName(2), queryResultMetaData.getColumnLabel(2), 2).isPrimaryKey()); }
public static void localRunnerNotification(JobConf conf, JobStatus status) { JobEndStatusInfo notification = createNotification(conf, status); if (notification != null) { do { try { int code = httpNotification(notification.getUri(), notification.getTimeout()); if (code != 200) { throw new IOException("Invalid response status code: " + code); } else { break; } } catch (IOException ioex) { LOG.error("Notification error [" + notification.getUri() + "]", ioex); } catch (Exception ex) { LOG.error("Notification error [" + notification.getUri() + "]", ex); } try { Thread.sleep(notification.getRetryInterval()); } catch (InterruptedException iex) { LOG.error("Notification retry error [" + notification + "]", iex); } } while (notification.configureForRetry()); } }
@Test public void testNotificationTimeout() throws InterruptedException { Configuration conf = new Configuration(); // Reduce the timeout to 1 second conf.setInt("mapreduce.job.end-notification.timeout", 1000); JobStatus jobStatus = createTestJobStatus( "job_20130313155005308_0001", JobStatus.SUCCEEDED); JobConf jobConf = createTestJobConf( conf, 0, baseUrl + "delay"); long startTime = System.currentTimeMillis(); JobEndNotifier.localRunnerNotification(jobConf, jobStatus); long elapsedTime = System.currentTimeMillis() - startTime; // Validate params assertEquals(1, DelayServlet.calledTimes); // Make sure we timed out with time slightly above 1 second // (default timeout is in terms of minutes, so we'll catch the problem) assertTrue(elapsedTime < 2000); }
@Override public void process(Tuple input) { String key = filterMapper.getKeyFromTuple(input); boolean found; JedisCommandsContainer jedisCommand = null; try { jedisCommand = getInstance(); switch (dataType) { case STRING: found = jedisCommand.exists(key); break; case SET: found = jedisCommand.sismember(additionalKey, key); break; case HASH: found = jedisCommand.hexists(additionalKey, key); break; case SORTED_SET: found = jedisCommand.zrank(additionalKey, key) != null; break; case HYPER_LOG_LOG: found = jedisCommand.pfcount(key) > 0; break; case GEO: List<GeoCoordinate> geopos = jedisCommand.geopos(additionalKey, key); if (geopos == null || geopos.isEmpty()) { found = false; } else { // If any entry is NOT null, then we have a match. found = geopos.stream() .anyMatch(Objects::nonNull); } break; default: throw new IllegalArgumentException("Cannot process such data type: " + dataType); } if (found) { collector.emit(input, input.getValues()); } collector.ack(input); } catch (Exception e) { this.collector.reportError(e); this.collector.fail(input); } }
@Test void smokeTest_zrank_isMember() { // Define input key final String setKey = "ThisIsMySetKey"; final String inputKey = "ThisIsMyKey"; // Ensure key does exist in redis jedisHelper.zrank(setKey, 2, inputKey); // Create an input tuple final Map<String, Object> values = new HashMap<>(); values.put("key", inputKey); values.put("value", "ThisIsMyValue"); final Tuple tuple = new StubTuple(values); final JedisPoolConfig config = configBuilder.build(); final TestMapper mapper = new TestMapper(SORTED_SET, setKey); final RedisFilterBolt bolt = new RedisFilterBolt(config, mapper); bolt.prepare(new HashMap<>(), topologyContext, new OutputCollector(outputCollector)); bolt.process(tuple); // Verify Tuple passed through the bolt verifyTuplePassed(tuple); }
public static String inputStreamToString(InputStream inputStream, Charset charset) throws IOException { if (inputStream != null && inputStream.available() != -1) { ByteArrayOutputStream result = new ByteArrayOutputStream(); byte[] buffer = new byte[1024]; int length; while ((length = inputStream.read(buffer)) != -1) { result.write(buffer, 0, length); } if (charset != null) { return result.toString(charset.name()); } return result.toString(StandardCharsets.UTF_8.name()); } return null; }
@Test public void testInputStreamToString_withExpected() throws IOException { String expected = "test data"; InputStream anyInputStream = new ByteArrayInputStream(expected.getBytes(StandardCharsets.UTF_8)); String actual = StringUtils.inputStreamToString(anyInputStream, StandardCharsets.UTF_8); Assert.assertEquals(expected, actual); }
public static String getRecordReaderClassName(String fileFormatStr) { return DEFAULT_RECORD_READER_CLASS_MAP.get(fileFormatStr.toUpperCase()); }
@Test public void testGetRecordReaderClassName() { assertEquals(getRecordReaderClassName("avro"), DEFAULT_AVRO_RECORD_READER_CLASS); assertEquals(getRecordReaderClassName("gzipped_avro"), DEFAULT_AVRO_RECORD_READER_CLASS); assertEquals(getRecordReaderClassName("csv"), DEFAULT_CSV_RECORD_READER_CLASS); assertEquals(getRecordReaderClassName("json"), DEFAULT_JSON_RECORD_READER_CLASS); assertEquals(getRecordReaderClassName("thrift"), DEFAULT_THRIFT_RECORD_READER_CLASS); assertEquals(getRecordReaderClassName("orc"), DEFAULT_ORC_RECORD_READER_CLASS); assertEquals(getRecordReaderClassName("parquet"), DEFAULT_PARQUET_RECORD_READER_CLASS); }
@Override public String getFieldDefinition( ValueMetaInterface v, String tk, String pk, boolean useAutoinc, boolean addFieldName, boolean addCr ) { String retval = ""; String fieldname = v.getName(); int length = v.getLength(); int precision = v.getPrecision(); if ( addFieldName ) { retval += fieldname + " "; } int type = v.getType(); switch ( type ) { case ValueMetaInterface.TYPE_TIMESTAMP: case ValueMetaInterface.TYPE_DATE: retval += "DATETIME NULL"; break; case ValueMetaInterface.TYPE_BOOLEAN: retval += "CHAR(1)"; break; case ValueMetaInterface.TYPE_NUMBER: case ValueMetaInterface.TYPE_INTEGER: case ValueMetaInterface.TYPE_BIGNUMBER: if ( fieldname.equalsIgnoreCase( tk ) || // Technical key fieldname.equalsIgnoreCase( pk ) // Primary key ) { retval += "INTEGER NOT NULL"; } else { if ( ( length < 0 && precision < 0 ) || precision > 0 || length > 9 ) { retval += "DOUBLE PRECISION"; } else { // Precision == 0 && length<=9 retval += "INTEGER"; } } break; case ValueMetaInterface.TYPE_STRING: if ( length > 254 || length < 0 ) { retval += "LONG VARCHAR"; } else { retval += "VARCHAR(" + length + ")"; } break; default: retval += " UNKNOWN"; break; } if ( addCr ) { retval += Const.CR; } return retval; }
@Test public void testGetFieldDefinition() { assertEquals( "FOO DATETIME NULL", nativeMeta.getFieldDefinition( new ValueMetaDate( "FOO" ), "", "", false, true, false ) ); assertEquals( "DATETIME NULL", nativeMeta.getFieldDefinition( new ValueMetaTimestamp( "FOO" ), "", "", false, false, false ) ); assertEquals( "INTEGER NOT NULL", nativeMeta.getFieldDefinition( new ValueMetaNumber( "FOO", 10, 0 ), "FOO", "", false, false, false ) ); assertEquals( "INTEGER NOT NULL", nativeMeta.getFieldDefinition( new ValueMetaInteger( "FOO", 8, 0 ), "", "FOO", false, false, false ) ); // Note - ValueMetaInteger returns zero always from the precision - so this avoids the weirdness assertEquals( "INTEGER", nativeMeta.getFieldDefinition( new ValueMetaInteger( "FOO", -8, -3 ), "", "", false, false, false ) ); // Weird if statement assertEquals( "DOUBLE PRECISION", nativeMeta.getFieldDefinition( new ValueMetaBigNumber( "FOO", -8, -3 ), "", "", false, false, false ) ); // Weird if statement ( length and precision less than zero) assertEquals( "DOUBLE PRECISION", nativeMeta.getFieldDefinition( new ValueMetaInteger( "FOO", 10, 3 ), "", "", false, false, false ) ); // Weird if statement assertEquals( "DOUBLE PRECISION", nativeMeta.getFieldDefinition( new ValueMetaInteger( "FOO", 10, 0 ), "", "", false, false, false ) ); // Weird if statement assertEquals( "INTEGER", nativeMeta.getFieldDefinition( new ValueMetaInteger( "FOO", 9, 0 ), "", "", false, false, false ) ); // Weird if statement assertEquals( "LONG VARCHAR", nativeMeta.getFieldDefinition( new ValueMetaString( "FOO", 255, 0 ), "", "", false, false, false ) ); assertEquals( "LONG VARCHAR", nativeMeta.getFieldDefinition( new ValueMetaString( "FOO", -33, 0 ), "", "", false, false, false ) ); assertEquals( "VARCHAR(15)", nativeMeta.getFieldDefinition( new ValueMetaString( "FOO", 15, 0 ), "", "", false, false, false ) ); assertEquals( "VARCHAR(0)", nativeMeta.getFieldDefinition( new ValueMetaString( "FOO", 0, 0 ), "", "", false, false, false ) ); assertEquals( " UNKNOWN", nativeMeta.getFieldDefinition( new ValueMetaInternetAddress( "FOO" ), "", "", false, false, false ) ); assertEquals( " UNKNOWN" + System.getProperty( "line.separator" ), nativeMeta.getFieldDefinition( new ValueMetaInternetAddress( "FOO" ), "", "", false, false, true ) ); }
@Override public void doFilter(HttpRequest request, HttpResponse response, FilterChain chain) throws IOException { try { userSession.checkIsSystemAdministrator(); } catch (ForbiddenException e) { AuthenticationError.handleError(request, response, "User needs to be logged in as system administrator to access this page."); return; } String csrfState = oAuthCsrfVerifier.generateState(request, response); try { samlAuthenticator.initLogin(oAuth2ContextFactory.generateCallbackUrl(SamlIdentityProvider.KEY), VALIDATION_RELAY_STATE + "/" + csrfState, request, response); } catch (IllegalArgumentException | IllegalStateException e) { response.sendRedirect("/" + SAML_VALIDATION_CONTROLLER_CONTEXT + "/" + SAML_VALIDATION_KEY + "?CSRFToken=" + csrfState); } }
@Test public void do_filter_as_not_admin() throws IOException { userSession.logIn(); HttpRequest servletRequest = mock(HttpRequest.class); HttpResponse servletResponse = mock(HttpResponse.class); FilterChain filterChain = mock(FilterChain.class); String callbackUrl = "http://localhost:9000/api/validation_test"; when(oAuth2ContextFactory.generateCallbackUrl(anyString())) .thenReturn(callbackUrl); underTest.doFilter(servletRequest, servletResponse, filterChain); verifyNoInteractions(samlAuthenticator); verify(servletResponse).sendRedirect(anyString()); }
@Override @CacheEvict(cacheNames = RedisKeyConstants.DEPT_CHILDREN_ID_LIST, allEntries = true) // allEntries 清空所有缓存,因为操作一个部门,涉及到多个缓存 public Long createDept(DeptSaveReqVO createReqVO) { if (createReqVO.getParentId() == null) { createReqVO.setParentId(DeptDO.PARENT_ID_ROOT); } // 校验父部门的有效性 validateParentDept(null, createReqVO.getParentId()); // 校验部门名的唯一性 validateDeptNameUnique(null, createReqVO.getParentId(), createReqVO.getName()); // 插入部门 DeptDO dept = BeanUtils.toBean(createReqVO, DeptDO.class); deptMapper.insert(dept); return dept.getId(); }
@Test public void testCreateDept() { // 准备参数 DeptSaveReqVO reqVO = randomPojo(DeptSaveReqVO.class, o -> { o.setId(null); // 防止 id 被设置 o.setParentId(DeptDO.PARENT_ID_ROOT); o.setStatus(randomCommonStatus()); }); // 调用 Long deptId = deptService.createDept(reqVO); // 断言 assertNotNull(deptId); // 校验记录的属性是否正确 DeptDO deptDO = deptMapper.selectById(deptId); assertPojoEquals(reqVO, deptDO, "id"); }
@VisibleForTesting void checkNoPendingTasks(DbSession dbSession, EntityDto entityDto) { //This check likely can be removed when we remove the column 'private' from components table in SONAR-20126. checkState(countPendingTask(dbSession, entityDto.getKey()) == 0, "Component visibility can't be changed as long as it has background task(s) pending or in progress"); }
@Test void checkNoPendingTasks_whenAnyOtherTaskInQueue_throws() { EntityDto entityDto = mockEntityDto(); when(dbClient.entityDao().selectByKey(dbSession, entityDto.getKey())).thenReturn(Optional.of(entityDto)); mockCeQueueDto("ANYTHING", entityDto.getUuid()); assertThatIllegalStateException() .isThrownBy(() -> visibilityService.checkNoPendingTasks(dbSession, entityDto)) .withMessage("Component visibility can't be changed as long as it has background task(s) pending or in progress"); }
private KsqlScalarFunction createFunction( final Class theClass, final UdfDescription udfDescriptionAnnotation, final Udf udfAnnotation, final Method method, final String path, final String sensorName, final Class<? extends Kudf> udfClass ) { // sanity check FunctionLoaderUtils .instantiateFunctionInstance(method.getDeclaringClass(), udfDescriptionAnnotation.name()); final FunctionInvoker invoker = FunctionLoaderUtils.createFunctionInvoker(method); final String functionName = udfDescriptionAnnotation.name(); LOGGER.info("Adding function " + functionName + " for method " + method); final List<ParameterInfo> parameters = FunctionLoaderUtils .createParameters(method, functionName, typeParser); final ParamType javaReturnSchema = FunctionLoaderUtils .getReturnType(method, udfAnnotation.schema(), typeParser); final SchemaProvider schemaProviderFunction = FunctionLoaderUtils .handleUdfReturnSchema( theClass, javaReturnSchema, udfAnnotation.schema(), typeParser, udfAnnotation.schemaProvider(), udfDescriptionAnnotation.name(), method.isVarArgs() ); return KsqlScalarFunction.create( schemaProviderFunction, javaReturnSchema, parameters, FunctionName.of(functionName.toUpperCase()), udfClass, getUdfFactory(method, udfDescriptionAnnotation, functionName, invoker, sensorName), udfAnnotation.description(), path, method.isVarArgs() ); }
@Test @SuppressWarnings("rawtypes") public void shouldInvokeUdafWhenMethodHasArgs() throws Exception { final UdafFactoryInvoker creator = createUdafLoader().createUdafFactoryInvoker( TestUdaf.class.getMethod( "createSumLengthString", String.class), FunctionName.of("test-udf"), "desc", new String[]{""}, "", ""); final KsqlAggregateFunction instance = creator.createFunction( new AggregateFunctionInitArguments(Collections.singletonList(0), "foo"), Collections.emptyList() ); assertThat(instance, not(nullValue())); assertThat(instance, not(instanceOf(TableAggregationFunction.class))); }
public void findIntersections(Rectangle query, Consumer<T> consumer) { IntArrayList todoNodes = new IntArrayList(levelOffsets.length * degree); IntArrayList todoLevels = new IntArrayList(levelOffsets.length * degree); int rootLevel = levelOffsets.length - 1; int rootIndex = levelOffsets[rootLevel]; if (doesIntersect(query, rootIndex)) { todoNodes.push(rootIndex); todoLevels.push(rootLevel); } while (!todoNodes.isEmpty()) { int nodeIndex = todoNodes.popInt(); int level = todoLevels.popInt(); if (level == 0) { // This is a leaf node consumer.accept(items[nodeIndex / ENVELOPE_SIZE]); } else { int childrenOffset = getChildrenOffset(nodeIndex, level); for (int i = 0; i < degree; i++) { int childIndex = childrenOffset + ENVELOPE_SIZE * i; if (doesIntersect(query, childIndex)) { todoNodes.push(childIndex); todoLevels.push(level - 1); } } } } }
@Test public void testSingletonFlatbushXY() { // Because mixing up x and y is easy to do... List<Rectangle> items = ImmutableList.of(new Rectangle(0, 10, 1, 11)); Flatbush<Rectangle> rtree = new Flatbush<>(items.toArray(new Rectangle[] {})); // hit assertEquals(findIntersections(rtree, new Rectangle(1, 11, 2, 12)), items); // miss assertEquals(findIntersections(rtree, new Rectangle(11, 1, 12, 2)), ImmutableList.of()); }
Future<Boolean> canRollController(int nodeId) { LOGGER.debugCr(reconciliation, "Determining whether controller pod {} can be rolled", nodeId); return describeMetadataQuorum().map(info -> { boolean canRoll = isQuorumHealthyWithoutNode(nodeId, info); if (!canRoll) { LOGGER.debugCr(reconciliation, "Not restarting controller pod {}. Restart would affect the quorum health", nodeId); } return canRoll; }).recover(error -> { LOGGER.warnCr(reconciliation, "Error determining whether it is safe to restart controller pod {}", nodeId, error); return Future.failedFuture(error); }); }
@Test public void cannotRollActiveControllerWith1FollowerBehindEvenSizedCluster(VertxTestContext context) { Map<Integer, OptionalLong> controllers = new HashMap<>(); controllers.put(1, OptionalLong.of(10000L)); controllers.put(2, OptionalLong.of(7000L)); controllers.put(3, OptionalLong.of(8200L)); controllers.put(4, OptionalLong.of(9000L)); Admin admin = setUpMocks(1, controllers); KafkaQuorumCheck quorumCheck = new KafkaQuorumCheck(Reconciliation.DUMMY_RECONCILIATION, admin, vertx, CONTROLLER_QUORUM_FETCH_TIMEOUT_MS); quorumCheck.canRollController(1).onComplete(context.succeeding(result -> { context.verify(() -> assertFalse(result)); context.completeNow(); })); }
public static Document buildNamespaceAwareDocument(File xml) throws SAXException, ParserConfigurationException, IOException { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setNamespaceAware(true); factory.setIgnoringElementContentWhitespace(true); factory.setIgnoringComments(true); try { // disable DOCTYPE declaration: factory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true); } catch (ParserConfigurationException e) { } try { // Set secure processing factory.setFeature(XMLConstants.FEATURE_SECURE_PROCESSING, Boolean.TRUE); } catch (ParserConfigurationException e) { } try { // Disable the external-general-entities by default factory.setFeature("http://xml.org/sax/features/external-general-entities", false); } catch (ParserConfigurationException e) { } try { // Disable the external-parameter-entities by default factory.setFeature("http://xml.org/sax/features/external-parameter-entities", false); } catch (ParserConfigurationException e) { } return factory.newDocumentBuilder().parse(xml); }
@Test public void testBuildNamespaceAwareDocument() throws Exception { assertNotNull(XmlHelper.buildNamespaceAwareDocument(ResourceUtils.getResourceAsFile("xmls/empty.xml"))); }
@Override public String requestMessageForCheckout(SCMPropertyConfiguration scmConfiguration, String destinationFolder, SCMRevision revision) { Map configuredValues = new LinkedHashMap(); configuredValues.put("scm-configuration", jsonResultMessageHandler.configurationToMap(scmConfiguration)); configuredValues.put("destination-folder", destinationFolder); configuredValues.put("revision", scmRevisionToMap(revision)); return GSON.toJson(configuredValues); }
@Test public void shouldBuildRequestBodyForCheckoutRequest() throws Exception { Date timestamp = new SimpleDateFormat(DATE_FORMAT).parse("2011-07-13T19:43:37.100Z"); Map data = new LinkedHashMap(); data.put("dataKeyOne", "data-value-one"); data.put("dataKeyTwo", "data-value-two"); SCMRevision revision = new SCMRevision("abc.rpm", timestamp, "someuser", "comment", data, null); String requestBody = messageHandler.requestMessageForCheckout(scmPropertyConfiguration, "destination", revision); String expectedValue = "{\"scm-configuration\":{\"key-one\":{\"value\":\"value-one\"},\"key-two\":{\"value\":\"value-two\"}},\"destination-folder\":\"destination\"," + "\"revision\":{\"revision\":\"abc.rpm\",\"timestamp\":\"2011-07-13T19:43:37.100Z\",\"data\":{\"dataKeyOne\":\"data-value-one\",\"dataKeyTwo\":\"data-value-two\"}}}"; assertThat(requestBody, is(expectedValue)); }
public static boolean isIpAddress(String ipAddress) { return isMatch(IP_REGEX, ipAddress); }
@Test public void testIp() { Assert.assertEquals(true, PatternKit.isIpAddress("192.168.1.1")); Assert.assertEquals(false, PatternKit.isIpAddress("256.255.255.0")); }
@SuppressFBWarnings("NP_NONNULL_PARAM_VIOLATION") // Not a bug synchronized CompletableFuture<Void> getFutureForSequenceNumber(final long seqNum) { if (seqNum <= lastCompletedSequenceNumber) { return CompletableFuture.completedFuture(null); } return sequenceNumberFutures.computeIfAbsent(seqNum, k -> new CompletableFuture<>()); }
@Test public void shouldReturnFutureForNewSequenceNumber() { // When: final CompletableFuture<Void> future = futureStore.getFutureForSequenceNumber(2); // Then: assertFutureIsNotCompleted(future); }
@Override public String getPipelineStatusMessage() { return String.format("Preparing to schedule (%s/%s)", 0, numberOfStages()); }
@Test public void shouldUnderstandPipelineStatusMessage() { StageInstanceModels stages = new StageInstanceModels(); stages.addFutureStage("unit1", false); stages.addFutureStage("unit2", false); PipelineInstanceModel pipeline = PipelineInstanceModel.createPreparingToSchedule("pipeline-name", stages); assertThat(pipeline.getPipelineStatusMessage(), Matchers.is("Preparing to schedule (0/2)")); }
@Override public BasicTypeDefine reconvert(Column column) { BasicTypeDefine.BasicTypeDefineBuilder builder = BasicTypeDefine.builder() .name(column.getName()) .nullable(column.isNullable()) .comment(column.getComment()) .defaultValue(column.getDefaultValue()); switch (column.getDataType().getSqlType()) { case BOOLEAN: builder.columnType(String.format("%s(%s)", ORACLE_NUMBER, 1)); builder.dataType(ORACLE_NUMBER); builder.length(1L); break; case TINYINT: case SMALLINT: case INT: case BIGINT: builder.columnType(ORACLE_INTEGER); builder.dataType(ORACLE_INTEGER); break; case FLOAT: builder.columnType(ORACLE_BINARY_FLOAT); builder.dataType(ORACLE_BINARY_FLOAT); break; case DOUBLE: builder.columnType(ORACLE_BINARY_DOUBLE); builder.dataType(ORACLE_BINARY_DOUBLE); break; case DECIMAL: DecimalType decimalType = (DecimalType) column.getDataType(); long precision = decimalType.getPrecision(); int scale = decimalType.getScale(); if (precision <= 0) { precision = DEFAULT_PRECISION; scale = DEFAULT_SCALE; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which is precision less than 0, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), precision, scale); } else if (precision > MAX_PRECISION) { scale = (int) Math.max(0, scale - (precision - MAX_PRECISION)); precision = MAX_PRECISION; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which exceeds the maximum precision of {}, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), MAX_PRECISION, precision, scale); } if (scale < 0) { scale = 0; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which is scale less than 0, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), precision, scale); } else if (scale > MAX_SCALE) { scale = MAX_SCALE; log.warn( "The decimal column {} type decimal({},{}) is out of range, " + "which exceeds the maximum scale of {}, " + "it will be converted to decimal({},{})", column.getName(), decimalType.getPrecision(), decimalType.getScale(), MAX_SCALE, precision, scale); } builder.columnType(String.format("%s(%s,%s)", ORACLE_NUMBER, precision, scale)); builder.dataType(ORACLE_NUMBER); builder.precision(precision); builder.scale(scale); break; case BYTES: if (column.getColumnLength() == null || column.getColumnLength() <= 0) { builder.columnType(ORACLE_BLOB); builder.dataType(ORACLE_BLOB); } else if (column.getColumnLength() <= MAX_RAW_LENGTH) { builder.columnType( String.format("%s(%s)", ORACLE_RAW, column.getColumnLength())); builder.dataType(ORACLE_RAW); } else { builder.columnType(ORACLE_BLOB); builder.dataType(ORACLE_BLOB); } break; case STRING: if (column.getColumnLength() == null || column.getColumnLength() <= 0) { builder.columnType( String.format("%s(%s)", ORACLE_VARCHAR2, MAX_VARCHAR_LENGTH)); builder.dataType(ORACLE_VARCHAR2); } else if (column.getColumnLength() <= MAX_VARCHAR_LENGTH) { builder.columnType( String.format("%s(%s)", ORACLE_VARCHAR2, column.getColumnLength())); builder.dataType(ORACLE_VARCHAR2); } else { builder.columnType(ORACLE_CLOB); builder.dataType(ORACLE_CLOB); } break; case DATE: builder.columnType(ORACLE_DATE); builder.dataType(ORACLE_DATE); break; case TIMESTAMP: if (column.getScale() == null || column.getScale() <= 0) { builder.columnType(ORACLE_TIMESTAMP_WITH_LOCAL_TIME_ZONE); } else { int timestampScale = column.getScale(); if (column.getScale() > MAX_TIMESTAMP_SCALE) { timestampScale = MAX_TIMESTAMP_SCALE; log.warn( "The timestamp column {} type timestamp({}) is out of range, " + "which exceeds the maximum scale of {}, " + "it will be converted to timestamp({})", column.getName(), column.getScale(), MAX_TIMESTAMP_SCALE, timestampScale); } builder.columnType( String.format("TIMESTAMP(%s) WITH LOCAL TIME ZONE", timestampScale)); builder.scale(timestampScale); } builder.dataType(ORACLE_TIMESTAMP_WITH_LOCAL_TIME_ZONE); break; default: throw CommonError.convertToConnectorTypeError( DatabaseIdentifier.ORACLE, column.getDataType().getSqlType().name(), column.getName()); } return builder.build(); }
@Test public void testReconvertDecimal() { Column column = PhysicalColumn.builder().name("test").dataType(new DecimalType(0, 0)).build(); BasicTypeDefine typeDefine = OracleTypeConverter.INSTANCE.reconvert(column); Assertions.assertEquals(column.getName(), typeDefine.getName()); Assertions.assertEquals( String.format( "%s(%s,%s)", OracleTypeConverter.ORACLE_NUMBER, OracleTypeConverter.DEFAULT_PRECISION, OracleTypeConverter.DEFAULT_SCALE), typeDefine.getColumnType()); Assertions.assertEquals(OracleTypeConverter.ORACLE_NUMBER, typeDefine.getDataType()); column = PhysicalColumn.builder().name("test").dataType(new DecimalType(10, 2)).build(); typeDefine = OracleTypeConverter.INSTANCE.reconvert(column); Assertions.assertEquals(column.getName(), typeDefine.getName()); Assertions.assertEquals( String.format("%s(%s,%s)", OracleTypeConverter.ORACLE_NUMBER, 10, 2), typeDefine.getColumnType()); Assertions.assertEquals(OracleTypeConverter.ORACLE_NUMBER, typeDefine.getDataType()); }
@Override public String getSignature(String baseString, String apiSecret, String tokenSecret) { try { final Signature signature = Signature.getInstance(RSA_SHA1); signature.initSign(privateKey); signature.update(baseString.getBytes(UTF8)); return Base64.encode(signature.sign()); } catch (NoSuchAlgorithmException | InvalidKeyException | SignatureException | UnsupportedEncodingException | RuntimeException e) { throw new OAuthSignatureException(baseString, e); } }
@Test public void shouldReturnSignature() { final String apiSecret = "api secret"; final String tokenSecret = "token secret"; final String baseString = "base string"; final String signature = "LUNRzQAlpdNyM9mLXm96Va6g/qVNnEAb7p7K1KM0g8IopOFQJPoOO7cvppgt7w3QyhijWJnCmvqXaaIAGrqvd" + "yr3fIzBULh8D/iZQUNLMi08GCOA34P81XBvsc7A5uJjPDsGhJg2MzoVJ8nWJhU/lMMk4c92S1WGskeoDofRwpo="; assertEquals(signature, service.getSignature(baseString, apiSecret, tokenSecret)); }
public long getTotalBatchMaxBytes() { if (this.produceAccumulator == null) { return 0; } return produceAccumulator.getTotalBatchMaxBytes(); }
@Test public void assertTotalBatchMaxBytes() throws NoSuchFieldException, IllegalAccessException { setProduceAccumulator(true); assertEquals(0L, producer.getTotalBatchMaxBytes()); }
@Override public GetContainerReportResponse getContainerReport( GetContainerReportRequest request) throws YarnException, IOException { ContainerId containerId = request.getContainerId(); try { GetContainerReportResponse response = GetContainerReportResponse.newInstance(history .getContainer(containerId)); return response; } catch (IOException e) { LOG.error(e.getMessage(), e); throw e; } }
@Test void testContainerReport() throws IOException, YarnException { ApplicationId appId = ApplicationId.newInstance(0, 1); ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId, 1); ContainerId containerId = ContainerId.newContainerId(appAttemptId, 1); GetContainerReportRequest request = GetContainerReportRequest.newInstance(containerId); GetContainerReportResponse response = clientService.getContainerReport(request); ContainerReport container = response.getContainerReport(); assertNotNull(container); assertEquals(containerId, container.getContainerId()); assertEquals("http://0.0.0.0:8188/applicationhistory/logs/" + "test host:100/container_0_0001_01_000001/" + "container_0_0001_01_000001/user1", container.getLogUrl()); }
public static <T> ThrowingConsumer<StreamRecord<T>, Exception> getRecordProcessor( Input<T> input) { boolean canOmitSetKeyContext; if (input instanceof AbstractStreamOperator) { canOmitSetKeyContext = canOmitSetKeyContext((AbstractStreamOperator<?>) input, 0); } else { canOmitSetKeyContext = input instanceof KeyContextHandler && !((KeyContextHandler) input).hasKeyContext(); } if (canOmitSetKeyContext) { return input::processElement; } else if (input instanceof AsyncStateProcessing && ((AsyncStateProcessing) input).isAsyncStateProcessingEnabled()) { return ((AsyncStateProcessing) input).getRecordProcessor(1); } else { return record -> { input.setKeyContextElement(record); input.processElement(record); }; } }
@Test void testOverrideSetKeyContextElementForOneInputStreamOperator() throws Exception { // test no override NoOverrideOneInputStreamOperator noOverride = new NoOverrideOneInputStreamOperator(); RecordProcessorUtils.getRecordProcessor(noOverride).accept(new StreamRecord<>("test")); assertThat(noOverride.setCurrentKeyCalled).isFalse(); // test override "SetKeyContextElement" OverrideSetKeyContextOneInputStreamOperator overrideSetKeyContext = new OverrideSetKeyContextOneInputStreamOperator(); RecordProcessorUtils.getRecordProcessor(overrideSetKeyContext) .accept(new StreamRecord<>("test")); assertThat(overrideSetKeyContext.setKeyContextElementCalled).isTrue(); // test override "SetKeyContextElement1" OverrideSetKeyContext1OneInputStreamOperator overrideSetKeyContext1 = new OverrideSetKeyContext1OneInputStreamOperator(); RecordProcessorUtils.getRecordProcessor(overrideSetKeyContext1) .accept(new StreamRecord<>("test")); assertThat(overrideSetKeyContext1.setKeyContextElement1Called).isTrue(); }
public FEELFnResult<Boolean> invoke(@ParameterName("range1") Range range1, @ParameterName("range2") Range range2) { if (range1 == null) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "range1", "cannot be null")); } if (range2 == null) { return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "range2", "cannot be null")); } try { boolean result = (range1.getHighEndPoint().compareTo(range2.getLowEndPoint()) > 0 || (range1.getHighEndPoint().compareTo(range2.getLowEndPoint()) == 0 && range1.getHighBoundary() == RangeBoundary.CLOSED && range2.getLowBoundary() == RangeBoundary.CLOSED)) && (range1.getLowEndPoint().compareTo(range2.getHighEndPoint()) < 0 || (range1.getLowEndPoint().compareTo(range2.getHighEndPoint()) == 0 && range1.getLowBoundary() == RangeBoundary.CLOSED && range2.getHighBoundary() == RangeBoundary.CLOSED)); return FEELFnResult.ofResult(result); } catch (Exception e) { // points are not comparable return FEELFnResult.ofError(new InvalidParametersEvent(Severity.ERROR, "range1", "cannot be compared to range2")); } }
@Test void invokeParamIsNull() { FunctionTestUtil.assertResultError(overlapsFunction.invoke(null, new RangeImpl()), InvalidParametersEvent.class); FunctionTestUtil.assertResultError(overlapsFunction.invoke(new RangeImpl(), null), InvalidParametersEvent.class); }
public static DataflowRunner fromOptions(PipelineOptions options) { DataflowPipelineOptions dataflowOptions = PipelineOptionsValidator.validate(DataflowPipelineOptions.class, options); ArrayList<String> missing = new ArrayList<>(); if (dataflowOptions.getAppName() == null) { missing.add("appName"); } if (Strings.isNullOrEmpty(dataflowOptions.getRegion()) && isServiceEndpoint(dataflowOptions.getDataflowEndpoint())) { missing.add("region"); } if (missing.size() > 0) { throw new IllegalArgumentException( "Missing required pipeline options: " + Joiner.on(',').join(missing)); } validateWorkerSettings( PipelineOptionsValidator.validate(DataflowPipelineWorkerPoolOptions.class, options)); PathValidator validator = dataflowOptions.getPathValidator(); String gcpTempLocation; try { gcpTempLocation = dataflowOptions.getGcpTempLocation(); } catch (Exception e) { throw new IllegalArgumentException( "DataflowRunner requires gcpTempLocation, " + "but failed to retrieve a value from PipelineOptions", e); } validator.validateOutputFilePrefixSupported(gcpTempLocation); String stagingLocation; try { stagingLocation = dataflowOptions.getStagingLocation(); } catch (Exception e) { throw new IllegalArgumentException( "DataflowRunner requires stagingLocation, " + "but failed to retrieve a value from PipelineOptions", e); } validator.validateOutputFilePrefixSupported(stagingLocation); if (!isNullOrEmpty(dataflowOptions.getSaveProfilesToGcs())) { validator.validateOutputFilePrefixSupported(dataflowOptions.getSaveProfilesToGcs()); } if (dataflowOptions.getFilesToStage() != null) { // The user specifically requested these files, so fail now if they do not exist. // (automatically detected classpath elements are permitted to not exist, so later // staging will not fail on nonexistent files) dataflowOptions.getFilesToStage().stream() .forEach( stagedFileSpec -> { File localFile; if (stagedFileSpec.contains("=")) { String[] components = stagedFileSpec.split("=", 2); localFile = new File(components[1]); } else { localFile = new File(stagedFileSpec); } if (!localFile.exists()) { // should be FileNotFoundException, but for build-time backwards compatibility // cannot add checked exception throw new RuntimeException( String.format("Non-existent files specified in filesToStage: %s", localFile)); } }); } else { dataflowOptions.setFilesToStage( detectClassPathResourcesToStage(DataflowRunner.class.getClassLoader(), options)); if (dataflowOptions.getFilesToStage().isEmpty()) { throw new IllegalArgumentException("No files to stage has been found."); } else { LOG.info( "PipelineOptions.filesToStage was not specified. " + "Defaulting to files from the classpath: will stage {} files. " + "Enable logging at DEBUG level to see which files will be staged.", dataflowOptions.getFilesToStage().size()); LOG.debug("Classpath elements: {}", dataflowOptions.getFilesToStage()); } } // Verify jobName according to service requirements, truncating converting to lowercase if // necessary. String jobName = dataflowOptions.getJobName().toLowerCase(); checkArgument( jobName.matches("[a-z]([-a-z0-9]*[a-z0-9])?"), "JobName invalid; the name must consist of only the characters " + "[-a-z0-9], starting with a letter and ending with a letter " + "or number"); if (!jobName.equals(dataflowOptions.getJobName())) { LOG.info( "PipelineOptions.jobName did not match the service requirements. " + "Using {} instead of {}.", jobName, dataflowOptions.getJobName()); } dataflowOptions.setJobName(jobName); // Verify project String project = dataflowOptions.getProject(); if (project.matches("[0-9]*")) { throw new IllegalArgumentException( "Project ID '" + project + "' invalid. Please make sure you specified the Project ID, not project number."); } else if (!project.matches(PROJECT_ID_REGEXP)) { throw new IllegalArgumentException( "Project ID '" + project + "' invalid. Please make sure you specified the Project ID, not project" + " description."); } DataflowPipelineDebugOptions debugOptions = dataflowOptions.as(DataflowPipelineDebugOptions.class); // Verify the number of worker threads is a valid value if (debugOptions.getNumberOfWorkerHarnessThreads() < 0) { throw new IllegalArgumentException( "Number of worker harness threads '" + debugOptions.getNumberOfWorkerHarnessThreads() + "' invalid. Please make sure the value is non-negative."); } // Verify that if recordJfrOnGcThrashing is set, the pipeline is at least on java 11 if (dataflowOptions.getRecordJfrOnGcThrashing() && Environments.getJavaVersion() == Environments.JavaVersion.java8) { throw new IllegalArgumentException( "recordJfrOnGcThrashing is only supported on java 9 and up."); } if (dataflowOptions.isStreaming() && dataflowOptions.getGcsUploadBufferSizeBytes() == null) { dataflowOptions.setGcsUploadBufferSizeBytes(GCS_UPLOAD_BUFFER_SIZE_BYTES_DEFAULT); } // Adding the Java version to the SDK name for user's and support convenience. String agentJavaVer = "(JRE 8 environment)"; if (Environments.getJavaVersion() != Environments.JavaVersion.java8) { agentJavaVer = String.format("(JRE %s environment)", Environments.getJavaVersion().specification()); } DataflowRunnerInfo dataflowRunnerInfo = DataflowRunnerInfo.getDataflowRunnerInfo(); String userAgentName = dataflowRunnerInfo.getName(); Preconditions.checkArgument( !userAgentName.equals(""), "Dataflow runner's `name` property cannot be empty."); String userAgentVersion = dataflowRunnerInfo.getVersion(); Preconditions.checkArgument( !userAgentVersion.equals(""), "Dataflow runner's `version` property cannot be empty."); String userAgent = String.format("%s/%s%s", userAgentName, userAgentVersion, agentJavaVer).replace(" ", "_"); dataflowOptions.setUserAgent(userAgent); return new DataflowRunner(dataflowOptions); }
@Test public void testInvalidNumberOfWorkerHarnessThreads() throws IOException { DataflowPipelineOptions options = buildPipelineOptions(); options.as(DataflowPipelineDebugOptions.class).setNumberOfWorkerHarnessThreads(-1); thrown.expect(IllegalArgumentException.class); thrown.expectMessage("Number of worker harness threads"); thrown.expectMessage("Please make sure the value is non-negative."); DataflowRunner.fromOptions(options); }
IdBatchAndWaitTime newIdBaseLocal(int batchSize) { return newIdBaseLocal(Clock.currentTimeMillis(), getNodeId(), batchSize); }
@Test public void test_idsOrdered() { long lastId = -1; for (long now = DEFAULT_EPOCH_START; now < DEFAULT_EPOCH_START + (1L << DEFAULT_BITS_TIMESTAMP); now += 365L * 24L * 60L * 60L * 1000L) { long base = gen.newIdBaseLocal(now, 1234, 1).idBatch.base(); LOG.info("at " + new Date(now) + ", id=" + base); assertTrue("lastId=" + lastId + ", newId=" + base, lastId < base); lastId = base; } }
public SqlType getExpressionSqlType(final Expression expression) { return getExpressionSqlType(expression, Collections.emptyMap()); }
@Test public void shouldHandleNestedLambdas() { // Given: givenUdfWithNameAndReturnType("TRANSFORM", SqlTypes.INTEGER); when(function.parameters()).thenReturn( ImmutableList.of( ArrayType.of(LongType.INSTANCE), IntegerType.INSTANCE, LambdaType.of( ImmutableList.of( DoubleType.INSTANCE, DoubleType.INSTANCE ), StringType.INSTANCE), LambdaType.of( ImmutableList.of( DoubleType.INSTANCE, DoubleType.INSTANCE ), StringType.INSTANCE ) )); final Expression expression = new ArithmeticBinaryExpression( Operator.ADD, new FunctionCall( FunctionName.of("TRANSFORM"), ImmutableList.of( ARRAYCOL, new IntegerLiteral(0), new LambdaFunctionCall( ImmutableList.of("A", "B"), new ArithmeticBinaryExpression( Operator.ADD, new FunctionCall( FunctionName.of("TRANSFORM"), ImmutableList.of( ARRAYCOL, new IntegerLiteral(0), new LambdaFunctionCall( ImmutableList.of("Q", "V"), new ArithmeticBinaryExpression( Operator.ADD, new LambdaVariable("Q"), new LambdaVariable("V")) ))), new LambdaVariable("B")) ))), new IntegerLiteral(5) ); // When: final SqlType result = expressionTypeManager.getExpressionSqlType(expression); assertThat(result, is(SqlTypes.INTEGER)); }
public static <T> T getBean(Class<T> interfaceClass, Class typeClass) { Object object = serviceMap.get(interfaceClass.getName() + "<" + typeClass.getName() + ">"); if(object == null) return null; if(object instanceof Object[]) { return (T)Array.get(object, 0); } else { return (T)object; } }
@Test public void testSingleFromArray() { // get the first object from an array of impelementation in service.yml E e = SingletonServiceFactory.getBean(E.class); Assert.assertEquals("e1", e.e()); }
@Override @SuppressWarnings("deprecation") public HttpClientOperations addHandler(ChannelHandler handler) { super.addHandler(handler); return this; }
@Test void addNamedEncoderReplaysLastHttp() { ByteBuf buf = Unpooled.copiedBuffer("{\"foo\":1}", CharsetUtil.UTF_8); EmbeddedChannel channel = new EmbeddedChannel(); new HttpClientOperations(() -> channel, ConnectionObserver.emptyListener(), ClientCookieEncoder.STRICT, ClientCookieDecoder.STRICT, ReactorNettyHttpMessageLogFactory.INSTANCE) .addHandler("json", new JsonObjectDecoder()); channel.writeInbound(new DefaultLastHttpContent(buf)); assertThat(channel.pipeline().names()).first().isEqualTo("json$extractor"); Object content = channel.readInbound(); assertThat(content).isInstanceOf(ByteBuf.class); ((ByteBuf) content).release(); content = channel.readInbound(); assertThat(content).isInstanceOf(LastHttpContent.class); ((LastHttpContent) content).release(); content = channel.readInbound(); assertThat(content).isNull(); }
void pruneMissingPeers() { prunePeersTimer.record(() -> { final Set<String> peerIds = presenceCluster.withCluster( connection -> connection.sync().smembers(MANAGER_SET_KEY)); peerIds.remove(managerId); for (final String peerId : peerIds) { final boolean peerMissing = presenceCluster.withCluster( connection -> connection.sync().publish(getManagerPresenceChannel(peerId), "ping") == 0); if (peerMissing) { log.debug("Presence manager {} did not respond to ping", peerId); final String connectedClientsKey = getConnectedClientSetKey(peerId); String presenceKey; while ((presenceKey = presenceCluster.withCluster(connection -> connection.sync().spop(connectedClientsKey))) != null) { clearPresenceScript.execute(List.of(presenceKey), List.of(peerId)); pruneClientMeter.increment(); } presenceCluster.useCluster(connection -> { connection.sync().del(connectedClientsKey); connection.sync().srem(MANAGER_SET_KEY, peerId); }); } } }); }
@Test void testPruneMissingPeers() { final String presentPeerId = UUID.randomUUID().toString(); final String missingPeerId = UUID.randomUUID().toString(); REDIS_CLUSTER_EXTENSION.getRedisCluster().useCluster(connection -> { connection.sync().sadd(ClientPresenceManager.MANAGER_SET_KEY, presentPeerId); connection.sync().sadd(ClientPresenceManager.MANAGER_SET_KEY, missingPeerId); }); for (int i = 0; i < 10; i++) { addClientPresence(presentPeerId); addClientPresence(missingPeerId); } clientPresenceManager.getPubSubConnection().usePubSubConnection( connection -> connection.sync().upstream().commands() .subscribe(ClientPresenceManager.getManagerPresenceChannel(presentPeerId))); clientPresenceManager.pruneMissingPeers(); assertEquals(1, (long) REDIS_CLUSTER_EXTENSION.getRedisCluster().withCluster( connection -> connection.sync().exists(ClientPresenceManager.getConnectedClientSetKey(presentPeerId)))); assertTrue(REDIS_CLUSTER_EXTENSION.getRedisCluster().withCluster( (Function<StatefulRedisClusterConnection<String, String>, Boolean>) connection -> connection.sync() .sismember(ClientPresenceManager.MANAGER_SET_KEY, presentPeerId))); assertEquals(0, (long) REDIS_CLUSTER_EXTENSION.getRedisCluster().withCluster( connection -> connection.sync().exists(ClientPresenceManager.getConnectedClientSetKey(missingPeerId)))); assertFalse(REDIS_CLUSTER_EXTENSION.getRedisCluster().withCluster( (Function<StatefulRedisClusterConnection<String, String>, Boolean>) connection -> connection.sync() .sismember(ClientPresenceManager.MANAGER_SET_KEY, missingPeerId))); }
public static int AUG_CCITT(@NonNull final byte[] data, final int offset, final int length) { return CRC(0x1021, 0x1D0F, data, offset, length, false, false, 0x0000); }
@Test public void AUG_CCITT_empty() { final byte[] data = new byte[0]; assertEquals(0x1D0F, CRC16.AUG_CCITT(data, 0, 0)); }
@Override public void validate(final Analysis analysis) { failPersistentQueryOnWindowedTable(analysis); QueryValidatorUtil.validateNoUserColumnsWithSameNameAsPseudoColumns(analysis); }
@Test public void shouldNotThrowOnPersistentPushQueryOnWindowedStream() { // Given: givenPersistentQuery(); givenSourceStream(); givenWindowedSource(); // When/Then: validator.validate(analysis); }
public static ExistsSubqueryExpression bind(final ExistsSubqueryExpression segment, final SQLStatementBinderContext binderContext, final Map<String, TableSegmentBinderContext> tableBinderContexts) { SubquerySegment boundSubquery = SubquerySegmentBinder.bind(segment.getSubquery(), binderContext, tableBinderContexts); ExistsSubqueryExpression result = new ExistsSubqueryExpression(segment.getStartIndex(), segment.getStopIndex(), boundSubquery); result.setNot(segment.isNot()); return result; }
@Test void assertBindExistsSubqueryExpression() { MySQLSelectStatement selectStatement = new MySQLSelectStatement(); selectStatement.setProjections(new ProjectionsSegment(0, 0)); ExistsSubqueryExpression existsSubqueryExpression = new ExistsSubqueryExpression(0, 0, new SubquerySegment(0, 0, selectStatement, "t_test")); SQLStatementBinderContext binderContext = new SQLStatementBinderContext(mock(ShardingSphereMetaData.class), DefaultDatabase.LOGIC_NAME, new MockedDatabaseType(), Collections.emptyList()); ExistsSubqueryExpression actual = ExistsSubqueryExpressionBinder.bind(existsSubqueryExpression, binderContext, Collections.emptyMap()); assertThat(actual.getStartIndex(), is(existsSubqueryExpression.getStartIndex())); assertThat(actual.getStopIndex(), is(existsSubqueryExpression.getStopIndex())); assertThat(actual.getText(), is("t_test")); assertThat(actual.getSubquery().getStartIndex(), is(existsSubqueryExpression.getSubquery().getStartIndex())); assertThat(actual.getSubquery().getStopIndex(), is(existsSubqueryExpression.getSubquery().getStopIndex())); assertThat(actual.getSubquery().getSubqueryType(), is(existsSubqueryExpression.getSubquery().getSubqueryType())); assertThat(actual.getSubquery().getText(), is("t_test")); assertThat(actual.getSubquery().getSelect().getDatabaseType(), is(existsSubqueryExpression.getSubquery().getSelect().getDatabaseType())); }
@Override public DataSerializableFactory createFactory() { return typeId -> switch (typeId) { case MAP_REPLICATION_UPDATE -> new WanMapAddOrUpdateEvent(); case MAP_REPLICATION_REMOVE -> new WanMapRemoveEvent(); case WAN_MAP_ENTRY_VIEW -> new WanMapEntryView<>(); case WAN_CACHE_ENTRY_VIEW -> new WanCacheEntryView<>(); case WAN_EVENT_CONTAINER_REPLICATION_OPERATION -> new WanEventContainerReplicationOperation(); default -> throw new IllegalArgumentException("Unknown type-id: " + typeId); }; }
@Test(expected = IllegalArgumentException.class) public void testInvalidType() { WanDataSerializerHook hook = new WanDataSerializerHook(); hook.createFactory().create(999); }
public static List<Import> getImportList(final List<String> importCells) { final List<Import> importList = new ArrayList<>(); if ( importCells == null ) { return importList; } for( String importCell: importCells ){ final StringTokenizer tokens = new StringTokenizer( importCell, "," ); while ( tokens.hasMoreTokens() ) { final Import imp = new Import(); imp.setClassName( tokens.nextToken().trim() ); importList.add( imp ); } } return importList; }
@Test public void getImportList_maniValues() { List<Import> list = getImportList(List.of("", "com.something.Yeah, com.something.No,com.something.yeah.*")); assertThat(list).hasSize(3).extracting(x -> x.getClassName()).containsExactly("com.something.Yeah", "com.something.No", "com.something.yeah.*"); }
public AdditionalServletWithClassLoader load( AdditionalServletMetadata metadata, String narExtractionDirectory) throws IOException { final File narFile = metadata.getArchivePath().toAbsolutePath().toFile(); NarClassLoader ncl = NarClassLoaderBuilder.builder() .narFile(narFile) .parentClassLoader(AdditionalServlet.class.getClassLoader()) .extractionDirectory(narExtractionDirectory) .build(); AdditionalServletDefinition def = getAdditionalServletDefinition(ncl); if (StringUtils.isBlank(def.getAdditionalServletClass())) { throw new IOException("Additional servlets `" + def.getName() + "` does NOT provide an " + "additional servlets implementation"); } try { Class additionalServletClass = ncl.loadClass(def.getAdditionalServletClass()); Object additionalServlet = additionalServletClass.getDeclaredConstructor().newInstance(); if (!(additionalServlet instanceof AdditionalServlet)) { throw new IOException("Class " + def.getAdditionalServletClass() + " does not implement additional servlet interface"); } AdditionalServlet servlet = (AdditionalServlet) additionalServlet; return new AdditionalServletWithClassLoader(servlet, ncl); } catch (Throwable t) { rethrowIOException(t); return null; } }
@Test public void testLoadEventListener() throws Exception { AdditionalServletDefinition def = new AdditionalServletDefinition(); def.setAdditionalServletClass(MockAdditionalServlet.class.getName()); def.setDescription("test-proxy-listener"); String archivePath = "/path/to/proxy/listener/nar"; AdditionalServletMetadata metadata = new AdditionalServletMetadata(); metadata.setDefinition(def); metadata.setArchivePath(Paths.get(archivePath)); NarClassLoader mockLoader = mock(NarClassLoader.class); when(mockLoader.getServiceDefinition(eq(AdditionalServletUtils.ADDITIONAL_SERVLET_FILE))) .thenReturn(ObjectMapperFactory.getYamlMapper().writer().writeValueAsString(def)); Class listenerClass = MockAdditionalServlet.class; when(mockLoader.loadClass(eq(MockAdditionalServlet.class.getName()))) .thenReturn(listenerClass); final NarClassLoaderBuilder mockedBuilder = mock(NarClassLoaderBuilder.class, RETURNS_SELF); when(mockedBuilder.build()).thenReturn(mockLoader); try (MockedStatic<NarClassLoaderBuilder> builder = Mockito.mockStatic(NarClassLoaderBuilder.class)) { builder.when(() -> NarClassLoaderBuilder.builder()).thenReturn(mockedBuilder); AdditionalServletWithClassLoader returnedPhWithCL = AdditionalServletUtils.load(metadata, ""); AdditionalServlet returnedPh = returnedPhWithCL.getServlet(); assertSame(mockLoader, returnedPhWithCL.getClassLoader()); assertTrue(returnedPh instanceof MockAdditionalServlet); } }
@Override public byte[] fromConnectHeader(String topic, String headerKey, Schema schema, Object value) { return fromConnectData(topic, schema, value); }
@Test public void testNullHeaderValueToBytes() { assertNull(converter.fromConnectHeader(TOPIC, "hdr", Schema.OPTIONAL_STRING_SCHEMA, null)); }
@Override protected void doStart() throws Exception { super.doStart(); LOG.debug("Creating connection to Azure ServiceBus"); client = getEndpoint().getServiceBusClientFactory().createServiceBusProcessorClient(getConfiguration(), this::processMessage, this::processError); client.start(); }
@Test void consumerPropagatesApplicationPropertiesToMessageHeaders() throws Exception { try (ServiceBusConsumer consumer = new ServiceBusConsumer(endpoint, processor)) { consumer.doStart(); verify(client).start(); verify(clientFactory).createServiceBusProcessorClient(any(), any(), any()); when(messageContext.getMessage()).thenReturn(message); configureMockMessage(); message.getApplicationProperties().put(PROPAGATED_HEADER_KEY, PROPAGATED_HEADER_VALUE); when(headerFilterStrategy.applyFilterToExternalHeaders(anyString(), any(), any())).thenReturn(false); processMessageCaptor.getValue().accept(messageContext); verify(headerFilterStrategy, atLeastOnce()).applyFilterToExternalHeaders(anyString(), any(), any(Exchange.class)); verifyNoMoreInteractions(headerFilterStrategy); verify(processor).process(any(Exchange.class), any(AsyncCallback.class)); Exchange exchange = exchangeCaptor.getValue(); assertThat(exchange).isNotNull(); Message inMessage = exchange.getIn(); assertThat(inMessage).isNotNull(); assertThat(inMessage.getHeaders()).containsEntry(PROPAGATED_HEADER_KEY, PROPAGATED_HEADER_VALUE); } }
@Override public void prepareContainer(ContainerRuntimeContext ctx) throws ContainerExecutionException { @SuppressWarnings("unchecked") List<String> localDirs = ctx.getExecutionAttribute(CONTAINER_LOCAL_DIRS); @SuppressWarnings("unchecked") Map<org.apache.hadoop.fs.Path, List<String>> resources = ctx.getExecutionAttribute(LOCALIZED_RESOURCES); @SuppressWarnings("unchecked") List<String> commands = ctx.getExecutionAttribute(CONTAINER_RUN_CMDS); Map<String, String> env = ctx.getContainer().getLaunchContext().getEnvironment(); String username = ctx.getExecutionAttribute(USER); if(!isSandboxContainerWhitelisted(username, commands)) { String tmpDirBase = configuration.get("hadoop.tmp.dir"); if (tmpDirBase == null) { throw new ContainerExecutionException("hadoop.tmp.dir not set!"); } try { String containerID = ctx.getExecutionAttribute(CONTAINER_ID_STR); initializePolicyDir(); List<String> groupPolicyFiles = getGroupPolicyFiles(configuration, ctx.getExecutionAttribute(USER)); Path policyFilePath = Files.createFile( Paths.get(policyFileDir.toString(), containerID + "-" + NMContainerPolicyUtils.POLICY_FILE), POLICY_ATTR); try(OutputStream policyOutputStream = Files.newOutputStream(policyFilePath)) { containerPolicies.put(containerID, policyFilePath); NMContainerPolicyUtils.generatePolicyFile(policyOutputStream, localDirs, groupPolicyFiles, resources, configuration); NMContainerPolicyUtils.appendSecurityFlags( commands, env, policyFilePath, sandboxMode); } } catch (IOException e) { throw new ContainerExecutionException(e); } } }
@Test public void testDeniedWhitelistGroup() throws ContainerExecutionException { String[] inputCommand = { "$JAVA_HOME/bin/java jar MyJob.jar" }; List<String> commands = Arrays.asList(inputCommand); conf.set(YarnConfiguration.YARN_CONTAINER_SANDBOX_WHITELIST_GROUP, WHITELIST_GROUP); runtimeContextBuilder.setExecutionAttribute(USER, NORMAL_USER); runtimeContextBuilder.setExecutionAttribute(CONTAINER_RUN_CMDS, commands); runtime.prepareContainer(runtimeContextBuilder.build()); Assert.assertTrue("Java security manager must be enabled for " + "unauthorized users", commands.get(0).contains(SECURITY_FLAG)); }
@VisibleForTesting static Map<Severity, List<String>> checkNoticeFile( Map<String, Set<Dependency>> modulesWithShadedDependencies, String moduleName, @Nullable NoticeContents noticeContents) { final Map<Severity, List<String>> problemsBySeverity = new HashMap<>(); if (noticeContents == null) { addProblem(problemsBySeverity, Severity.CRITICAL, "The NOTICE file was empty."); } else { // first line must be the module name. if (!noticeContents.getNoticeModuleName().equals(moduleName)) { addProblem( problemsBySeverity, Severity.TOLERATED, String.format( "First line does not start with module name. firstLine=%s", noticeContents.getNoticeModuleName())); } // collect all declared dependencies from NOTICE file Set<Dependency> declaredDependencies = new HashSet<>(); for (Dependency declaredDependency : noticeContents.getDeclaredDependencies()) { if (!declaredDependencies.add(declaredDependency)) { addProblem( problemsBySeverity, Severity.CRITICAL, String.format("Dependency %s is declared twice.", declaredDependency)); } } // find all dependencies missing from NOTICE file Collection<Dependency> expectedDependencies = modulesWithShadedDependencies.getOrDefault(moduleName, Collections.emptySet()) .stream() .filter( dependency -> !dependency.getGroupId().equals("org.apache.flink")) .collect(Collectors.toList()); for (Dependency expectedDependency : expectedDependencies) { if (!declaredDependencies.contains(expectedDependency)) { addProblem( problemsBySeverity, Severity.CRITICAL, String.format("Dependency %s is not listed.", expectedDependency)); } } boolean moduleDefinesExcessDependencies = MODULES_DEFINING_EXCESS_DEPENDENCIES.contains(moduleName); // find all dependencies defined in NOTICE file, which were not expected for (Dependency declaredDependency : declaredDependencies) { if (!expectedDependencies.contains(declaredDependency)) { final Severity severity = moduleDefinesExcessDependencies ? Severity.SUPPRESSED : Severity.TOLERATED; addProblem( problemsBySeverity, severity, String.format( "Dependency %s is not bundled, but listed.", declaredDependency)); } } } return problemsBySeverity; }
@Test void testCheckNoticeFileRejectsEmptyFile() { assertThat(NoticeFileChecker.checkNoticeFile(Collections.emptyMap(), "test", null)) .containsOnlyKeys(NoticeFileChecker.Severity.CRITICAL); }
@Override @Nullable public V put(@Nullable K key, @Nullable V value) { return put(key, value, true); }
@Test void shouldGetValues() { this.map.put(123, "123"); this.map.put(456, null); this.map.put(null, "789"); List<String> actual = new ArrayList<>(this.map.values()); List<String> expected = new ArrayList<>(); expected.add("123"); expected.add(null); expected.add("789"); actual.sort(NULL_SAFE_STRING_SORT); expected.sort(NULL_SAFE_STRING_SORT); assertThat(actual, equalTo(expected)); }
@Override public Map<Errors, Integer> errorCounts() { HashMap<Errors, Integer> counts = new HashMap<>(); updateErrorCounts(counts, Errors.forCode(data.errorCode())); return counts; }
@Test public void testErrorCountsReturnsOneError() { GetTelemetrySubscriptionsResponseData data = new GetTelemetrySubscriptionsResponseData() .setErrorCode(Errors.CLUSTER_AUTHORIZATION_FAILED.code()); data.setErrorCode(Errors.INVALID_CONFIG.code()); GetTelemetrySubscriptionsResponse response = new GetTelemetrySubscriptionsResponse(data); assertEquals(Collections.singletonMap(Errors.INVALID_CONFIG, 1), response.errorCounts()); }
public static List<Date> matchedDates(String patternStr, Date start, int count, boolean isMatchSecond) { return matchedDates(patternStr, start, DateUtil.endOfYear(start), count, isMatchSecond); }
@Test public void matchedDatesTest3() { //测试最后一天 List<Date> matchedDates = CronPatternUtil.matchedDates("0 0 */1 L * *", DateUtil.parse("2018-10-30 23:33:22"), 5, true); assertEquals(5, matchedDates.size()); assertEquals("2018-10-31 00:00:00", matchedDates.get(0).toString()); assertEquals("2018-10-31 01:00:00", matchedDates.get(1).toString()); assertEquals("2018-10-31 02:00:00", matchedDates.get(2).toString()); assertEquals("2018-10-31 03:00:00", matchedDates.get(3).toString()); assertEquals("2018-10-31 04:00:00", matchedDates.get(4).toString()); }
public static void updateKeyForBlobStore(Map<String, Object> conf, BlobStore blobStore, CuratorFramework zkClient, String key, NimbusInfo nimbusDetails) { try { // Most of clojure tests currently try to access the blobs using getBlob. Since, updateKeyForBlobStore // checks for updating the correct version of the blob as a part of nimbus ha before performing any // operation on it, there is a necessity to stub several test cases to ignore this method. It is a valid // trade off to return if nimbusDetails which include the details of the current nimbus host port data are // not initialized as a part of the test. Moreover, this applies to only local blobstore when used along with // nimbus ha. if (nimbusDetails == null) { return; } boolean isListContainsCurrentNimbusInfo = false; List<String> stateInfo; if (zkClient.checkExists().forPath(BLOBSTORE_SUBTREE + "/" + key) == null) { return; } stateInfo = zkClient.getChildren().forPath(BLOBSTORE_SUBTREE + "/" + key); if (stateInfo == null || stateInfo.isEmpty()) { return; } LOG.debug("StateInfo for update {}", stateInfo); Set<NimbusInfo> nimbusInfoList = getNimbodesWithLatestSequenceNumberOfBlob(zkClient, key); for (NimbusInfo nimbusInfo : nimbusInfoList) { if (nimbusInfo.getHost().equals(nimbusDetails.getHost())) { isListContainsCurrentNimbusInfo = true; break; } } if (!isListContainsCurrentNimbusInfo && downloadUpdatedBlob(conf, blobStore, key, nimbusInfoList)) { LOG.debug("Updating state inside zookeeper for an update"); createStateInZookeeper(conf, key, nimbusDetails); } } catch (KeeperException.NoNodeException | KeyNotFoundException e) { //race condition with a delete return; } catch (Exception exp) { throw new RuntimeException(exp); } }
@Test public void testUpdateKeyForBlobStore_nodeWithNullChildren() { zkClientBuilder.withExists(BLOBSTORE_KEY, true); zkClientBuilder.withGetChildren(BLOBSTORE_KEY, (List<String>) null); BlobStoreUtils.updateKeyForBlobStore(conf, blobStore, zkClientBuilder.build(), KEY, nimbusDetails); zkClientBuilder.verifyExists(true); zkClientBuilder.verifyGetChildren(); verify(nimbusDetails, never()).getHost(); verify(conf, never()).get(anyString()); }
public static DeleteAclsRequest parse(ByteBuffer buffer, short version) { return new DeleteAclsRequest(new DeleteAclsRequestData(new ByteBufferAccessor(buffer), version), version); }
@Test public void shouldRoundTripV1() { final DeleteAclsRequest original = new DeleteAclsRequest.Builder( requestData(LITERAL_FILTER, PREFIXED_FILTER, ANY_FILTER) ).build(V1); final ByteBuffer buffer = original.serialize(); final DeleteAclsRequest result = DeleteAclsRequest.parse(buffer, V1); assertRequestEquals(original, result); }
@Override public String convert(final BroadcastRuleConfiguration ruleConfig) { if (ruleConfig.getTables().isEmpty()) { return ""; } return String.format(CREATE_BROADCAST_TABLE_RULE, Joiner.on(",").join(ruleConfig.getTables())); }
@Test void assertConvert() { BroadcastRuleConfiguration ruleConfig = mock(BroadcastRuleConfiguration.class); when(ruleConfig.getTables()).thenReturn(Arrays.asList("t_province", "t_city")); String actual = new BroadcastRuleConfigurationToDistSQLConverter().convert(ruleConfig); assertThat(actual, is("CREATE BROADCAST TABLE RULE t_province,t_city;")); }
@Override protected void handleEndTxnOnPartition(CommandEndTxnOnPartition command) { checkArgument(state == State.Connected); final long requestId = command.getRequestId(); final String topic = command.getTopic(); final int txnAction = command.getTxnAction().getValue(); final TxnID txnID = new TxnID(command.getTxnidMostBits(), command.getTxnidLeastBits()); final long lowWaterMark = command.getTxnidLeastBitsOfLowWatermark(); if (log.isDebugEnabled()) { log.debug("[{}] handleEndTxnOnPartition txnId: [{}], txnAction: [{}]", topic, txnID, txnAction); } CompletableFuture<Optional<Topic>> topicFuture = service.getTopicIfExists(TopicName.get(topic).toString()); topicFuture.thenAcceptAsync(optionalTopic -> { if (optionalTopic.isPresent()) { // we only accept superuser because this endpoint is reserved for tc to broker communication isSuperUser() .thenCompose(isOwner -> { if (!isOwner) { return failedFutureTxnTcNotAllowed(txnID); } return optionalTopic.get().endTxn(txnID, txnAction, lowWaterMark); }) .whenComplete((ignored, throwable) -> { if (throwable != null) { throwable = FutureUtil.unwrapCompletionException(throwable); log.error("handleEndTxnOnPartition fail!, topic {}, txnId: [{}], " + "txnAction: [{}]", topic, txnID, TxnAction.valueOf(txnAction), throwable); writeAndFlush(Commands.newEndTxnOnPartitionResponse( requestId, BrokerServiceException.getClientErrorCode(throwable), throwable.getMessage(), txnID.getLeastSigBits(), txnID.getMostSigBits())); return; } writeAndFlush(Commands.newEndTxnOnPartitionResponse(requestId, txnID.getLeastSigBits(), txnID.getMostSigBits())); }); } else { getBrokerService().getManagedLedgerFactory() .asyncExists(TopicName.get(topic).getPersistenceNamingEncoding()) .thenAccept((b) -> { if (b) { log.error("handleEndTxnOnPartition fail ! The topic {} does not exist in broker, " + "txnId: [{}], txnAction: [{}]", topic, txnID, TxnAction.valueOf(txnAction)); writeAndFlush(Commands.newEndTxnOnPartitionResponse(requestId, ServerError.ServiceNotReady, "The topic " + topic + " does not exist in broker.", txnID.getLeastSigBits(), txnID.getMostSigBits())); } else { log.warn("handleEndTxnOnPartition fail ! The topic {} has not been created, " + "txnId: [{}], txnAction: [{}]", topic, txnID, TxnAction.valueOf(txnAction)); writeAndFlush(Commands.newEndTxnOnPartitionResponse(requestId, txnID.getLeastSigBits(), txnID.getMostSigBits())); } }).exceptionally(e -> { log.error("handleEndTxnOnPartition fail ! topic {}, " + "txnId: [{}], txnAction: [{}]", topic, txnID, TxnAction.valueOf(txnAction), e.getCause()); writeAndFlush(Commands.newEndTxnOnPartitionResponse( requestId, ServerError.ServiceNotReady, e.getMessage(), txnID.getLeastSigBits(), txnID.getMostSigBits())); return null; }); } }, ctx.executor()).exceptionally(e -> { log.error("handleEndTxnOnPartition fail ! topic {}, " + "txnId: [{}], txnAction: [{}]", topic, txnID, TxnAction.valueOf(txnAction), e.getCause()); writeAndFlush(Commands.newEndTxnOnPartitionResponse( requestId, ServerError.ServiceNotReady, e.getMessage(), txnID.getLeastSigBits(), txnID.getMostSigBits())); return null; }); }
@Test(expectedExceptions = IllegalArgumentException.class) public void shouldFailHandleEndTxnOnPartition() throws Exception { ServerCnx serverCnx = mock(ServerCnx.class, CALLS_REAL_METHODS); Field stateUpdater = ServerCnx.class.getDeclaredField("state"); stateUpdater.setAccessible(true); stateUpdater.set(serverCnx, ServerCnx.State.Failed); serverCnx.handleEndTxnOnPartition(any()); }
protected Query<E> namedTypedQuery(String queryName) throws HibernateException { return currentSession().createNamedQuery(queryName, getEntityClass()); }
@Test void getsNamedTypedQueries() throws Exception { assertThat(dao.namedTypedQuery("query-name")) .isEqualTo(query); verify(session).createNamedQuery("query-name", String.class); }
@Override public void publish(ScannerReportWriter writer) { for (final DefaultInputFile inputFile : componentCache.allChangedFilesToPublish()) { File iofile = writer.getSourceFile(inputFile.scannerId()); try (OutputStream output = new BufferedOutputStream(new FileOutputStream(iofile)); InputStream in = inputFile.inputStream(); BufferedReader reader = new BufferedReader(new InputStreamReader(in, inputFile.charset()))) { writeSource(reader, output, inputFile.lines()); } catch (IOException e) { throw new IllegalStateException("Unable to store file source in the report", e); } } }
@Test public void cleanLineEnds() throws Exception { FileUtils.write(sourceFile, "\n2\r\n3\n4\r5", StandardCharsets.ISO_8859_1); publisher.publish(writer); File out = writer.getSourceFile(inputFile.scannerId()); assertThat(FileUtils.readFileToString(out, StandardCharsets.UTF_8)).isEqualTo("\n2\n3\n4\n5"); }
@ConstantFunction(name = "bitand", argTypes = {TINYINT, TINYINT}, returnType = TINYINT) public static ConstantOperator bitandTinyInt(ConstantOperator first, ConstantOperator second) { return ConstantOperator.createTinyInt((byte) (first.getTinyInt() & second.getTinyInt())); }
@Test public void bitandTinyInt() { assertEquals(10, ScalarOperatorFunctions.bitandTinyInt(O_TI_10, O_TI_10).getTinyInt()); }
@Override public Optional<BufferOrEvent> getNext() throws IOException, InterruptedException { return getNextBufferOrEvent(true); }
@Test void testAvailability() throws IOException, InterruptedException { final SingleInputGate inputGate1 = createInputGate(1); TestInputChannel inputChannel1 = new TestInputChannel(inputGate1, 0, false, true); inputGate1.setInputChannels(inputChannel1); final SingleInputGate inputGate2 = createInputGate(1); TestInputChannel inputChannel2 = new TestInputChannel(inputGate2, 0, false, true); inputGate2.setInputChannels(inputChannel2); UnionInputGate inputGate = new UnionInputGate(inputGate1, inputGate2); inputChannel1.read(BufferBuilderTestUtils.buildSomeBuffer(1)); assertThat(inputGate.getAvailableFuture()).isDone(); inputChannel1.read(BufferBuilderTestUtils.buildSomeBuffer(2)); assertThat(inputGate.getAvailableFuture()).isDone(); assertThat(inputGate.getNext().get().getBuffer().getSize()).isOne(); assertThat(inputGate.getAvailableFuture()).isDone(); }
@Override public PulsarClient build() throws PulsarClientException { checkArgument(StringUtils.isNotBlank(conf.getServiceUrl()) || conf.getServiceUrlProvider() != null, "service URL or service URL provider needs to be specified on the ClientBuilder object."); checkArgument(StringUtils.isBlank(conf.getServiceUrl()) || conf.getServiceUrlProvider() == null, "Can only chose one way service URL or service URL provider."); if (conf.getServiceUrlProvider() != null) { checkArgument(StringUtils.isNotBlank(conf.getServiceUrlProvider().getServiceUrl()), "Cannot get service url from service url provider."); conf.setServiceUrl(conf.getServiceUrlProvider().getServiceUrl()); } PulsarClient client = new PulsarClientImpl(conf); if (conf.getServiceUrlProvider() != null) { conf.getServiceUrlProvider().initialize(client); } return client; }
@Test(expectedExceptions = IllegalArgumentException.class) public void testClientBuilderWithServiceUrlAndServiceUrlProviderNotSet() throws PulsarClientException { PulsarClient.builder().build(); }
@Override public List<byte[]> mGet(byte[]... keys) { if (isQueueing() || isPipelined()) { for (byte[] key : keys) { read(key, ByteArrayCodec.INSTANCE, RedisCommands.GET, key); } return null; } CommandBatchService es = new CommandBatchService(executorService); for (byte[] key: keys) { es.readAsync(key, ByteArrayCodec.INSTANCE, RedisCommands.GET, key); } BatchResult<byte[]> r = (BatchResult<byte[]>) es.execute(); return r.getResponses(); }
@Test public void testMGet() { Map<byte[], byte[]> map = new HashMap<>(); for (int i = 0; i < 10; i++) { map.put(("test" + i).getBytes(), ("test" + i*100).getBytes()); } connection.mSet(map); List<byte[]> r = connection.mGet(map.keySet().toArray(new byte[0][])); assertThat(r).containsExactly(map.values().toArray(new byte[0][])); }
@Override public TableEntryByTypeTransformer tableEntryByTypeTransformer() { return transformer; }
@Test void transforms_empties_with_correct_method() throws Throwable { Map<String, String> fromValue = singletonMap("key", "[empty]"); Method method = JavaDefaultDataTableEntryTransformerDefinitionTest.class.getMethod("correct_method", Map.class, Type.class); JavaDefaultDataTableEntryTransformerDefinition definition = new JavaDefaultDataTableEntryTransformerDefinition( method, lookup, false, new String[] { "[empty]" }); assertThat(definition.tableEntryByTypeTransformer() .transform(fromValue, String.class, cellTransformer), is("key=")); }
@Override public List<Instance> selectInstances(String serviceName, boolean healthy) throws NacosException { return selectInstances(serviceName, new ArrayList<>(), healthy); }
@Test void testSelectInstances8() throws NacosException { //given String serviceName = "service1"; String groupName = "group1"; List<String> clusterList = Arrays.asList("cluster1", "cluster2"); //when client.selectInstances(serviceName, groupName, clusterList, true, false); //then verify(proxy, times(1)).queryInstancesOfService(serviceName, groupName, "cluster1,cluster2", false); }
@Override public synchronized String getUri() { return String.format( "jdbc:%s://%s:%d%s%s", getJDBCPrefix(), this.getHost(), this.getPort(getJDBCPort()), initialized ? ";DatabaseName=" + databaseName : "", ";encrypt=true;trustServerCertificate=true;"); }
@Test public void testGetUriShouldReturnCorrectValue() { when(container.getHost()).thenReturn(HOST); when(container.getMappedPort( MSSQLResourceManager.DefaultMSSQLServerContainer.MS_SQL_SERVER_PORT)) .thenReturn(MAPPED_PORT); assertThat(testManager.getUri()) .matches( "jdbc:sqlserver://" + HOST + ":" + MAPPED_PORT + ";DatabaseName=" + DATABASE_NAME + ";encrypt=true;trustServerCertificate=true;"); }
public void setOuterJoinType(OuterJoinType outerJoinType) { this.outerJoinType = outerJoinType; }
@Test void testFullOuterJoinBuildingCorrectCrossProducts() throws Exception { final List<String> leftInput = Arrays.asList("foo", "foo", "foo", "bar", "bar", "foobar", "foobar"); final List<String> rightInput = Arrays.asList("foo", "foo", "bar", "bar", "bar", "barfoo", "barfoo"); baseOperator.setOuterJoinType(OuterJoinOperatorBase.OuterJoinType.FULL); List<String> expected = Arrays.asList( "bar,bar", "bar,bar", "bar,bar", "bar,bar", "bar,bar", "bar,bar", "null,barfoo", "null,barfoo", "foo,foo", "foo,foo", "foo,foo", "foo,foo", "foo,foo", "foo,foo", "foobar,null", "foobar,null"); testOuterJoin(leftInput, rightInput, expected); }
public void poll(RequestFuture<?> future) { while (!future.isDone()) poll(time.timer(Long.MAX_VALUE), future); }
@Test public void blockWhenPollConditionNotSatisfied() { long timeout = 4000L; NetworkClient mockNetworkClient = mock(NetworkClient.class); ConsumerNetworkClient consumerClient = new ConsumerNetworkClient(new LogContext(), mockNetworkClient, metadata, time, 100, 1000, Integer.MAX_VALUE); when(mockNetworkClient.inFlightRequestCount()).thenReturn(1); consumerClient.poll(time.timer(timeout), () -> true); verify(mockNetworkClient).poll(eq(timeout), anyLong()); }
public static List<Event> computeEventDiff(final Params params) { final List<Event> events = new ArrayList<>(); emitPerNodeDiffEvents(createBaselineParams(params), events); emitWholeClusterDiffEvent(createBaselineParams(params), events); emitDerivedBucketSpaceStatesDiffEvents(params, events); return events; }
@Test void derived_bucket_space_state_events_are_not_emitted_if_similar_to_baseline() { EventFixture f = EventFixture.createForNodes(3) .clusterStateBefore("distributor:3 storage:3") .derivedClusterStateBefore("default", "distributor:3 storage:3") .derivedClusterStateBefore("global", "distributor:3 storage:3") .clusterStateAfter("distributor:3 storage:3 .0.s:m") .derivedClusterStateAfter("default", "distributor:3 storage:3 .0.s:m") .derivedClusterStateAfter("global", "distributor:3 storage:3 .0.s:m"); List<Event> events = f.computeEventDiff(); assertThat(events.size(), equalTo(1)); assertThat(events, hasItem(allOf( eventForNode(storageNode(0)), nodeEventForBaseline(), nodeEventWithDescription("Altered node state in cluster state from 'U' to 'M'")))); }
@Override protected void doStart() throws Exception { super.doStart(); LOG.debug("Creating connection to Azure ServiceBus"); client = getEndpoint().getServiceBusClientFactory().createServiceBusProcessorClient(getConfiguration(), this::processMessage, this::processError); client.start(); }
@Test void synchronizationDoesNotDeadLetterMessageWhenReceiveModeIsReceiveAndDelete() throws Exception { try (ServiceBusConsumer consumer = new ServiceBusConsumer(endpoint, processor)) { when(configuration.getServiceBusReceiveMode()).thenReturn(ServiceBusReceiveMode.RECEIVE_AND_DELETE); when(configuration.isEnableDeadLettering()).thenReturn(true); consumer.doStart(); verify(client).start(); verify(clientFactory).createServiceBusProcessorClient(any(), any(), any()); when(messageContext.getMessage()).thenReturn(message); processMessageCaptor.getValue().accept(messageContext); verify(messageContext).getMessage(); verify(processor).process(any(Exchange.class), any(AsyncCallback.class)); Exchange exchange = exchangeCaptor.getValue(); assertThat(exchange).isNotNull(); Synchronization synchronization = exchange.getExchangeExtension().handoverCompletions().get(0); synchronization.onFailure(exchange); verifyNoMoreInteractions(messageContext); } }
@Override public Photo get(Long key) { return _db.getData().get(key); }
@Test public void testResourceGet() { // because the test function will take arbitrary order // always create a photo and operate on that photo for a test function final Long id = createPhoto(); // validate all data are correct final Photo p = _res.get(id); Assert.assertNotNull(p); Assert.assertEquals(p.getId(), id); Assert.assertTrue(p.hasExif()); final EXIF e = p.getExif(); Assert.assertTrue(e.hasLocation()); final LatLong l = e.getLocation(); Assert.assertEquals(l.getLatitude(), 7.0f); Assert.assertEquals(l.getLongitude(), 27.0f); }
public void onFragment(final DirectBuffer buffer, final int offset, final int length, final Header header) { final byte flags = header.flags(); if ((flags & UNFRAGMENTED) == UNFRAGMENTED) { delegate.onFragment(buffer, offset, length, header); } else { handleFragment(buffer, offset, length, header, flags); } }
@Test void shouldDoNotingIfMidArrivesWithoutBegin() { when(header.flags()) .thenReturn((byte)0) .thenReturn(FrameDescriptor.END_FRAG_FLAG); final UnsafeBuffer srcBuffer = new UnsafeBuffer(new byte[1024]); final int offset = 0; final int length = srcBuffer.capacity() / 2; assembler.onFragment(srcBuffer, offset, length, header); assembler.onFragment(srcBuffer, offset, length, header); verify(delegateFragmentHandler, never()).onFragment(any(), anyInt(), anyInt(), any()); }
public static PageNumber page(int i) { return new PageNumber(i); }
@Test public void shouldUnderstandPageNumberAndLabel() { assertThat(Pagination.PageNumber.DOTS.getLabel(), is("...")); assertThat(Pagination.PageNumber.DOTS.getNumber(), is(-1)); assertThat(Pagination.page(5).getLabel(), is("5")); assertThat(Pagination.page(5).getNumber(), is(5)); assertThat(Pagination.page(10, "foo").getLabel(), is("foo")); assertThat(Pagination.page(10, "foo").getNumber(), is(10)); }
@Override public NSImage folderIcon(final Integer size) { NSImage folder = this.iconNamed("NSFolder", size); if(null == folder) { return this.iconNamed("NSFolder", size); } return folder; }
@Test public void testFolderIconAllSizes() { final NSImage icon = new NSImageIconCache().folderIcon(null); assertNotNull(icon); assertTrue(icon.isValid()); assertFalse(icon.isTemplate()); assertTrue(icon.representations().count().intValue() >= 1); }
@Override public ApiResult<TopicPartition, DeletedRecords> handleResponse( Node broker, Set<TopicPartition> keys, AbstractResponse abstractResponse ) { DeleteRecordsResponse response = (DeleteRecordsResponse) abstractResponse; Map<TopicPartition, DeletedRecords> completed = new HashMap<>(); Map<TopicPartition, Throwable> failed = new HashMap<>(); List<TopicPartition> unmapped = new ArrayList<>(); Set<TopicPartition> retriable = new HashSet<>(); for (DeleteRecordsResponseData.DeleteRecordsTopicResult topicResult: response.data().topics()) { for (DeleteRecordsResponseData.DeleteRecordsPartitionResult partitionResult : topicResult.partitions()) { Errors error = Errors.forCode(partitionResult.errorCode()); TopicPartition topicPartition = new TopicPartition(topicResult.name(), partitionResult.partitionIndex()); if (error == Errors.NONE) { completed.put(topicPartition, new DeletedRecords(partitionResult.lowWatermark())); } else { handlePartitionError(topicPartition, error, failed, unmapped, retriable); } } } // Sanity-check if the current leader for these partitions returned results for all of them for (TopicPartition topicPartition : keys) { if (unmapped.isEmpty() && !completed.containsKey(topicPartition) && !failed.containsKey(topicPartition) && !retriable.contains(topicPartition) ) { ApiException sanityCheckException = new ApiException( "The response from broker " + broker.id() + " did not contain a result for topic partition " + topicPartition); log.error( "DeleteRecords request for topic partition {} failed sanity check", topicPartition, sanityCheckException); failed.put(topicPartition, sanityCheckException); } } return new ApiResult<>(completed, failed, unmapped); }
@Test public void testHandlePartitionErrorResponse() { TopicPartition errorPartition = t0p0; Errors error = Errors.TOPIC_AUTHORIZATION_FAILED; Map<TopicPartition, Short> errorsByPartition = new HashMap<>(); errorsByPartition.put(errorPartition, error.code()); AdminApiHandler.ApiResult<TopicPartition, DeletedRecords> result = handleResponse(createResponse(errorsByPartition)); Map<TopicPartition, Throwable> failed = new HashMap<>(); failed.put(errorPartition, error.exception()); Set<TopicPartition> completed = new HashSet<>(recordsToDelete.keySet()); completed.removeAll(failed.keySet()); assertResult(result, completed, failed, emptyList(), emptySet()); }
@SuppressWarnings("MethodMayBeStatic") // Non-static to support DI. public long parse(final String text) { final String date; final String time; final String timezone; if (text.contains("T")) { date = text.substring(0, text.indexOf('T')); final String withTimezone = text.substring(text.indexOf('T') + 1); timezone = getTimezone(withTimezone); time = completeTime(withTimezone.substring(0, withTimezone.length() - timezone.length()) .replaceAll("Z$","")); } else { date = completeDate(text); time = completeTime(""); timezone = ""; } try { final ZoneId zoneId = parseTimezone(timezone); return PARSER.parse(date + "T" + time, zoneId); } catch (final RuntimeException e) { throw new KsqlException("Failed to parse timestamp '" + text + "': " + e.getMessage() + HELP_MESSAGE, e ); } }
@Test public void shouldParseDateWithHourMinuteSecond() { // When: assertThat(parser.parse("2020-12-02T13:59:58"), is(fullParse("2020-12-02T13:59:58.000+0000"))); assertThat(parser.parse("2020-12-02T13:59:58Z"), is(fullParse("2020-12-02T13:59:58.000+0000"))); }
@Override public Column convert(BasicTypeDefine typeDefine) { PhysicalColumn.PhysicalColumnBuilder builder = PhysicalColumn.builder() .name(typeDefine.getName()) .sourceType(typeDefine.getColumnType()) .nullable(typeDefine.isNullable()) .defaultValue(typeDefine.getDefaultValue()) .comment(typeDefine.getComment()); String db2Type = typeDefine.getDataType().toUpperCase(); switch (db2Type) { case DB2_BOOLEAN: builder.sourceType(DB2_BOOLEAN); builder.dataType(BasicType.BOOLEAN_TYPE); break; case DB2_SMALLINT: builder.sourceType(DB2_SMALLINT); builder.dataType(BasicType.SHORT_TYPE); break; case DB2_INT: case DB2_INTEGER: builder.sourceType(DB2_INT); builder.dataType(BasicType.INT_TYPE); break; case DB2_BIGINT: builder.sourceType(DB2_BIGINT); builder.dataType(BasicType.LONG_TYPE); break; case DB2_REAL: builder.sourceType(DB2_REAL); builder.dataType(BasicType.FLOAT_TYPE); break; case DB2_DOUBLE: builder.sourceType(DB2_DOUBLE); builder.dataType(BasicType.DOUBLE_TYPE); break; case DB2_DECFLOAT: builder.sourceType(DB2_DECFLOAT); builder.dataType(BasicType.DOUBLE_TYPE); break; case DB2_DECIMAL: builder.sourceType( String.format( "%s(%s,%s)", DB2_DECIMAL, typeDefine.getPrecision(), typeDefine.getScale())); builder.dataType( new DecimalType( Math.toIntExact(typeDefine.getPrecision()), typeDefine.getScale())); builder.columnLength(typeDefine.getPrecision()); builder.scale(typeDefine.getScale()); break; case DB2_CHARACTER: case DB2_CHAR: builder.sourceType(String.format("%s(%d)", DB2_CHAR, typeDefine.getLength())); // For char/varchar this length is in bytes builder.columnLength(typeDefine.getLength()); builder.dataType(BasicType.STRING_TYPE); break; case DB2_VARCHAR: builder.sourceType(String.format("%s(%d)", DB2_VARCHAR, typeDefine.getLength())); builder.columnLength(typeDefine.getLength()); builder.dataType(BasicType.STRING_TYPE); break; case DB2_LONG_VARCHAR: builder.sourceType(DB2_LONG_VARCHAR); // default length is 32700 builder.columnLength(typeDefine.getLength()); builder.dataType(BasicType.STRING_TYPE); break; case DB2_CLOB: builder.sourceType(String.format("%s(%d)", DB2_CLOB, typeDefine.getLength())); builder.columnLength(typeDefine.getLength()); builder.dataType(BasicType.STRING_TYPE); break; case DB2_GRAPHIC: builder.sourceType(String.format("%s(%d)", DB2_GRAPHIC, typeDefine.getLength())); builder.columnLength(TypeDefineUtils.charTo4ByteLength(typeDefine.getLength())); builder.dataType(BasicType.STRING_TYPE); break; case DB2_VARGRAPHIC: builder.sourceType(String.format("%s(%d)", DB2_VARGRAPHIC, typeDefine.getLength())); builder.columnLength(TypeDefineUtils.charTo4ByteLength(typeDefine.getLength())); builder.dataType(BasicType.STRING_TYPE); break; case DB2_DBCLOB: builder.sourceType(String.format("%s(%d)", DB2_DBCLOB, typeDefine.getLength())); builder.columnLength(TypeDefineUtils.charTo4ByteLength(typeDefine.getLength())); builder.dataType(BasicType.STRING_TYPE); break; case DB2_XML: builder.sourceType(DB2_XML); builder.columnLength((long) Integer.MAX_VALUE); builder.dataType(BasicType.STRING_TYPE); break; case DB2_BINARY: builder.sourceType(String.format("%s(%d)", DB2_BINARY, typeDefine.getLength())); builder.columnLength(typeDefine.getLength()); builder.dataType(PrimitiveByteArrayType.INSTANCE); break; case DB2_VARBINARY: builder.sourceType(String.format("%s(%d)", DB2_VARBINARY, typeDefine.getLength())); builder.columnLength(typeDefine.getLength()); builder.dataType(PrimitiveByteArrayType.INSTANCE); break; case DB2_BLOB: builder.sourceType(String.format("%s(%d)", DB2_BLOB, typeDefine.getLength())); builder.columnLength(typeDefine.getLength()); builder.dataType(PrimitiveByteArrayType.INSTANCE); break; case DB2_DATE: builder.sourceType(DB2_DATE); builder.dataType(LocalTimeType.LOCAL_DATE_TYPE); break; case DB2_TIME: builder.sourceType(DB2_TIME); builder.dataType(LocalTimeType.LOCAL_TIME_TYPE); break; case DB2_TIMESTAMP: builder.sourceType(String.format("%s(%d)", DB2_TIMESTAMP, typeDefine.getScale())); builder.dataType(LocalTimeType.LOCAL_DATE_TIME_TYPE); builder.scale(typeDefine.getScale()); break; default: throw CommonError.convertToSeaTunnelTypeError( DatabaseIdentifier.DB_2, db2Type, typeDefine.getName()); } return builder.build(); }
@Test public void testConvertChar() { BasicTypeDefine<Object> typeDefine = BasicTypeDefine.builder() .name("test") .columnType("CHARACTER") .dataType("CHARACTER") .length(1L) .build(); Column column = DB2TypeConverter.INSTANCE.convert(typeDefine); Assertions.assertEquals(typeDefine.getName(), column.getName()); Assertions.assertEquals(BasicType.STRING_TYPE, column.getDataType()); Assertions.assertEquals(typeDefine.getLength(), column.getColumnLength()); Assertions.assertEquals("CHAR(1)", column.getSourceType()); typeDefine = BasicTypeDefine.builder() .name("test") .columnType("VARCHAR") .dataType("VARCHAR") .length(1L) .build(); column = DB2TypeConverter.INSTANCE.convert(typeDefine); Assertions.assertEquals(typeDefine.getName(), column.getName()); Assertions.assertEquals(BasicType.STRING_TYPE, column.getDataType()); Assertions.assertEquals(typeDefine.getLength(), column.getColumnLength()); Assertions.assertEquals("VARCHAR(1)", column.getSourceType()); typeDefine = BasicTypeDefine.builder() .name("test") .columnType("LONG VARCHAR") .dataType("LONG VARCHAR") .length(1L) .build(); column = DB2TypeConverter.INSTANCE.convert(typeDefine); Assertions.assertEquals(typeDefine.getName(), column.getName()); Assertions.assertEquals(BasicType.STRING_TYPE, column.getDataType()); Assertions.assertEquals(typeDefine.getLength(), column.getColumnLength()); Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType()); typeDefine = BasicTypeDefine.builder() .name("test") .columnType("CLOB") .dataType("CLOB") .length(1L) .build(); column = DB2TypeConverter.INSTANCE.convert(typeDefine); Assertions.assertEquals(typeDefine.getName(), column.getName()); Assertions.assertEquals(BasicType.STRING_TYPE, column.getDataType()); Assertions.assertEquals(typeDefine.getLength(), column.getColumnLength()); Assertions.assertEquals( String.format("%s(%s)", DB2TypeConverter.DB2_CLOB, typeDefine.getLength()), column.getSourceType()); typeDefine = BasicTypeDefine.builder() .name("test") .columnType("GRAPHIC") .dataType("GRAPHIC") .length(1L) .build(); column = DB2TypeConverter.INSTANCE.convert(typeDefine); Assertions.assertEquals(typeDefine.getName(), column.getName()); Assertions.assertEquals(BasicType.STRING_TYPE, column.getDataType()); Assertions.assertEquals(4, column.getColumnLength()); Assertions.assertEquals( String.format("%s(%s)", DB2TypeConverter.DB2_GRAPHIC, typeDefine.getLength()), column.getSourceType()); typeDefine = BasicTypeDefine.builder() .name("test") .columnType("VARGRAPHIC") .dataType("VARGRAPHIC") .build(); column = DB2TypeConverter.INSTANCE.convert(typeDefine); Assertions.assertEquals(typeDefine.getName(), column.getName()); Assertions.assertEquals(BasicType.STRING_TYPE, column.getDataType()); Assertions.assertEquals(typeDefine.getLength(), column.getColumnLength()); Assertions.assertEquals( String.format("%s(%s)", DB2TypeConverter.DB2_VARGRAPHIC, typeDefine.getLength()), column.getSourceType()); typeDefine = BasicTypeDefine.builder() .name("test") .columnType("DBCLOB") .dataType("DBCLOB") .length(1L) .build(); column = DB2TypeConverter.INSTANCE.convert(typeDefine); Assertions.assertEquals(typeDefine.getName(), column.getName()); Assertions.assertEquals(BasicType.STRING_TYPE, column.getDataType()); Assertions.assertEquals(4, column.getColumnLength()); Assertions.assertEquals( String.format("%s(%s)", DB2TypeConverter.DB2_DBCLOB, typeDefine.getLength()), column.getSourceType()); typeDefine = BasicTypeDefine.builder().name("test").columnType("XML").dataType("XML").build(); column = DB2TypeConverter.INSTANCE.convert(typeDefine); Assertions.assertEquals(typeDefine.getName(), column.getName()); Assertions.assertEquals(BasicType.STRING_TYPE, column.getDataType()); Assertions.assertEquals(Integer.MAX_VALUE, column.getColumnLength()); Assertions.assertEquals(typeDefine.getColumnType(), column.getSourceType()); }
public IssueQuery create(SearchRequest request) { try (DbSession dbSession = dbClient.openSession(false)) { final ZoneId timeZone = parseTimeZone(request.getTimeZone()).orElse(clock.getZone()); Collection<RuleDto> ruleDtos = ruleKeysToRuleId(dbSession, request.getRules()); Collection<String> ruleUuids = ruleDtos.stream().map(RuleDto::getUuid).collect(Collectors.toSet()); Collection<String> issueKeys = collectIssueKeys(dbSession, request); if (request.getRules() != null && request.getRules().stream().collect(Collectors.toSet()).size() != ruleDtos.size()) { ruleUuids.add("non-existing-uuid"); } IssueQuery.Builder builder = IssueQuery.builder() .issueKeys(issueKeys) .severities(request.getSeverities()) .cleanCodeAttributesCategories(request.getCleanCodeAttributesCategories()) .impactSoftwareQualities(request.getImpactSoftwareQualities()) .impactSeverities(request.getImpactSeverities()) .statuses(request.getStatuses()) .resolutions(request.getResolutions()) .issueStatuses(request.getIssueStatuses()) .resolved(request.getResolved()) .prioritizedRule(request.getPrioritizedRule()) .rules(ruleDtos) .ruleUuids(ruleUuids) .assigneeUuids(request.getAssigneeUuids()) .authors(request.getAuthors()) .scopes(request.getScopes()) .languages(request.getLanguages()) .tags(request.getTags()) .types(request.getTypes()) .pciDss32(request.getPciDss32()) .pciDss40(request.getPciDss40()) .owaspAsvs40(request.getOwaspAsvs40()) .owaspAsvsLevel(request.getOwaspAsvsLevel()) .owaspTop10(request.getOwaspTop10()) .owaspTop10For2021(request.getOwaspTop10For2021()) .stigAsdR5V3(request.getStigAsdV5R3()) .casa(request.getCasa()) .sansTop25(request.getSansTop25()) .cwe(request.getCwe()) .sonarsourceSecurity(request.getSonarsourceSecurity()) .assigned(request.getAssigned()) .createdAt(parseStartingDateOrDateTime(request.getCreatedAt(), timeZone)) .createdBefore(parseEndingDateOrDateTime(request.getCreatedBefore(), timeZone)) .facetMode(request.getFacetMode()) .timeZone(timeZone) .codeVariants(request.getCodeVariants()); List<ComponentDto> allComponents = new ArrayList<>(); boolean effectiveOnComponentOnly = mergeDeprecatedComponentParameters(dbSession, request, allComponents); addComponentParameters(builder, dbSession, effectiveOnComponentOnly, allComponents, request); setCreatedAfterFromRequest(dbSession, builder, request, allComponents, timeZone); String sort = request.getSort(); if (!isNullOrEmpty(sort)) { builder.sort(sort); builder.asc(request.getAsc()); } return builder.build(); } }
@Test public void return_empty_results_if_not_allowed_to_search_for_subview() { ComponentDto view = db.components().insertPrivatePortfolio(); ComponentDto subView = db.components().insertComponent(newSubPortfolio(view)); SearchRequest request = new SearchRequest() .setComponentUuids(singletonList(subView.uuid())); IssueQuery query = underTest.create(request); assertThat(query.viewUuids()).containsOnly("<UNKNOWN>"); }
public String getName() { return name; }
@Test public void testGetName() { // Test the getName method assertEquals("EventName", event.getName()); }
@Override void execute() { String[] loc = getCl().getUpddateLocationParams(); Path newPath = new Path(loc[0]); Path oldPath = new Path(loc[1]); URI oldURI = oldPath.toUri(); URI newURI = newPath.toUri(); /* * validate input - Both new and old URI should contain valid host names and valid schemes. * port is optional in both the URIs since HDFS HA NN URI doesn't have a port. */ if (oldURI.getHost() == null || newURI.getHost() == null) { throw new IllegalStateException("HiveMetaTool:A valid host is required in both old-loc and new-loc"); } else if (oldURI.getScheme() == null || newURI.getScheme() == null) { throw new IllegalStateException("HiveMetaTool:A valid scheme is required in both old-loc and new-loc"); } updateFSRootLocation(oldURI, newURI, getCl().getSerdePropKey(), getCl().getTablePropKey(), getCl().isDryRun()); }
@Test public void testNoScheme() throws Exception { exception.expect(IllegalStateException.class); exception.expectMessage("HiveMetaTool:A valid scheme is required in both old-loc and new-loc"); MetaToolTaskUpdateLocation t = new MetaToolTaskUpdateLocation(); t.setCommandLine(new HiveMetaToolCommandLine(new String[] {"-updateLocation", "//old.host", "//new.host"})); t.execute(); }
protected String getHttpURL(Exchange exchange, Endpoint endpoint) { Object url = exchange.getIn().getHeader(Exchange.HTTP_URL); if (url instanceof String) { return (String) url; } else { Object uri = exchange.getIn().getHeader(Exchange.HTTP_URI); if (uri instanceof String) { return (String) uri; } else { // Try to obtain from endpoint int index = endpoint.getEndpointUri().lastIndexOf("http:"); if (index != -1) { return endpoint.getEndpointUri().substring(index); } } } return null; }
@Test public void testGetHttpURLFromEndpointUriWithAdditionalScheme() { Endpoint endpoint = Mockito.mock(Endpoint.class); Exchange exchange = Mockito.mock(Exchange.class); Message message = Mockito.mock(Message.class); Mockito.when(endpoint.getEndpointUri()).thenReturn("netty-http:" + TEST_URI); Mockito.when(exchange.getIn()).thenReturn(message); AbstractHttpSpanDecorator decorator = new AbstractHttpSpanDecorator() { @Override public String getComponent() { return null; } @Override public String getComponentClassName() { return null; } }; assertEquals(TEST_URI, decorator.getHttpURL(exchange, endpoint)); }
public SearchSourceBuilder create(SearchesConfig config) { return create(SearchCommand.from(config)); }
@Test void searchIncludesProperSourceFields() { final SearchSourceBuilder search = this.searchRequestFactory.create(ChunkCommand.builder() .indices(Collections.singleton("graylog_0")) .range(RANGE) .fields(List.of("foo", "bar")) .build()); assertJsonPath(search, request -> { request.jsonPathAsListOf("$._source.includes", String.class) .containsExactly("foo", "bar"); request.jsonPathAsListOf("$._source.excludes", String.class) .isEmpty(); }); }
public void unset(PropertyKey key) { Preconditions.checkNotNull(key, "key"); mProperties.remove(key); }
@Test public void unset() { assertFalse(mConfiguration.isSet(PropertyKey.SECURITY_LOGIN_USERNAME)); mConfiguration.set(PropertyKey.SECURITY_LOGIN_USERNAME, "test"); assertTrue(mConfiguration.isSet(PropertyKey.SECURITY_LOGIN_USERNAME)); mConfiguration.unset(PropertyKey.SECURITY_LOGIN_USERNAME); assertFalse(mConfiguration.isSet(PropertyKey.SECURITY_LOGIN_USERNAME)); }
public static HttpRequest newJDiscRequest(CurrentContainer container, HttpServletRequest servletRequest) { try { var jettyRequest = (Request) servletRequest; var jdiscHttpReq = HttpRequest.newServerRequest( container, getUri(servletRequest), getMethod(servletRequest), HttpRequest.Version.fromString(servletRequest.getProtocol()), new InetSocketAddress(servletRequest.getRemoteAddr(), servletRequest.getRemotePort()), getConnection(jettyRequest).getCreatedTimeStamp(), jettyRequest.getTimeStamp()); jdiscHttpReq.context().put(RequestUtils.JDISC_REQUEST_X509CERT, getCertChain(servletRequest)); jdiscHttpReq.context().put(RequestUtils.JDICS_REQUEST_PORT, servletRequest.getLocalPort()); SSLSession sslSession = (SSLSession) servletRequest.getAttribute(RequestUtils.JETTY_REQUEST_SSLSESSION); jdiscHttpReq.context().put(RequestUtils.JDISC_REQUEST_SSLSESSION, sslSession); servletRequest.setAttribute(HttpRequest.class.getName(), jdiscHttpReq); return jdiscHttpReq; } catch (Utf8Appendable.NotUtf8Exception e) { throw createBadQueryException(e); } }
@Test final void illegal_host_throws_requestexception1() { try { HttpRequestFactory.newJDiscRequest( new MockContainer(), createMockRequest("http", "?", "/foo", "")); fail("Above statement should throw"); } catch (RequestException e) { assertThat(e.getResponseStatus(), is(Response.Status.BAD_REQUEST)); } }
@SuppressWarnings("unchecked") static Object extractFromRecordValue(Object recordValue, String fieldName) { List<String> fields = Splitter.on('.').splitToList(fieldName); if (recordValue instanceof Struct) { return valueFromStruct((Struct) recordValue, fields); } else if (recordValue instanceof Map) { return valueFromMap((Map<String, ?>) recordValue, fields); } else { throw new UnsupportedOperationException( "Cannot extract value from type: " + recordValue.getClass().getName()); } }
@Test public void testExtractFromRecordValueMapNested() { Map<String, Object> id = ImmutableMap.of("key", 123L); Map<String, Object> data = ImmutableMap.of("id", id); Map<String, Object> val = ImmutableMap.of("data", data); Object result = RecordUtils.extractFromRecordValue(val, "data.id.key"); assertThat(result).isEqualTo(123L); }
@Override public Object deserialize(Asn1ObjectInputStream in, Class<? extends Object> type, Asn1ObjectMapper mapper) { final Asn1Entity entity = type.getAnnotation(Asn1Entity.class); final Object instance = ObjectUtils.newInstance(type); final Map<String, List<Asn1Field>> fields = fieldsMap(mapper.getFields(type)); while (!in.atEnd()) { try (final Asn1ObjectInputStream seq = in.next()) { if (seq.tagNo != getNestedTagNo(type)) { throw new Asn1Exception("Expected tag %x, got %x", getNestedTagNo(type), seq.tagNo); } final String id = readIdentifier(seq); final List<Asn1Field> fieldsOfId = fields.remove(id); if (fieldsOfId == null) { if (!entity.partial()) throw new Asn1Exception("Found unknown identifier " + id); seq.advanceToEnd(); continue; } final FieldSequence fieldsSeq = new FieldSequence(false, fieldsOfId); readFields(mapper, seq, fieldsSeq, instance); while (!seq.atEnd()) { try (final Asn1ObjectInputStream obj = seq.next()) { final Asn1Field field = fieldsSeq.get(obj.tagNo); final Object attr = mapper.readValue(obj, field.converter(), field.type()); ObjectUtils.setProperty(field.pd, instance, attr); } } if (!fieldsSeq.done()) { throw new Asn1Exception("At end of data, but still non optional fields"); } } } return instance; }
@Test public void shouldDeserializeWithOptional() { assertEquals(new Set(1, 2, 3, 4), deserialize( new SetOfIdentifiedConverter(), Set.class, new byte[] { 0x30, 6, 0x06, 1, 44, 0x02, 1, 4, 0x30, 6, 0x06, 1, 83, 0x02, 1, 3, 0x30,9, 0x06, 1, 84, 0x02, 1, 1, 0x02, 1, 2, } )); }
public Optional<Table> getTable(TableName tableName) { return Optional.ofNullable(getTable(tableName.getCatalog(), tableName.getDb(), tableName.getTbl())); }
@Test public void testGetMetadataTable() throws Exception { new MockUp<IcebergHiveCatalog>() { @Mock boolean tableExists(String dbName, String tableName) { return true; } }; String createIcebergCatalogStmt = "create external catalog iceberg_catalog properties (\"type\"=\"iceberg\", " + "\"hive.metastore.uris\"=\"thrift://hms:9083\", \"iceberg.catalog.type\"=\"hive\")"; AnalyzeTestUtil.getStarRocksAssert().withCatalog(createIcebergCatalogStmt); MetadataMgr metadataMgr = AnalyzeTestUtil.getConnectContext().getGlobalStateMgr().getMetadataMgr(); com.starrocks.catalog.Table table = metadataMgr.getTable("iceberg_catalog", "iceberg_db", "t1$logical_iceberg_metadata"); Assert.assertTrue(table instanceof LogicalIcebergMetadataTable); LogicalIcebergMetadataTable metadataTable = (LogicalIcebergMetadataTable) table; Assert.assertEquals("iceberg_db", metadataTable.getOriginDb()); Assert.assertEquals("t1", metadataTable.getOriginTable()); Assert.assertEquals(LOGICAL_ICEBERG_METADATA, metadataTable.getMetadataTableType()); Assert.assertTrue(metadataTable.isSupported()); AnalyzeTestUtil.getStarRocksAssert().dropCatalog("iceberg_catalog"); }
public static void toLowerCase(StringValue string) { final char[] chars = string.getCharArray(); final int len = string.length(); for (int i = 0; i < len; i++) { chars[i] = Character.toLowerCase(chars[i]); } }
@Test void testToLowerCaseConverting() { StringValue testString = new StringValue("TEST"); StringValueUtils.toLowerCase(testString); assertThat((Object) testString).isEqualTo(new StringValue("test")); }
public String decode(byte[] val) { return codecs[0].decode(val, 0, val.length); }
@Test public void testDecodeKoreanLongText() { assertEquals(KOREAN_LONG_TEXT, ksx1001().decode(KOREAN_LONG_TEXT_NO_EXPLICIT_ESCSEQ_BYTES)); }
public static long heapMemoryFree() { return Math.subtractExact(heapMemoryMax(), heapMemoryUsed()); }
@Test public void heapMemoryFree() { long memoryUsed = MemoryUtil.heapMemoryFree(); Assert.assertNotEquals(0, memoryUsed); }
@Override public void pluginLoaded(GoPluginDescriptor pluginDescriptor) { if (notificationExtension.canHandlePlugin(pluginDescriptor.id())) { try { notificationPluginRegistry.registerPlugin(pluginDescriptor.id()); List<String> notificationsInterestedIn = notificationExtension.getNotificationsOfInterestFor(pluginDescriptor.id()); if (notificationsInterestedIn != null && !notificationsInterestedIn.isEmpty()) { checkNotificationTypes(pluginDescriptor, notificationsInterestedIn); notificationPluginRegistry.registerPluginInterests(pluginDescriptor.id(), notificationsInterestedIn); } } catch (Exception e) { LOGGER.warn("Error occurred during plugin notification interest registration.", e); } } }
@Test public void shouldNotRegisterPluginInterestsOnPluginLoadIfPluginIfPluginIsNotOfNotificationType() { NotificationPluginRegistrar notificationPluginRegistrar = new NotificationPluginRegistrar(pluginManager, notificationExtension, notificationPluginRegistry); notificationPluginRegistrar.pluginLoaded(GoPluginDescriptor.builder().id(PLUGIN_ID_4).isBundledPlugin(true).build()); verify(notificationPluginRegistry, never()).registerPluginInterests(anyString(), anyList()); }
public Optional<Measure> toMeasure(@Nullable MeasureDto measureDto, Metric metric) { requireNonNull(metric); if (measureDto == null) { return Optional.empty(); } Double value = measureDto.getValue(); String data = measureDto.getData(); switch (metric.getType().getValueType()) { case INT: return toIntegerMeasure(measureDto, value, data); case LONG: return toLongMeasure(measureDto, value, data); case DOUBLE: return toDoubleMeasure(measureDto, value, data); case BOOLEAN: return toBooleanMeasure(measureDto, value, data); case STRING: return toStringMeasure(measureDto, data); case LEVEL: return toLevelMeasure(measureDto, data); case NO_VALUE: return toNoValueMeasure(measureDto); default: throw new IllegalArgumentException("Unsupported Measure.ValueType " + metric.getType().getValueType()); } }
@Test public void toMeasure_returns_no_value_if_dto_has_no_value_for_Boolean_metric() { Optional<Measure> measure = underTest.toMeasure(EMPTY_MEASURE_DTO, SOME_BOOLEAN_METRIC); assertThat(measure).isPresent(); assertThat(measure.get().getValueType()).isEqualTo(Measure.ValueType.NO_VALUE); }
public Response listLogFiles(String user, Integer port, String topologyId, String callback, String origin) throws IOException { List<Path> fileResults = null; if (topologyId == null) { if (port == null) { fileResults = workerLogs.getAllLogsForRootDir(); } else { fileResults = new ArrayList<>(); File[] logRootFiles = logRoot.toFile().listFiles(); if (logRootFiles != null) { for (File topoDir : logRootFiles) { File[] topoDirFiles = topoDir.listFiles(); if (topoDirFiles != null) { for (File portDir : topoDirFiles) { if (portDir.getName().equals(port.toString())) { fileResults.addAll(directoryCleaner.getFilesForDir(portDir.toPath())); } } } } } } } else { if (port == null) { fileResults = new ArrayList<>(); Path topoDir = logRoot.resolve(topologyId).toAbsolutePath().normalize(); if (!topoDir.startsWith(logRoot)) { return LogviewerResponseBuilder.buildSuccessJsonResponse(Collections.emptyList(), callback, origin); } if (topoDir.toFile().exists()) { File[] topoDirFiles = topoDir.toFile().listFiles(); if (topoDirFiles != null) { for (File portDir : topoDirFiles) { fileResults.addAll(directoryCleaner.getFilesForDir(portDir.toPath())); } } } } else { File portDir = ConfigUtils.getWorkerDirFromRoot(logRoot.toString(), topologyId, port).getCanonicalFile(); if (!portDir.getPath().startsWith(logRoot.toString())) { return LogviewerResponseBuilder.buildSuccessJsonResponse(Collections.emptyList(), callback, origin); } if (portDir.exists()) { fileResults = directoryCleaner.getFilesForDir(portDir.toPath()); } } } List<String> files; if (fileResults != null) { files = fileResults.stream() .map(WorkerLogs::getTopologyPortWorkerLog) .sorted().collect(toList()); } else { files = new ArrayList<>(); } return LogviewerResponseBuilder.buildSuccessJsonResponse(files, callback, origin); }
@Test public void testListLogFiles() throws IOException { String rootPath = Files.createTempDirectory("workers-artifacts").toFile().getCanonicalPath(); File file1 = new File(String.join(File.separator, rootPath, "topoA", "1111"), "worker.log"); File file2 = new File(String.join(File.separator, rootPath, "topoA", "2222"), "worker.log"); File file3 = new File(String.join(File.separator, rootPath, "topoB", "1111"), "worker.log"); file1.getParentFile().mkdirs(); file2.getParentFile().mkdirs(); file3.getParentFile().mkdirs(); file1.createNewFile(); file2.createNewFile(); file3.createNewFile(); String origin = "www.origin.server.net"; Map<String, Object> stormConf = Utils.readStormConfig(); StormMetricsRegistry metricsRegistry = new StormMetricsRegistry(); LogviewerLogPageHandler handler = new LogviewerLogPageHandler(rootPath, rootPath, new WorkerLogs(stormConf, Paths.get(rootPath), metricsRegistry), new ResourceAuthorizer(stormConf), metricsRegistry); final Response expectedAll = LogviewerResponseBuilder.buildSuccessJsonResponse( List.of("topoA/port1/worker.log", "topoA/port2/worker.log", "topoB/port1/worker.log"), null, origin ); final Response expectedFilterPort = LogviewerResponseBuilder.buildSuccessJsonResponse( List.of("topoA/port1/worker.log", "topoB/port1/worker.log"), null, origin ); final Response expectedFilterTopoId = LogviewerResponseBuilder.buildSuccessJsonResponse( List.of("topoB/port1/worker.log"), null, origin ); final Response returnedAll = handler.listLogFiles("user", null, null, null, origin); final Response returnedFilterPort = handler.listLogFiles("user", 1111, null, null, origin); final Response returnedFilterTopoId = handler.listLogFiles("user", null, "topoB", null, origin); Utils.forceDelete(rootPath); assertEqualsJsonResponse(expectedAll, returnedAll, List.class); assertEqualsJsonResponse(expectedFilterPort, returnedFilterPort, List.class); assertEqualsJsonResponse(expectedFilterTopoId, returnedFilterTopoId, List.class); }
@WorkerThread @Override public Unit call() throws IOException, StreamNotFoundException, ShellNotRunningException, IllegalArgumentException { OutputStream outputStream; File destFile = null; switch (fileAbstraction.scheme) { case CONTENT: Objects.requireNonNull(fileAbstraction.uri); if (fileAbstraction.uri.getAuthority().equals(context.get().getPackageName())) { DocumentFile documentFile = DocumentFile.fromSingleUri(AppConfig.getInstance(), fileAbstraction.uri); if (documentFile != null && documentFile.exists() && documentFile.canWrite()) { outputStream = contentResolver.openOutputStream(fileAbstraction.uri, "wt"); } else { destFile = FileUtils.fromContentUri(fileAbstraction.uri); outputStream = openFile(destFile, context.get()); } } else { outputStream = contentResolver.openOutputStream(fileAbstraction.uri, "wt"); } break; case FILE: final HybridFileParcelable hybridFileParcelable = fileAbstraction.hybridFileParcelable; Objects.requireNonNull(hybridFileParcelable); Context context = this.context.get(); if (context == null) { return null; } outputStream = openFile(hybridFileParcelable.getFile(), context); destFile = fileAbstraction.hybridFileParcelable.getFile(); break; default: throw new IllegalArgumentException( "The scheme for '" + fileAbstraction.scheme + "' cannot be processed!"); } Objects.requireNonNull(outputStream); outputStream.write(dataToSave.getBytes()); outputStream.close(); if (cachedFile != null && cachedFile.exists() && destFile != null) { // cat cache content to original file and delete cache file ConcatenateFileCommand.INSTANCE.concatenateFile(cachedFile.getPath(), destFile.getPath()); cachedFile.delete(); } return Unit.INSTANCE; }
@Test @Config(shadows = {ShellNotRunningRootUtils.class}) public void testWriteFileRootShellNotRunning() throws IOException, StreamNotFoundException, ShellNotRunningException { File file = new File(Environment.getExternalStorageDirectory(), "test.txt"); Uri uri = Uri.fromFile(file); File cacheFile = File.createTempFile("test.txt", "cache"); cacheFile.deleteOnExit(); Context ctx = ApplicationProvider.getApplicationContext(); ContentResolver cr = ctx.getContentResolver(); try { WriteTextFileCallable task = new WriteTextFileCallable( ctx, cr, new EditableFileAbstraction(ctx, uri), contents, cacheFile, true); task.call(); } catch (ShellNotRunningException e) { return; } fail(); }
public List<R> scanForResourcesPath(Path resourcePath) { requireNonNull(resourcePath, "resourcePath must not be null"); List<R> resources = new ArrayList<>(); pathScanner.findResourcesForPath( resourcePath, canLoad, processResource(DEFAULT_PACKAGE_NAME, NULL_FILTER, createUriResource(), resources::add)); return resources; }
@Test @DisabledOnOs(value = OS.WINDOWS, disabledReason = "Only works if repository is explicitly cloned activated symlinks and " + "developer mode in windows is activated") void scanForResourcesPathSymlink() { File file = new File("src/test/resource-symlink/test/resource.txt"); List<URI> resources = resourceScanner.scanForResourcesPath(file.toPath()); assertThat(resources, contains(file.toURI())); }
@Override public void onSuccess(T result) { markTimings(); _callback.onSuccess(result); }
@Test(dataProvider = "timingImportanceThreshold") public void testBuilder(TimingImportance timingImportanceThreshold) { final RequestContext requestContext = new RequestContext(); if (timingImportanceThreshold != null) { requestContext.putLocalAttr(TimingContextUtil.TIMING_IMPORTANCE_THRESHOLD_KEY_NAME, timingImportanceThreshold); } final Callback<Long> callback = new Callback<Long>() { @Override public void onSuccess(Long result) { Map<TimingKey, TimingContextUtil.TimingContext> timings = TimingContextUtil.getTimingsMap(requestContext); // Ensure that keys have been filtered out correctly if (timingImportanceThreshold == null || TimingImportance.LOW.isAtLeast(timingImportanceThreshold)) { Assert.assertTrue(timings.containsKey(KEY_L)); Assert.assertTrue(timings.containsKey(KEY_M)); } else if (TimingImportance.MEDIUM.isAtLeast(timingImportanceThreshold)) { Assert.assertFalse(timings.containsKey(KEY_L)); Assert.assertTrue(timings.containsKey(KEY_M)); } else { Assert.assertFalse(timings.containsKey(KEY_L)); Assert.assertFalse(timings.containsKey(KEY_M)); } } @Override public void onError(Throwable e) {} }; final Callback<Long> timingCallback = new TimingCallback.Builder<>(callback, requestContext) .addBeginTimingKey(KEY_L) .addBeginTimingKey(KEY_M) .addEndTimingKey(KEY_L) .addEndTimingKey(KEY_M) .build(); // Ensure that the builder can correctly determine when to return the original callback if (timingImportanceThreshold == null || !timingImportanceThreshold.equals(TimingImportance.HIGH)) { Assert.assertTrue(timingCallback instanceof TimingCallback); } else { Assert.assertFalse(timingCallback instanceof TimingCallback); } timingCallback.onSuccess(1L); }
@Override public UserIdentity login(String username, Object credentials, ServletRequest request) { if (!(credentials instanceof SignedJWT)) { return null; } if (!(request instanceof HttpServletRequest)) { return null; } SignedJWT jwtToken = (SignedJWT) credentials; JWTClaimsSet claimsSet; boolean valid; try { claimsSet = jwtToken.getJWTClaimsSet(); valid = validateToken(jwtToken, claimsSet, username); } catch (ParseException e) { JWT_LOGGER.warn(String.format("%s: Couldn't parse a JWT token", username), e); return null; } if (valid) { String serializedToken = (String) request.getAttribute(JwtAuthenticator.JWT_TOKEN_REQUEST_ATTRIBUTE); UserIdentity rolesDelegate = _authorizationService.getUserIdentity((HttpServletRequest) request, username); if (rolesDelegate == null) { return null; } else { return getUserIdentity(jwtToken, claimsSet, serializedToken, username, rolesDelegate); } } else { return null; } }
@Test public void testFailSignatureValidation() throws Exception { UserStore testUserStore = new UserStore(); testUserStore.addUser(TEST_USER, SecurityUtils.NO_CREDENTIAL, new String[] {"USER"}); TokenGenerator.TokenAndKeys tokenAndKeys = TokenGenerator.generateToken(TEST_USER); // This will be signed with a different key TokenGenerator.TokenAndKeys tokenAndKeys2 = TokenGenerator.generateToken(TEST_USER); JwtLoginService loginService = new JwtLoginService(new UserStoreAuthorizationService(testUserStore), tokenAndKeys2.publicKey(), null); SignedJWT jwtToken = SignedJWT.parse(tokenAndKeys.token()); HttpServletRequest request = mock(HttpServletRequest.class); UserIdentity identity = loginService.login(TEST_USER, jwtToken, request); assertNull(identity); }
@PublicAPI(usage = ACCESS) public static PackageMatcher of(String packageIdentifier) { return new PackageMatcher(packageIdentifier); }
@Test public void should_reject_more_than_two_dots_in_a_row() { assertThatThrownBy(() -> PackageMatcher.of("some...pkg")) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Package Identifier may not contain more than two '.' in a row"); }
@Override public boolean evaluate(Map<String, Object> values) { Boolean toReturn = null; for (KiePMMLPredicate kiePMMLPredicate : kiePMMLPredicates) { Boolean evaluation = kiePMMLPredicate.evaluate(values); switch (booleanOperator) { case OR: toReturn = orOperator(toReturn, evaluation); break; case AND: toReturn = andOperator(toReturn, evaluation); break; case XOR: toReturn = xorOperator(toReturn, evaluation); break; case SURROGATE: toReturn = surrogateOperator(toReturn, evaluation); break; default: throw new KiePMMLException("Unknown BOOLEAN_OPERATOR " + booleanOperator); } } return toReturn != null && toReturn; }
@Test void evaluateCompoundPredicateOr() { ARRAY_TYPE arrayType = ARRAY_TYPE.STRING; List<Object> stringValues = getObjects(arrayType, 4); KiePMMLSimpleSetPredicate kiePMMLSimpleSetPredicateString = getKiePMMLSimpleSetPredicate(SIMPLE_SET_PREDICATE_STRING_NAME, stringValues, arrayType, IN_NOTIN.IN); arrayType = ARRAY_TYPE.INT; List<Object> intValues = getObjects(arrayType, 4); KiePMMLSimpleSetPredicate kiePMMLSimpleSetPredicateInt = getKiePMMLSimpleSetPredicate(SIMPLE_SET_PREDICATE_INT_NAME, intValues, arrayType, IN_NOTIN.NOT_IN); KiePMMLCompoundPredicate kiePMMLCompoundPredicate = getKiePMMLCompoundPredicate(BOOLEAN_OPERATOR.OR, Arrays.asList(kiePMMLSimpleSetPredicateString, kiePMMLSimpleSetPredicateInt)); Map<String, Object> inputData = new HashMap<>(); inputData.put(SIMPLE_SET_PREDICATE_STRING_NAME, "NOT"); inputData.put(SIMPLE_SET_PREDICATE_INT_NAME, intValues.get(0)); assertThat(kiePMMLCompoundPredicate.evaluate(inputData)).isFalse(); inputData.put(SIMPLE_SET_PREDICATE_STRING_NAME, stringValues.get(0)); inputData.put(SIMPLE_SET_PREDICATE_INT_NAME, intValues.get(0)); assertThat(kiePMMLCompoundPredicate.evaluate(inputData)).isTrue(); inputData = new HashMap<>(); inputData.put(SIMPLE_SET_PREDICATE_STRING_NAME, "NOT"); inputData.put(SIMPLE_SET_PREDICATE_INT_NAME, "234"); assertThat(kiePMMLCompoundPredicate.evaluate(inputData)).isTrue(); inputData = new HashMap<>(); inputData.put(SIMPLE_SET_PREDICATE_STRING_NAME, stringValues.get(0)); inputData.put(SIMPLE_SET_PREDICATE_INT_NAME, "234"); assertThat(kiePMMLCompoundPredicate.evaluate(inputData)).isTrue(); }
public static String removeModifierSuffix(String fullName) { int indexOfFirstOpeningToken = fullName.indexOf(MODIFIER_OPENING_TOKEN); if (indexOfFirstOpeningToken == -1) { return fullName; } int indexOfSecondOpeningToken = fullName.lastIndexOf(MODIFIER_OPENING_TOKEN); if (indexOfSecondOpeningToken != indexOfFirstOpeningToken) { throw new IllegalArgumentException("Attribute name '" + fullName + "' is not valid as it contains more than one " + MODIFIER_OPENING_TOKEN); } int indexOfFirstClosingToken = fullName.indexOf(MODIFIER_CLOSING_TOKEN); if (indexOfFirstClosingToken != fullName.length() - 1) { throw new IllegalArgumentException("Attribute name '" + fullName + "' is not valid as the last character is not " + MODIFIER_CLOSING_TOKEN); } return fullName.substring(0, indexOfFirstOpeningToken); }
@Test public void removeModifierSuffix_whenSuffixIsPresent_thenReturnStringWithoutSuffix() { String attributeWithModifier = "foo[*]"; String result = SuffixModifierUtils.removeModifierSuffix(attributeWithModifier); assertEquals("foo", result); }
public ActionPermissionResolver getActionPermissionResolver() { return actionPermissionResolver; }
@Test public void testDefaults() { ActionPermissionResolver resolver = filter.getActionPermissionResolver(); assertNotNull(resolver); assertTrue(resolver instanceof DestinationActionPermissionResolver); }
public static <T extends NamedConfig> T getConfig(ConfigPatternMatcher configPatternMatcher, Map<String, T> configs, String name, Class clazz) { return getConfig(configPatternMatcher, configs, name, clazz, (BiConsumer<T, String>) DEFAULT_NAME_SETTER); }
@Test public void getExistingConfig() { QueueConfig aDefault = new QueueConfig("newConfig"); aDefault.setBackupCount(5); queueConfigs.put(aDefault.getName(), aDefault); QueueConfig newConfig = ConfigUtils.getConfig(configPatternMatcher, queueConfigs, "newConfig", QueueConfig.class); assertEquals("newConfig", newConfig.getName()); assertEquals(5, newConfig.getBackupCount()); assertEquals(1, queueConfigs.size()); assertTrue(queueConfigs.containsKey("newConfig")); }
@Override public Mono<Authentication> convert(ServerWebExchange exchange) { return super.convert(exchange) // validate the password .<Authentication>flatMap(token -> { var credentials = (String) token.getCredentials(); byte[] credentialsBytes; try { credentialsBytes = Base64.getDecoder().decode(credentials); } catch (IllegalArgumentException e) { // the credentials are not in valid Base64 scheme return Mono.error(new BadCredentialsException("Invalid Base64 scheme.")); } return cryptoService.decrypt(credentialsBytes) .onErrorMap(InvalidEncryptedMessageException.class, error -> new BadCredentialsException("Invalid credential.", error)) .map(decryptedCredentials -> new UsernamePasswordAuthenticationToken( token.getPrincipal(), new String(decryptedCredentials, UTF_8))); }) .transformDeferred(createIpBasedRateLimiter(exchange)) .onErrorMap(RequestNotPermitted.class, RateLimitExceededException::new); }
@Test void applyUsernameAndInvalidPasswordThenBadCredentialsException() { var username = "username"; var password = "password"; formData.add("username", username); formData.add("password", Base64.getEncoder().encodeToString(password.getBytes())); when(cryptoService.decrypt(password.getBytes())) .thenReturn(Mono.error(() -> new InvalidEncryptedMessageException("invalid message"))); StepVerifier.create(converter.convert(exchange)) .verifyError(BadCredentialsException.class); verify(cryptoService).decrypt(password.getBytes()); }