focal_method
stringlengths 13
60.9k
| test_case
stringlengths 25
109k
|
---|---|
@Override
public int handshake(final ChannelHandlerContext context) {
int result = ConnectionIdGenerator.getInstance().nextId();
connectionPhase = MySQLConnectionPhase.AUTH_PHASE_FAST_PATH;
boolean sslEnabled = ProxySSLContext.getInstance().isSSLEnabled();
if (sslEnabled) {
context.pipeline().addFirst(MySQLSSLRequestHandler.class.getSimpleName(), new MySQLSSLRequestHandler());
}
context.writeAndFlush(new MySQLHandshakePacket(result, sslEnabled, authPluginData));
MySQLStatementIdGenerator.getInstance().registerConnection(result);
return result;
}
|
@Test
void assertHandshakeWithSSLEnabled() {
when(ProxySSLContext.getInstance().isSSLEnabled()).thenReturn(true);
ChannelHandlerContext context = mockChannelHandlerContext();
when(context.pipeline()).thenReturn(mock(ChannelPipeline.class));
assertTrue(authenticationEngine.handshake(context) > 0);
verify(context.pipeline()).addFirst(eq(MySQLSSLRequestHandler.class.getSimpleName()), any(MySQLSSLRequestHandler.class));
verify(context).writeAndFlush(any(MySQLHandshakePacket.class));
}
|
@GET
@Produces({ MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
MediaType.APPLICATION_XML + "; " + JettyUtils.UTF_8 })
public AppInfo get() {
return getAppInfo();
}
|
@Test
public void testInvalidUri2() throws JSONException, Exception {
WebResource r = resource();
String responseStr = "";
try {
responseStr = r.path("ws").path("v1").path("invalid")
.accept(MediaType.APPLICATION_JSON).get(String.class);
fail("should have thrown exception on invalid uri");
} catch (UniformInterfaceException ue) {
ClientResponse response = ue.getResponse();
assertResponseStatusCode(Status.NOT_FOUND, response.getStatusInfo());
WebServicesTestUtils.checkStringMatch(
"error string exists and shouldn't", "", responseStr);
}
}
|
public static void parseParams(Map<String, String> params) {
if (params != null && params.size() > 0) {
for (Map.Entry<String, String> entry : UTM_LINK_MAP.entrySet()) {
String utmKey = entry.getValue();
String value = params.get(utmKey);
if (!TextUtils.isEmpty(value)) {
sUtmProperties.put(UTM_MAP.get(entry.getKey()), value);
sLatestUtmProperties.put(LATEST_UTM_MAP.get(entry.getKey()), value);
}
}
for (String sourceKey : sChannelSourceKeySet) {
try {
//检测 key 的值,非正常 key 值直接跳过.
if (!SADataHelper.assertPropertyKey(sourceKey)) {
continue;
}
String value = params.get(sourceKey);
if (!TextUtils.isEmpty(value)) {
sUtmProperties.put(sourceKey, value);
sLatestUtmProperties.put("_latest_" + sourceKey, value);
}
} catch (Exception e) {
SALog.printStackTrace(e);
}
}
}
}
|
@Test
public void parseParams() {
Map<String, String> params = new HashMap<>();
params.put("utm_source", "source_value");
params.put("utm_medium", "medium_value");
ChannelUtils.parseParams(params);
JSONObject jsonObject = ChannelUtils.getUtmProperties();
Assert.assertTrue(jsonObject.has("$utm_source"));
Assert.assertTrue(jsonObject.has("$utm_medium"));
}
|
@Override
public int removeRangeByRank(int startIndex, int endIndex) {
return get(removeRangeByRankAsync(startIndex, endIndex));
}
|
@Test
public void testRemoveRangeByRank() {
RScoredSortedSet<String> set = redisson.getScoredSortedSet("simple");
set.add(0.1, "a");
set.add(0.2, "b");
set.add(0.3, "c");
set.add(0.4, "d");
set.add(0.5, "e");
set.add(0.6, "f");
set.add(0.7, "g");
Assertions.assertEquals(2, set.removeRangeByRank(0, 1));
assertThat(set).containsExactly("c", "d", "e", "f", "g");
}
|
public static int checkLessThanOrEqual(int n, long expected, String name)
{
if (n > expected)
{
throw new IllegalArgumentException(name + ": " + n + " (expected: <= " + expected + ')');
}
return n;
}
|
@Test
public void checkLessThanOrEqualMustPassIfArgumentIsLessThanExpected()
{
final int n = 0;
final int actual = RangeUtil.checkLessThanOrEqual(n, 1, "var");
assertThat(actual, is(equalTo(n)));
}
|
@Override
public HttpResponseOutputStream<StorageObject> write(final Path file, final TransferStatus status, final ConnectionCallback callback) throws BackgroundException {
final DelayedHttpEntityCallable<StorageObject> command = new DelayedHttpEntityCallable<StorageObject>(file) {
@Override
public StorageObject call(final HttpEntity entity) throws BackgroundException {
try {
// POST /upload/storage/v1/b/myBucket/o
final StringBuilder uri = new StringBuilder(String.format("%supload/storage/v1/b/%s/o?uploadType=resumable",
session.getClient().getRootUrl(), containerService.getContainer(file).getName()));
if(containerService.getContainer(file).attributes().getCustom().containsKey(GoogleStorageAttributesFinderFeature.KEY_REQUESTER_PAYS)) {
uri.append(String.format("&userProject=%s", session.getHost().getCredentials().getUsername()));
}
if(!Acl.EMPTY.equals(status.getAcl())) {
if(status.getAcl().isCanned()) {
uri.append("&predefinedAcl=");
if(Acl.CANNED_PRIVATE.equals(status.getAcl())) {
uri.append("private");
}
else if(Acl.CANNED_PUBLIC_READ.equals(status.getAcl())) {
uri.append("publicRead");
}
else if(Acl.CANNED_PUBLIC_READ_WRITE.equals(status.getAcl())) {
uri.append("publicReadWrite");
}
else if(Acl.CANNED_AUTHENTICATED_READ.equals(status.getAcl())) {
uri.append("authenticatedRead");
}
else if(Acl.CANNED_BUCKET_OWNER_FULLCONTROL.equals(status.getAcl())) {
uri.append("bucketOwnerFullControl");
}
else if(Acl.CANNED_BUCKET_OWNER_READ.equals(status.getAcl())) {
uri.append("bucketOwnerRead");
}
// Reset in status to skip setting ACL in upload filter already applied as canned ACL
status.setAcl(Acl.EMPTY);
}
}
final HttpEntityEnclosingRequestBase request = new HttpPost(uri.toString());
final StringBuilder metadata = new StringBuilder();
metadata.append(String.format("{\"name\": \"%s\"", containerService.getKey(file)));
metadata.append(",\"metadata\": {");
for(Iterator<Map.Entry<String, String>> iter = status.getMetadata().entrySet().iterator(); iter.hasNext(); ) {
final Map.Entry<String, String> item = iter.next();
metadata.append(String.format("\"%s\": \"%s\"", item.getKey(), item.getValue()));
if(iter.hasNext()) {
metadata.append(",");
}
}
metadata.append("}");
if(StringUtils.isNotBlank(status.getMime())) {
metadata.append(String.format(", \"contentType\": \"%s\"", status.getMime()));
}
if(StringUtils.isNotBlank(status.getStorageClass())) {
metadata.append(String.format(", \"storageClass\": \"%s\"", status.getStorageClass()));
}
if(null != status.getModified()) {
metadata.append(String.format(", \"customTime\": \"%s\"",
new ISO8601DateFormatter().format(status.getModified(), TimeZone.getTimeZone("UTC"))));
}
metadata.append("}");
request.setEntity(new StringEntity(metadata.toString(),
ContentType.create("application/json", StandardCharsets.UTF_8.name())));
if(StringUtils.isNotBlank(status.getMime())) {
// Set to the media MIME type of the upload data to be transferred in subsequent requests.
request.addHeader("X-Upload-Content-Type", status.getMime());
}
request.addHeader(HTTP.CONTENT_TYPE, MEDIA_TYPE);
final HttpClient client = session.getHttpClient();
final HttpResponse response = client.execute(request);
try {
switch(response.getStatusLine().getStatusCode()) {
case HttpStatus.SC_OK:
break;
default:
throw new DefaultHttpResponseExceptionMappingService().map("Upload {0} failed",
new HttpResponseException(response.getStatusLine().getStatusCode(),
GoogleStorageExceptionMappingService.parse(response)), file);
}
}
finally {
EntityUtils.consume(response.getEntity());
}
if(response.containsHeader(HttpHeaders.LOCATION)) {
final String putTarget = response.getFirstHeader(HttpHeaders.LOCATION).getValue();
// Upload the file
final HttpPut put = new HttpPut(putTarget);
put.setEntity(entity);
final HttpResponse putResponse = client.execute(put);
try {
switch(putResponse.getStatusLine().getStatusCode()) {
case HttpStatus.SC_OK:
case HttpStatus.SC_CREATED:
return session.getClient().getObjectParser().parseAndClose(new InputStreamReader(
putResponse.getEntity().getContent(), StandardCharsets.UTF_8), StorageObject.class);
default:
throw new DefaultHttpResponseExceptionMappingService().map("Upload {0} failed",
new HttpResponseException(putResponse.getStatusLine().getStatusCode(),
GoogleStorageExceptionMappingService.parse(putResponse)), file);
}
}
finally {
EntityUtils.consume(putResponse.getEntity());
}
}
else {
throw new DefaultHttpResponseExceptionMappingService().map("Upload {0} failed",
new HttpResponseException(response.getStatusLine().getStatusCode(),
GoogleStorageExceptionMappingService.parse(response)), file);
}
}
catch(IOException e) {
throw new GoogleStorageExceptionMappingService().map("Upload {0} failed", e, file);
}
}
@Override
public long getContentLength() {
return status.getLength();
}
};
return this.write(file, status, command);
}
|
@Test
public void testWriteCustomMetadata() throws Exception {
final Path container = new Path("cyberduck-test-eu", EnumSet.of(Path.Type.directory, Path.Type.volume));
final Path test = new Path(container, new AlphanumericRandomStringService().random(), EnumSet.of(Path.Type.file));
final TransferStatus status = new TransferStatus();
final HashMap<String, String> metadata = new HashMap<>();
metadata.put("k1", "v1");
status.setMetadata(metadata);
new GoogleStorageWriteFeature(session).write(test, status, new DisabledConnectionCallback()).close();
assertEquals(metadata, new GoogleStorageMetadataFeature(session).getMetadata(test));
metadata.put("k2", "v2");
new GoogleStorageWriteFeature(session).write(test, status, new DisabledConnectionCallback()).close();
assertEquals(metadata, new GoogleStorageMetadataFeature(session).getMetadata(test));
new GoogleStorageDeleteFeature(session).delete(Collections.singletonList(test), new DisabledLoginCallback(), new Delete.DisabledCallback());
}
|
@Override
public String getName() {
return ANALYZER_NAME;
}
|
@Test
public void testGetName() {
assertEquals("Composer.lock analyzer", analyzer.getName());
}
|
@Override
public SeekableByteChannel getChannel() {
return new RedissonByteChannel();
}
|
@Test
public void testChannelOverwrite() throws IOException {
RBinaryStream stream = redisson.getBinaryStream("test");
SeekableByteChannel c = stream.getChannel();
assertThat(c.write(ByteBuffer.wrap(new byte[]{1, 2, 3, 4, 5, 6, 7}))).isEqualTo(7);
c.position(3);
assertThat(c.write(ByteBuffer.wrap(new byte[]{0, 9, 10}))).isEqualTo(3);
assertThat(c.position()).isEqualTo(6);
ByteBuffer b = ByteBuffer.allocate(3);
int r = c.read(b);
assertThat(c.position()).isEqualTo(7);
assertThat(r).isEqualTo(1);
b.flip();
byte[] bb = new byte[b.remaining()];
b.get(bb);
assertThat(bb).isEqualTo(new byte[]{7});
c.position(0);
ByteBuffer state = ByteBuffer.allocate(7);
c.read(state);
byte[] bb1 = new byte[7];
state.flip();
state.get(bb1);
assertThat(bb1).isEqualTo(new byte[]{1, 2, 3, 0, 9, 10, 7});
}
|
@Override
public String put(String key, String value) {
if (value == null) throw new IllegalArgumentException("Null value not allowed as an environment variable: " + key);
return super.put(key, value);
}
|
@Test
public void overrideOrderCalculatorSelfReference() {
EnvVars env = new EnvVars();
EnvVars overrides = new EnvVars();
overrides.put("PATH", "some;${PATH}");
OverrideOrderCalculator calc = new OverrideOrderCalculator(env, overrides);
List<String> order = calc.getOrderedVariableNames();
assertEquals(List.of("PATH"), order);
}
|
public static Object parseType(String name, Object value, Type type) {
try {
if (value == null) return null;
String trimmed = null;
if (value instanceof String)
trimmed = ((String) value).trim();
switch (type) {
case BOOLEAN:
if (value instanceof String) {
if (trimmed.equalsIgnoreCase("true"))
return true;
else if (trimmed.equalsIgnoreCase("false"))
return false;
else
throw new ConfigException(name, value, "Expected value to be either true or false");
} else if (value instanceof Boolean)
return value;
else
throw new ConfigException(name, value, "Expected value to be either true or false");
case PASSWORD:
if (value instanceof Password)
return value;
else if (value instanceof String)
return new Password(trimmed);
else
throw new ConfigException(name, value, "Expected value to be a string, but it was a " + value.getClass().getName());
case STRING:
if (value instanceof String)
return trimmed;
else
throw new ConfigException(name, value, "Expected value to be a string, but it was a " + value.getClass().getName());
case INT:
if (value instanceof Integer) {
return value;
} else if (value instanceof String) {
return Integer.parseInt(trimmed);
} else {
throw new ConfigException(name, value, "Expected value to be a 32-bit integer, but it was a " + value.getClass().getName());
}
case SHORT:
if (value instanceof Short) {
return value;
} else if (value instanceof String) {
return Short.parseShort(trimmed);
} else {
throw new ConfigException(name, value, "Expected value to be a 16-bit integer (short), but it was a " + value.getClass().getName());
}
case LONG:
if (value instanceof Integer)
return ((Integer) value).longValue();
if (value instanceof Long)
return value;
else if (value instanceof String)
return Long.parseLong(trimmed);
else
throw new ConfigException(name, value, "Expected value to be a 64-bit integer (long), but it was a " + value.getClass().getName());
case DOUBLE:
if (value instanceof Number)
return ((Number) value).doubleValue();
else if (value instanceof String)
return Double.parseDouble(trimmed);
else
throw new ConfigException(name, value, "Expected value to be a double, but it was a " + value.getClass().getName());
case LIST:
if (value instanceof List)
return value;
else if (value instanceof String)
if (trimmed.isEmpty())
return Collections.emptyList();
else
return Arrays.asList(COMMA_WITH_WHITESPACE.split(trimmed, -1));
else
throw new ConfigException(name, value, "Expected a comma separated list.");
case CLASS:
if (value instanceof Class)
return value;
else if (value instanceof String) {
return Utils.loadClass(trimmed, Object.class);
} else
throw new ConfigException(name, value, "Expected a Class instance or class name.");
default:
throw new IllegalStateException("Unknown type.");
}
} catch (NumberFormatException e) {
throw new ConfigException(name, value, "Not a number of type " + type);
} catch (ClassNotFoundException e) {
throw new ConfigException(name, value, "Class " + value + " could not be found.");
}
}
|
@Test
public void testClassWithAlias() {
final String alias = "PluginAlias";
ClassLoader originalClassLoader = Thread.currentThread().getContextClassLoader();
try {
// Could try to use the Plugins class from Connect here, but this should simulate enough
// of the aliasing logic to suffice for this test.
Thread.currentThread().setContextClassLoader(new ClassLoader(originalClassLoader) {
@Override
public Class<?> loadClass(String name, boolean resolve) throws ClassNotFoundException {
if (alias.equals(name)) {
return NestedClass.class;
} else {
return super.loadClass(name, resolve);
}
}
});
ConfigDef.parseType("Test config", alias, Type.CLASS);
} finally {
Thread.currentThread().setContextClassLoader(originalClassLoader);
}
}
|
@Override
public CompletableFuture<ShareGroupHeartbeatResponseData> shareGroupHeartbeat(
RequestContext context,
ShareGroupHeartbeatRequestData request
) {
if (!isActive.get()) {
return CompletableFuture.completedFuture(new ShareGroupHeartbeatResponseData()
.setErrorCode(Errors.COORDINATOR_NOT_AVAILABLE.code())
);
}
return runtime.scheduleWriteOperation(
"share-group-heartbeat",
topicPartitionFor(request.groupId()),
Duration.ofMillis(config.offsetCommitTimeoutMs()),
coordinator -> coordinator.shareGroupHeartbeat(context, request)
).exceptionally(exception -> handleOperationException(
"share-group-heartbeat",
request,
exception,
(error, message) -> new ShareGroupHeartbeatResponseData()
.setErrorCode(error.code())
.setErrorMessage(message)
));
}
|
@Test
public void testShareGroupHeartbeat() throws ExecutionException, InterruptedException, TimeoutException {
CoordinatorRuntime<GroupCoordinatorShard, CoordinatorRecord> runtime = mockRuntime();
GroupCoordinatorService service = new GroupCoordinatorService(
new LogContext(),
createConfig(),
runtime,
new GroupCoordinatorMetrics(),
createConfigManager()
);
ShareGroupHeartbeatRequestData request = new ShareGroupHeartbeatRequestData()
.setGroupId("foo");
service.startup(() -> 1);
when(runtime.scheduleWriteOperation(
ArgumentMatchers.eq("share-group-heartbeat"),
ArgumentMatchers.eq(new TopicPartition("__consumer_offsets", 0)),
ArgumentMatchers.eq(Duration.ofMillis(5000)),
ArgumentMatchers.any()
)).thenReturn(CompletableFuture.completedFuture(
new ShareGroupHeartbeatResponseData()
));
CompletableFuture<ShareGroupHeartbeatResponseData> future = service.shareGroupHeartbeat(
requestContext(ApiKeys.SHARE_GROUP_HEARTBEAT),
request
);
assertEquals(new ShareGroupHeartbeatResponseData(), future.get(5, TimeUnit.SECONDS));
}
|
public static void addSecurityProvider(Properties properties) {
properties.keySet().stream()
.filter(key -> key.toString().matches("security\\.provider(\\.\\d+)?"))
.sorted(Comparator.comparing(String::valueOf)).forEach(key -> addSecurityProvider(properties.get(key).toString()));
}
|
@Test
void addSecurityProvidersViaProperties() {
removeAllDummyProviders();
int providersCountBefore = Security.getProviders().length;
Properties properties = new Properties();
properties.put("security.provider.1", DummyProviderWithConfig.class.getName() + ":2:CONFIG");
properties.put("security.provider", DummyProvider.class.getName() + ":1");
SecurityProviderLoader.addSecurityProvider(properties);
Provider[] providersAfter = Security.getProviders();
assertEquals(providersCountBefore + 2, providersAfter.length);
Provider provider = Security.getProvider(DummyProvider.PROVIDER_NAME);
Provider providerWithConfig = Security.getProvider(DummyProviderWithConfig.PROVIDER_NAME);
assertNotNull(provider, "Provider not installed.");
assertEquals(DummyProvider.class, provider.getClass());
assertEquals(provider, providersAfter[0]);
assertNotNull(providerWithConfig, "Provider not installed.");
assertEquals(DummyProviderWithConfig.class, providerWithConfig.getClass());
assertEquals(providerWithConfig, providersAfter[1]);
assertEquals("CONFIG", ((DummyProviderWithConfig) providerWithConfig).getConfig());
}
|
public static int[] computePhysicalIndices(
List<TableColumn> logicalColumns,
DataType physicalType,
Function<String, String> nameRemapping) {
Map<TableColumn, Integer> physicalIndexLookup =
computePhysicalIndices(logicalColumns.stream(), physicalType, nameRemapping);
return logicalColumns.stream().mapToInt(physicalIndexLookup::get).toArray();
}
|
@Test
void testFieldMappingLegacyCompositeTypeWithRenaming() {
int[] indices =
TypeMappingUtils.computePhysicalIndices(
TableSchema.builder()
.field("a", DataTypes.BIGINT())
.field("b", DataTypes.STRING())
.build()
.getTableColumns(),
TypeConversions.fromLegacyInfoToDataType(
Types.TUPLE(Types.STRING, Types.LONG)),
str -> {
switch (str) {
case "a":
return "f1";
case "b":
return "f0";
default:
throw new AssertionError();
}
});
assertThat(indices).isEqualTo(new int[] {1, 0});
}
|
static List<String> serversMapToList(Map<String, String> servers) {
List<String> serversList = new ArrayList<>(servers.size());
for (var entry : servers.entrySet()) {
serversList.add(String.format("%s=%s", entry.getKey(), entry.getValue()));
}
return serversList;
}
|
@Test
public void testMapToList() {
Map<String, String> servers = new HashMap<>(3);
servers.put("server.1", "my-cluster-zookeeper-0.my-cluster-zookeeper-nodes.myproject.svc:2888:3888:participant;127.0.0.1:12181");
servers.put("server.2", "my-cluster-zookeeper-1.my-cluster-zookeeper-nodes.myproject.svc:2888:3888:participant;127.0.0.1:12181");
servers.put("server.3", "my-cluster-zookeeper-2.my-cluster-zookeeper-nodes.myproject.svc:2888:3888:participant;127.0.0.1:12181");
List<String> expected = new ArrayList<>(3);
expected.add("server.1=my-cluster-zookeeper-0.my-cluster-zookeeper-nodes.myproject.svc:2888:3888:participant;127.0.0.1:12181");
expected.add("server.2=my-cluster-zookeeper-1.my-cluster-zookeeper-nodes.myproject.svc:2888:3888:participant;127.0.0.1:12181");
expected.add("server.3=my-cluster-zookeeper-2.my-cluster-zookeeper-nodes.myproject.svc:2888:3888:participant;127.0.0.1:12181");
assertThat(ZookeeperScaler.serversMapToList(servers), containsInAnyOrder(expected.toArray()));
}
|
public static boolean parse(final String str, ResTable_config out) {
return parse(str, out, true);
}
|
@Test
public void parse_orientation_land() {
ResTable_config config = new ResTable_config();
ConfigDescription.parse("land", config);
assertThat(config.orientation).isEqualTo(ORIENTATION_LAND);
}
|
@Override
public Interpreter getInterpreter(String replName,
ExecutionContext executionContext)
throws InterpreterNotFoundException {
if (StringUtils.isBlank(replName)) {
// Get the default interpreter of the defaultInterpreterSetting
InterpreterSetting defaultSetting =
interpreterSettingManager.getByName(executionContext.getDefaultInterpreterGroup());
return defaultSetting.getDefaultInterpreter(executionContext);
}
String[] replNameSplits = replName.split("\\.");
if (replNameSplits.length == 2) {
String group = replNameSplits[0];
String name = replNameSplits[1];
InterpreterSetting setting = interpreterSettingManager.getByName(group);
if (null != setting) {
Interpreter interpreter = setting.getInterpreter(executionContext, name);
if (null != interpreter) {
return interpreter;
}
throw new InterpreterNotFoundException("No such interpreter: " + replName);
}
throw new InterpreterNotFoundException("No interpreter setting named: " + group);
} else if (replNameSplits.length == 1){
// first assume group is omitted
InterpreterSetting setting =
interpreterSettingManager.getByName(executionContext.getDefaultInterpreterGroup());
if (setting != null) {
Interpreter interpreter = setting.getInterpreter(executionContext, replName);
if (null != interpreter) {
return interpreter;
}
}
// then assume interpreter name is omitted
setting = interpreterSettingManager.getByName(replName);
if (null != setting) {
return setting.getDefaultInterpreter(executionContext);
}
}
throw new InterpreterNotFoundException("No such interpreter: " + replName);
}
|
@Test
void testUnknownRepl2() {
try {
interpreterFactory.getInterpreter("unknown_repl", new ExecutionContext("user1", "note1", "test"));
fail("should fail due to no such interpreter");
} catch (InterpreterNotFoundException e) {
assertEquals("No such interpreter: unknown_repl", e.getMessage());
}
}
|
public void initializeSession(AuthenticationRequest authenticationRequest, SAMLBindingContext bindingContext) throws SamlSessionException, SharedServiceClientException {
final String httpSessionId = authenticationRequest.getRequest().getSession().getId();
if (authenticationRequest.getFederationName() != null) {
findOrInitializeFederationSession(authenticationRequest, httpSessionId);
}
findOrInitializeSamlSession(authenticationRequest, httpSessionId, bindingContext);
}
|
@Test
public void checkIfAssertionConsumerServiceUrlIsSet() throws SamlSessionException, SharedServiceClientException {
samlSessionService.initializeSession(authenticationRequest, bindingContext);
assertEquals("https://sso.afnemer.nl", authenticationRequest.getSamlSession().getAssertionConsumerServiceURL());
}
|
@Override
public <T> Exporter<T> export(Invoker<T> invoker) throws RpcException {
return new InjvmExporter<>(invoker, invoker.getUrl().getServiceKey(), exporterMap);
}
|
@Test
void testApplication() {
DemoService service = new DemoServiceImpl();
URL url = URL.valueOf("injvm://127.0.0.1/TestService")
.addParameter(INTERFACE_KEY, DemoService.class.getName())
.addParameter("application", "consumer")
.addParameter(APPLICATION_KEY, "test-app")
.setScopeModel(ApplicationModel.defaultModel().getDefaultModule());
Invoker<?> invoker = proxy.getInvoker(service, DemoService.class, url);
assertTrue(invoker.isAvailable());
Exporter<?> exporter = protocol.export(invoker);
exporters.add(exporter);
service = proxy.getProxy(protocol.refer(DemoService.class, url));
assertEquals("test-app", service.getApplication());
assertTrue(StringUtils.isEmpty(RpcContext.getServiceContext().getRemoteApplicationName()));
}
|
@Override
public boolean filterPath(Path filePath) {
if (getIncludeMatchers().isEmpty() && getExcludeMatchers().isEmpty()) {
return false;
}
// compensate for the fact that Flink paths are slashed
final String path =
filePath.hasWindowsDrive() ? filePath.getPath().substring(1) : filePath.getPath();
final java.nio.file.Path nioPath = Paths.get(path);
for (PathMatcher matcher : getIncludeMatchers()) {
if (matcher.matches(nioPath)) {
return shouldExclude(nioPath);
}
}
return true;
}
|
@Test
void testMatchAllFilesByDefault() {
GlobFilePathFilter matcher =
new GlobFilePathFilter(Collections.emptyList(), Collections.emptyList());
assertThat(matcher.filterPath(new Path("dir/file.txt"))).isFalse();
}
|
@Override
public T deserialize(final String topic, final byte[] bytes) {
try {
if (bytes == null) {
return null;
}
// don't use the JsonSchemaConverter to read this data because
// we require that the MAPPER enables USE_BIG_DECIMAL_FOR_FLOATS,
// which is not currently available in the standard converters
final JsonNode value = isJsonSchema
? JsonSerdeUtils.readJsonSR(bytes, MAPPER, JsonNode.class)
: MAPPER.readTree(bytes);
final Object coerced = enforceFieldType(
"$",
new JsonValueContext(value, schema)
);
if (LOG.isTraceEnabled()) {
LOG.trace("Deserialized {}. topic:{}, row:{}", target, topic, coerced);
}
return SerdeUtils.castToTargetType(coerced, targetType);
} catch (final Exception e) {
// Clear location in order to avoid logging data, for security reasons
if (e instanceof JsonParseException) {
((JsonParseException) e).clearLocation();
}
throw new SerializationException(
"Failed to deserialize " + target + " from topic: " + topic + ". " + e.getMessage(), e);
}
}
|
@Test
public void shouldThrowIfCanNotCoerceToDouble() {
// Given:
final KsqlJsonDeserializer<Double> deserializer =
givenDeserializerForSchema(Schema.OPTIONAL_FLOAT64_SCHEMA, Double.class);
final byte[] bytes = serializeJson(BooleanNode.valueOf(true));
// When:
final Exception e = assertThrows(
SerializationException.class,
() -> deserializer.deserialize(SOME_TOPIC, bytes)
);
// Then:
assertThat(e.getCause(), (hasMessage(startsWith(
"Can't convert type. sourceType: BooleanNode, requiredType: DOUBLE"))));
}
|
public long size() {
return mInStreamCache.size() + mOutStreamCache.size();
}
|
@Test
public void size() throws Exception {
StreamCache streamCache = new StreamCache(Constants.HOUR_MS);
FileInStream is = mock(FileInStream.class);
FileOutStream os = mock(FileOutStream.class);
Assert.assertEquals(0, streamCache.size());
int isId = streamCache.put(is);
Assert.assertEquals(1, streamCache.size());
int osId = streamCache.put(os);
Assert.assertEquals(2, streamCache.size());
streamCache.invalidate(isId);
Assert.assertEquals(1, streamCache.size());
streamCache.invalidate(osId);
Assert.assertEquals(0, streamCache.size());
}
|
public static Compressor getCompressor(String alias) {
// 工厂模式 托管给ExtensionLoader
return EXTENSION_LOADER.getExtension(alias);
}
|
@Test
public void getCompressor1() throws Exception {
Compressor compressor = CompressorFactory.getCompressor("test");
Assert.assertNotNull(compressor);
Assert.assertEquals(compressor.getClass(), TestCompressor.class);
}
|
public static void loadRules(List<AuthorityRule> rules) {
currentProperty.updateValue(rules);
}
|
@Test
public void testLoadRules() {
String resourceName = "testLoadRules";
AuthorityRule rule = new AuthorityRule();
rule.setResource(resourceName);
rule.setLimitApp("a,b");
rule.setStrategy(RuleConstant.AUTHORITY_WHITE);
AuthorityRuleManager.loadRules(Collections.singletonList(rule));
List<AuthorityRule> rules = AuthorityRuleManager.getRules();
assertEquals(1, rules.size());
assertEquals(rule, rules.get(0));
AuthorityRuleManager.loadRules(Collections.singletonList(new AuthorityRule()));
rules = AuthorityRuleManager.getRules();
assertEquals(0, rules.size());
}
|
@Override
public StorageVolume getDefaultStorageVolume() {
try (LockCloseable lock = new LockCloseable(rwLock.readLock())) {
if (defaultStorageVolumeId.isEmpty()) {
return getStorageVolumeByName(BUILTIN_STORAGE_VOLUME);
}
return getStorageVolume(getDefaultStorageVolumeId());
}
}
|
@Test
public void testGetDefaultStorageVolume() throws IllegalAccessException, AlreadyExistsException,
DdlException, NoSuchFieldException {
new Expectations() {
{
editLog.logSetDefaultStorageVolume((SetDefaultStorageVolumeLog) any);
}
};
SharedDataStorageVolumeMgr sdsvm = new SharedDataStorageVolumeMgr();
sdsvm.createBuiltinStorageVolume();
FieldUtils.writeField(sdsvm, "defaultStorageVolumeId", "", true);
Assert.assertEquals(SharedDataStorageVolumeMgr.BUILTIN_STORAGE_VOLUME, sdsvm.getDefaultStorageVolume().getName());
String svName = "test";
List<String> locations = Arrays.asList("s3://abc");
Map<String, String> storageParams = new HashMap<>();
storageParams.put(AWS_S3_REGION, "region");
storageParams.put(AWS_S3_ENDPOINT, "endpoint");
storageParams.put(AWS_S3_USE_AWS_SDK_DEFAULT_BEHAVIOR, "true");
sdsvm.createStorageVolume(svName, "S3", locations, storageParams, Optional.empty(), "");
sdsvm.setDefaultStorageVolume(svName);
Assert.assertEquals(svName, sdsvm.getDefaultStorageVolume().getName());
}
|
@Override
void handle(Connection connection, DatabaseCharsetChecker.State state) throws SQLException {
expectCaseSensitiveDefaultCollation(connection);
if (state == DatabaseCharsetChecker.State.UPGRADE || state == DatabaseCharsetChecker.State.STARTUP) {
repairColumns(connection);
}
}
|
@Test
public void upgrade_fails_if_default_collation_is_not_CS_AS() throws SQLException {
answerDefaultCollation("Latin1_General_CI_AI");
assertThatThrownBy(() -> underTest.handle(connection, DatabaseCharsetChecker.State.UPGRADE))
.isInstanceOf(MessageException.class)
.hasMessage("Database collation must be case-sensitive and accent-sensitive. It is Latin1_General_CI_AI but should be Latin1_General_CS_AS.");
}
|
public static Transcript parse(String jsonStr) {
try {
Transcript transcript = new Transcript();
long startTime = -1L;
long endTime = -1L;
long segmentStartTime = -1L;
long segmentEndTime = -1L;
long duration = 0L;
String speaker = "";
String prevSpeaker = "";
String segmentBody = "";
JSONArray objSegments;
Set<String> speakers = new HashSet<>();
try {
JSONObject obj = new JSONObject(jsonStr);
objSegments = obj.getJSONArray("segments");
} catch (JSONException e) {
e.printStackTrace();
return null;
}
for (int i = 0; i < objSegments.length(); i++) {
JSONObject jsonObject = objSegments.getJSONObject(i);
segmentEndTime = endTime;
startTime = Double.valueOf(jsonObject.optDouble("startTime", -1) * 1000L).longValue();
endTime = Double.valueOf(jsonObject.optDouble("endTime", -1) * 1000L).longValue();
if (startTime < 0 || endTime < 0) {
continue;
}
if (segmentStartTime == -1L) {
segmentStartTime = startTime;
}
duration += endTime - startTime;
prevSpeaker = speaker;
speaker = jsonObject.optString("speaker");
speakers.add(speaker);
if (StringUtils.isEmpty(speaker) && StringUtils.isNotEmpty(prevSpeaker)) {
speaker = prevSpeaker;
}
String body = jsonObject.optString("body");
if (!prevSpeaker.equals(speaker)) {
if (StringUtils.isNotEmpty(segmentBody)) {
segmentBody = StringUtils.trim(segmentBody);
transcript.addSegment(new TranscriptSegment(segmentStartTime,
segmentEndTime,
segmentBody,
prevSpeaker));
segmentStartTime = startTime;
segmentBody = body.toString();
duration = 0L;
continue;
}
}
segmentBody += " " + body;
if (duration >= TranscriptParser.MIN_SPAN) {
// Look ahead and make sure the next segment does not start with an alphanumeric character
if ((i + 1) < objSegments.length()) {
String nextSegmentFirstChar = objSegments.getJSONObject(i + 1)
.optString("body")
.substring(0, 1);
if (!StringUtils.isAlphanumeric(nextSegmentFirstChar)
&& (duration < TranscriptParser.MAX_SPAN)) {
continue;
}
}
segmentBody = StringUtils.trim(segmentBody);
transcript.addSegment(new TranscriptSegment(segmentStartTime, endTime, segmentBody, speaker));
duration = 0L;
segmentBody = "";
segmentStartTime = -1L;
}
}
if (!StringUtil.isBlank(segmentBody)) {
segmentBody = StringUtils.trim(segmentBody);
transcript.addSegment(new TranscriptSegment(segmentStartTime, endTime, segmentBody, speaker));
}
if (transcript.getSegmentCount() > 0) {
transcript.setSpeakers(speakers);
return transcript;
} else {
return null;
}
} catch (JSONException e) {
e.printStackTrace();
}
return null;
}
|
@Test
public void testParse() {
String type = "application/json";
Transcript result = TranscriptParser.parse(jsonStr, type);
// There isn't a segment at 900L, so go backwards and get the segment at 800L
assertEquals(result.getSegmentAtTime(900L).getSpeaker(), "John Doe");
assertEquals(result.getSegmentAtTime(930L).getWords(), "And");
// blank string
String blankStr = "";
result = TranscriptParser.parse(blankStr, type);
assertEquals(result, null);
result = TranscriptParser.parse(null, type);
assertEquals(result, null);
// All blank lines
String allNewlinesStr = "\r\n\r\n\r\n\r\n";
result = TranscriptParser.parse(allNewlinesStr, type);
assertEquals(result, null);
// segments is missing
String jsonStrBad1 = "{'version': '1.0.0', "
+ "'segmentsX': [ "
+ "{ 'speaker' : 'John Doe', 'startTime': 0.8, 'endTime': 1.9, 'body': 'And' },"
+ "{ 'startTime': 2.9, 'endTime': 3.4, 'body': 'the' },"
+ "{ 'startTime': 3.5, 'endTime': 3.6, 'body': 'person' }]}";
result = TranscriptParser.parse(jsonStrBad1, type);
assertEquals(result, null);
// invalid time formatting
String jsonStrBad2 = "{'version': '1.0.0', "
+ "'segments': [ "
+ "{ 'speaker' : 'XJohn Doe', 'startTime': stringTime, 'endTime': stringTime, 'body': 'And' },"
+ "{ 'XstartTime': 2.9, 'XendTime': 3.4, 'body': 'the' },"
+ "{ 'startTime': '-2.9', 'endTime': '-3.4', 'body': 'the' },"
+ "{ 'startTime': 'bad_time', 'endTime': '-3.4', 'body': 'the' }]}";
result = TranscriptParser.parse(jsonStrBad2, type);
assertNull(result);
// Just plain text
String strBad3 = "John Doe: Promoting your podcast in a new\n\n"
+ "way. The latest from PogNews.";
result = TranscriptParser.parse(strBad3, type);
assertNull(result);
// passing the wrong type
type = "application/srt";
result = TranscriptParser.parse(jsonStr, type);
assertEquals(result, null);
}
|
@Nullable public JavaEmojiUtils.SkinTone getDefaultSkinTone() {
if (mRandom) {
switch (new Random().nextInt(JavaEmojiUtils.SkinTone.values().length)) {
case 0:
return JavaEmojiUtils.SkinTone.Fitzpatrick_2;
case 1:
return JavaEmojiUtils.SkinTone.Fitzpatrick_3;
case 2:
return JavaEmojiUtils.SkinTone.Fitzpatrick_4;
case 3:
return JavaEmojiUtils.SkinTone.Fitzpatrick_5;
default:
return JavaEmojiUtils.SkinTone.Fitzpatrick_6;
}
}
return mDefaultSkinTone;
}
|
@Test
public void getDefaultSkinTone() {
DefaultSkinTonePrefTracker tracker =
new DefaultSkinTonePrefTracker(AnyApplication.prefs(getApplicationContext()));
// default value is null
Assert.assertNull(tracker.getDefaultSkinTone());
final String[] skinToneValues =
getApplicationContext()
.getResources()
.getStringArray(R.array.settings_key_default_emoji_skin_tone_values);
// random + generic
Assert.assertEquals(JavaEmojiUtils.SkinTone.values().length + 2, skinToneValues.length);
Assert.assertNotNull(skinToneValues);
SharedPrefsHelper.setPrefsValue(
R.string.settings_key_default_emoji_skin_tone, skinToneValues[1]);
Assert.assertEquals(JavaEmojiUtils.SkinTone.Fitzpatrick_2, tracker.getDefaultSkinTone());
SharedPrefsHelper.setPrefsValue(
R.string.settings_key_default_emoji_skin_tone, skinToneValues[2]);
Assert.assertEquals(JavaEmojiUtils.SkinTone.Fitzpatrick_3, tracker.getDefaultSkinTone());
SharedPrefsHelper.setPrefsValue(
R.string.settings_key_default_emoji_skin_tone, skinToneValues[3]);
Assert.assertEquals(JavaEmojiUtils.SkinTone.Fitzpatrick_4, tracker.getDefaultSkinTone());
SharedPrefsHelper.setPrefsValue(
R.string.settings_key_default_emoji_skin_tone, skinToneValues[4]);
Assert.assertEquals(JavaEmojiUtils.SkinTone.Fitzpatrick_5, tracker.getDefaultSkinTone());
SharedPrefsHelper.setPrefsValue(
R.string.settings_key_default_emoji_skin_tone, skinToneValues[5]);
Assert.assertEquals(JavaEmojiUtils.SkinTone.Fitzpatrick_6, tracker.getDefaultSkinTone());
SharedPrefsHelper.setPrefsValue(
R.string.settings_key_default_emoji_skin_tone, skinToneValues[0] /*generic*/);
Assert.assertNull(tracker.getDefaultSkinTone());
SharedPrefsHelper.setPrefsValue(
R.string.settings_key_default_emoji_skin_tone, skinToneValues[5]);
Assert.assertEquals(JavaEmojiUtils.SkinTone.Fitzpatrick_6, tracker.getDefaultSkinTone());
SharedPrefsHelper.setPrefsValue(R.string.settings_key_default_emoji_skin_tone, "blah");
// failing to generic
Assert.assertNull(tracker.getDefaultSkinTone());
SharedPrefsHelper.setPrefsValue(
R.string.settings_key_default_emoji_skin_tone, skinToneValues[6] /*random*/);
Set<JavaEmojiUtils.SkinTone> seen = new HashSet<>();
for (int i = 0; i < 10000; i++) {
final JavaEmojiUtils.SkinTone skinTone = tracker.getDefaultSkinTone();
Assert.assertNotNull(skinTone);
seen.add(skinTone);
}
Assert.assertEquals(JavaEmojiUtils.SkinTone.values().length, seen.size());
}
|
public static MetricRegistry getOrCreate(String name) {
final MetricRegistry existing = REGISTRIES.get(name);
if (existing == null) {
final MetricRegistry created = new MetricRegistry();
final MetricRegistry raced = add(name, created);
if (raced == null) {
return created;
}
return raced;
}
return existing;
}
|
@Test
public void memorizesRegistriesByName() throws Exception {
final MetricRegistry one = SharedMetricRegistries.getOrCreate("one");
final MetricRegistry two = SharedMetricRegistries.getOrCreate("one");
assertThat(one)
.isSameAs(two);
}
|
@Override
protected Mono<Void> doExecute(final ServerWebExchange exchange,
final ShenyuPluginChain chain,
final SelectorData selector,
final RuleData rule) {
return wasmLoader.getWasmExtern(DO_EXECUTE_METHOD_NAME).map(doExecute -> {
final Long argumentId = callWASI(exchange, chain, selector, rule, doExecute);
return doExecute(exchange, chain, selector, rule, argumentId);
}).orElseGet(() -> {
LOG.error("{} function not found in {}", DO_EXECUTE_METHOD_NAME, wasmLoader.getWasmName());
exchange.getResponse().setStatusCode(HttpStatus.INTERNAL_SERVER_ERROR);
Object error = ShenyuResultWrap.error(exchange, ShenyuResultEnum.WASM_FUNC_NOT_FOUND);
return WebFluxResultUtils.result(exchange, error);
});
}
|
@Test
public void executeSelectorManyMatch() {
List<ConditionData> conditionDataList = Collections.singletonList(conditionData);
this.ruleData.setConditionDataList(conditionDataList);
this.ruleData.setMatchMode(0);
this.selectorData.setSort(1);
this.selectorData.setMatchMode(0);
this.selectorData.setLogged(true);
this.selectorData.setConditionList(conditionDataList);
BaseDataCache.getInstance().cachePluginData(pluginData);
BaseDataCache.getInstance().cacheSelectData(selectorData);
BaseDataCache.getInstance().cacheSelectData(SelectorData.builder()
.id("2").pluginName("SHENYU")
.enabled(true)
.matchMode(0)
.logged(true)
.sort(2)
.conditionList(conditionDataList)
.type(SelectorTypeEnum.CUSTOM_FLOW.getCode()).build());
BaseDataCache.getInstance().cacheRuleData(ruleData);
StepVerifier.create(testShenyuWasmPlugin.execute(exchange, shenyuPluginChain)).expectSubscription().verifyComplete();
verify(testShenyuWasmPlugin).doExecute(exchange, shenyuPluginChain, selectorData, ruleData);
}
|
@Override
public TimeValue getRetryInterval(HttpResponse response, int execCount, HttpContext context) {
// a server may send a 429 / 503 with a Retry-After header
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After
Header header = response.getFirstHeader(HttpHeaders.RETRY_AFTER);
TimeValue retryAfter = null;
if (header != null) {
String value = header.getValue();
try {
retryAfter = TimeValue.ofSeconds(Long.parseLong(value));
} catch (NumberFormatException ignore) {
Instant retryAfterDate = DateUtils.parseStandardDate(value);
if (retryAfterDate != null) {
retryAfter =
TimeValue.ofMilliseconds(retryAfterDate.toEpochMilli() - System.currentTimeMillis());
}
}
if (TimeValue.isPositive(retryAfter)) {
return retryAfter;
}
}
int delayMillis = 1000 * (int) Math.min(Math.pow(2.0, (long) execCount - 1.0), 64.0);
int jitter = ThreadLocalRandom.current().nextInt(Math.max(1, (int) (delayMillis * 0.1)));
return TimeValue.ofMilliseconds(delayMillis + jitter);
}
|
@Test
public void invalidRetryAfterHeader() {
HttpResponse response = new BasicHttpResponse(503, "Oopsie");
response.setHeader(HttpHeaders.RETRY_AFTER, "Stuff");
assertThat(retryStrategy.getRetryInterval(response, 3, null).toMilliseconds())
.isBetween(4000L, 5000L);
}
|
@GetMapping("/trace")
@PreAuthorize(value = "@apolloAuditLogQueryApiPreAuthorizer.hasQueryPermission()")
public List<ApolloAuditLogDetailsDTO> findTraceDetails(@RequestParam String traceId) {
List<ApolloAuditLogDetailsDTO> detailsDTOList = api.queryTraceDetails(traceId);
return detailsDTOList;
}
|
@Test
public void testFindTraceDetails() throws Exception {
final String traceId = "query-trace-id";
final int traceDetailsListLength = 3;
{
List<ApolloAuditLogDetailsDTO> mockDetailsDTOList = MockBeanFactory.mockTraceDetailsDTOListByLength(
traceDetailsListLength);
mockDetailsDTOList.forEach(e -> e.getLogDTO().setTraceId(traceId));
Mockito.when(api.queryTraceDetails(Mockito.eq(traceId))).thenReturn(mockDetailsDTOList);
}
mockMvc.perform(MockMvcRequestBuilders.get("/apollo/audit/trace")
.param("traceId", traceId))
.andExpect(MockMvcResultMatchers.status().isOk())
.andExpect(MockMvcResultMatchers.jsonPath("$").isArray())
.andExpect(MockMvcResultMatchers.jsonPath("$.length()").value(traceDetailsListLength))
.andExpect(MockMvcResultMatchers.jsonPath("$[0].logDTO.traceId").value(traceId));
Mockito.verify(api, Mockito.times(1)).queryTraceDetails(Mockito.eq(traceId));
}
|
public synchronized Map<String, Object> getSubtaskProgress(String taskName, @Nullable String subtaskNames,
Executor executor, HttpClientConnectionManager connMgr, Map<String, String> workerEndpoints,
Map<String, String> requestHeaders, int timeoutMs)
throws Exception {
return getSubtaskProgress(taskName, subtaskNames,
new CompletionServiceHelper(executor, connMgr, HashBiMap.create(0)), workerEndpoints, requestHeaders,
timeoutMs);
}
|
@Test
public void testGetSubtaskProgressPending()
throws Exception {
TaskDriver taskDriver = mock(TaskDriver.class);
JobConfig jobConfig = mock(JobConfig.class);
when(taskDriver.getJobConfig(anyString())).thenReturn(jobConfig);
JobContext jobContext = mock(JobContext.class);
when(taskDriver.getJobContext(anyString())).thenReturn(jobContext);
PinotHelixTaskResourceManager mgr =
new PinotHelixTaskResourceManager(mock(PinotHelixResourceManager.class), taskDriver);
CompletionServiceHelper httpHelper = mock(CompletionServiceHelper.class);
CompletionServiceHelper.CompletionServiceResponse httpResp =
new CompletionServiceHelper.CompletionServiceResponse();
when(httpHelper.doMultiGetRequest(any(), any(), anyBoolean(), any(), anyInt())).thenReturn(httpResp);
String[] workers = new String[]{"worker0", "worker1", "worker2"};
Map<String, String> workerEndpoints = new HashMap<>();
for (String worker : workers) {
workerEndpoints.put(worker, "http://" + worker + ":9000");
}
String taskName = "Task_SegmentGenerationAndPushTask_someone";
String[] subtaskNames = new String[3];
Map<String, Integer> taskIdPartitionMap = new HashMap<>();
for (int i = 0; i < 3; i++) {
String subtaskName = taskName + "_" + i;
subtaskNames[i] = subtaskName;
taskIdPartitionMap.put(subtaskName, i);
}
Map<String, TaskConfig> taskConfigMap = new HashMap<>();
for (String subtaskName : subtaskNames) {
taskConfigMap.put(subtaskName, mock(TaskConfig.class));
}
when(jobConfig.getTaskConfigMap()).thenReturn(taskConfigMap);
// Some subtasks are pending to be run
httpResp._httpResponses.put(workers[0],
JsonUtils.objectToString(Collections.singletonMap(subtaskNames[0], "running on worker: 0")));
when(jobContext.getTaskIdPartitionMap()).thenReturn(taskIdPartitionMap);
when(jobContext.getAssignedParticipant(0)).thenReturn(workers[0]);
when(jobContext.getPartitionState(0)).thenReturn(TaskPartitionState.RUNNING);
Map<String, Object> progress =
mgr.getSubtaskProgress(taskName, StringUtils.join(subtaskNames, ','), httpHelper, workerEndpoints,
Collections.emptyMap(), 1000);
String taskProgress = (String) progress.get(subtaskNames[0]);
assertEquals(taskProgress, "running on worker: 0");
taskProgress = (String) progress.get(subtaskNames[1]);
assertEquals(taskProgress, "No worker has run this subtask");
taskProgress = (String) progress.get(subtaskNames[2]);
assertEquals(taskProgress, "No worker has run this subtask");
}
|
public static String getType(String fileStreamHexHead) {
if(StrUtil.isBlank(fileStreamHexHead)){
return null;
}
if (MapUtil.isNotEmpty(FILE_TYPE_MAP)) {
for (final Entry<String, String> fileTypeEntry : FILE_TYPE_MAP.entrySet()) {
if (StrUtil.startWithIgnoreCase(fileStreamHexHead, fileTypeEntry.getKey())) {
return fileTypeEntry.getValue();
}
}
}
byte[] bytes = HexUtil.decodeHex(fileStreamHexHead);
return FileMagicNumber.getMagicNumber(bytes).getExtension();
}
|
@Test
@Disabled
public void emptyTest() {
final File file = FileUtil.file("d:/empty.txt");
final String type = FileTypeUtil.getType(file);
Console.log(type);
}
|
@Override
public boolean hasReservedCapacity(@Nonnull UUID txnId) {
if (txnId.equals(NULL_UUID)) {
return false;
}
return reservedCapacityCountByTxId.containsKey(txnId);
}
|
@Test
public void null_uuid_has_no_reserved_capacity() {
assertFalse(counter.hasReservedCapacity(TxnReservedCapacityCounter.NULL_UUID));
}
|
public boolean checkStateUpdater(final long now,
final java.util.function.Consumer<Set<TopicPartition>> offsetResetter) {
addTasksToStateUpdater();
if (stateUpdater.hasExceptionsAndFailedTasks()) {
handleExceptionsFromStateUpdater();
}
if (stateUpdater.restoresActiveTasks()) {
handleRestoredTasksFromStateUpdater(now, offsetResetter);
}
return !stateUpdater.restoresActiveTasks()
&& !tasks.hasPendingTasksToInit();
}
|
@Test
public void shouldRetryInitializationWhenLockExceptionInStateUpdater() {
final StreamTask task00 = statefulTask(taskId00, taskId00ChangelogPartitions)
.withInputPartitions(taskId00Partitions)
.inState(State.RESTORING).build();
final StandbyTask task01 = standbyTask(taskId01, taskId01ChangelogPartitions)
.withInputPartitions(taskId01Partitions)
.inState(State.RUNNING).build();
final TasksRegistry tasks = mock(TasksRegistry.class);
when(tasks.drainPendingTasksToInit()).thenReturn(mkSet(task00, task01));
final LockException lockException = new LockException("Where are my keys??");
doThrow(lockException).when(task00).initializeIfNeeded();
taskManager = setUpTaskManager(StreamsConfigUtils.ProcessingMode.AT_LEAST_ONCE, tasks, true);
taskManager.checkStateUpdater(time.milliseconds(), noOpResetter);
verify(task00).initializeIfNeeded();
verify(task01).initializeIfNeeded();
verify(tasks).addPendingTasksToInit(
argThat(tasksToInit -> tasksToInit.contains(task00) && !tasksToInit.contains(task01))
);
verify(stateUpdater, never()).add(task00);
verify(stateUpdater).add(task01);
}
|
@Override // FsDatasetSpi
public FsVolumeReferences getFsVolumeReferences() {
return new FsVolumeReferences(volumes.getVolumes());
}
|
@Test(timeout = 30000)
public void testReportBadBlocks() throws Exception {
boolean threwException = false;
final Configuration config = new HdfsConfiguration();
try (MiniDFSCluster cluster = new MiniDFSCluster.Builder(config)
.numDataNodes(1).build()) {
cluster.waitActive();
Assert.assertEquals(0, cluster.getNamesystem().getCorruptReplicaBlocks());
DataNode dataNode = cluster.getDataNodes().get(0);
ExtendedBlock block = new ExtendedBlock(cluster.getNamesystem().getBlockPoolId(), 0);
try {
// Test the reportBadBlocks when the volume is null
dataNode.reportBadBlocks(block);
} catch (NullPointerException npe) {
threwException = true;
}
Thread.sleep(3000);
Assert.assertFalse(threwException);
Assert.assertEquals(0, cluster.getNamesystem().getCorruptReplicaBlocks());
FileSystem fs = cluster.getFileSystem();
Path filePath = new Path(name.getMethodName());
DFSTestUtil.createFile(fs, filePath, 1, (short) 1, 0);
block = DFSTestUtil.getFirstBlock(fs, filePath);
// Test for the overloaded method reportBadBlocks
dataNode.reportBadBlocks(block, dataNode.getFSDataset().getFsVolumeReferences().get(0));
DataNodeTestUtils.triggerHeartbeat(dataNode);
BlockManagerTestUtil.updateState(cluster.getNamesystem().getBlockManager());
assertEquals("Corrupt replica blocks could not be reflected with the heartbeat", 1,
cluster.getNamesystem().getCorruptReplicaBlocks());
}
}
|
public boolean filterMatchesEntry(String filter, FeedEntry entry) throws FeedEntryFilterException {
if (StringUtils.isBlank(filter)) {
return true;
}
Script script;
try {
script = ENGINE.createScript(filter);
} catch (JexlException e) {
throw new FeedEntryFilterException("Exception while parsing expression " + filter, e);
}
JexlContext context = new MapContext();
context.set("title", entry.getContent().getTitle() == null ? "" : Jsoup.parse(entry.getContent().getTitle()).text().toLowerCase());
context.set("author", entry.getContent().getAuthor() == null ? "" : entry.getContent().getAuthor().toLowerCase());
context.set("content",
entry.getContent().getContent() == null ? "" : Jsoup.parse(entry.getContent().getContent()).text().toLowerCase());
context.set("url", entry.getUrl() == null ? "" : entry.getUrl().toLowerCase());
context.set("categories", entry.getContent().getCategories() == null ? "" : entry.getContent().getCategories().toLowerCase());
context.set("year", Year.now().getValue());
Callable<Object> callable = script.callable(context);
Future<Object> future = executor.submit(callable);
Object result;
try {
result = future.get(config.feedRefresh().filteringExpressionEvaluationTimeout().toMillis(), TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new FeedEntryFilterException("interrupted while evaluating expression " + filter, e);
} catch (ExecutionException e) {
throw new FeedEntryFilterException("Exception while evaluating expression " + filter, e);
} catch (TimeoutException e) {
throw new FeedEntryFilterException("Took too long evaluating expression " + filter, e);
}
try {
return (boolean) result;
} catch (ClassCastException e) {
throw new FeedEntryFilterException(e.getMessage(), e);
}
}
|
@Test
void emptyFilterMatchesFilter() throws FeedEntryFilterException {
Assertions.assertTrue(service.filterMatchesEntry(null, entry));
}
|
private void writeObject(java.io.ObjectOutputStream out)
throws IOException {
out.defaultWriteObject();
}
|
@Test
public void javaSerdeInvalidVersion() throws Exception {
int invalidVersion = -1;
byte[] data = new byte[]{0x1, 0x2, 0x3, 0x4};
WorkerIdentity identity = new WorkerIdentity(data, invalidVersion);
final byte[] buffer;
ByteArrayOutputStream bos = new ByteArrayOutputStream();
try (ObjectOutputStream oos = new ObjectOutputStream(bos)) {
oos.writeObject(identity);
}
buffer = bos.toByteArray();
try (ByteArrayInputStream bis = new ByteArrayInputStream(buffer);
ObjectInputStream ois = new ObjectInputStream(bis)) {
InvalidObjectException t = assertThrows(InvalidObjectException.class,
ois::readObject);
t.printStackTrace();
}
}
|
public Timestamp convertDateToTimestamp( Date date ) throws KettleValueException {
if ( date == null ) {
return null;
}
Timestamp result = null;
if ( date instanceof Timestamp ) {
result = (Timestamp) date;
} else {
result = new Timestamp( date.getTime() );
}
return result;
}
|
@Test
public void testConvertDateToTimestamp() throws Exception {
ValueMetaTimestamp valueMetaTimestamp = new ValueMetaTimestamp();
// Converting date to timestamp
Date date = new Date();
assertEquals( valueMetaTimestamp.convertDateToTimestamp( date ).getTime(), date.getTime() );
// Converting timestamp to timestamp
Timestamp timestamp = Timestamp.valueOf( "2014-04-05 04:03:02.123456789" );
Timestamp convertedTimestamp = valueMetaTimestamp.convertDateToTimestamp( timestamp );
assertEquals( convertedTimestamp.getTime(), timestamp.getTime() );
assertEquals( convertedTimestamp.getNanos(), timestamp.getNanos() );
}
|
@Override
public long currentTime() {
return timerService.currentProcessingTime();
}
|
@Test
void testCurrentProcessingTime() throws Exception {
TestInternalTimerService<Integer, VoidNamespace> timerService = getTimerService();
DefaultProcessingTimeManager manager = new DefaultProcessingTimeManager(timerService);
long newTime = 100L;
timerService.advanceProcessingTime(newTime);
assertThat(manager.currentTime()).isEqualTo(newTime);
}
|
public static void mergeParams(
Map<String, ParamDefinition> params,
Map<String, ParamDefinition> paramsToMerge,
MergeContext context) {
if (paramsToMerge == null) {
return;
}
Stream.concat(params.keySet().stream(), paramsToMerge.keySet().stream())
.forEach(
name -> {
ParamDefinition paramToMerge = paramsToMerge.get(name);
if (paramToMerge == null) {
return;
}
if (paramToMerge.getType() == ParamType.MAP && paramToMerge.isLiteral()) {
Map<String, ParamDefinition> baseMap = mapValueOrEmpty(params, name);
Map<String, ParamDefinition> toMergeMap = mapValueOrEmpty(paramsToMerge, name);
mergeParams(
baseMap,
toMergeMap,
MergeContext.copyWithParentMode(
context, params.getOrDefault(name, paramToMerge).getMode()));
params.put(
name,
buildMergedParamDefinition(
name, paramToMerge, params.get(name), context, baseMap));
} else if (paramToMerge.getType() == ParamType.STRING_MAP
&& paramToMerge.isLiteral()) {
Map<String, String> baseMap = stringMapValueOrEmpty(params, name);
Map<String, String> toMergeMap = stringMapValueOrEmpty(paramsToMerge, name);
baseMap.putAll(toMergeMap);
params.put(
name,
buildMergedParamDefinition(
name, paramToMerge, params.get(name), context, baseMap));
} else {
params.put(
name,
buildMergedParamDefinition(
name, paramToMerge, params.get(name), context, paramToMerge.getValue()));
}
});
}
|
@Test
public void testMergeTags() throws JsonProcessingException {
Map<String, ParamDefinition> allParams =
parseParamDefMap(
"{'withtags': {'type': 'STRING','tags': ['tag1', 'tag3'], 'value': 'hello'}}");
Map<String, ParamDefinition> paramsToMerge =
parseParamDefMap(
"{'withtags': {'type': 'STRING','tags': ['tag2', 'tag3'], 'value': 'goodbye'}}");
ParamsMergeHelper.mergeParams(allParams, paramsToMerge, definitionContext);
assertEquals(1, allParams.size());
assertEquals(3, allParams.get("withtags").asStringParamDef().getTags().getTags().size());
}
|
@Override
public boolean isSameRM(final XAResource xaResource) {
SingleXAResource singleXAResource = (SingleXAResource) xaResource;
return resourceName.equals(singleXAResource.resourceName);
}
|
@Test
void assertIsSameRM() {
assertTrue(singleXAResource.isSameRM(new SingleXAResource("ds1", xaResource)));
}
|
@Override
public Exchange add(CamelContext camelContext, String key, Exchange oldExchange, Exchange newExchange)
throws OptimisticLockingException {
if (!optimistic) {
throw new UnsupportedOperationException();
}
LOG.trace("Adding an Exchange with ID {} for key {} in an optimistic manner.", newExchange.getExchangeId(), key);
if (oldExchange == null) {
DefaultExchangeHolder newHolder = DefaultExchangeHolder.marshal(newExchange, true, allowSerializedHeaders);
DefaultExchangeHolder oldHolder = cache.getAndPut(key, newHolder);
if (oldHolder != null) {
Exchange exchange = unmarshallExchange(camelContext, oldHolder);
LOG.error(
"Optimistic locking failed for exchange with key {}: IMap#putIfAbsend returned Exchange with ID {}, while it's expected no exchanges to be returned",
key,
exchange != null ? exchange.getExchangeId() : "<null>");
throw new OptimisticLockingException();
}
} else {
DefaultExchangeHolder oldHolder = DefaultExchangeHolder.marshal(oldExchange, true, allowSerializedHeaders);
DefaultExchangeHolder newHolder = DefaultExchangeHolder.marshal(newExchange, true, allowSerializedHeaders);
if (!cache.replace(key, oldHolder, newHolder)) {
LOG.error(
"Optimistic locking failed for exchange with key {}: IMap#replace returned no Exchanges, while it's expected to replace one",
key);
throw new OptimisticLockingException();
}
}
LOG.trace("Added an Exchange with ID {} for key {} in optimistic manner.", newExchange.getExchangeId(), key);
return oldExchange;
}
|
@Test
public void checkThreadSafeAddOfNewExchange() throws Exception {
JCacheAggregationRepository repoOne = createRepository(false);
JCacheAggregationRepository repoTwo = createRepository(false);
repoOne.start();
repoTwo.start();
try {
final String testBody = "This is a thread-safe test body. Sincerely yours, Captain Obvious.";
final String key = "threadSafeKey";
Exchange newEx = createExchangeWithBody(testBody);
Exchange oldEx = repoOne.add(context(), key, newEx);
assertNull(oldEx, "Old exchange should be null.");
final String theNewestBody = "This is the newest test body.";
Exchange theNewestEx = createExchangeWithBody(theNewestBody);
oldEx = repoTwo.add(context(), key, theNewestEx);
assertEquals(newEx.getIn().getBody(), oldEx.getIn().getBody());
} finally {
repoOne.stop();
repoTwo.stop();
}
}
|
@Override
public Optional<Entity> exportEntity(EntityDescriptor entityDescriptor, EntityDescriptorIds entityDescriptorIds) {
final ModelId modelId = entityDescriptor.id();
try {
final Input input = inputService.find(modelId.id());
final InputWithExtractors inputWithExtractors = InputWithExtractors.create(input, inputService.getExtractors(input));
return Optional.of(exportNativeEntity(inputWithExtractors, entityDescriptorIds));
} catch (NotFoundException e) {
return Optional.empty();
}
}
|
@Test
@MongoDBFixtures("InputFacadeTest.json")
public void collectEntity() {
final EntityDescriptor descriptor = EntityDescriptor.create("5adf25294b900a0fdb4e5365", ModelTypes.INPUT_V1);
final EntityDescriptorIds entityDescriptorIds = EntityDescriptorIds.of(descriptor);
final Optional<Entity> collectedEntity = facade.exportEntity(descriptor, entityDescriptorIds);
assertThat(collectedEntity)
.isPresent()
.containsInstanceOf(EntityV1.class);
final EntityV1 entity = (EntityV1) collectedEntity.get();
assertThat(entity.id()).isEqualTo(ModelId.of(entityDescriptorIds.get(descriptor).orElse(null)));
assertThat(entity.type()).isEqualTo(ModelTypes.INPUT_V1);
final InputEntity inputEntity = objectMapper.convertValue(entity.data(), InputEntity.class);
assertThat(inputEntity.title()).isEqualTo(ValueReference.of("Global Random HTTP"));
assertThat(inputEntity.type()).isEqualTo(ValueReference.of("org.graylog2.inputs.random.FakeHttpMessageInput"));
assertThat(inputEntity.global()).isEqualTo(ValueReference.of(true));
assertThat(inputEntity.staticFields()).containsEntry("custom_field", ValueReference.of("foobar"));
assertThat(inputEntity.configuration()).isNotEmpty();
assertThat(inputEntity.extractors()).hasSize(5);
}
|
public static MySQLCommandPacket newInstance(final MySQLCommandPacketType commandPacketType, final MySQLPacketPayload payload,
final ConnectionSession connectionSession) {
switch (commandPacketType) {
case COM_QUIT:
return new MySQLComQuitPacket();
case COM_INIT_DB:
return new MySQLComInitDbPacket(payload);
case COM_FIELD_LIST:
return new MySQLComFieldListPacket(payload);
case COM_QUERY:
return new MySQLComQueryPacket(payload);
case COM_STMT_PREPARE:
return new MySQLComStmtPreparePacket(payload);
case COM_STMT_EXECUTE:
MySQLServerPreparedStatement serverPreparedStatement =
connectionSession.getServerPreparedStatementRegistry().getPreparedStatement(payload.getByteBuf().getIntLE(payload.getByteBuf().readerIndex()));
return new MySQLComStmtExecutePacket(payload, serverPreparedStatement.getSqlStatementContext().getSqlStatement().getParameterCount());
case COM_STMT_SEND_LONG_DATA:
return new MySQLComStmtSendLongDataPacket(payload);
case COM_STMT_RESET:
return new MySQLComStmtResetPacket(payload);
case COM_STMT_CLOSE:
return new MySQLComStmtClosePacket(payload);
case COM_SET_OPTION:
return new MySQLComSetOptionPacket(payload);
case COM_PING:
return new MySQLComPingPacket();
case COM_RESET_CONNECTION:
return new MySQLComResetConnectionPacket();
default:
return new MySQLUnsupportedCommandPacket(commandPacketType);
}
}
|
@Test
void assertNewInstanceWithComDebugPacket() {
assertThat(MySQLCommandPacketFactory.newInstance(MySQLCommandPacketType.COM_DEBUG, payload, connectionSession), instanceOf(MySQLUnsupportedCommandPacket.class));
}
|
public static Map<String, Object> convert(FormData data) {
Map<String, Object> map = new HashMap<>();
for (String key : data) {
if (data.get(key).size() == 1) {
// If the form data is file, read it as FileItem, else read as String.
if (data.getFirst(key).getFileName() == null) {
String value = data.getFirst(key).getValue();
map.put(key, value);
} else {
FormData.FileItem value = data.getFirst(key).getFileItem();
map.put(key, value);
}
} else if (data.get(key).size() > 1) {
List<Object> list = new ArrayList<>();
for (FormData.FormValue value : data.get(key)) {
// If the form data is file, read it as FileItem, else read as String.
if (value.getFileName() == null) {
list.add(value.getValue());
} else {
list.add(value.getFileItem());
}
}
map.put(key, list);
}
// ignore size == 0
}
return map;
}
|
@Test
public void shouldToGetConvertedFormDataInAMap() {
String aKey = "aKey";
String aValue = "aValue";
String anotherKey = "anotherKey";
String anotherValue = "anotherValue";
FormData formData = new FormData(99);
formData.add(aKey, aValue);
formData.add(anotherKey, anotherValue);
Map<String, Object> bodyMap = BodyConverter.convert(formData);
Assert.assertEquals(2, bodyMap.size());
Object aConvertedListvalue = bodyMap.get(aKey);
Assert.assertTrue(aConvertedListvalue instanceof String);
Assert.assertEquals(aValue, aConvertedListvalue);
Object anotherListvalues = bodyMap.get(anotherKey);
Assert.assertTrue(anotherListvalues instanceof String);
Assert.assertEquals(anotherValue, anotherListvalues);
}
|
@CanIgnoreReturnValue
public final Ordered containsExactly() {
return containsExactlyEntriesIn(ImmutableMap.of());
}
|
@Test
public void containsExactlyExtraKey() {
ImmutableMap<String, Integer> actual = ImmutableMap.of("jan", 1, "feb", 2, "march", 3);
expectFailureWhenTestingThat(actual).containsExactly("feb", 2, "jan", 1);
assertFailureKeys(
"unexpected keys", "for key", "unexpected value", "---", "expected", "but was");
assertFailureValue("for key", "march");
assertFailureValue("unexpected value", "3");
assertFailureValue("expected", "{feb=2, jan=1}");
assertFailureValue("but was", "{jan=1, feb=2, march=3}");
}
|
public synchronized void userAddFunction(Function f, boolean allowExists) throws UserException {
addFunction(f, false, allowExists);
GlobalStateMgr.getCurrentState().getEditLog().logAddFunction(f);
}
|
@Test
public void testUserAddFunction() throws UserException {
// User adds addIntInt UDF
FunctionName name = new FunctionName(null, "addIntInt");
name.setAsGlobalFunction();
final Type[] argTypes = {Type.INT, Type.INT};
Function f = new Function(name, argTypes, Type.INT, false);
globalFunctionMgr.userAddFunction(f, false);
// User adds addDoubleDouble UDF
FunctionName name2 = new FunctionName(null, "addDoubleDouble");
name2.setAsGlobalFunction();
final Type[] argTypes2 = {Type.DOUBLE, Type.DOUBLE};
Function f2 = new Function(name2, argTypes2, Type.DOUBLE, false);
globalFunctionMgr.userAddFunction(f2, false);
}
|
@Override
public void export(RegisterTypeEnum registerType) {
if (this.exported) {
return;
}
if (getScopeModel().isLifeCycleManagedExternally()) {
// prepare model for reference
getScopeModel().getDeployer().prepare();
} else {
// ensure start module, compatible with old api usage
getScopeModel().getDeployer().start();
}
synchronized (this) {
if (this.exported) {
return;
}
if (!this.isRefreshed()) {
this.refresh();
}
if (this.shouldExport()) {
this.init();
if (shouldDelay()) {
// should register if delay export
doDelayExport();
} else if (Integer.valueOf(-1).equals(getDelay())
&& Boolean.parseBoolean(ConfigurationUtils.getProperty(
getScopeModel(), CommonConstants.DUBBO_MANUAL_REGISTER_KEY, "false"))) {
// should not register by default
doExport(RegisterTypeEnum.MANUAL_REGISTER);
} else {
doExport(registerType);
}
}
}
}
|
@Test
void testMethodConfigWithConfiguredArgumentTypeAndIndex() {
ServiceConfig<DemoServiceImpl> service = new ServiceConfig<>();
service.setInterface(DemoService.class);
service.setRef(new DemoServiceImpl());
service.setProtocol(new ProtocolConfig() {
{
setName("dubbo");
}
});
MethodConfig methodConfig = new MethodConfig();
methodConfig.setName("sayName");
// invalid argument index.
methodConfig.setArguments(Lists.newArrayList(new ArgumentConfig() {
{
setType(String.class.getName());
setIndex(0);
setCallback(false);
}
}));
service.setMethods(Lists.newArrayList(methodConfig));
service.export();
assertFalse(service.getExportedUrls().isEmpty());
assertEquals("false", service.getExportedUrls().get(0).getParameters().get("sayName.0.callback"));
}
|
public static boolean isLocalhostReachable(int port) {
return reachable(null, port);
}
|
@Test
public void shouldReturnFalseIfPortIsNotReachableOnLocalhost() throws Exception {
assertThat(SystemUtil.isLocalhostReachable(9876), is(false));
}
|
public static String escape(String original) {
if (original != null) {
return original.replace("\\", "\\\\").replace("\r", "\\r").replace("\n", "\\n");
} else {
return null;
}
}
|
@Test
public void testEscape() {
assertEquals("Hello\\\\nWorld!", StringUtil.escape("Hello\\nWorld!"));
assertEquals("Hello\\nWorld!", StringUtil.escape("Hello\nWorld!"));
assertEquals("\\r\tHello\\nWorld!", StringUtil.escape("\r\tHello\nWorld!"));
assertEquals("Hello\\\\\\nWorld!", StringUtil.escape("Hello\\\nWorld!"));
assertEquals("Hello\tWorld", StringUtil.escape("Hello\tWorld"));
assertEquals("Hello\\\\World!", StringUtil.escape("Hello\\World!"));
}
|
ProducerListeners listeners() {
return new ProducerListeners(eventListeners.toArray(new HollowProducerEventListener[0]));
}
|
@Test
public void fireIntegrityCheckStartDontStopWhenOneFails() {
long version = 31337;
HollowProducer.ReadState readState = Mockito.mock(HollowProducer.ReadState.class);
Mockito.when(readState.getVersion()).thenReturn(version);
Mockito.doThrow(RuntimeException.class).when(listener).onIntegrityCheckStart(version);
listenerSupport.listeners().fireIntegrityCheckStart(readState);
Mockito.verify(listener).onIntegrityCheckStart(version);
}
|
public Repository getRepo(String serverUrl, String token, String project, String repoSlug) {
HttpUrl url = buildUrl(serverUrl, format("/rest/api/1.0/projects/%s/repos/%s", project, repoSlug));
return doGet(token, url, body -> buildGson().fromJson(body, Repository.class));
}
|
@Test
public void malformed_json() {
server.enqueue(new MockResponse()
.setHeader("Content-Type", "application/json;charset=UTF-8")
.setBody("I'm malformed JSON"));
String serverUrl = server.url("/").toString();
assertThatThrownBy(() -> underTest.getRepo(serverUrl, "token", "", ""))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("Unexpected response from Bitbucket server");
assertThat(String.join(", ", logTester.logs()))
.contains("Unexpected response from Bitbucket server : [I'm malformed JSON]");
}
|
public long lappedCount()
{
return lappedCount.get();
}
|
@Test
void shouldNotBeLappedBeforeReception()
{
assertThat(broadcastReceiver.lappedCount(), is(0L));
}
|
public void parse(InputStream stream, ContentHandler handler, Metadata metadata,
ParseContext context) throws IOException, SAXException, TikaException {
PDFParserConfig localConfig = defaultConfig;
PDFParserConfig userConfig = context.get(PDFParserConfig.class);
if (userConfig != null) {
localConfig = defaultConfig.cloneAndUpdate(userConfig);
}
if (localConfig.isSetKCMS()) {
System.setProperty("sun.java2d.cmm", "sun.java2d.cmm.kcms.KcmsServiceProvider");
}
IncrementalUpdateRecord incomingIncrementalUpdateRecord = context.get(IncrementalUpdateRecord.class);
context.set(IncrementalUpdateRecord.class, null);
initRenderer(localConfig, context);
PDDocument pdfDocument = null;
String password = "";
PDFRenderingState incomingRenderingState = context.get(PDFRenderingState.class);
TikaInputStream tstream = null;
boolean shouldClose = false;
OCRPageCounter prevOCRCounter = context.get(OCRPageCounter.class);
context.set(OCRPageCounter.class, new OCRPageCounter());
try {
if (shouldSpool(localConfig)) {
if (stream instanceof TikaInputStream) {
tstream = (TikaInputStream) stream;
} else {
tstream = TikaInputStream.get(CloseShieldInputStream.wrap(stream));
shouldClose = true;
}
context.set(PDFRenderingState.class, new PDFRenderingState(tstream));
} else {
tstream = TikaInputStream.cast(stream);
}
scanXRefOffsets(localConfig, tstream, metadata, context);
password = getPassword(metadata, context);
MemoryUsageSetting memoryUsageSetting = null;
if (localConfig.getMaxMainMemoryBytes() >= 0) {
memoryUsageSetting =
MemoryUsageSetting.setupMixed(localConfig.getMaxMainMemoryBytes());
} else {
memoryUsageSetting = MemoryUsageSetting.setupMainMemoryOnly();
}
pdfDocument = getPDDocument(stream, tstream, password,
memoryUsageSetting.streamCache, metadata, context);
boolean hasCollection = hasCollection(pdfDocument, metadata);
checkEncryptedPayload(pdfDocument, hasCollection, localConfig);
boolean hasXFA = hasXFA(pdfDocument, metadata);
boolean hasMarkedContent = hasMarkedContent(pdfDocument, metadata);
extractMetadata(pdfDocument, metadata, context);
extractSignatures(pdfDocument, metadata);
checkIllustrator(pdfDocument, metadata);
AccessChecker checker = localConfig.getAccessChecker();
checker.check(metadata);
renderPagesBeforeParse(tstream, handler, metadata, context, localConfig);
if (handler != null) {
if (shouldHandleXFAOnly(hasXFA, localConfig)) {
handleXFAOnly(pdfDocument, handler, metadata, context);
} else if (localConfig.getOcrStrategy()
.equals(PDFParserConfig.OCR_STRATEGY.OCR_ONLY)) {
OCR2XHTML.process(pdfDocument, handler, context, metadata,
localConfig);
} else if (hasMarkedContent && localConfig.isExtractMarkedContent()) {
PDFMarkedContent2XHTML
.process(pdfDocument, handler, context, metadata,
localConfig);
} else {
PDF2XHTML.process(pdfDocument, handler, context, metadata,
localConfig);
}
}
} catch (InvalidPasswordException e) {
metadata.set(PDF.IS_ENCRYPTED, "true");
throw new EncryptedDocumentException(e);
} finally {
metadata.set(OCR_PAGE_COUNT, context.get(OCRPageCounter.class).getCount());
context.set(OCRPageCounter.class, prevOCRCounter);
//reset the incrementalUpdateRecord even if null
context.set(IncrementalUpdateRecord.class, incomingIncrementalUpdateRecord);
PDFRenderingState currState = context.get(PDFRenderingState.class);
try {
if (currState != null && currState.getRenderResults() != null) {
currState.getRenderResults().close();
}
if (pdfDocument != null) {
pdfDocument.close();
}
} finally {
//replace the one that was here
context.set(PDFRenderingState.class, incomingRenderingState);
if (shouldClose && tstream != null) {
tstream.close();
}
}
}
}
|
@Test
public void testSingleCloseDoc() throws Exception {
//TIKA-1341
Metadata m = new Metadata();
ParseContext c = new ParseContext();
ContentHandler h = new EventCountingHandler();
try (InputStream is = getResourceAsStream("/test-documents/testPDFTripleLangTitle.pdf")) {
AUTO_DETECT_PARSER.parse(is, h, m, c);
}
assertEquals(1, ((EventCountingHandler) h).getEndDocument());
}
|
@GetMapping
@Secured(action = ActionTypes.READ, signType = SignType.CONFIG)
public Result<ConfigHistoryInfo> getConfigHistoryInfo(@RequestParam("dataId") String dataId,
@RequestParam("group") String group,
@RequestParam(value = "namespaceId", required = false, defaultValue = StringUtils.EMPTY) String namespaceId,
@RequestParam("nid") Long nid) throws AccessException, NacosApiException {
ConfigHistoryInfo configHistoryInfo;
try {
//fix issue #9783
namespaceId = NamespaceUtil.processNamespaceParameter(namespaceId);
configHistoryInfo = historyService.getConfigHistoryInfo(dataId, group, namespaceId, nid);
} catch (DataAccessException e) {
throw new NacosApiException(HttpStatus.NOT_FOUND.value(), ErrorCode.RESOURCE_NOT_FOUND,
"certain config history for nid = " + nid + " not exist");
}
return Result.success(configHistoryInfo);
}
|
@Test
void testGetConfigHistoryInfo() throws Exception {
ConfigHistoryInfo configHistoryInfo = new ConfigHistoryInfo();
configHistoryInfo.setDataId(TEST_DATA_ID);
configHistoryInfo.setGroup(TEST_GROUP);
configHistoryInfo.setContent(TEST_CONTENT);
configHistoryInfo.setTenant(TEST_NAMESPACE_ID);
configHistoryInfo.setCreatedTime(new Timestamp(new Date().getTime()));
configHistoryInfo.setLastModifiedTime(new Timestamp(new Date().getTime()));
when(historyService.getConfigHistoryInfo(TEST_DATA_ID, TEST_GROUP, TEST_NAMESPACE_ID, 1L)).thenReturn(configHistoryInfo);
Result<ConfigHistoryInfo> result = historyControllerV2.getConfigHistoryInfo(TEST_DATA_ID, TEST_GROUP, TEST_NAMESPACE_ID, 1L);
verify(historyService).getConfigHistoryInfo(TEST_DATA_ID, TEST_GROUP, TEST_NAMESPACE_ID, 1L);
ConfigHistoryInfo resConfigHistoryInfo = result.getData();
assertEquals(ErrorCode.SUCCESS.getCode(), result.getCode());
assertEquals(configHistoryInfo.getDataId(), resConfigHistoryInfo.getDataId());
assertEquals(configHistoryInfo.getGroup(), resConfigHistoryInfo.getGroup());
assertEquals(configHistoryInfo.getContent(), resConfigHistoryInfo.getContent());
}
|
public B protocolIds(String protocolIds) {
this.protocolIds = protocolIds;
return getThis();
}
|
@Test
void protocolIds() {
ServiceBuilder builder = new ServiceBuilder();
builder.protocolIds("protocolIds");
Assertions.assertEquals("protocolIds", builder.build().getProtocolIds());
}
|
@Override
public <T> T persist(T detachedObject) {
Map<Object, Object> alreadyPersisted = new HashMap<Object, Object>();
return persist(detachedObject, alreadyPersisted, RCascadeType.PERSIST);
}
|
@Test
public void testInheritedREntity() {
Dog d = new Dog("Fido");
d.setBreed("lab");
d = redisson.getLiveObjectService().persist(d);
assertThat(d.getName()).isEqualTo("Fido");
assertThat(d.getBreed()).isEqualTo("lab");
}
|
@ApiOperation(value = "Create Or update Tenant (saveTenant)",
notes = "Create or update the Tenant. When creating tenant, platform generates Tenant Id as " + UUID_WIKI_LINK +
"Default Rule Chain and Device profile are also generated for the new tenants automatically. " +
"The newly created Tenant Id will be present in the response. " +
"Specify existing Tenant Id id to update the Tenant. " +
"Referencing non-existing Tenant Id will cause 'Not Found' error." +
"Remove 'id', 'tenantId' from the request body example (below) to create new Tenant entity." +
SYSTEM_AUTHORITY_PARAGRAPH)
@PreAuthorize("hasAuthority('SYS_ADMIN')")
@RequestMapping(value = "/tenant", method = RequestMethod.POST)
@ResponseBody
public Tenant saveTenant(@Parameter(description = "A JSON value representing the tenant.")
@RequestBody Tenant tenant) throws Exception {
checkEntity(tenant.getId(), tenant, Resource.TENANT);
return tbTenantService.save(tenant);
}
|
@Test
public void testSaveTenant() throws Exception {
loginSysAdmin();
Tenant tenant = new Tenant();
tenant.setTitle("My tenant");
Mockito.reset(tbClusterService);
Tenant savedTenant = saveTenant(tenant);
Assert.assertNotNull(savedTenant);
Assert.assertNotNull(savedTenant.getId());
Assert.assertTrue(savedTenant.getCreatedTime() > 0);
Assert.assertEquals(tenant.getTitle(), savedTenant.getTitle());
testBroadcastEntityStateChangeEventTimeManyTimeTenant(savedTenant, ComponentLifecycleEvent.CREATED, 1);
savedTenant.setTitle("My new tenant");
savedTenant = saveTenant(savedTenant);
Tenant foundTenant = doGet("/api/tenant/" + savedTenant.getId().getId().toString(), Tenant.class);
Assert.assertEquals(foundTenant.getTitle(), savedTenant.getTitle());
testBroadcastEntityStateChangeEventTimeManyTimeTenant(savedTenant, ComponentLifecycleEvent.UPDATED, 1);
deleteTenant(savedTenant.getId());
testBroadcastEntityStateChangeEventTimeManyTimeTenant(savedTenant, ComponentLifecycleEvent.DELETED, 1);
}
|
@Override
public ProtobufSystemInfo.Section toProtobuf() {
ProtobufSystemInfo.Section.Builder protobuf = ProtobufSystemInfo.Section.newBuilder();
protobuf.setName("Bundled");
for (PluginInfo plugin : repository.getPluginsInfoByType(PluginType.BUNDLED)) {
String label = "";
Version version = plugin.getVersion();
if (version != null) {
label = version.getName() + " ";
}
label += String.format("[%s]", plugin.getName());
setAttribute(protobuf, plugin.getKey(), label);
}
return protobuf.build();
}
|
@Test
public void toProtobuf_given3BundledPlugins_returnThree() {
when(repo.getPluginsInfoByType(PluginType.BUNDLED)).thenReturn(Arrays.asList(
new PluginInfo("java")
.setName("Java")
.setVersion(Version.create("20.0")),
new PluginInfo("c++")
.setName("C++")
.setVersion(Version.create("1.0.2")),
new PluginInfo("no-version")
.setName("No Version")));
ProtobufSystemInfo.Section section = underTest.toProtobuf();
assertThatAttributeIs(section, "java", "20.0 [Java]");
assertThatAttributeIs(section, "c++", "1.0.2 [C++]");
assertThatAttributeIs(section, "no-version", "[No Version]");
}
|
public @CheckForNull R search(final int n, final Direction d) {
switch (d) {
case EXACT:
return getByNumber(n);
case ASC:
for (int m : numberOnDisk) {
if (m < n) {
// TODO could be made more efficient with numberOnDisk.find
continue;
}
R r = getByNumber(m);
if (r != null) {
return r;
}
}
return null;
case DESC:
// TODO again could be made more efficient
ListIterator<Integer> iterator = numberOnDisk.listIterator(numberOnDisk.size());
while (iterator.hasPrevious()) {
int m = iterator.previous();
if (m > n) {
continue;
}
R r = getByNumber(m);
if (r != null) {
return r;
}
}
return null;
default:
throw new AssertionError();
}
}
|
@Issue("JENKINS-22681")
@Test public void exactSearchShouldNotReload() throws Exception {
FakeMap m = localBuilder.add(1).add(2).make();
assertNull(m.search(0, Direction.EXACT));
Build a = m.search(1, Direction.EXACT);
a.asserts(1);
Build b = m.search(2, Direction.EXACT);
b.asserts(2);
assertNull(m.search(0, Direction.EXACT));
assertSame(a, m.search(1, Direction.EXACT));
assertSame(b, m.search(2, Direction.EXACT));
assertNull(m.search(3, Direction.EXACT));
assertNull(m.search(0, Direction.EXACT));
assertSame(a, m.search(1, Direction.EXACT));
assertSame("#2 should not have been reloaded by searching for #3", b, m.search(2, Direction.EXACT));
assertNull(m.search(3, Direction.EXACT));
}
|
public DirectoryEntry lookUp(
File workingDirectory, JimfsPath path, Set<? super LinkOption> options) throws IOException {
checkNotNull(path);
checkNotNull(options);
DirectoryEntry result = lookUp(workingDirectory, path, options, 0);
if (result == null) {
// an intermediate file in the path did not exist or was not a directory
throw new NoSuchFileException(path.toString());
}
return result;
}
|
@Test
public void testLookup_absolute_notExists() throws IOException {
try {
lookup("/a/b");
fail();
} catch (NoSuchFileException expected) {
}
try {
lookup("/work/one/foo/bar");
fail();
} catch (NoSuchFileException expected) {
}
try {
lookup("$c/d");
fail();
} catch (NoSuchFileException expected) {
}
try {
lookup("$a/b/c/d/e");
fail();
} catch (NoSuchFileException expected) {
}
}
|
public static byte[] encodeObjectIdentifier(String oid) {
try (final ByteArrayOutputStream bos = new ByteArrayOutputStream(oid.length() / 3 + 1)) {
encodeObjectIdentifier(oid, bos);
return bos.toByteArray();
} catch (IOException e) {
throw new Asn1Exception("Unexpected IO exception", e);
}
}
|
@Test
public void encodeObjectIdentifierWithDoubleFirst() {
assertArrayEquals(new byte[] { (byte) 0x81, 5 }, Asn1Utils.encodeObjectIdentifier("2.53"));
}
|
@Override
public abstract String toString();
|
@Test
public void testFrom_toString() {
assertThat(STRING_PREFIX_EQUALITY.toString()).isEqualTo("starts with");
}
|
@Override
public Mono<GetVersionedProfileResponse> getVersionedProfile(final GetVersionedProfileRequest request) {
final AuthenticatedDevice authenticatedDevice = AuthenticationUtil.requireAuthenticatedDevice();
final ServiceIdentifier targetIdentifier =
ServiceIdentifierUtil.fromGrpcServiceIdentifier(request.getAccountIdentifier());
if (targetIdentifier.identityType() != IdentityType.ACI) {
throw Status.INVALID_ARGUMENT.withDescription("Expected ACI service identifier").asRuntimeException();
}
return validateRateLimitAndGetAccount(authenticatedDevice.accountIdentifier(), targetIdentifier)
.flatMap(account -> ProfileGrpcHelper.getVersionedProfile(account, profilesManager, request.getVersion()));
}
|
@Test
void getVersionedProfileRatelimited() {
final Duration retryAfterDuration = MockUtils.updateRateLimiterResponseToFail(rateLimiter, AUTHENTICATED_ACI, Duration.ofMinutes(7), false);
final GetVersionedProfileRequest request = GetVersionedProfileRequest.newBuilder()
.setAccountIdentifier(ServiceIdentifier.newBuilder()
.setIdentityType(IdentityType.IDENTITY_TYPE_ACI)
.setUuid(ByteString.copyFrom(UUIDUtil.toBytes(UUID.randomUUID())))
.build())
.setVersion("someVersion")
.build();
assertRateLimitExceeded(retryAfterDuration, () -> authenticatedServiceStub().getVersionedProfile(request), accountsManager, profilesManager);
}
|
@Override
public String name() {
return name;
}
|
@Test
public void testSetNamespaceOwnershipNoop() throws TException, IOException {
setNamespaceOwnershipAndVerify(
"set_ownership_noop_1",
ImmutableMap.of(HiveCatalog.HMS_DB_OWNER, "some_individual_owner"),
ImmutableMap.of(
HiveCatalog.HMS_DB_OWNER,
"some_individual_owner",
HiveCatalog.HMS_DB_OWNER_TYPE,
PrincipalType.USER.name()),
"some_individual_owner",
PrincipalType.USER,
"some_individual_owner",
PrincipalType.USER);
setNamespaceOwnershipAndVerify(
"set_ownership_noop_2",
ImmutableMap.of(
HiveCatalog.HMS_DB_OWNER,
"some_group_owner",
HiveCatalog.HMS_DB_OWNER_TYPE,
PrincipalType.GROUP.name()),
ImmutableMap.of(
HiveCatalog.HMS_DB_OWNER,
"some_group_owner",
HiveCatalog.HMS_DB_OWNER_TYPE,
PrincipalType.GROUP.name()),
"some_group_owner",
PrincipalType.GROUP,
"some_group_owner",
PrincipalType.GROUP);
setNamespaceOwnershipAndVerify(
"set_ownership_noop_3",
ImmutableMap.of(),
ImmutableMap.of(),
UserGroupInformation.getCurrentUser().getShortUserName(),
PrincipalType.USER,
UserGroupInformation.getCurrentUser().getShortUserName(),
PrincipalType.USER);
setNamespaceOwnershipAndVerify(
"set_ownership_noop_4",
ImmutableMap.of(
HiveCatalog.HMS_DB_OWNER,
"some_group_owner",
HiveCatalog.HMS_DB_OWNER_TYPE,
PrincipalType.GROUP.name()),
ImmutableMap.of("unrelated_prop_1", "value_1", "unrelated_prop_2", "value_2"),
"some_group_owner",
PrincipalType.GROUP,
"some_group_owner",
PrincipalType.GROUP);
}
|
public static <T> T[] checkNonEmpty(T[] array, String name) {
//No String concatenation for check
if (checkNotNull(array, name).length == 0) {
throw new IllegalArgumentException("Param '" + name + "' must not be empty");
}
return array;
}
|
@Test
public void testCheckNonEmptyCharArrayString() {
Exception actualEx = null;
try {
ObjectUtil.checkNonEmpty((char[]) NULL_OBJECT, NULL_NAME);
} catch (Exception e) {
actualEx = e;
}
assertNotNull(actualEx, TEST_RESULT_NULLEX_OK);
assertTrue(actualEx instanceof NullPointerException, TEST_RESULT_EXTYPE_NOK);
actualEx = null;
try {
ObjectUtil.checkNonEmpty((char[]) NON_NULL_FILLED_CHAR_ARRAY, NON_NULL_NAME);
} catch (Exception e) {
actualEx = e;
}
assertNull(actualEx, TEST_RESULT_NULLEX_NOK);
actualEx = null;
try {
ObjectUtil.checkNonEmpty((char[]) NON_NULL_EMPTY_CHAR_ARRAY, NON_NULL_EMPTY_NAME);
} catch (Exception e) {
actualEx = e;
}
assertNotNull(actualEx, TEST_RESULT_NULLEX_OK);
assertTrue(actualEx instanceof IllegalArgumentException, TEST_RESULT_EXTYPE_NOK);
}
|
public void acknowledgePages(long sequenceId)
{
checkArgument(sequenceId >= 0, "Invalid sequence id");
// Fast path early-return without synchronizing
if (destroyed.get() || sequenceId < currentSequenceId.get()) {
return;
}
ImmutableList.Builder<SerializedPageReference> removedPages;
synchronized (this) {
if (destroyed.get()) {
return;
}
// if pages have already been acknowledged, just ignore this
long oldCurrentSequenceId = currentSequenceId.get();
if (sequenceId < oldCurrentSequenceId) {
return;
}
int pagesToRemove = toIntExact(sequenceId - oldCurrentSequenceId);
checkArgument(pagesToRemove <= pages.size(), "Invalid sequence id");
removedPages = ImmutableList.builderWithExpectedSize(pagesToRemove);
long bytesRemoved = 0;
for (int i = 0; i < pagesToRemove; i++) {
SerializedPageReference removedPage = pages.removeFirst();
removedPages.add(removedPage);
bytesRemoved += removedPage.getRetainedSizeInBytes();
}
// update current sequence id
verify(currentSequenceId.compareAndSet(oldCurrentSequenceId, oldCurrentSequenceId + pagesToRemove));
// update memory tracking
verify(bufferedBytes.addAndGet(-bytesRemoved) >= 0);
}
// dereference outside of synchronized to avoid making a callback while holding a lock
dereferencePages(removedPages.build(), onPagesReleased);
}
|
@Test
public void testBufferResults()
{
ClientBuffer buffer = new ClientBuffer(TASK_INSTANCE_ID, BUFFER_ID, NOOP_RELEASE_LISTENER);
long totalSizeOfPagesInBytes = 0;
for (int i = 0; i < 3; i++) {
Page page = createPage(i);
SerializedPageReference pageReference = new SerializedPageReference(PAGES_SERDE.serialize(page), 1, Lifespan.taskWide());
totalSizeOfPagesInBytes = totalSizeOfPagesInBytes + pageReference.getSerializedPage().getRetainedSizeInBytes();
addPage(buffer, page);
}
// Everything buffered
assertBufferInfo(buffer, 3, 0, totalSizeOfPagesInBytes);
BufferResult bufferResult = getBufferResult(buffer, 0, sizeOfPages(1), NO_WAIT);
long remainingBytes = totalSizeOfPagesInBytes - bufferResult.getBufferedBytes();
assertEquals(bufferResult.getBufferedBytes(), sizeOfPages(1).toBytes());
assertBufferInfo(buffer, 3, 0, totalSizeOfPagesInBytes);
buffer.acknowledgePages(bufferResult.getNextToken());
assertBufferInfo(buffer, 2, 1, remainingBytes);
}
|
@Override
public synchronized void putConnectorConfig(String connName,
final Map<String, String> config,
boolean allowReplace,
final Callback<Created<ConnectorInfo>> callback) {
putConnectorConfig(connName, config, null, allowReplace, callback);
}
|
@Test
public void testCreateConnectorWithStoppedInitialState() throws Exception {
initialize(true);
Map<String, String> config = connectorConfig(SourceSink.SINK);
expectConfigValidation(SourceSink.SINK, config);
// Only the connector should be created; we expect no tasks to be spawned for a connector created with a paused or stopped initial state
mockStartConnector(config, null, TargetState.STOPPED, null);
when(worker.isRunning(CONNECTOR_NAME)).thenReturn(true);
when(herder.connectorType(any())).thenReturn(ConnectorType.SINK);
herder.putConnectorConfig(CONNECTOR_NAME, config, TargetState.STOPPED, false, createCallback);
Herder.Created<ConnectorInfo> connectorInfo = createCallback.get(WAIT_TIME_MS, TimeUnit.MILLISECONDS);
assertEquals(
new ConnectorInfo(CONNECTOR_NAME, connectorConfig(SourceSink.SINK), Collections.emptyList(), ConnectorType.SINK),
connectorInfo.result()
);
verify(loaderSwap).close();
}
|
@Override
public void setConfigAttributes(Object attributes) {
if (attributes == null) {
return;
}
super.setConfigAttributes(attributes);
Map map = (Map) attributes;
if (map.containsKey(URL)) {
this.url = new HgUrlArgument((String) map.get(URL));
}
if (map.containsKey("userName")) {
this.userName = (String) map.get("userName");
}
if (map.containsKey(PASSWORD_CHANGED) && "1".equals(map.get(PASSWORD_CHANGED))) {
String passwordToSet = (String) map.get(PASSWORD);
resetPassword(passwordToSet);
}
if (map.containsKey(BRANCH)) {
setBranchAttribute((String) map.get(BRANCH));
}
}
|
@Test
void setConfigAttributes_shouldUpdatePasswordWhenPasswordChangedBooleanChanged() throws Exception {
HgMaterialConfig hgMaterialConfig = hg();
Map<String, String> map = new HashMap<>();
map.put(HgMaterialConfig.PASSWORD, "secret");
map.put(HgMaterialConfig.PASSWORD_CHANGED, "1");
hgMaterialConfig.setConfigAttributes(map);
assertThat((String) ReflectionUtil.getField(hgMaterialConfig, "password")).isNull();
assertThat(hgMaterialConfig.getPassword()).isEqualTo("secret");
assertThat(hgMaterialConfig.getEncryptedPassword()).isEqualTo(new GoCipher().encrypt("secret"));
//Dont change
map.put(HgMaterialConfig.PASSWORD, "Hehehe");
map.put(HgMaterialConfig.PASSWORD_CHANGED, "0");
hgMaterialConfig.setConfigAttributes(map);
assertThat((String) ReflectionUtil.getField(hgMaterialConfig, "password")).isNull();
assertThat(hgMaterialConfig.getPassword()).isEqualTo("secret");
assertThat(hgMaterialConfig.getEncryptedPassword()).isEqualTo(new GoCipher().encrypt("secret"));
map.put(HgMaterialConfig.PASSWORD, "");
map.put(HgMaterialConfig.PASSWORD_CHANGED, "1");
hgMaterialConfig.setConfigAttributes(map);
assertThat(hgMaterialConfig.getPassword()).isNull();
assertThat(hgMaterialConfig.getEncryptedPassword()).isNull();
}
|
Plugin create(Options.Plugin plugin) {
try {
return instantiate(plugin.pluginString(), plugin.pluginClass(), plugin.argument());
} catch (IOException | URISyntaxException e) {
throw new CucumberException(e);
}
}
|
@Test
void instantiates_usage_plugin_without_file_arg() {
PluginOption option = parse("usage");
plugin = fc.create(option);
assertThat(plugin.getClass(), is(equalTo(UsageFormatter.class)));
}
|
public List<ChangeStreamRecord> toChangeStreamRecords(
PartitionMetadata partition,
ChangeStreamResultSet resultSet,
ChangeStreamResultSetMetadata resultSetMetadata) {
if (this.isPostgres()) {
// In PostgresQL, change stream records are returned as JsonB.
return Collections.singletonList(
toChangeStreamRecordJson(partition, resultSet.getPgJsonb(0), resultSetMetadata));
}
// In GoogleSQL, change stream records are returned as an array of structs.
return resultSet.getCurrentRowAsStruct().getStructList(0).stream()
.flatMap(struct -> toChangeStreamRecord(partition, struct, resultSetMetadata))
.collect(Collectors.toList());
}
|
@Test
public void testMappingUpdateStructRowNewValuesToDataChangeRecord() {
final DataChangeRecord dataChangeRecord =
new DataChangeRecord(
"partitionToken",
Timestamp.ofTimeSecondsAndNanos(10L, 20),
"serverTransactionId",
true,
"1",
"tableName",
Arrays.asList(
new ColumnType("column1", new TypeCode("{\"code\":\"INT664\"}"), true, 1L),
new ColumnType("column2", new TypeCode("{\"code\":\"BYTES\"}"), false, 2L)),
Collections.singletonList(
new Mod("{\"column1\":\"value1\"}", null, "{\"column2\":\"newValue2\"}")),
ModType.UPDATE,
ValueCaptureType.NEW_VALUES,
10L,
2L,
"transactionTag",
true,
null);
final Struct jsonFieldsStruct = recordsToStructWithJson(dataChangeRecord);
ChangeStreamResultSet resultSet = mock(ChangeStreamResultSet.class);
when(resultSet.getCurrentRowAsStruct()).thenReturn(jsonFieldsStruct);
assertEquals(
Collections.singletonList(dataChangeRecord),
mapper.toChangeStreamRecords(partition, resultSet, resultSetMetadata));
}
|
@Override
public Long put(final K key, final Long value)
{
return valOrNull(put(key, value.longValue()));
}
|
@Test
public void putShouldReturnOldValue() {
map.put("1", 1L);
assertEquals(1L, map.put("1", 2L));
}
|
public static String maskString(String input, String key) {
if(input == null)
return null;
String output = input;
Map<String, Object> stringConfig = (Map<String, Object>) config.get(MASK_TYPE_STRING);
if (stringConfig != null) {
Map<String, Object> keyConfig = (Map<String, Object>) stringConfig.get(key);
if (keyConfig != null) {
Set<String> patterns = keyConfig.keySet();
for (String pattern : patterns) {
output = output.replaceAll(pattern, (String) keyConfig.get(pattern));
}
}
}
return output;
}
|
@Test
public void testMaskString() {
String url1 = "/v1/customer?sin=123456789&password=secret&number=1234567890123456";
String output = Mask.maskString(url1, "uri");
System.out.println("ouput = " + output);
Assert.assertEquals("/v1/customer?sin=masked&password=******&number=----------------", output);
}
|
@Override
public String getOriginalHost() {
try {
if (originalHost == null) {
originalHost = getOriginalHost(getHeaders(), getServerName());
}
return originalHost;
} catch (URISyntaxException e) {
throw new IllegalArgumentException(e);
}
}
|
@Test
void testGetOriginalHost_immutable() {
HttpQueryParams queryParams = new HttpQueryParams();
Headers headers = new Headers();
headers.add("Host", "blah.netflix.com");
request = new HttpRequestMessageImpl(
new SessionContext(),
"HTTP/1.1",
"POST",
"/some/where",
queryParams,
headers,
"192.168.0.2",
"https",
7002,
"localhost",
new SocketAddress() {},
true);
// Check it's the same value 2nd time.
assertEquals("blah.netflix.com", request.getOriginalHost());
assertEquals("blah.netflix.com", request.getOriginalHost());
// Update the Host header value and ensure the result didn't change.
headers.set("Host", "testOriginalHost2");
assertEquals("blah.netflix.com", request.getOriginalHost());
}
|
public FifoOrderingPolicy() {
List<Comparator<SchedulableEntity>> comparators =
new ArrayList<Comparator<SchedulableEntity>>();
comparators.add(new PriorityComparator());
comparators.add(new FifoComparator());
this.comparator = new CompoundComparator(comparators);
this.schedulableEntities = new ConcurrentSkipListSet<S>(comparator);
}
|
@Test
public void testFifoOrderingPolicy() {
FifoOrderingPolicy<MockSchedulableEntity> policy =
new FifoOrderingPolicy<MockSchedulableEntity>();
MockSchedulableEntity r1 = new MockSchedulableEntity();
MockSchedulableEntity r2 = new MockSchedulableEntity();
assertEquals("The comparator should return 0 because the entities are created with " +
"the same values.", 0,
policy.getComparator().compare(r1, r2));
r1.setSerial(1);
assertEquals("The lhs entity has a larger serial, the comparator return " +
"value should be 1.", 1, policy.getComparator().compare(r1, r2));
r2.setSerial(2);
Assert.assertEquals("The rhs entity has a larger serial, the comparator return " +
"value should be -1.", -1, policy.getComparator().compare(r1, r2));
}
|
@Override
public Object getValue() {
return serializationService.toObject(value);
}
|
@Test
public void getValue_caching() {
QueryableEntry entry = createEntry("key", "value");
assertThat(entry.getValue()).isNotSameAs(entry.getValue());
}
|
static int determineCoordinatorReservoirSize(int numPartitions) {
int reservoirSize = numPartitions * COORDINATOR_TARGET_PARTITIONS_MULTIPLIER;
if (reservoirSize < COORDINATOR_MIN_RESERVOIR_SIZE) {
// adjust it up and still make reservoirSize divisible by numPartitions
int remainder = COORDINATOR_MIN_RESERVOIR_SIZE % numPartitions;
reservoirSize = COORDINATOR_MIN_RESERVOIR_SIZE + (numPartitions - remainder);
} else if (reservoirSize > COORDINATOR_MAX_RESERVOIR_SIZE) {
// adjust it down and still make reservoirSize divisible by numPartitions
int remainder = COORDINATOR_MAX_RESERVOIR_SIZE % numPartitions;
reservoirSize = COORDINATOR_MAX_RESERVOIR_SIZE - remainder;
}
return reservoirSize;
}
|
@Test
public void testCoordinatorReservoirSize() {
// adjusted to over min threshold of 10_000 and is divisible by number of partitions (3)
assertThat(SketchUtil.determineCoordinatorReservoirSize(3)).isEqualTo(10_002);
// adjust to multiplier of 100
assertThat(SketchUtil.determineCoordinatorReservoirSize(123)).isEqualTo(123_00);
// adjusted to below max threshold of 1_000_000 and is divisible by number of partitions (3)
assertThat(SketchUtil.determineCoordinatorReservoirSize(10_123))
.isEqualTo(1_000_000 - (1_000_000 % 10_123));
}
|
public void createApplication(ApplicationId id) {
database().createApplication(id);
}
|
@Test
public void require_that_application_ids_can_be_written() throws Exception {
TenantApplications repo = createZKAppRepo();
ApplicationId myapp = createApplicationId("myapp");
repo.createApplication(myapp);
writeActiveTransaction(repo, myapp, 3);
String path = TenantRepository.getApplicationsPath(tenantName).append(myapp.serializedForm()).getAbsolute();
assertNotNull(curatorFramework.checkExists().forPath(path));
assertEquals("{\"applicationId\":\"tenant:myapp:myinst\",\"activeSession\":3,\"lastDeployedSession\":3}",
Utf8.toString(curatorFramework.getData().forPath(path)));
writeActiveTransaction(repo, myapp, 5);
assertNotNull(curatorFramework.checkExists().forPath(path));
assertEquals("{\"applicationId\":\"tenant:myapp:myinst\",\"activeSession\":5,\"lastDeployedSession\":5}",
Utf8.toString(curatorFramework.getData().forPath(path)));
}
|
public QueryConfiguration applyOverrides(QueryConfigurationOverrides overrides)
{
Map<String, String> sessionProperties;
if (overrides.getSessionPropertiesOverrideStrategy() == OVERRIDE) {
sessionProperties = new HashMap<>(overrides.getSessionPropertiesOverride());
}
else {
sessionProperties = new HashMap<>(this.sessionProperties);
if (overrides.getSessionPropertiesOverrideStrategy() == SUBSTITUTE) {
sessionProperties.putAll(overrides.getSessionPropertiesOverride());
}
}
overrides.getSessionPropertiesToRemove().forEach(sessionProperties::remove);
return new QueryConfiguration(
overrides.getCatalogOverride().orElse(catalog),
overrides.getSchemaOverride().orElse(schema),
Optional.ofNullable(overrides.getUsernameOverride().orElse(username.orElse(null))),
Optional.ofNullable(overrides.getPasswordOverride().orElse(password.orElse(null))),
Optional.of(sessionProperties),
isReusableTable,
Optional.of(partitions));
}
|
@Test
public void testEmptyOverrides()
{
assertEquals(CONFIGURATION_1.applyOverrides(new QueryConfigurationOverridesConfig()), CONFIGURATION_1);
assertEquals(CONFIGURATION_2.applyOverrides(new QueryConfigurationOverridesConfig()), CONFIGURATION_2);
}
|
Subscription addSubscription(final String channel, final int streamId)
{
return addSubscription(channel, streamId, defaultAvailableImageHandler, defaultUnavailableImageHandler);
}
|
@Test
void shouldFailToAddSubscriptionOnMediaDriverError()
{
whenReceiveBroadcastOnMessage(
ControlProtocolEvents.ON_ERROR,
errorMessageBuffer,
(buffer) ->
{
errorResponse.errorCode(INVALID_CHANNEL);
errorResponse.errorMessage("invalid channel");
errorResponse.offendingCommandCorrelationId(CORRELATION_ID);
return errorResponse.length();
});
assertThrows(RegistrationException.class, () -> conductor.addSubscription(CHANNEL, STREAM_ID_1));
}
|
public void isInStrictOrder() {
isInStrictOrder(Ordering.natural());
}
|
@Test
public void iterableIsInStrictOrderWithComparatorFailure() {
expectFailureWhenTestingThat(asList("1", "2", "2", "10")).isInStrictOrder(COMPARE_AS_DECIMAL);
assertFailureKeys(
"expected to be in strict order", "but contained", "followed by", "full contents");
assertFailureValue("but contained", "2");
assertFailureValue("followed by", "2");
assertFailureValue("full contents", "[1, 2, 2, 10]");
}
|
int run() {
final Map<String, String> configProps = options.getConfigFile()
.map(Ksql::loadProperties)
.orElseGet(Collections::emptyMap);
final Map<String, String> sessionVariables = options.getVariables();
try (KsqlRestClient restClient = buildClient(configProps)) {
try (Cli cli = cliBuilder.build(
options.getStreamedQueryRowLimit(),
options.getStreamedQueryTimeoutMs(),
options.getOutputFormat(),
restClient)
) {
// Add CLI variables If defined by parameters
cli.addSessionVariables(sessionVariables);
if (options.getExecute().isPresent()) {
return cli.runCommand(options.getExecute().get());
} else if (options.getScriptFile().isPresent()) {
final File scriptFile = new File(options.getScriptFile().get());
if (scriptFile.exists() && scriptFile.isFile()) {
return cli.runScript(scriptFile.getPath());
} else {
throw new KsqlException("No such script file: " + scriptFile.getPath());
}
} else {
return cli.runInteractively();
}
}
}
}
|
@Test
public void shouldRunScriptFileWhenFileOptionIsUsed() throws IOException {
// Given:
final String sqlFile = TMP.newFile().getAbsolutePath();
when(options.getScriptFile()).thenReturn(Optional.of(sqlFile));
// When:
ksql.run();
// Then:
verify(cli).runScript(sqlFile);
}
|
@Override
protected boolean isNewMigration(NoSqlMigration noSqlMigration) {
// why: as Jedis does not have a schema, each migration checks if it needs to do something
return true;
}
|
@Test
void testMigrationsHappyPath() {
assertThat(lettuceRedisDBCreator.isNewMigration(new NoSqlMigrationByClass(M001_JedisRemoveJobStatsAndUseMetadata.class))).isTrue();
assertThatCode(lettuceRedisDBCreator::runMigrations).doesNotThrowAnyException();
assertThatCode(lettuceRedisDBCreator::runMigrations).doesNotThrowAnyException();
assertThat(lettuceRedisDBCreator.isNewMigration(new NoSqlMigrationByClass(M001_JedisRemoveJobStatsAndUseMetadata.class))).isTrue();
}
|
public Record convert(final AbstractWALEvent event) {
if (filter(event)) {
return createPlaceholderRecord(event);
}
if (!(event instanceof AbstractRowEvent)) {
return createPlaceholderRecord(event);
}
PipelineTableMetaData tableMetaData = getPipelineTableMetaData(((AbstractRowEvent) event).getTableName());
if (event instanceof WriteRowEvent) {
return handleWriteRowEvent((WriteRowEvent) event, tableMetaData);
}
if (event instanceof UpdateRowEvent) {
return handleUpdateRowEvent((UpdateRowEvent) event, tableMetaData);
}
if (event instanceof DeleteRowEvent) {
return handleDeleteRowEvent((DeleteRowEvent) event, tableMetaData);
}
throw new UnsupportedSQLOperationException("");
}
|
@Test
void assertConvertWriteRowEvent() {
Record record = walEventConverter.convert(mockWriteRowEvent());
assertThat(record, instanceOf(DataRecord.class));
assertThat(((DataRecord) record).getType(), is(PipelineSQLOperationType.INSERT));
}
|
List<Endpoint> endpoints() {
try {
String urlString = String.format("%s/api/v1/namespaces/%s/pods", kubernetesMaster, namespace);
return enrichWithPublicAddresses(parsePodsList(callGet(urlString)));
} catch (RestClientException e) {
return handleKnownException(e);
}
}
|
@Test(expected = KubernetesClientException.class)
public void endpointsFailFastWhenLbServiceHasNoHazelcastPort() throws JsonProcessingException {
// given
kubernetesClient = newKubernetesClient(ExposeExternallyMode.ENABLED, false, null, null);
stub(String.format("/api/v1/namespaces/%s/pods", NAMESPACE), podsListResponse());
stub(String.format("/api/v1/namespaces/%s/endpoints", NAMESPACE), endpointsListResponse());
stub(String.format("/api/v1/namespaces/%s/services/hazelcast-0", NAMESPACE),
serviceLbWithMultiplePorts("hazelcast-0", List.of(
servicePortWithName("hz-port", 5701, 5701, 31916),
servicePortWithName("wan-port", 5710, 5710, 31926)), "35.232.226.200"));
stub(String.format("/api/v1/namespaces/%s/services/service-1", NAMESPACE),
serviceLbWithMultiplePorts("service-1", List.of(
servicePortWithName("hz-port", 5701, 5701, 31917),
servicePortWithName("wan-port", 5701, 5701, 31916)), "35.232.226.201"));
// when
List<Endpoint> result = kubernetesClient.endpoints();
// then
// exception
}
|
@Override
public int hashCode() {
int result = (includeValue ? 1 : 0);
result = 31 * result + (key != null ? key.hashCode() : 0);
return result;
}
|
@Test
public void testHashCode() {
assertEquals(multiMapEventFilter.hashCode(), multiMapEventFilter.hashCode());
assertEquals(multiMapEventFilter.hashCode(), multiMapEventFilterSameAttributes.hashCode());
assumeDifferentHashCodes();
assertNotEquals(multiMapEventFilter.hashCode(), multiMapEventFilterOtherIncludeValue.hashCode());
assertNotEquals(multiMapEventFilter.hashCode(), multiMapEventFilterOtherKey.hashCode());
assertNotEquals(multiMapEventFilter.hashCode(), multiMapEventFilterDefaultParameters.hashCode());
}
|
public Session getSession(String name) {
return new Session(UUID.randomUUID().toString(), name);
}
|
@Test
void checkGetSession() {
Server server = new Server("localhost", 8080);
Session session = server.getSession("Session");
assertEquals("Session", session.getClientName());
}
|
@Override
public LocalAddress localAddress() {
return (LocalAddress) super.localAddress();
}
|
@Test
@Timeout(value = 3000, unit = TimeUnit.MILLISECONDS)
public void testConnectFutureBeforeChannelActive() throws Exception {
Bootstrap cb = new Bootstrap();
ServerBootstrap sb = new ServerBootstrap();
cb.group(group1)
.channel(LocalChannel.class)
.handler(new ChannelInboundHandlerAdapter());
sb.group(group2)
.channel(LocalServerChannel.class)
.childHandler(new ChannelInitializer<LocalChannel>() {
@Override
public void initChannel(LocalChannel ch) throws Exception {
ch.pipeline().addLast(new TestHandler());
}
});
Channel sc = null;
Channel cc = null;
try {
// Start server
sc = sb.bind(TEST_ADDRESS).sync().channel();
cc = cb.register().sync().channel();
final ChannelPromise promise = cc.newPromise();
final Promise<Void> assertPromise = cc.eventLoop().newPromise();
cc.pipeline().addLast(new TestHandler() {
@Override
public void channelActive(ChannelHandlerContext ctx) throws Exception {
// Ensure the promise was done before the handler method is triggered.
if (promise.isDone()) {
assertPromise.setSuccess(null);
} else {
assertPromise.setFailure(new AssertionError("connect promise should be done"));
}
}
});
// Connect to the server
cc.connect(sc.localAddress(), promise).sync();
assertPromise.syncUninterruptibly();
assertTrue(promise.isSuccess());
} finally {
closeChannel(cc);
closeChannel(sc);
}
}
|
public static Serializer getDefault() {
return SERIALIZER_MAP.get(defaultSerializer);
}
|
@Test
void testSetSerialize() {
Serializer serializer = SerializeFactory.getDefault();
Set<Integer> logsMap = new CopyOnWriteArraySet<>();
for (int i = 0; i < 4; i++) {
logsMap.add(i);
}
byte[] data = serializer.serialize(logsMap);
assertNotEquals(0, data.length);
Set<Integer> result = serializer.deserialize(data, CopyOnWriteArraySet.class);
System.out.println(result);
}
|
@Override
public void checkCanTruncateTable(ConnectorTransactionHandle transaction, ConnectorIdentity identity, AccessControlContext context, SchemaTableName tableName)
{
if (!checkTablePermission(identity, tableName, DELETE)) {
denyTruncateTable(tableName.toString());
}
}
|
@Test
public void testTableRulesForCheckCanTruncateTable()
throws IOException
{
ConnectorAccessControl accessControl = createAccessControl("table.json");
accessControl.checkCanTruncateTable(TRANSACTION_HANDLE, user("bob"), CONTEXT, new SchemaTableName("bobschema", "bobtable"));
assertDenied(() -> accessControl.checkCanTruncateTable(TRANSACTION_HANDLE, user("bob"), CONTEXT, new SchemaTableName("bobschema", "test")));
}
|
@Override
@Transactional(value="defaultTransactionManager")
public OAuth2AccessTokenEntity createAccessToken(OAuth2Authentication authentication) throws AuthenticationException, InvalidClientException {
if (authentication != null && authentication.getOAuth2Request() != null) {
// look up our client
OAuth2Request request = authentication.getOAuth2Request();
ClientDetailsEntity client = clientDetailsService.loadClientByClientId(request.getClientId());
if (client == null) {
throw new InvalidClientException("Client not found: " + request.getClientId());
}
// handle the PKCE code challenge if present
if (request.getExtensions().containsKey(CODE_CHALLENGE)) {
String challenge = (String) request.getExtensions().get(CODE_CHALLENGE);
PKCEAlgorithm alg = PKCEAlgorithm.parse((String) request.getExtensions().get(CODE_CHALLENGE_METHOD));
String verifier = request.getRequestParameters().get(CODE_VERIFIER);
if (alg.equals(PKCEAlgorithm.plain)) {
// do a direct string comparison
if (!challenge.equals(verifier)) {
throw new InvalidRequestException("Code challenge and verifier do not match");
}
} else if (alg.equals(PKCEAlgorithm.S256)) {
// hash the verifier
try {
MessageDigest digest = MessageDigest.getInstance("SHA-256");
String hash = Base64URL.encode(digest.digest(verifier.getBytes(StandardCharsets.US_ASCII))).toString();
if (!challenge.equals(hash)) {
throw new InvalidRequestException("Code challenge and verifier do not match");
}
} catch (NoSuchAlgorithmException e) {
logger.error("Unknown algorithm for PKCE digest", e);
}
}
}
OAuth2AccessTokenEntity token = new OAuth2AccessTokenEntity();//accessTokenFactory.createNewAccessToken();
// attach the client
token.setClient(client);
// inherit the scope from the auth, but make a new set so it is
//not unmodifiable. Unmodifiables don't play nicely with Eclipselink, which
//wants to use the clone operation.
Set<SystemScope> scopes = scopeService.fromStrings(request.getScope());
// remove any of the special system scopes
scopes = scopeService.removeReservedScopes(scopes);
token.setScope(scopeService.toStrings(scopes));
// make it expire if necessary
if (client.getAccessTokenValiditySeconds() != null && client.getAccessTokenValiditySeconds() > 0) {
Date expiration = new Date(System.currentTimeMillis() + (client.getAccessTokenValiditySeconds() * 1000L));
token.setExpiration(expiration);
}
// attach the authorization so that we can look it up later
AuthenticationHolderEntity authHolder = new AuthenticationHolderEntity();
authHolder.setAuthentication(authentication);
authHolder = authenticationHolderRepository.save(authHolder);
token.setAuthenticationHolder(authHolder);
// attach a refresh token, if this client is allowed to request them and the user gets the offline scope
if (client.isAllowRefresh() && token.getScope().contains(SystemScopeService.OFFLINE_ACCESS)) {
OAuth2RefreshTokenEntity savedRefreshToken = createRefreshToken(client, authHolder);
token.setRefreshToken(savedRefreshToken);
}
//Add approved site reference, if any
OAuth2Request originalAuthRequest = authHolder.getAuthentication().getOAuth2Request();
if (originalAuthRequest.getExtensions() != null && originalAuthRequest.getExtensions().containsKey("approved_site")) {
Long apId = Long.parseLong((String) originalAuthRequest.getExtensions().get("approved_site"));
ApprovedSite ap = approvedSiteService.getById(apId);
token.setApprovedSite(ap);
}
OAuth2AccessTokenEntity enhancedToken = (OAuth2AccessTokenEntity) tokenEnhancer.enhance(token, authentication);
OAuth2AccessTokenEntity savedToken = saveAccessToken(enhancedToken);
if (savedToken.getRefreshToken() != null) {
tokenRepository.saveRefreshToken(savedToken.getRefreshToken()); // make sure we save any changes that might have been enhanced
}
return savedToken;
}
throw new AuthenticationCredentialsNotFoundException("No authentication credentials found");
}
|
@Test(expected = InvalidClientException.class)
public void createAccessToken_nullClient() {
when(clientDetailsService.loadClientByClientId(anyString())).thenReturn(null);
service.createAccessToken(authentication);
}
|
public Object select(String id, ParamOption[] options, Object defaultValue) {
if (defaultValue == null && options != null && options.length > 0) {
defaultValue = options[0].getValue();
}
forms.put(id, new Select(id, defaultValue, options));
Object value = params.get(id);
if (value == null) {
value = defaultValue;
}
params.put(id, value);
return value;
}
|
@Test
void testSelect() {
GUI gui = new GUI();
Object selected = gui.select("list_1", options, null);
// use the first one as the default value
assertEquals("1", selected);
gui = new GUI();
selected = gui.select("list_1", options, "2");
assertEquals("2", selected);
// "2" is selected by above statement, so even this default value is "1", the selected value is
// still "2"
selected = gui.select("list_1", options, "1");
assertEquals("2", selected);
}
|
@VisibleForTesting
void validateMenu(Long parentId, String name, Long id) {
MenuDO menu = menuMapper.selectByParentIdAndName(parentId, name);
if (menu == null) {
return;
}
// 如果 id 为空,说明不用比较是否为相同 id 的菜单
if (id == null) {
throw exception(MENU_NAME_DUPLICATE);
}
if (!menu.getId().equals(id)) {
throw exception(MENU_NAME_DUPLICATE);
}
}
|
@Test
public void testValidateMenu_sonMenuNameDuplicate() {
// mock 父子菜单
MenuDO sonMenu = createParentAndSonMenu();
// 准备参数
Long parentId = sonMenu.getParentId();
Long otherSonMenuId = randomLongId();
String otherSonMenuName = sonMenu.getName(); //相同名称
// 调用,并断言异常
assertServiceException(() -> menuService.validateMenu(parentId, otherSonMenuName, otherSonMenuId),
MENU_NAME_DUPLICATE);
}
|
@Override
public void writeDouble(final double v) throws IOException {
ensureAvailable(DOUBLE_SIZE_IN_BYTES);
MEM.putDouble(buffer, ARRAY_BYTE_BASE_OFFSET + pos, v);
pos += DOUBLE_SIZE_IN_BYTES;
}
|
@Test
public void testWriteDoubleForPositionV() throws Exception {
double expected = 1.1d;
out.writeDouble(1, expected);
long theLong = Bits.readLong(out.buffer, 1, ByteOrder.nativeOrder() == ByteOrder.BIG_ENDIAN);
double actual = Double.longBitsToDouble(theLong);
assertEquals(expected, actual, 0);
}
|
public boolean shouldFlush(Duration flushInterval) {
final long lastFlush = lastFlushTime.get();
// If we don't know the last flush time, we want to flush. Happens with a new buffer instance.
return lastFlush == 0 || (System.nanoTime() - lastFlush) > flushInterval.toNanos();
}
|
@Test
void shouldFlush() {
// No interactions yet, we want to flush because we don't know the last execution time.
assertThat(buffer.shouldFlush(Duration.ofSeconds(1))).isTrue();
// Trigger a flush
buffer.flush(flusher);
// The last flush just happened, flush interval not reached.
assertThat(buffer.shouldFlush(Duration.ofDays(1))).isFalse();
// The last flush was more than 1 ns ago, flush.
assertThat(buffer.shouldFlush(Duration.ofNanos(1))).isTrue();
}
|
@Override
public int run(String[] args) throws Exception {
if (args.length != 2) {
return usage(args);
}
String action = args[0];
String name = args[1];
int result;
if (A_LOAD.equals(action)) {
result = loadClass(name);
} else if (A_CREATE.equals(action)) {
//first load to separate load errors from create
result = loadClass(name);
if (result == SUCCESS) {
//class loads, so instantiate it
result = createClassInstance(name);
}
} else if (A_RESOURCE.equals(action)) {
result = loadResource(name);
} else if (A_PRINTRESOURCE.equals(action)) {
result = dumpResource(name);
} else {
result = usage(args);
}
return result;
}
|
@Test
public void testCreatesClass() throws Throwable {
run(FindClass.SUCCESS,
FindClass.A_CREATE, "org.apache.hadoop.util.TestFindClass");
}
|
@Override
public void lockEdge(Inode lastInode, String childName, LockMode mode) {
mode = nextLockMode(mode);
long edgeParentId = lastInode.getId();
Edge edge = new Edge(lastInode.getId(), childName);
if (!mLocks.isEmpty()) {
Preconditions.checkState(endsInInode(),
"Cannot lock edge %s when lock list %s already ends in an edge", edge, this);
Preconditions.checkState(lastInode().getId() == edgeParentId,
"Cannot lock edge %s when the last inode id in %s is %s", edge, this, lastInode.getId());
}
lockAndAddEdge(edge, mode);
}
|
@Test
public void lockEdgeAfterEdge() {
mLockList.lockEdge(mDirA, mDirB.getName(), LockMode.READ);
mThrown.expect(IllegalStateException.class);
mLockList.lockEdge(mDirB, mFileC.getName(), LockMode.READ);
}
|
@Override
public LeastLoadedNode leastLoadedNode(long now) {
List<Node> nodes = this.metadataUpdater.fetchNodes();
if (nodes.isEmpty())
throw new IllegalStateException("There are no nodes in the Kafka cluster");
int inflight = Integer.MAX_VALUE;
Node foundConnecting = null;
Node foundCanConnect = null;
Node foundReady = null;
boolean atLeastOneConnectionReady = false;
int offset = this.randOffset.nextInt(nodes.size());
for (int i = 0; i < nodes.size(); i++) {
int idx = (offset + i) % nodes.size();
Node node = nodes.get(idx);
if (!atLeastOneConnectionReady
&& connectionStates.isReady(node.idString(), now)
&& selector.isChannelReady(node.idString())) {
atLeastOneConnectionReady = true;
}
if (canSendRequest(node.idString(), now)) {
int currInflight = this.inFlightRequests.count(node.idString());
if (currInflight == 0) {
// if we find an established connection with no in-flight requests we can stop right away
log.trace("Found least loaded node {} connected with no in-flight requests", node);
return new LeastLoadedNode(node, true);
} else if (currInflight < inflight) {
// otherwise if this is the best we have found so far, record that
inflight = currInflight;
foundReady = node;
}
} else if (connectionStates.isPreparingConnection(node.idString())) {
foundConnecting = node;
} else if (canConnect(node, now)) {
if (foundCanConnect == null ||
this.connectionStates.lastConnectAttemptMs(foundCanConnect.idString()) >
this.connectionStates.lastConnectAttemptMs(node.idString())) {
foundCanConnect = node;
}
} else {
log.trace("Removing node {} from least loaded node selection since it is neither ready " +
"for sending or connecting", node);
}
}
// We prefer established connections if possible. Otherwise, we will wait for connections
// which are being established before connecting to new nodes.
if (foundReady != null) {
log.trace("Found least loaded node {} with {} inflight requests", foundReady, inflight);
return new LeastLoadedNode(foundReady, atLeastOneConnectionReady);
} else if (foundConnecting != null) {
log.trace("Found least loaded connecting node {}", foundConnecting);
return new LeastLoadedNode(foundConnecting, atLeastOneConnectionReady);
} else if (foundCanConnect != null) {
log.trace("Found least loaded node {} with no active connection", foundCanConnect);
return new LeastLoadedNode(foundCanConnect, atLeastOneConnectionReady);
} else {
log.trace("Least loaded node selection failed to find an available node");
return new LeastLoadedNode(null, atLeastOneConnectionReady);
}
}
|
@Test
public void testLeastLoadedNode() {
client.ready(node, time.milliseconds());
assertFalse(client.isReady(node, time.milliseconds()));
LeastLoadedNode leastLoadedNode = client.leastLoadedNode(time.milliseconds());
assertEquals(node, leastLoadedNode.node());
assertTrue(leastLoadedNode.hasNodeAvailableOrConnectionReady());
awaitReady(client, node);
client.poll(1, time.milliseconds());
assertTrue(client.isReady(node, time.milliseconds()), "The client should be ready");
// leastloadednode should be our single node
leastLoadedNode = client.leastLoadedNode(time.milliseconds());
assertTrue(leastLoadedNode.hasNodeAvailableOrConnectionReady());
Node leastNode = leastLoadedNode.node();
assertEquals(leastNode.id(), node.id(), "There should be one leastloadednode");
// sleep for longer than reconnect backoff
time.sleep(reconnectBackoffMsTest);
// CLOSE node
selector.serverDisconnect(node.idString());
client.poll(1, time.milliseconds());
assertFalse(client.ready(node, time.milliseconds()), "After we forced the disconnection the client is no longer ready.");
leastLoadedNode = client.leastLoadedNode(time.milliseconds());
assertFalse(leastLoadedNode.hasNodeAvailableOrConnectionReady());
assertNull(leastLoadedNode.node(), "There should be NO leastloadednode");
}
|
@Override
public String toString() {
return StringUtils.toString(this);
}
|
@Test
public void testToString() {
Assertions.assertEquals("Metadata(leaders={}, clusterTerm={}, clusterNodes={\"cluster\"->{}}, storeMode=StoreMode.RAFT)", metadata.toString());
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.