target
stringlengths
20
113k
src_fm
stringlengths
11
86.3k
src_fm_fc
stringlengths
21
86.4k
src_fm_fc_co
stringlengths
30
86.4k
src_fm_fc_ms
stringlengths
42
86.8k
src_fm_fc_ms_ff
stringlengths
43
86.8k
@Test public void testRoundRobin() throws InterruptedException { final String topicA = "topicA"; final String topicB = "topicB"; List<PartitionInfo> allPartitions = asList(new PartitionInfo(topicA, 0, node0, nodes, nodes), new PartitionInfo(topicA, 1, node1, nodes, nodes), new PartitionInfo(topicA, 2, node2, nodes, nodes), new PartitionInfo(topicB, 0, node0, nodes, nodes) ); Cluster testCluster = new Cluster("clusterId", asList(node0, node1, node2), allPartitions, Collections.<String>emptySet(), Collections.<String>emptySet()); final Map<Integer, Integer> partitionCount = new HashMap<>(); for (int i = 0; i < 30; ++i) { int partition = partitioner.partition(topicA, null, null, null, null, testCluster); Integer count = partitionCount.get(partition); if (null == count) count = 0; partitionCount.put(partition, count + 1); if (i % 5 == 0) { partitioner.partition(topicB, null, null, null, null, testCluster); } } assertEquals(10, (int) partitionCount.get(0)); assertEquals(10, (int) partitionCount.get(1)); assertEquals(10, (int) partitionCount.get(2)); }
public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { List<PartitionInfo> partitions = cluster.partitionsForTopic(topic); int numPartitions = partitions.size(); if (keyBytes == null) { int nextValue = nextValue(topic); List<PartitionInfo> availablePartitions = cluster.availablePartitionsForTopic(topic); if (availablePartitions.size() > 0) { int part = Utils.toPositive(nextValue) % availablePartitions.size(); return availablePartitions.get(part).partition(); } else { return Utils.toPositive(nextValue) % numPartitions; } } else { return Utils.toPositive(Utils.murmur2(keyBytes)) % numPartitions; } }
DefaultPartitioner implements Partitioner { public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { List<PartitionInfo> partitions = cluster.partitionsForTopic(topic); int numPartitions = partitions.size(); if (keyBytes == null) { int nextValue = nextValue(topic); List<PartitionInfo> availablePartitions = cluster.availablePartitionsForTopic(topic); if (availablePartitions.size() > 0) { int part = Utils.toPositive(nextValue) % availablePartitions.size(); return availablePartitions.get(part).partition(); } else { return Utils.toPositive(nextValue) % numPartitions; } } else { return Utils.toPositive(Utils.murmur2(keyBytes)) % numPartitions; } } }
DefaultPartitioner implements Partitioner { public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { List<PartitionInfo> partitions = cluster.partitionsForTopic(topic); int numPartitions = partitions.size(); if (keyBytes == null) { int nextValue = nextValue(topic); List<PartitionInfo> availablePartitions = cluster.availablePartitionsForTopic(topic); if (availablePartitions.size() > 0) { int part = Utils.toPositive(nextValue) % availablePartitions.size(); return availablePartitions.get(part).partition(); } else { return Utils.toPositive(nextValue) % numPartitions; } } else { return Utils.toPositive(Utils.murmur2(keyBytes)) % numPartitions; } } }
DefaultPartitioner implements Partitioner { public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { List<PartitionInfo> partitions = cluster.partitionsForTopic(topic); int numPartitions = partitions.size(); if (keyBytes == null) { int nextValue = nextValue(topic); List<PartitionInfo> availablePartitions = cluster.availablePartitionsForTopic(topic); if (availablePartitions.size() > 0) { int part = Utils.toPositive(nextValue) % availablePartitions.size(); return availablePartitions.get(part).partition(); } else { return Utils.toPositive(nextValue) % numPartitions; } } else { return Utils.toPositive(Utils.murmur2(keyBytes)) % numPartitions; } } void configure(Map<String, ?> configs); int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster); void close(); }
DefaultPartitioner implements Partitioner { public int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster) { List<PartitionInfo> partitions = cluster.partitionsForTopic(topic); int numPartitions = partitions.size(); if (keyBytes == null) { int nextValue = nextValue(topic); List<PartitionInfo> availablePartitions = cluster.availablePartitionsForTopic(topic); if (availablePartitions.size() > 0) { int part = Utils.toPositive(nextValue) % availablePartitions.size(); return availablePartitions.get(part).partition(); } else { return Utils.toPositive(nextValue) % numPartitions; } } else { return Utils.toPositive(Utils.murmur2(keyBytes)) % numPartitions; } } void configure(Map<String, ?> configs); int partition(String topic, Object key, byte[] keyBytes, Object value, byte[] valueBytes, Cluster cluster); void close(); }
@Test public void testSimple() throws Exception { long offset = 0; Future<RecordMetadata> future = accumulator.append(tp0, 0L, "key".getBytes(), "value".getBytes(), null, null, MAX_BLOCK_TIMEOUT).future; sender.run(time.milliseconds()); sender.run(time.milliseconds()); assertEquals("We should have a single produce request in flight.", 1, client.inFlightRequestCount()); assertTrue(client.hasInFlightRequests()); client.respond(produceResponse(tp0, offset, Errors.NONE, 0)); sender.run(time.milliseconds()); assertEquals("All requests completed.", 0, client.inFlightRequestCount()); assertFalse(client.hasInFlightRequests()); sender.run(time.milliseconds()); assertTrue("Request should be completed", future.isDone()); assertEquals(offset, future.get().offset()); }
public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } Sender(KafkaClient client, Metadata metadata, RecordAccumulator accumulator, boolean guaranteeMessageOrder, int maxRequestSize, short acks, int retries, Metrics metrics, Time time, int requestTimeout, long retryBackoffMs, TransactionManager transactionManager, ApiVersions apiVersions); }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } Sender(KafkaClient client, Metadata metadata, RecordAccumulator accumulator, boolean guaranteeMessageOrder, int maxRequestSize, short acks, int retries, Metrics metrics, Time time, int requestTimeout, long retryBackoffMs, TransactionManager transactionManager, ApiVersions apiVersions); void run(); void initiateClose(); void forceClose(); void wakeup(); static Sensor throttleTimeSensor(Metrics metrics); }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } Sender(KafkaClient client, Metadata metadata, RecordAccumulator accumulator, boolean guaranteeMessageOrder, int maxRequestSize, short acks, int retries, Metrics metrics, Time time, int requestTimeout, long retryBackoffMs, TransactionManager transactionManager, ApiVersions apiVersions); void run(); void initiateClose(); void forceClose(); void wakeup(); static Sensor throttleTimeSensor(Metrics metrics); }
@Test public void testMessageFormatDownConversion() throws Exception { long offset = 0; apiVersions.update("0", NodeApiVersions.create()); Future<RecordMetadata> future = accumulator.append(tp0, 0L, "key".getBytes(), "value".getBytes(), null, null, MAX_BLOCK_TIMEOUT).future; apiVersions.update("0", NodeApiVersions.create(Collections.singleton( new ApiVersionsResponse.ApiVersion(ApiKeys.PRODUCE.id, (short) 0, (short) 2)))); client.prepareResponse(new MockClient.RequestMatcher() { @Override public boolean matches(AbstractRequest body) { ProduceRequest request = (ProduceRequest) body; if (request.version() != 2) return false; MemoryRecords records = request.partitionRecordsOrFail().get(tp0); return records != null && records.sizeInBytes() > 0 && records.hasMatchingMagic(RecordBatch.MAGIC_VALUE_V1); } }, produceResponse(tp0, offset, Errors.NONE, 0)); sender.run(time.milliseconds()); sender.run(time.milliseconds()); assertTrue("Request should be completed", future.isDone()); assertEquals(offset, future.get().offset()); }
public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } Sender(KafkaClient client, Metadata metadata, RecordAccumulator accumulator, boolean guaranteeMessageOrder, int maxRequestSize, short acks, int retries, Metrics metrics, Time time, int requestTimeout, long retryBackoffMs, TransactionManager transactionManager, ApiVersions apiVersions); }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } Sender(KafkaClient client, Metadata metadata, RecordAccumulator accumulator, boolean guaranteeMessageOrder, int maxRequestSize, short acks, int retries, Metrics metrics, Time time, int requestTimeout, long retryBackoffMs, TransactionManager transactionManager, ApiVersions apiVersions); void run(); void initiateClose(); void forceClose(); void wakeup(); static Sensor throttleTimeSensor(Metrics metrics); }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } Sender(KafkaClient client, Metadata metadata, RecordAccumulator accumulator, boolean guaranteeMessageOrder, int maxRequestSize, short acks, int retries, Metrics metrics, Time time, int requestTimeout, long retryBackoffMs, TransactionManager transactionManager, ApiVersions apiVersions); void run(); void initiateClose(); void forceClose(); void wakeup(); static Sensor throttleTimeSensor(Metrics metrics); }
@Test public void testDownConversionForMismatchedMagicValues() throws Exception { long offset = 0; apiVersions.update("0", NodeApiVersions.create()); Future<RecordMetadata> future1 = accumulator.append(tp0, 0L, "key".getBytes(), "value".getBytes(), null, null, MAX_BLOCK_TIMEOUT).future; apiVersions.update("0", NodeApiVersions.create(Collections.singleton( new ApiVersionsResponse.ApiVersion(ApiKeys.PRODUCE.id, (short) 0, (short) 2)))); Future<RecordMetadata> future2 = accumulator.append(tp1, 0L, "key".getBytes(), "value".getBytes(), null, null, MAX_BLOCK_TIMEOUT).future; apiVersions.update("0", NodeApiVersions.create()); ProduceResponse.PartitionResponse resp = new ProduceResponse.PartitionResponse(Errors.NONE, offset, RecordBatch.NO_TIMESTAMP); Map<TopicPartition, ProduceResponse.PartitionResponse> partResp = new HashMap<>(); partResp.put(tp0, resp); partResp.put(tp1, resp); ProduceResponse produceResponse = new ProduceResponse(partResp, 0); client.prepareResponse(new MockClient.RequestMatcher() { @Override public boolean matches(AbstractRequest body) { ProduceRequest request = (ProduceRequest) body; if (request.version() != 2) return false; Map<TopicPartition, MemoryRecords> recordsMap = request.partitionRecordsOrFail(); if (recordsMap.size() != 2) return false; for (MemoryRecords records : recordsMap.values()) { if (records == null || records.sizeInBytes() == 0 || !records.hasMatchingMagic(RecordBatch.MAGIC_VALUE_V1)) return false; } return true; } }, produceResponse); sender.run(time.milliseconds()); sender.run(time.milliseconds()); assertTrue("Request should be completed", future1.isDone()); assertTrue("Request should be completed", future2.isDone()); }
public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } Sender(KafkaClient client, Metadata metadata, RecordAccumulator accumulator, boolean guaranteeMessageOrder, int maxRequestSize, short acks, int retries, Metrics metrics, Time time, int requestTimeout, long retryBackoffMs, TransactionManager transactionManager, ApiVersions apiVersions); }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } Sender(KafkaClient client, Metadata metadata, RecordAccumulator accumulator, boolean guaranteeMessageOrder, int maxRequestSize, short acks, int retries, Metrics metrics, Time time, int requestTimeout, long retryBackoffMs, TransactionManager transactionManager, ApiVersions apiVersions); void run(); void initiateClose(); void forceClose(); void wakeup(); static Sensor throttleTimeSensor(Metrics metrics); }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } Sender(KafkaClient client, Metadata metadata, RecordAccumulator accumulator, boolean guaranteeMessageOrder, int maxRequestSize, short acks, int retries, Metrics metrics, Time time, int requestTimeout, long retryBackoffMs, TransactionManager transactionManager, ApiVersions apiVersions); void run(); void initiateClose(); void forceClose(); void wakeup(); static Sensor throttleTimeSensor(Metrics metrics); }
@Test public void testQuotaMetrics() throws Exception { MockSelector selector = new MockSelector(time); Sensor throttleTimeSensor = Sender.throttleTimeSensor(metrics); Cluster cluster = TestUtils.singletonCluster("test", 1); Node node = cluster.nodes().get(0); NetworkClient client = new NetworkClient(selector, metadata, "mock", Integer.MAX_VALUE, 1000, 1000, 64 * 1024, 64 * 1024, 1000, time, true, new ApiVersions(), throttleTimeSensor); short apiVersionsResponseVersion = ApiKeys.API_VERSIONS.latestVersion(); ByteBuffer buffer = ApiVersionsResponse.createApiVersionsResponse(400, RecordBatch.CURRENT_MAGIC_VALUE).serialize(apiVersionsResponseVersion, new ResponseHeader(0)); selector.delayedReceive(new DelayedReceive(node.idString(), new NetworkReceive(node.idString(), buffer))); while (!client.ready(node, time.milliseconds())) client.poll(1, time.milliseconds()); selector.clear(); for (int i = 1; i <= 3; i++) { int throttleTimeMs = 100 * i; ProduceRequest.Builder builder = new ProduceRequest.Builder(RecordBatch.CURRENT_MAGIC_VALUE, (short) 1, 1000, Collections.<TopicPartition, MemoryRecords>emptyMap()); ClientRequest request = client.newClientRequest(node.idString(), builder, time.milliseconds(), true, null); client.send(request, time.milliseconds()); client.poll(1, time.milliseconds()); ProduceResponse response = produceResponse(tp0, i, Errors.NONE, throttleTimeMs); buffer = response.serialize(ApiKeys.PRODUCE.latestVersion(), new ResponseHeader(request.correlationId())); selector.completeReceive(new NetworkReceive(node.idString(), buffer)); client.poll(1, time.milliseconds()); selector.clear(); } Map<MetricName, KafkaMetric> allMetrics = metrics.metrics(); KafkaMetric avgMetric = allMetrics.get(metrics.metricName("produce-throttle-time-avg", METRIC_GROUP, "")); KafkaMetric maxMetric = allMetrics.get(metrics.metricName("produce-throttle-time-max", METRIC_GROUP, "")); assertEquals(250, avgMetric.value(), EPS); assertEquals(400, maxMetric.value(), EPS); client.close(); }
public static Sensor throttleTimeSensor(Metrics metrics) { String metricGrpName = SenderMetrics.METRIC_GROUP_NAME; Sensor produceThrottleTimeSensor = metrics.sensor("produce-throttle-time"); produceThrottleTimeSensor.add(metrics.metricName("produce-throttle-time-avg", metricGrpName, "The average throttle time in ms"), new Avg()); produceThrottleTimeSensor.add(metrics.metricName("produce-throttle-time-max", metricGrpName, "The maximum throttle time in ms"), new Max()); return produceThrottleTimeSensor; }
Sender implements Runnable { public static Sensor throttleTimeSensor(Metrics metrics) { String metricGrpName = SenderMetrics.METRIC_GROUP_NAME; Sensor produceThrottleTimeSensor = metrics.sensor("produce-throttle-time"); produceThrottleTimeSensor.add(metrics.metricName("produce-throttle-time-avg", metricGrpName, "The average throttle time in ms"), new Avg()); produceThrottleTimeSensor.add(metrics.metricName("produce-throttle-time-max", metricGrpName, "The maximum throttle time in ms"), new Max()); return produceThrottleTimeSensor; } }
Sender implements Runnable { public static Sensor throttleTimeSensor(Metrics metrics) { String metricGrpName = SenderMetrics.METRIC_GROUP_NAME; Sensor produceThrottleTimeSensor = metrics.sensor("produce-throttle-time"); produceThrottleTimeSensor.add(metrics.metricName("produce-throttle-time-avg", metricGrpName, "The average throttle time in ms"), new Avg()); produceThrottleTimeSensor.add(metrics.metricName("produce-throttle-time-max", metricGrpName, "The maximum throttle time in ms"), new Max()); return produceThrottleTimeSensor; } Sender(KafkaClient client, Metadata metadata, RecordAccumulator accumulator, boolean guaranteeMessageOrder, int maxRequestSize, short acks, int retries, Metrics metrics, Time time, int requestTimeout, long retryBackoffMs, TransactionManager transactionManager, ApiVersions apiVersions); }
Sender implements Runnable { public static Sensor throttleTimeSensor(Metrics metrics) { String metricGrpName = SenderMetrics.METRIC_GROUP_NAME; Sensor produceThrottleTimeSensor = metrics.sensor("produce-throttle-time"); produceThrottleTimeSensor.add(metrics.metricName("produce-throttle-time-avg", metricGrpName, "The average throttle time in ms"), new Avg()); produceThrottleTimeSensor.add(metrics.metricName("produce-throttle-time-max", metricGrpName, "The maximum throttle time in ms"), new Max()); return produceThrottleTimeSensor; } Sender(KafkaClient client, Metadata metadata, RecordAccumulator accumulator, boolean guaranteeMessageOrder, int maxRequestSize, short acks, int retries, Metrics metrics, Time time, int requestTimeout, long retryBackoffMs, TransactionManager transactionManager, ApiVersions apiVersions); void run(); void initiateClose(); void forceClose(); void wakeup(); static Sensor throttleTimeSensor(Metrics metrics); }
Sender implements Runnable { public static Sensor throttleTimeSensor(Metrics metrics) { String metricGrpName = SenderMetrics.METRIC_GROUP_NAME; Sensor produceThrottleTimeSensor = metrics.sensor("produce-throttle-time"); produceThrottleTimeSensor.add(metrics.metricName("produce-throttle-time-avg", metricGrpName, "The average throttle time in ms"), new Avg()); produceThrottleTimeSensor.add(metrics.metricName("produce-throttle-time-max", metricGrpName, "The maximum throttle time in ms"), new Max()); return produceThrottleTimeSensor; } Sender(KafkaClient client, Metadata metadata, RecordAccumulator accumulator, boolean guaranteeMessageOrder, int maxRequestSize, short acks, int retries, Metrics metrics, Time time, int requestTimeout, long retryBackoffMs, TransactionManager transactionManager, ApiVersions apiVersions); void run(); void initiateClose(); void forceClose(); void wakeup(); static Sensor throttleTimeSensor(Metrics metrics); }
@Test public void testRetries() throws Exception { int maxRetries = 1; Metrics m = new Metrics(); try { Sender sender = new Sender(client, metadata, this.accumulator, false, MAX_REQUEST_SIZE, ACKS_ALL, maxRetries, m, time, REQUEST_TIMEOUT, 50, null, apiVersions); Future<RecordMetadata> future = accumulator.append(tp0, 0L, "key".getBytes(), "value".getBytes(), null, null, MAX_BLOCK_TIMEOUT).future; sender.run(time.milliseconds()); sender.run(time.milliseconds()); String id = client.requests().peek().destination(); Node node = new Node(Integer.parseInt(id), "localhost", 0); assertEquals(1, client.inFlightRequestCount()); assertTrue(client.hasInFlightRequests()); assertTrue("Client ready status should be true", client.isReady(node, 0L)); client.disconnect(id); assertEquals(0, client.inFlightRequestCount()); assertFalse(client.hasInFlightRequests()); assertFalse("Client ready status should be false", client.isReady(node, 0L)); sender.run(time.milliseconds()); sender.run(time.milliseconds()); sender.run(time.milliseconds()); assertEquals(1, client.inFlightRequestCount()); assertTrue(client.hasInFlightRequests()); long offset = 0; client.respond(produceResponse(tp0, offset, Errors.NONE, 0)); sender.run(time.milliseconds()); assertTrue("Request should have retried and completed", future.isDone()); assertEquals(offset, future.get().offset()); future = accumulator.append(tp0, 0L, "key".getBytes(), "value".getBytes(), null, null, MAX_BLOCK_TIMEOUT).future; sender.run(time.milliseconds()); for (int i = 0; i < maxRetries + 1; i++) { client.disconnect(client.requests().peek().destination()); sender.run(time.milliseconds()); sender.run(time.milliseconds()); sender.run(time.milliseconds()); } sender.run(time.milliseconds()); completedWithError(future, Errors.NETWORK_EXCEPTION); } finally { m.close(); } }
public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } Sender(KafkaClient client, Metadata metadata, RecordAccumulator accumulator, boolean guaranteeMessageOrder, int maxRequestSize, short acks, int retries, Metrics metrics, Time time, int requestTimeout, long retryBackoffMs, TransactionManager transactionManager, ApiVersions apiVersions); }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } Sender(KafkaClient client, Metadata metadata, RecordAccumulator accumulator, boolean guaranteeMessageOrder, int maxRequestSize, short acks, int retries, Metrics metrics, Time time, int requestTimeout, long retryBackoffMs, TransactionManager transactionManager, ApiVersions apiVersions); void run(); void initiateClose(); void forceClose(); void wakeup(); static Sensor throttleTimeSensor(Metrics metrics); }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } Sender(KafkaClient client, Metadata metadata, RecordAccumulator accumulator, boolean guaranteeMessageOrder, int maxRequestSize, short acks, int retries, Metrics metrics, Time time, int requestTimeout, long retryBackoffMs, TransactionManager transactionManager, ApiVersions apiVersions); void run(); void initiateClose(); void forceClose(); void wakeup(); static Sensor throttleTimeSensor(Metrics metrics); }
@Test public void testSendInOrder() throws Exception { int maxRetries = 1; Metrics m = new Metrics(); try { Sender sender = new Sender(client, metadata, this.accumulator, true, MAX_REQUEST_SIZE, ACKS_ALL, maxRetries, m, time, REQUEST_TIMEOUT, 50, null, apiVersions); Cluster cluster1 = TestUtils.clusterWith(2, "test", 2); metadata.update(cluster1, Collections.<String>emptySet(), time.milliseconds()); TopicPartition tp2 = new TopicPartition("test", 1); accumulator.append(tp2, 0L, "key1".getBytes(), "value1".getBytes(), null, null, MAX_BLOCK_TIMEOUT); sender.run(time.milliseconds()); sender.run(time.milliseconds()); String id = client.requests().peek().destination(); assertEquals(ApiKeys.PRODUCE, client.requests().peek().requestBuilder().apiKey()); Node node = new Node(Integer.parseInt(id), "localhost", 0); assertEquals(1, client.inFlightRequestCount()); assertTrue(client.hasInFlightRequests()); assertTrue("Client ready status should be true", client.isReady(node, 0L)); time.sleep(900); accumulator.append(tp2, 0L, "key2".getBytes(), "value2".getBytes(), null, null, MAX_BLOCK_TIMEOUT); Cluster cluster2 = TestUtils.singletonCluster("test", 2); metadata.update(cluster2, Collections.<String>emptySet(), time.milliseconds()); sender.run(time.milliseconds()); assertEquals(1, client.inFlightRequestCount()); assertTrue(client.hasInFlightRequests()); } finally { m.close(); } }
public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } Sender(KafkaClient client, Metadata metadata, RecordAccumulator accumulator, boolean guaranteeMessageOrder, int maxRequestSize, short acks, int retries, Metrics metrics, Time time, int requestTimeout, long retryBackoffMs, TransactionManager transactionManager, ApiVersions apiVersions); }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } Sender(KafkaClient client, Metadata metadata, RecordAccumulator accumulator, boolean guaranteeMessageOrder, int maxRequestSize, short acks, int retries, Metrics metrics, Time time, int requestTimeout, long retryBackoffMs, TransactionManager transactionManager, ApiVersions apiVersions); void run(); void initiateClose(); void forceClose(); void wakeup(); static Sensor throttleTimeSensor(Metrics metrics); }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } Sender(KafkaClient client, Metadata metadata, RecordAccumulator accumulator, boolean guaranteeMessageOrder, int maxRequestSize, short acks, int retries, Metrics metrics, Time time, int requestTimeout, long retryBackoffMs, TransactionManager transactionManager, ApiVersions apiVersions); void run(); void initiateClose(); void forceClose(); void wakeup(); static Sensor throttleTimeSensor(Metrics metrics); }
@Test public void testAppendInExpiryCallback() throws InterruptedException { int messagesPerBatch = 10; final AtomicInteger expiryCallbackCount = new AtomicInteger(0); final AtomicReference<Exception> unexpectedException = new AtomicReference<>(); final byte[] key = "key".getBytes(); final byte[] value = "value".getBytes(); final long maxBlockTimeMs = 1000; Callback callback = new Callback() { @Override public void onCompletion(RecordMetadata metadata, Exception exception) { if (exception instanceof TimeoutException) { expiryCallbackCount.incrementAndGet(); try { accumulator.append(tp1, 0L, key, value, Record.EMPTY_HEADERS, null, maxBlockTimeMs); } catch (InterruptedException e) { throw new RuntimeException("Unexpected interruption", e); } } else if (exception != null) unexpectedException.compareAndSet(null, exception); } }; for (int i = 0; i < messagesPerBatch; i++) accumulator.append(tp1, 0L, key, value, null, callback, maxBlockTimeMs); time.sleep(10000); Node clusterNode = this.cluster.nodes().get(0); client.disconnect(clusterNode.idString()); client.blackout(clusterNode, 100); sender.run(time.milliseconds()); assertEquals("Callbacks not invoked for expiry", messagesPerBatch, expiryCallbackCount.get()); assertNull("Unexpected exception", unexpectedException.get()); assertTrue(accumulator.batches().containsKey(tp1)); assertEquals(1, accumulator.batches().get(tp1).size()); assertEquals(messagesPerBatch, accumulator.batches().get(tp1).peekFirst().recordCount); }
public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } Sender(KafkaClient client, Metadata metadata, RecordAccumulator accumulator, boolean guaranteeMessageOrder, int maxRequestSize, short acks, int retries, Metrics metrics, Time time, int requestTimeout, long retryBackoffMs, TransactionManager transactionManager, ApiVersions apiVersions); }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } Sender(KafkaClient client, Metadata metadata, RecordAccumulator accumulator, boolean guaranteeMessageOrder, int maxRequestSize, short acks, int retries, Metrics metrics, Time time, int requestTimeout, long retryBackoffMs, TransactionManager transactionManager, ApiVersions apiVersions); void run(); void initiateClose(); void forceClose(); void wakeup(); static Sensor throttleTimeSensor(Metrics metrics); }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } Sender(KafkaClient client, Metadata metadata, RecordAccumulator accumulator, boolean guaranteeMessageOrder, int maxRequestSize, short acks, int retries, Metrics metrics, Time time, int requestTimeout, long retryBackoffMs, TransactionManager transactionManager, ApiVersions apiVersions); void run(); void initiateClose(); void forceClose(); void wakeup(); static Sensor throttleTimeSensor(Metrics metrics); }
@Test public void testMetadataTopicExpiry() throws Exception { long offset = 0; metadata.update(Cluster.empty(), Collections.<String>emptySet(), time.milliseconds()); Future<RecordMetadata> future = accumulator.append(tp0, time.milliseconds(), "key".getBytes(), "value".getBytes(), null, null, MAX_BLOCK_TIMEOUT).future; sender.run(time.milliseconds()); assertTrue("Topic not added to metadata", metadata.containsTopic(tp0.topic())); metadata.update(cluster, Collections.<String>emptySet(), time.milliseconds()); sender.run(time.milliseconds()); client.respond(produceResponse(tp0, offset++, Errors.NONE, 0)); sender.run(time.milliseconds()); assertEquals("Request completed.", 0, client.inFlightRequestCount()); assertFalse(client.hasInFlightRequests()); sender.run(time.milliseconds()); assertTrue("Request should be completed", future.isDone()); assertTrue("Topic not retained in metadata list", metadata.containsTopic(tp0.topic())); time.sleep(Metadata.TOPIC_EXPIRY_MS); metadata.update(Cluster.empty(), Collections.<String>emptySet(), time.milliseconds()); assertFalse("Unused topic has not been expired", metadata.containsTopic(tp0.topic())); future = accumulator.append(tp0, time.milliseconds(), "key".getBytes(), "value".getBytes(), null, null, MAX_BLOCK_TIMEOUT).future; sender.run(time.milliseconds()); assertTrue("Topic not added to metadata", metadata.containsTopic(tp0.topic())); metadata.update(cluster, Collections.<String>emptySet(), time.milliseconds()); sender.run(time.milliseconds()); client.respond(produceResponse(tp0, offset++, Errors.NONE, 0)); sender.run(time.milliseconds()); assertEquals("Request completed.", 0, client.inFlightRequestCount()); assertFalse(client.hasInFlightRequests()); sender.run(time.milliseconds()); assertTrue("Request should be completed", future.isDone()); }
public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } Sender(KafkaClient client, Metadata metadata, RecordAccumulator accumulator, boolean guaranteeMessageOrder, int maxRequestSize, short acks, int retries, Metrics metrics, Time time, int requestTimeout, long retryBackoffMs, TransactionManager transactionManager, ApiVersions apiVersions); }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } Sender(KafkaClient client, Metadata metadata, RecordAccumulator accumulator, boolean guaranteeMessageOrder, int maxRequestSize, short acks, int retries, Metrics metrics, Time time, int requestTimeout, long retryBackoffMs, TransactionManager transactionManager, ApiVersions apiVersions); void run(); void initiateClose(); void forceClose(); void wakeup(); static Sensor throttleTimeSensor(Metrics metrics); }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } Sender(KafkaClient client, Metadata metadata, RecordAccumulator accumulator, boolean guaranteeMessageOrder, int maxRequestSize, short acks, int retries, Metrics metrics, Time time, int requestTimeout, long retryBackoffMs, TransactionManager transactionManager, ApiVersions apiVersions); void run(); void initiateClose(); void forceClose(); void wakeup(); static Sensor throttleTimeSensor(Metrics metrics); }
@Test public void floatToJson() { JsonNode converted = parse(converter.fromConnectData(TOPIC, Schema.FLOAT32_SCHEMA, 12.34f)); validateEnvelope(converted); assertEquals(parse("{ \"type\": \"float\", \"optional\": false }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME)); assertEquals(12.34f, converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).floatValue(), 0.001); }
@Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } }
JsonConverter implements Converter { @Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } } }
JsonConverter implements Converter { @Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } } }
JsonConverter implements Converter { @Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } } @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); ObjectNode asJsonSchema(Schema schema); Schema asConnectSchema(JsonNode jsonSchema); }
JsonConverter implements Converter { @Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } } @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); ObjectNode asJsonSchema(Schema schema); Schema asConnectSchema(JsonNode jsonSchema); }
@Test public void shortToConnect() { assertEquals(new SchemaAndValue(Schema.INT16_SCHEMA, (short) 12), converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"int16\" }, \"payload\": 12 }".getBytes())); }
@Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); }
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } }
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } }
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); ObjectNode asJsonSchema(Schema schema); Schema asConnectSchema(JsonNode jsonSchema); }
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); ObjectNode asJsonSchema(Schema schema); Schema asConnectSchema(JsonNode jsonSchema); }
@Test public void testClusterAuthorizationExceptionInProduceRequest() throws Exception { final long producerId = 343434L; TransactionManager transactionManager = new TransactionManager(); setupWithTransactionState(transactionManager); client.setNode(new Node(1, "localhost", 33343)); prepareAndReceiveInitProducerId(producerId, Errors.NONE); assertTrue(transactionManager.hasProducerId()); Future<RecordMetadata> future = accumulator.append(tp0, time.milliseconds(), "key".getBytes(), "value".getBytes(), null, null, MAX_BLOCK_TIMEOUT).future; client.prepareResponse(new MockClient.RequestMatcher() { @Override public boolean matches(AbstractRequest body) { return body instanceof ProduceRequest && ((ProduceRequest) body).isIdempotent(); } }, produceResponse(tp0, -1, Errors.CLUSTER_AUTHORIZATION_FAILED, 0)); sender.run(time.milliseconds()); assertTrue(future.isDone()); try { future.get(); fail("Future should have raised ClusterAuthorizationException"); } catch (ExecutionException e) { assertTrue(e.getCause() instanceof ClusterAuthorizationException); } assertSendFailure(ClusterAuthorizationException.class); }
public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } Sender(KafkaClient client, Metadata metadata, RecordAccumulator accumulator, boolean guaranteeMessageOrder, int maxRequestSize, short acks, int retries, Metrics metrics, Time time, int requestTimeout, long retryBackoffMs, TransactionManager transactionManager, ApiVersions apiVersions); }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } Sender(KafkaClient client, Metadata metadata, RecordAccumulator accumulator, boolean guaranteeMessageOrder, int maxRequestSize, short acks, int retries, Metrics metrics, Time time, int requestTimeout, long retryBackoffMs, TransactionManager transactionManager, ApiVersions apiVersions); void run(); void initiateClose(); void forceClose(); void wakeup(); static Sensor throttleTimeSensor(Metrics metrics); }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } Sender(KafkaClient client, Metadata metadata, RecordAccumulator accumulator, boolean guaranteeMessageOrder, int maxRequestSize, short acks, int retries, Metrics metrics, Time time, int requestTimeout, long retryBackoffMs, TransactionManager transactionManager, ApiVersions apiVersions); void run(); void initiateClose(); void forceClose(); void wakeup(); static Sensor throttleTimeSensor(Metrics metrics); }
@Test public void testSequenceNumberIncrement() throws InterruptedException { final long producerId = 343434L; TransactionManager transactionManager = new TransactionManager(); transactionManager.setProducerIdAndEpoch(new ProducerIdAndEpoch(producerId, (short) 0)); setupWithTransactionState(transactionManager); client.setNode(new Node(1, "localhost", 33343)); int maxRetries = 10; Metrics m = new Metrics(); Sender sender = new Sender(client, metadata, this.accumulator, true, MAX_REQUEST_SIZE, ACKS_ALL, maxRetries, m, time, REQUEST_TIMEOUT, 50, transactionManager, apiVersions); Future<RecordMetadata> responseFuture = accumulator.append(tp0, time.milliseconds(), "key".getBytes(), "value".getBytes(), null, null, MAX_BLOCK_TIMEOUT).future; client.prepareResponse(new MockClient.RequestMatcher() { @Override public boolean matches(AbstractRequest body) { if (body instanceof ProduceRequest) { ProduceRequest request = (ProduceRequest) body; MemoryRecords records = request.partitionRecordsOrFail().get(tp0); Iterator<MutableRecordBatch> batchIterator = records.batches().iterator(); assertTrue(batchIterator.hasNext()); RecordBatch batch = batchIterator.next(); assertFalse(batchIterator.hasNext()); assertEquals(0, batch.baseSequence()); assertEquals(producerId, batch.producerId()); assertEquals(0, batch.producerEpoch()); return true; } return false; } }, produceResponse(tp0, 0, Errors.NONE, 0)); sender.run(time.milliseconds()); sender.run(time.milliseconds()); sender.run(time.milliseconds()); assertTrue(responseFuture.isDone()); assertEquals((long) transactionManager.sequenceNumber(tp0), 1L); }
public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } Sender(KafkaClient client, Metadata metadata, RecordAccumulator accumulator, boolean guaranteeMessageOrder, int maxRequestSize, short acks, int retries, Metrics metrics, Time time, int requestTimeout, long retryBackoffMs, TransactionManager transactionManager, ApiVersions apiVersions); }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } Sender(KafkaClient client, Metadata metadata, RecordAccumulator accumulator, boolean guaranteeMessageOrder, int maxRequestSize, short acks, int retries, Metrics metrics, Time time, int requestTimeout, long retryBackoffMs, TransactionManager transactionManager, ApiVersions apiVersions); void run(); void initiateClose(); void forceClose(); void wakeup(); static Sensor throttleTimeSensor(Metrics metrics); }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } Sender(KafkaClient client, Metadata metadata, RecordAccumulator accumulator, boolean guaranteeMessageOrder, int maxRequestSize, short acks, int retries, Metrics metrics, Time time, int requestTimeout, long retryBackoffMs, TransactionManager transactionManager, ApiVersions apiVersions); void run(); void initiateClose(); void forceClose(); void wakeup(); static Sensor throttleTimeSensor(Metrics metrics); }
@Test public void testAbortRetryWhenProducerIdChanges() throws InterruptedException { final long producerId = 343434L; TransactionManager transactionManager = new TransactionManager(); transactionManager.setProducerIdAndEpoch(new ProducerIdAndEpoch(producerId, (short) 0)); setupWithTransactionState(transactionManager); client.setNode(new Node(1, "localhost", 33343)); int maxRetries = 10; Metrics m = new Metrics(); Sender sender = new Sender(client, metadata, this.accumulator, true, MAX_REQUEST_SIZE, ACKS_ALL, maxRetries, m, time, REQUEST_TIMEOUT, 50, transactionManager, apiVersions); Future<RecordMetadata> responseFuture = accumulator.append(tp0, time.milliseconds(), "key".getBytes(), "value".getBytes(), null, null, MAX_BLOCK_TIMEOUT).future; sender.run(time.milliseconds()); sender.run(time.milliseconds()); String id = client.requests().peek().destination(); Node node = new Node(Integer.valueOf(id), "localhost", 0); assertEquals(1, client.inFlightRequestCount()); assertTrue("Client ready status should be true", client.isReady(node, 0L)); client.disconnect(id); assertEquals(0, client.inFlightRequestCount()); assertFalse("Client ready status should be false", client.isReady(node, 0L)); transactionManager.setProducerIdAndEpoch(new ProducerIdAndEpoch(producerId + 1, (short) 0)); sender.run(time.milliseconds()); sender.run(time.milliseconds()); sender.run(time.milliseconds()); assertEquals("Expected requests to be aborted after pid change", 0, client.inFlightRequestCount()); KafkaMetric recordErrors = m.metrics().get(m.metricName("record-error-rate", METRIC_GROUP, "")); assertTrue("Expected non-zero value for record send errors", recordErrors.value() > 0); assertTrue(responseFuture.isDone()); assertEquals((long) transactionManager.sequenceNumber(tp0), 0L); }
public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } Sender(KafkaClient client, Metadata metadata, RecordAccumulator accumulator, boolean guaranteeMessageOrder, int maxRequestSize, short acks, int retries, Metrics metrics, Time time, int requestTimeout, long retryBackoffMs, TransactionManager transactionManager, ApiVersions apiVersions); }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } Sender(KafkaClient client, Metadata metadata, RecordAccumulator accumulator, boolean guaranteeMessageOrder, int maxRequestSize, short acks, int retries, Metrics metrics, Time time, int requestTimeout, long retryBackoffMs, TransactionManager transactionManager, ApiVersions apiVersions); void run(); void initiateClose(); void forceClose(); void wakeup(); static Sensor throttleTimeSensor(Metrics metrics); }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } Sender(KafkaClient client, Metadata metadata, RecordAccumulator accumulator, boolean guaranteeMessageOrder, int maxRequestSize, short acks, int retries, Metrics metrics, Time time, int requestTimeout, long retryBackoffMs, TransactionManager transactionManager, ApiVersions apiVersions); void run(); void initiateClose(); void forceClose(); void wakeup(); static Sensor throttleTimeSensor(Metrics metrics); }
@Test public void testResetWhenOutOfOrderSequenceReceived() throws InterruptedException { final long producerId = 343434L; TransactionManager transactionManager = new TransactionManager(); transactionManager.setProducerIdAndEpoch(new ProducerIdAndEpoch(producerId, (short) 0)); setupWithTransactionState(transactionManager); client.setNode(new Node(1, "localhost", 33343)); int maxRetries = 10; Metrics m = new Metrics(); Sender sender = new Sender(client, metadata, this.accumulator, true, MAX_REQUEST_SIZE, ACKS_ALL, maxRetries, m, time, REQUEST_TIMEOUT, 50, transactionManager, apiVersions); Future<RecordMetadata> responseFuture = accumulator.append(tp0, time.milliseconds(), "key".getBytes(), "value".getBytes(), null, null, MAX_BLOCK_TIMEOUT).future; sender.run(time.milliseconds()); sender.run(time.milliseconds()); assertEquals(1, client.inFlightRequestCount()); client.respond(produceResponse(tp0, 0, Errors.OUT_OF_ORDER_SEQUENCE_NUMBER, 0)); sender.run(time.milliseconds()); assertTrue(responseFuture.isDone()); assertFalse("Expected transaction state to be reset upon receiving an OutOfOrderSequenceException", transactionManager.hasProducerId()); }
public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } Sender(KafkaClient client, Metadata metadata, RecordAccumulator accumulator, boolean guaranteeMessageOrder, int maxRequestSize, short acks, int retries, Metrics metrics, Time time, int requestTimeout, long retryBackoffMs, TransactionManager transactionManager, ApiVersions apiVersions); }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } Sender(KafkaClient client, Metadata metadata, RecordAccumulator accumulator, boolean guaranteeMessageOrder, int maxRequestSize, short acks, int retries, Metrics metrics, Time time, int requestTimeout, long retryBackoffMs, TransactionManager transactionManager, ApiVersions apiVersions); void run(); void initiateClose(); void forceClose(); void wakeup(); static Sensor throttleTimeSensor(Metrics metrics); }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } Sender(KafkaClient client, Metadata metadata, RecordAccumulator accumulator, boolean guaranteeMessageOrder, int maxRequestSize, short acks, int retries, Metrics metrics, Time time, int requestTimeout, long retryBackoffMs, TransactionManager transactionManager, ApiVersions apiVersions); void run(); void initiateClose(); void forceClose(); void wakeup(); static Sensor throttleTimeSensor(Metrics metrics); }
@Test public void testTransactionalSplitBatchAndSend() throws Exception { ProducerIdAndEpoch producerIdAndEpoch = new ProducerIdAndEpoch(123456L, (short) 0); TopicPartition tp = new TopicPartition("testSplitBatchAndSend", 1); TransactionManager txnManager = new TransactionManager("testSplitBatchAndSend", 60000, 100); setupWithTransactionState(txnManager); doInitTransactions(txnManager, producerIdAndEpoch); txnManager.beginTransaction(); txnManager.maybeAddPartitionToTransaction(tp); client.prepareResponse(new AddPartitionsToTxnResponse(0, Collections.singletonMap(tp, Errors.NONE))); sender.run(time.milliseconds()); testSplitBatchAndSend(txnManager, producerIdAndEpoch, tp); }
public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } Sender(KafkaClient client, Metadata metadata, RecordAccumulator accumulator, boolean guaranteeMessageOrder, int maxRequestSize, short acks, int retries, Metrics metrics, Time time, int requestTimeout, long retryBackoffMs, TransactionManager transactionManager, ApiVersions apiVersions); }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } Sender(KafkaClient client, Metadata metadata, RecordAccumulator accumulator, boolean guaranteeMessageOrder, int maxRequestSize, short acks, int retries, Metrics metrics, Time time, int requestTimeout, long retryBackoffMs, TransactionManager transactionManager, ApiVersions apiVersions); void run(); void initiateClose(); void forceClose(); void wakeup(); static Sensor throttleTimeSensor(Metrics metrics); }
Sender implements Runnable { public void run() { log.debug("Starting Kafka producer I/O thread."); while (running) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } log.debug("Beginning shutdown of Kafka producer I/O thread, sending remaining records."); while (!forceClose && (this.accumulator.hasUndrained() || this.client.inFlightRequestCount() > 0)) { try { run(time.milliseconds()); } catch (Exception e) { log.error("Uncaught error in kafka producer I/O thread: ", e); } } if (forceClose) { this.accumulator.abortIncompleteBatches(); } try { this.client.close(); } catch (Exception e) { log.error("Failed to close network client", e); } log.debug("Shutdown of Kafka producer I/O thread has completed."); } Sender(KafkaClient client, Metadata metadata, RecordAccumulator accumulator, boolean guaranteeMessageOrder, int maxRequestSize, short acks, int retries, Metrics metrics, Time time, int requestTimeout, long retryBackoffMs, TransactionManager transactionManager, ApiVersions apiVersions); void run(); void initiateClose(); void forceClose(); void wakeup(); static Sensor throttleTimeSensor(Metrics metrics); }
@Test public void testConstructorWithSerializers() { Properties producerProps = new Properties(); producerProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9000"); new KafkaProducer<>(producerProps, new ByteArraySerializer(), new ByteArraySerializer()).close(); }
@Override public void close() { close(Long.MAX_VALUE, TimeUnit.MILLISECONDS); }
KafkaProducer implements Producer<K, V> { @Override public void close() { close(Long.MAX_VALUE, TimeUnit.MILLISECONDS); } }
KafkaProducer implements Producer<K, V> { @Override public void close() { close(Long.MAX_VALUE, TimeUnit.MILLISECONDS); } KafkaProducer(Map<String, Object> configs); KafkaProducer(Map<String, Object> configs, Serializer<K> keySerializer, Serializer<V> valueSerializer); KafkaProducer(Properties properties); KafkaProducer(Properties properties, Serializer<K> keySerializer, Serializer<V> valueSerializer); @SuppressWarnings({"unchecked", "deprecation"}) private KafkaProducer(ProducerConfig config, Serializer<K> keySerializer, Serializer<V> valueSerializer); }
KafkaProducer implements Producer<K, V> { @Override public void close() { close(Long.MAX_VALUE, TimeUnit.MILLISECONDS); } KafkaProducer(Map<String, Object> configs); KafkaProducer(Map<String, Object> configs, Serializer<K> keySerializer, Serializer<V> valueSerializer); KafkaProducer(Properties properties); KafkaProducer(Properties properties, Serializer<K> keySerializer, Serializer<V> valueSerializer); @SuppressWarnings({"unchecked", "deprecation"}) private KafkaProducer(ProducerConfig config, Serializer<K> keySerializer, Serializer<V> valueSerializer); void initTransactions(); void beginTransaction(); void sendOffsetsToTransaction(Map<TopicPartition, OffsetAndMetadata> offsets, String consumerGroupId); void commitTransaction(); void abortTransaction(); @Override Future<RecordMetadata> send(ProducerRecord<K, V> record); @Override // 发送消息,将消息放入RecordAccumulator暂存 Future<RecordMetadata> send(ProducerRecord<K, V> record, Callback callback); @Override void flush(); @Override // 从Metadata中获取指定的topc分区信息 List<PartitionInfo> partitionsFor(String topic); @Override Map<MetricName, ? extends Metric> metrics(); @Override void close(); @Override void close(long timeout, TimeUnit timeUnit); }
KafkaProducer implements Producer<K, V> { @Override public void close() { close(Long.MAX_VALUE, TimeUnit.MILLISECONDS); } KafkaProducer(Map<String, Object> configs); KafkaProducer(Map<String, Object> configs, Serializer<K> keySerializer, Serializer<V> valueSerializer); KafkaProducer(Properties properties); KafkaProducer(Properties properties, Serializer<K> keySerializer, Serializer<V> valueSerializer); @SuppressWarnings({"unchecked", "deprecation"}) private KafkaProducer(ProducerConfig config, Serializer<K> keySerializer, Serializer<V> valueSerializer); void initTransactions(); void beginTransaction(); void sendOffsetsToTransaction(Map<TopicPartition, OffsetAndMetadata> offsets, String consumerGroupId); void commitTransaction(); void abortTransaction(); @Override Future<RecordMetadata> send(ProducerRecord<K, V> record); @Override // 发送消息,将消息放入RecordAccumulator暂存 Future<RecordMetadata> send(ProducerRecord<K, V> record, Callback callback); @Override void flush(); @Override // 从Metadata中获取指定的topc分区信息 List<PartitionInfo> partitionsFor(String topic); @Override Map<MetricName, ? extends Metric> metrics(); @Override void close(); @Override void close(long timeout, TimeUnit timeUnit); }
@Test public void testSerializerClose() throws Exception { Map<String, Object> configs = new HashMap<>(); configs.put(ProducerConfig.CLIENT_ID_CONFIG, "testConstructorClose"); configs.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9999"); configs.put(ProducerConfig.METRIC_REPORTER_CLASSES_CONFIG, MockMetricsReporter.class.getName()); configs.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, CommonClientConfigs.DEFAULT_SECURITY_PROTOCOL); final int oldInitCount = MockSerializer.INIT_COUNT.get(); final int oldCloseCount = MockSerializer.CLOSE_COUNT.get(); KafkaProducer<byte[], byte[]> producer = new KafkaProducer<byte[], byte[]>( configs, new MockSerializer(), new MockSerializer()); assertEquals(oldInitCount + 2, MockSerializer.INIT_COUNT.get()); assertEquals(oldCloseCount, MockSerializer.CLOSE_COUNT.get()); producer.close(); assertEquals(oldInitCount + 2, MockSerializer.INIT_COUNT.get()); assertEquals(oldCloseCount + 2, MockSerializer.CLOSE_COUNT.get()); }
@Override public void close() { close(Long.MAX_VALUE, TimeUnit.MILLISECONDS); }
KafkaProducer implements Producer<K, V> { @Override public void close() { close(Long.MAX_VALUE, TimeUnit.MILLISECONDS); } }
KafkaProducer implements Producer<K, V> { @Override public void close() { close(Long.MAX_VALUE, TimeUnit.MILLISECONDS); } KafkaProducer(Map<String, Object> configs); KafkaProducer(Map<String, Object> configs, Serializer<K> keySerializer, Serializer<V> valueSerializer); KafkaProducer(Properties properties); KafkaProducer(Properties properties, Serializer<K> keySerializer, Serializer<V> valueSerializer); @SuppressWarnings({"unchecked", "deprecation"}) private KafkaProducer(ProducerConfig config, Serializer<K> keySerializer, Serializer<V> valueSerializer); }
KafkaProducer implements Producer<K, V> { @Override public void close() { close(Long.MAX_VALUE, TimeUnit.MILLISECONDS); } KafkaProducer(Map<String, Object> configs); KafkaProducer(Map<String, Object> configs, Serializer<K> keySerializer, Serializer<V> valueSerializer); KafkaProducer(Properties properties); KafkaProducer(Properties properties, Serializer<K> keySerializer, Serializer<V> valueSerializer); @SuppressWarnings({"unchecked", "deprecation"}) private KafkaProducer(ProducerConfig config, Serializer<K> keySerializer, Serializer<V> valueSerializer); void initTransactions(); void beginTransaction(); void sendOffsetsToTransaction(Map<TopicPartition, OffsetAndMetadata> offsets, String consumerGroupId); void commitTransaction(); void abortTransaction(); @Override Future<RecordMetadata> send(ProducerRecord<K, V> record); @Override // 发送消息,将消息放入RecordAccumulator暂存 Future<RecordMetadata> send(ProducerRecord<K, V> record, Callback callback); @Override void flush(); @Override // 从Metadata中获取指定的topc分区信息 List<PartitionInfo> partitionsFor(String topic); @Override Map<MetricName, ? extends Metric> metrics(); @Override void close(); @Override void close(long timeout, TimeUnit timeUnit); }
KafkaProducer implements Producer<K, V> { @Override public void close() { close(Long.MAX_VALUE, TimeUnit.MILLISECONDS); } KafkaProducer(Map<String, Object> configs); KafkaProducer(Map<String, Object> configs, Serializer<K> keySerializer, Serializer<V> valueSerializer); KafkaProducer(Properties properties); KafkaProducer(Properties properties, Serializer<K> keySerializer, Serializer<V> valueSerializer); @SuppressWarnings({"unchecked", "deprecation"}) private KafkaProducer(ProducerConfig config, Serializer<K> keySerializer, Serializer<V> valueSerializer); void initTransactions(); void beginTransaction(); void sendOffsetsToTransaction(Map<TopicPartition, OffsetAndMetadata> offsets, String consumerGroupId); void commitTransaction(); void abortTransaction(); @Override Future<RecordMetadata> send(ProducerRecord<K, V> record); @Override // 发送消息,将消息放入RecordAccumulator暂存 Future<RecordMetadata> send(ProducerRecord<K, V> record, Callback callback); @Override void flush(); @Override // 从Metadata中获取指定的topc分区信息 List<PartitionInfo> partitionsFor(String topic); @Override Map<MetricName, ? extends Metric> metrics(); @Override void close(); @Override void close(long timeout, TimeUnit timeUnit); }
@Test public void testInterceptorConstructClose() throws Exception { try { Properties props = new Properties(); props.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9999"); props.setProperty(ProducerConfig.INTERCEPTOR_CLASSES_CONFIG, MockProducerInterceptor.class.getName()); props.setProperty(MockProducerInterceptor.APPEND_STRING_PROP, "something"); KafkaProducer<String, String> producer = new KafkaProducer<String, String>( props, new StringSerializer(), new StringSerializer()); assertEquals(1, MockProducerInterceptor.INIT_COUNT.get()); assertEquals(0, MockProducerInterceptor.CLOSE_COUNT.get()); Assert.assertNull(MockProducerInterceptor.CLUSTER_META.get()); producer.close(); assertEquals(1, MockProducerInterceptor.INIT_COUNT.get()); assertEquals(1, MockProducerInterceptor.CLOSE_COUNT.get()); } finally { MockProducerInterceptor.resetCounters(); } }
@Override public void close() { close(Long.MAX_VALUE, TimeUnit.MILLISECONDS); }
KafkaProducer implements Producer<K, V> { @Override public void close() { close(Long.MAX_VALUE, TimeUnit.MILLISECONDS); } }
KafkaProducer implements Producer<K, V> { @Override public void close() { close(Long.MAX_VALUE, TimeUnit.MILLISECONDS); } KafkaProducer(Map<String, Object> configs); KafkaProducer(Map<String, Object> configs, Serializer<K> keySerializer, Serializer<V> valueSerializer); KafkaProducer(Properties properties); KafkaProducer(Properties properties, Serializer<K> keySerializer, Serializer<V> valueSerializer); @SuppressWarnings({"unchecked", "deprecation"}) private KafkaProducer(ProducerConfig config, Serializer<K> keySerializer, Serializer<V> valueSerializer); }
KafkaProducer implements Producer<K, V> { @Override public void close() { close(Long.MAX_VALUE, TimeUnit.MILLISECONDS); } KafkaProducer(Map<String, Object> configs); KafkaProducer(Map<String, Object> configs, Serializer<K> keySerializer, Serializer<V> valueSerializer); KafkaProducer(Properties properties); KafkaProducer(Properties properties, Serializer<K> keySerializer, Serializer<V> valueSerializer); @SuppressWarnings({"unchecked", "deprecation"}) private KafkaProducer(ProducerConfig config, Serializer<K> keySerializer, Serializer<V> valueSerializer); void initTransactions(); void beginTransaction(); void sendOffsetsToTransaction(Map<TopicPartition, OffsetAndMetadata> offsets, String consumerGroupId); void commitTransaction(); void abortTransaction(); @Override Future<RecordMetadata> send(ProducerRecord<K, V> record); @Override // 发送消息,将消息放入RecordAccumulator暂存 Future<RecordMetadata> send(ProducerRecord<K, V> record, Callback callback); @Override void flush(); @Override // 从Metadata中获取指定的topc分区信息 List<PartitionInfo> partitionsFor(String topic); @Override Map<MetricName, ? extends Metric> metrics(); @Override void close(); @Override void close(long timeout, TimeUnit timeUnit); }
KafkaProducer implements Producer<K, V> { @Override public void close() { close(Long.MAX_VALUE, TimeUnit.MILLISECONDS); } KafkaProducer(Map<String, Object> configs); KafkaProducer(Map<String, Object> configs, Serializer<K> keySerializer, Serializer<V> valueSerializer); KafkaProducer(Properties properties); KafkaProducer(Properties properties, Serializer<K> keySerializer, Serializer<V> valueSerializer); @SuppressWarnings({"unchecked", "deprecation"}) private KafkaProducer(ProducerConfig config, Serializer<K> keySerializer, Serializer<V> valueSerializer); void initTransactions(); void beginTransaction(); void sendOffsetsToTransaction(Map<TopicPartition, OffsetAndMetadata> offsets, String consumerGroupId); void commitTransaction(); void abortTransaction(); @Override Future<RecordMetadata> send(ProducerRecord<K, V> record); @Override // 发送消息,将消息放入RecordAccumulator暂存 Future<RecordMetadata> send(ProducerRecord<K, V> record, Callback callback); @Override void flush(); @Override // 从Metadata中获取指定的topc分区信息 List<PartitionInfo> partitionsFor(String topic); @Override Map<MetricName, ? extends Metric> metrics(); @Override void close(); @Override void close(long timeout, TimeUnit timeUnit); }
@Test public void testPartitionerClose() throws Exception { try { Properties props = new Properties(); props.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9999"); props.setProperty(ProducerConfig.PARTITIONER_CLASS_CONFIG, MockPartitioner.class.getName()); KafkaProducer<String, String> producer = new KafkaProducer<String, String>( props, new StringSerializer(), new StringSerializer()); assertEquals(1, MockPartitioner.INIT_COUNT.get()); assertEquals(0, MockPartitioner.CLOSE_COUNT.get()); producer.close(); assertEquals(1, MockPartitioner.INIT_COUNT.get()); assertEquals(1, MockPartitioner.CLOSE_COUNT.get()); } finally { MockPartitioner.resetCounters(); } }
@Override public void close() { close(Long.MAX_VALUE, TimeUnit.MILLISECONDS); }
KafkaProducer implements Producer<K, V> { @Override public void close() { close(Long.MAX_VALUE, TimeUnit.MILLISECONDS); } }
KafkaProducer implements Producer<K, V> { @Override public void close() { close(Long.MAX_VALUE, TimeUnit.MILLISECONDS); } KafkaProducer(Map<String, Object> configs); KafkaProducer(Map<String, Object> configs, Serializer<K> keySerializer, Serializer<V> valueSerializer); KafkaProducer(Properties properties); KafkaProducer(Properties properties, Serializer<K> keySerializer, Serializer<V> valueSerializer); @SuppressWarnings({"unchecked", "deprecation"}) private KafkaProducer(ProducerConfig config, Serializer<K> keySerializer, Serializer<V> valueSerializer); }
KafkaProducer implements Producer<K, V> { @Override public void close() { close(Long.MAX_VALUE, TimeUnit.MILLISECONDS); } KafkaProducer(Map<String, Object> configs); KafkaProducer(Map<String, Object> configs, Serializer<K> keySerializer, Serializer<V> valueSerializer); KafkaProducer(Properties properties); KafkaProducer(Properties properties, Serializer<K> keySerializer, Serializer<V> valueSerializer); @SuppressWarnings({"unchecked", "deprecation"}) private KafkaProducer(ProducerConfig config, Serializer<K> keySerializer, Serializer<V> valueSerializer); void initTransactions(); void beginTransaction(); void sendOffsetsToTransaction(Map<TopicPartition, OffsetAndMetadata> offsets, String consumerGroupId); void commitTransaction(); void abortTransaction(); @Override Future<RecordMetadata> send(ProducerRecord<K, V> record); @Override // 发送消息,将消息放入RecordAccumulator暂存 Future<RecordMetadata> send(ProducerRecord<K, V> record, Callback callback); @Override void flush(); @Override // 从Metadata中获取指定的topc分区信息 List<PartitionInfo> partitionsFor(String topic); @Override Map<MetricName, ? extends Metric> metrics(); @Override void close(); @Override void close(long timeout, TimeUnit timeUnit); }
KafkaProducer implements Producer<K, V> { @Override public void close() { close(Long.MAX_VALUE, TimeUnit.MILLISECONDS); } KafkaProducer(Map<String, Object> configs); KafkaProducer(Map<String, Object> configs, Serializer<K> keySerializer, Serializer<V> valueSerializer); KafkaProducer(Properties properties); KafkaProducer(Properties properties, Serializer<K> keySerializer, Serializer<V> valueSerializer); @SuppressWarnings({"unchecked", "deprecation"}) private KafkaProducer(ProducerConfig config, Serializer<K> keySerializer, Serializer<V> valueSerializer); void initTransactions(); void beginTransaction(); void sendOffsetsToTransaction(Map<TopicPartition, OffsetAndMetadata> offsets, String consumerGroupId); void commitTransaction(); void abortTransaction(); @Override Future<RecordMetadata> send(ProducerRecord<K, V> record); @Override // 发送消息,将消息放入RecordAccumulator暂存 Future<RecordMetadata> send(ProducerRecord<K, V> record, Callback callback); @Override void flush(); @Override // 从Metadata中获取指定的topc分区信息 List<PartitionInfo> partitionsFor(String topic); @Override Map<MetricName, ? extends Metric> metrics(); @Override void close(); @Override void close(long timeout, TimeUnit timeUnit); }
@Test public void testOsDefaultSocketBufferSizes() throws Exception { Map<String, Object> config = new HashMap<>(); config.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9999"); config.put(ProducerConfig.SEND_BUFFER_CONFIG, Selectable.USE_DEFAULT_BUFFER_SIZE); config.put(ProducerConfig.RECEIVE_BUFFER_CONFIG, Selectable.USE_DEFAULT_BUFFER_SIZE); KafkaProducer<byte[], byte[]> producer = new KafkaProducer<>( config, new ByteArraySerializer(), new ByteArraySerializer()); producer.close(); }
@Override public void close() { close(Long.MAX_VALUE, TimeUnit.MILLISECONDS); }
KafkaProducer implements Producer<K, V> { @Override public void close() { close(Long.MAX_VALUE, TimeUnit.MILLISECONDS); } }
KafkaProducer implements Producer<K, V> { @Override public void close() { close(Long.MAX_VALUE, TimeUnit.MILLISECONDS); } KafkaProducer(Map<String, Object> configs); KafkaProducer(Map<String, Object> configs, Serializer<K> keySerializer, Serializer<V> valueSerializer); KafkaProducer(Properties properties); KafkaProducer(Properties properties, Serializer<K> keySerializer, Serializer<V> valueSerializer); @SuppressWarnings({"unchecked", "deprecation"}) private KafkaProducer(ProducerConfig config, Serializer<K> keySerializer, Serializer<V> valueSerializer); }
KafkaProducer implements Producer<K, V> { @Override public void close() { close(Long.MAX_VALUE, TimeUnit.MILLISECONDS); } KafkaProducer(Map<String, Object> configs); KafkaProducer(Map<String, Object> configs, Serializer<K> keySerializer, Serializer<V> valueSerializer); KafkaProducer(Properties properties); KafkaProducer(Properties properties, Serializer<K> keySerializer, Serializer<V> valueSerializer); @SuppressWarnings({"unchecked", "deprecation"}) private KafkaProducer(ProducerConfig config, Serializer<K> keySerializer, Serializer<V> valueSerializer); void initTransactions(); void beginTransaction(); void sendOffsetsToTransaction(Map<TopicPartition, OffsetAndMetadata> offsets, String consumerGroupId); void commitTransaction(); void abortTransaction(); @Override Future<RecordMetadata> send(ProducerRecord<K, V> record); @Override // 发送消息,将消息放入RecordAccumulator暂存 Future<RecordMetadata> send(ProducerRecord<K, V> record, Callback callback); @Override void flush(); @Override // 从Metadata中获取指定的topc分区信息 List<PartitionInfo> partitionsFor(String topic); @Override Map<MetricName, ? extends Metric> metrics(); @Override void close(); @Override void close(long timeout, TimeUnit timeUnit); }
KafkaProducer implements Producer<K, V> { @Override public void close() { close(Long.MAX_VALUE, TimeUnit.MILLISECONDS); } KafkaProducer(Map<String, Object> configs); KafkaProducer(Map<String, Object> configs, Serializer<K> keySerializer, Serializer<V> valueSerializer); KafkaProducer(Properties properties); KafkaProducer(Properties properties, Serializer<K> keySerializer, Serializer<V> valueSerializer); @SuppressWarnings({"unchecked", "deprecation"}) private KafkaProducer(ProducerConfig config, Serializer<K> keySerializer, Serializer<V> valueSerializer); void initTransactions(); void beginTransaction(); void sendOffsetsToTransaction(Map<TopicPartition, OffsetAndMetadata> offsets, String consumerGroupId); void commitTransaction(); void abortTransaction(); @Override Future<RecordMetadata> send(ProducerRecord<K, V> record); @Override // 发送消息,将消息放入RecordAccumulator暂存 Future<RecordMetadata> send(ProducerRecord<K, V> record, Callback callback); @Override void flush(); @Override // 从Metadata中获取指定的topc分区信息 List<PartitionInfo> partitionsFor(String topic); @Override Map<MetricName, ? extends Metric> metrics(); @Override void close(); @Override void close(long timeout, TimeUnit timeUnit); }
@Test public void doubleToJson() { JsonNode converted = parse(converter.fromConnectData(TOPIC, Schema.FLOAT64_SCHEMA, 12.34)); validateEnvelope(converted); assertEquals(parse("{ \"type\": \"double\", \"optional\": false }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME)); assertEquals(12.34, converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).doubleValue(), 0.001); }
@Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } }
JsonConverter implements Converter { @Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } } }
JsonConverter implements Converter { @Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } } }
JsonConverter implements Converter { @Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } } @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); ObjectNode asJsonSchema(Schema schema); Schema asConnectSchema(JsonNode jsonSchema); }
JsonConverter implements Converter { @Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } } @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); ObjectNode asJsonSchema(Schema schema); Schema asConnectSchema(JsonNode jsonSchema); }
@PrepareOnlyThisForTest(Metadata.class) @Test public void testMetadataFetchOnStaleMetadata() throws Exception { Properties props = new Properties(); props.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9999"); KafkaProducer<String, String> producer = new KafkaProducer<>(props, new StringSerializer(), new StringSerializer()); Metadata metadata = PowerMock.createNiceMock(Metadata.class); MemberModifier.field(KafkaProducer.class, "metadata").set(producer, metadata); String topic = "topic"; ProducerRecord<String, String> initialRecord = new ProducerRecord<>(topic, "value"); ProducerRecord<String, String> extendedRecord = new ProducerRecord<>(topic, 2, null, "value"); Collection<Node> nodes = Collections.singletonList(new Node(0, "host1", 1000)); final Cluster emptyCluster = new Cluster(null, nodes, Collections.<PartitionInfo>emptySet(), Collections.<String>emptySet(), Collections.<String>emptySet()); final Cluster initialCluster = new Cluster( "dummy", Collections.singletonList(new Node(0, "host1", 1000)), Arrays.asList(new PartitionInfo(topic, 0, null, null, null)), Collections.<String>emptySet(), Collections.<String>emptySet()); final Cluster extendedCluster = new Cluster( "dummy", Collections.singletonList(new Node(0, "host1", 1000)), Arrays.asList( new PartitionInfo(topic, 0, null, null, null), new PartitionInfo(topic, 1, null, null, null), new PartitionInfo(topic, 2, null, null, null)), Collections.<String>emptySet(), Collections.<String>emptySet()); final int refreshAttempts = 5; EasyMock.expect(metadata.fetch()).andReturn(emptyCluster).times(refreshAttempts - 1); EasyMock.expect(metadata.fetch()).andReturn(initialCluster).once(); EasyMock.expect(metadata.fetch()).andThrow(new IllegalStateException("Unexpected call to metadata.fetch()")).anyTimes(); PowerMock.replay(metadata); producer.send(initialRecord); PowerMock.verify(metadata); PowerMock.reset(metadata); EasyMock.expect(metadata.fetch()).andReturn(initialCluster).once(); EasyMock.expect(metadata.fetch()).andThrow(new IllegalStateException("Unexpected call to metadata.fetch()")).anyTimes(); PowerMock.replay(metadata); producer.send(initialRecord, null); PowerMock.verify(metadata); PowerMock.reset(metadata); EasyMock.expect(metadata.fetch()).andReturn(initialCluster).once(); EasyMock.expect(metadata.fetch()).andReturn(initialCluster).once(); EasyMock.expect(metadata.fetch()).andThrow(new IllegalStateException("Unexpected call to metadata.fetch()")).anyTimes(); PowerMock.replay(metadata); try { producer.send(extendedRecord, null); fail("Expected KafkaException to be raised"); } catch (KafkaException e) { } PowerMock.verify(metadata); PowerMock.reset(metadata); EasyMock.expect(metadata.fetch()).andReturn(initialCluster).once(); EasyMock.expect(metadata.fetch()).andReturn(extendedCluster).once(); EasyMock.expect(metadata.fetch()).andThrow(new IllegalStateException("Unexpected call to metadata.fetch()")).anyTimes(); PowerMock.replay(metadata); producer.send(extendedRecord, null); PowerMock.verify(metadata); }
@Override public Future<RecordMetadata> send(ProducerRecord<K, V> record) { return send(record, null); }
KafkaProducer implements Producer<K, V> { @Override public Future<RecordMetadata> send(ProducerRecord<K, V> record) { return send(record, null); } }
KafkaProducer implements Producer<K, V> { @Override public Future<RecordMetadata> send(ProducerRecord<K, V> record) { return send(record, null); } KafkaProducer(Map<String, Object> configs); KafkaProducer(Map<String, Object> configs, Serializer<K> keySerializer, Serializer<V> valueSerializer); KafkaProducer(Properties properties); KafkaProducer(Properties properties, Serializer<K> keySerializer, Serializer<V> valueSerializer); @SuppressWarnings({"unchecked", "deprecation"}) private KafkaProducer(ProducerConfig config, Serializer<K> keySerializer, Serializer<V> valueSerializer); }
KafkaProducer implements Producer<K, V> { @Override public Future<RecordMetadata> send(ProducerRecord<K, V> record) { return send(record, null); } KafkaProducer(Map<String, Object> configs); KafkaProducer(Map<String, Object> configs, Serializer<K> keySerializer, Serializer<V> valueSerializer); KafkaProducer(Properties properties); KafkaProducer(Properties properties, Serializer<K> keySerializer, Serializer<V> valueSerializer); @SuppressWarnings({"unchecked", "deprecation"}) private KafkaProducer(ProducerConfig config, Serializer<K> keySerializer, Serializer<V> valueSerializer); void initTransactions(); void beginTransaction(); void sendOffsetsToTransaction(Map<TopicPartition, OffsetAndMetadata> offsets, String consumerGroupId); void commitTransaction(); void abortTransaction(); @Override Future<RecordMetadata> send(ProducerRecord<K, V> record); @Override // 发送消息,将消息放入RecordAccumulator暂存 Future<RecordMetadata> send(ProducerRecord<K, V> record, Callback callback); @Override void flush(); @Override // 从Metadata中获取指定的topc分区信息 List<PartitionInfo> partitionsFor(String topic); @Override Map<MetricName, ? extends Metric> metrics(); @Override void close(); @Override void close(long timeout, TimeUnit timeUnit); }
KafkaProducer implements Producer<K, V> { @Override public Future<RecordMetadata> send(ProducerRecord<K, V> record) { return send(record, null); } KafkaProducer(Map<String, Object> configs); KafkaProducer(Map<String, Object> configs, Serializer<K> keySerializer, Serializer<V> valueSerializer); KafkaProducer(Properties properties); KafkaProducer(Properties properties, Serializer<K> keySerializer, Serializer<V> valueSerializer); @SuppressWarnings({"unchecked", "deprecation"}) private KafkaProducer(ProducerConfig config, Serializer<K> keySerializer, Serializer<V> valueSerializer); void initTransactions(); void beginTransaction(); void sendOffsetsToTransaction(Map<TopicPartition, OffsetAndMetadata> offsets, String consumerGroupId); void commitTransaction(); void abortTransaction(); @Override Future<RecordMetadata> send(ProducerRecord<K, V> record); @Override // 发送消息,将消息放入RecordAccumulator暂存 Future<RecordMetadata> send(ProducerRecord<K, V> record, Callback callback); @Override void flush(); @Override // 从Metadata中获取指定的topc分区信息 List<PartitionInfo> partitionsFor(String topic); @Override Map<MetricName, ? extends Metric> metrics(); @Override void close(); @Override void close(long timeout, TimeUnit timeUnit); }
@Test public void testTopicRefreshInMetadata() throws Exception { Properties props = new Properties(); props.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9999"); props.setProperty(ProducerConfig.MAX_BLOCK_MS_CONFIG, "600000"); KafkaProducer<String, String> producer = new KafkaProducer<>(props, new StringSerializer(), new StringSerializer()); long refreshBackoffMs = 500L; long metadataExpireMs = 60000L; final Metadata metadata = new Metadata(refreshBackoffMs, metadataExpireMs, true, true, new ClusterResourceListeners()); final Time time = new MockTime(); MemberModifier.field(KafkaProducer.class, "metadata").set(producer, metadata); MemberModifier.field(KafkaProducer.class, "time").set(producer, time); final String topic = "topic"; Thread t = new Thread() { @Override public void run() { long startTimeMs = System.currentTimeMillis(); for (int i = 0; i < 10; i++) { while (!metadata.updateRequested() && System.currentTimeMillis() - startTimeMs < 1000) yield(); metadata.update(Cluster.empty(), Collections.singleton(topic), time.milliseconds()); time.sleep(60 * 1000L); } } }; t.start(); try { producer.partitionsFor(topic); fail("Expect TimeoutException"); } catch (TimeoutException e) { } Assert.assertTrue("Topic should still exist in metadata", metadata.containsTopic(topic)); }
@Override public List<PartitionInfo> partitionsFor(String topic) { try { return waitOnMetadata(topic, null, maxBlockTimeMs).cluster.partitionsForTopic(topic); } catch (InterruptedException e) { throw new InterruptException(e); } }
KafkaProducer implements Producer<K, V> { @Override public List<PartitionInfo> partitionsFor(String topic) { try { return waitOnMetadata(topic, null, maxBlockTimeMs).cluster.partitionsForTopic(topic); } catch (InterruptedException e) { throw new InterruptException(e); } } }
KafkaProducer implements Producer<K, V> { @Override public List<PartitionInfo> partitionsFor(String topic) { try { return waitOnMetadata(topic, null, maxBlockTimeMs).cluster.partitionsForTopic(topic); } catch (InterruptedException e) { throw new InterruptException(e); } } KafkaProducer(Map<String, Object> configs); KafkaProducer(Map<String, Object> configs, Serializer<K> keySerializer, Serializer<V> valueSerializer); KafkaProducer(Properties properties); KafkaProducer(Properties properties, Serializer<K> keySerializer, Serializer<V> valueSerializer); @SuppressWarnings({"unchecked", "deprecation"}) private KafkaProducer(ProducerConfig config, Serializer<K> keySerializer, Serializer<V> valueSerializer); }
KafkaProducer implements Producer<K, V> { @Override public List<PartitionInfo> partitionsFor(String topic) { try { return waitOnMetadata(topic, null, maxBlockTimeMs).cluster.partitionsForTopic(topic); } catch (InterruptedException e) { throw new InterruptException(e); } } KafkaProducer(Map<String, Object> configs); KafkaProducer(Map<String, Object> configs, Serializer<K> keySerializer, Serializer<V> valueSerializer); KafkaProducer(Properties properties); KafkaProducer(Properties properties, Serializer<K> keySerializer, Serializer<V> valueSerializer); @SuppressWarnings({"unchecked", "deprecation"}) private KafkaProducer(ProducerConfig config, Serializer<K> keySerializer, Serializer<V> valueSerializer); void initTransactions(); void beginTransaction(); void sendOffsetsToTransaction(Map<TopicPartition, OffsetAndMetadata> offsets, String consumerGroupId); void commitTransaction(); void abortTransaction(); @Override Future<RecordMetadata> send(ProducerRecord<K, V> record); @Override // 发送消息,将消息放入RecordAccumulator暂存 Future<RecordMetadata> send(ProducerRecord<K, V> record, Callback callback); @Override void flush(); @Override // 从Metadata中获取指定的topc分区信息 List<PartitionInfo> partitionsFor(String topic); @Override Map<MetricName, ? extends Metric> metrics(); @Override void close(); @Override void close(long timeout, TimeUnit timeUnit); }
KafkaProducer implements Producer<K, V> { @Override public List<PartitionInfo> partitionsFor(String topic) { try { return waitOnMetadata(topic, null, maxBlockTimeMs).cluster.partitionsForTopic(topic); } catch (InterruptedException e) { throw new InterruptException(e); } } KafkaProducer(Map<String, Object> configs); KafkaProducer(Map<String, Object> configs, Serializer<K> keySerializer, Serializer<V> valueSerializer); KafkaProducer(Properties properties); KafkaProducer(Properties properties, Serializer<K> keySerializer, Serializer<V> valueSerializer); @SuppressWarnings({"unchecked", "deprecation"}) private KafkaProducer(ProducerConfig config, Serializer<K> keySerializer, Serializer<V> valueSerializer); void initTransactions(); void beginTransaction(); void sendOffsetsToTransaction(Map<TopicPartition, OffsetAndMetadata> offsets, String consumerGroupId); void commitTransaction(); void abortTransaction(); @Override Future<RecordMetadata> send(ProducerRecord<K, V> record); @Override // 发送消息,将消息放入RecordAccumulator暂存 Future<RecordMetadata> send(ProducerRecord<K, V> record, Callback callback); @Override void flush(); @Override // 从Metadata中获取指定的topc分区信息 List<PartitionInfo> partitionsFor(String topic); @Override Map<MetricName, ? extends Metric> metrics(); @Override void close(); @Override void close(long timeout, TimeUnit timeUnit); }
@PrepareOnlyThisForTest(Metadata.class) @Test public void testHeaders() throws Exception { Properties props = new Properties(); props.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9999"); ExtendedSerializer keySerializer = PowerMock.createNiceMock(ExtendedSerializer.class); ExtendedSerializer valueSerializer = PowerMock.createNiceMock(ExtendedSerializer.class); KafkaProducer<String, String> producer = new KafkaProducer<>(props, keySerializer, valueSerializer); Metadata metadata = PowerMock.createNiceMock(Metadata.class); MemberModifier.field(KafkaProducer.class, "metadata").set(producer, metadata); String topic = "topic"; Collection<Node> nodes = Collections.singletonList(new Node(0, "host1", 1000)); final Cluster cluster = new Cluster( "dummy", Collections.singletonList(new Node(0, "host1", 1000)), Arrays.asList(new PartitionInfo(topic, 0, null, null, null)), Collections.<String>emptySet(), Collections.<String>emptySet()); EasyMock.expect(metadata.fetch()).andReturn(cluster).anyTimes(); PowerMock.replay(metadata); String value = "value"; ProducerRecord<String, String> record = new ProducerRecord<>(topic, value); EasyMock.expect(keySerializer.serialize(topic, record.headers(), null)).andReturn(null).once(); EasyMock.expect(valueSerializer.serialize(topic, record.headers(), value)).andReturn(value.getBytes()).once(); PowerMock.replay(keySerializer); PowerMock.replay(valueSerializer); record.headers().add(new RecordHeader("test", "header2".getBytes())); producer.send(record, null); try { record.headers().add(new RecordHeader("test", "test".getBytes())); fail("Expected IllegalStateException to be raised"); } catch (IllegalStateException ise) { } assertTrue(Arrays.equals(record.headers().lastHeader("test").value(), "header2".getBytes())); PowerMock.verify(valueSerializer); PowerMock.verify(keySerializer); }
@Override public Future<RecordMetadata> send(ProducerRecord<K, V> record) { return send(record, null); }
KafkaProducer implements Producer<K, V> { @Override public Future<RecordMetadata> send(ProducerRecord<K, V> record) { return send(record, null); } }
KafkaProducer implements Producer<K, V> { @Override public Future<RecordMetadata> send(ProducerRecord<K, V> record) { return send(record, null); } KafkaProducer(Map<String, Object> configs); KafkaProducer(Map<String, Object> configs, Serializer<K> keySerializer, Serializer<V> valueSerializer); KafkaProducer(Properties properties); KafkaProducer(Properties properties, Serializer<K> keySerializer, Serializer<V> valueSerializer); @SuppressWarnings({"unchecked", "deprecation"}) private KafkaProducer(ProducerConfig config, Serializer<K> keySerializer, Serializer<V> valueSerializer); }
KafkaProducer implements Producer<K, V> { @Override public Future<RecordMetadata> send(ProducerRecord<K, V> record) { return send(record, null); } KafkaProducer(Map<String, Object> configs); KafkaProducer(Map<String, Object> configs, Serializer<K> keySerializer, Serializer<V> valueSerializer); KafkaProducer(Properties properties); KafkaProducer(Properties properties, Serializer<K> keySerializer, Serializer<V> valueSerializer); @SuppressWarnings({"unchecked", "deprecation"}) private KafkaProducer(ProducerConfig config, Serializer<K> keySerializer, Serializer<V> valueSerializer); void initTransactions(); void beginTransaction(); void sendOffsetsToTransaction(Map<TopicPartition, OffsetAndMetadata> offsets, String consumerGroupId); void commitTransaction(); void abortTransaction(); @Override Future<RecordMetadata> send(ProducerRecord<K, V> record); @Override // 发送消息,将消息放入RecordAccumulator暂存 Future<RecordMetadata> send(ProducerRecord<K, V> record, Callback callback); @Override void flush(); @Override // 从Metadata中获取指定的topc分区信息 List<PartitionInfo> partitionsFor(String topic); @Override Map<MetricName, ? extends Metric> metrics(); @Override void close(); @Override void close(long timeout, TimeUnit timeUnit); }
KafkaProducer implements Producer<K, V> { @Override public Future<RecordMetadata> send(ProducerRecord<K, V> record) { return send(record, null); } KafkaProducer(Map<String, Object> configs); KafkaProducer(Map<String, Object> configs, Serializer<K> keySerializer, Serializer<V> valueSerializer); KafkaProducer(Properties properties); KafkaProducer(Properties properties, Serializer<K> keySerializer, Serializer<V> valueSerializer); @SuppressWarnings({"unchecked", "deprecation"}) private KafkaProducer(ProducerConfig config, Serializer<K> keySerializer, Serializer<V> valueSerializer); void initTransactions(); void beginTransaction(); void sendOffsetsToTransaction(Map<TopicPartition, OffsetAndMetadata> offsets, String consumerGroupId); void commitTransaction(); void abortTransaction(); @Override Future<RecordMetadata> send(ProducerRecord<K, V> record); @Override // 发送消息,将消息放入RecordAccumulator暂存 Future<RecordMetadata> send(ProducerRecord<K, V> record, Callback callback); @Override void flush(); @Override // 从Metadata中获取指定的topc分区信息 List<PartitionInfo> partitionsFor(String topic); @Override Map<MetricName, ? extends Metric> metrics(); @Override void close(); @Override void close(long timeout, TimeUnit timeUnit); }
@Test public void closeShouldBeIdempotent() { Properties producerProps = new Properties(); producerProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9000"); Producer producer = new KafkaProducer<>(producerProps, new ByteArraySerializer(), new ByteArraySerializer()); producer.close(); producer.close(); }
@Override public void close() { close(Long.MAX_VALUE, TimeUnit.MILLISECONDS); }
KafkaProducer implements Producer<K, V> { @Override public void close() { close(Long.MAX_VALUE, TimeUnit.MILLISECONDS); } }
KafkaProducer implements Producer<K, V> { @Override public void close() { close(Long.MAX_VALUE, TimeUnit.MILLISECONDS); } KafkaProducer(Map<String, Object> configs); KafkaProducer(Map<String, Object> configs, Serializer<K> keySerializer, Serializer<V> valueSerializer); KafkaProducer(Properties properties); KafkaProducer(Properties properties, Serializer<K> keySerializer, Serializer<V> valueSerializer); @SuppressWarnings({"unchecked", "deprecation"}) private KafkaProducer(ProducerConfig config, Serializer<K> keySerializer, Serializer<V> valueSerializer); }
KafkaProducer implements Producer<K, V> { @Override public void close() { close(Long.MAX_VALUE, TimeUnit.MILLISECONDS); } KafkaProducer(Map<String, Object> configs); KafkaProducer(Map<String, Object> configs, Serializer<K> keySerializer, Serializer<V> valueSerializer); KafkaProducer(Properties properties); KafkaProducer(Properties properties, Serializer<K> keySerializer, Serializer<V> valueSerializer); @SuppressWarnings({"unchecked", "deprecation"}) private KafkaProducer(ProducerConfig config, Serializer<K> keySerializer, Serializer<V> valueSerializer); void initTransactions(); void beginTransaction(); void sendOffsetsToTransaction(Map<TopicPartition, OffsetAndMetadata> offsets, String consumerGroupId); void commitTransaction(); void abortTransaction(); @Override Future<RecordMetadata> send(ProducerRecord<K, V> record); @Override // 发送消息,将消息放入RecordAccumulator暂存 Future<RecordMetadata> send(ProducerRecord<K, V> record, Callback callback); @Override void flush(); @Override // 从Metadata中获取指定的topc分区信息 List<PartitionInfo> partitionsFor(String topic); @Override Map<MetricName, ? extends Metric> metrics(); @Override void close(); @Override void close(long timeout, TimeUnit timeUnit); }
KafkaProducer implements Producer<K, V> { @Override public void close() { close(Long.MAX_VALUE, TimeUnit.MILLISECONDS); } KafkaProducer(Map<String, Object> configs); KafkaProducer(Map<String, Object> configs, Serializer<K> keySerializer, Serializer<V> valueSerializer); KafkaProducer(Properties properties); KafkaProducer(Properties properties, Serializer<K> keySerializer, Serializer<V> valueSerializer); @SuppressWarnings({"unchecked", "deprecation"}) private KafkaProducer(ProducerConfig config, Serializer<K> keySerializer, Serializer<V> valueSerializer); void initTransactions(); void beginTransaction(); void sendOffsetsToTransaction(Map<TopicPartition, OffsetAndMetadata> offsets, String consumerGroupId); void commitTransaction(); void abortTransaction(); @Override Future<RecordMetadata> send(ProducerRecord<K, V> record); @Override // 发送消息,将消息放入RecordAccumulator暂存 Future<RecordMetadata> send(ProducerRecord<K, V> record, Callback callback); @Override void flush(); @Override // 从Metadata中获取指定的topc分区信息 List<PartitionInfo> partitionsFor(String topic); @Override Map<MetricName, ? extends Metric> metrics(); @Override void close(); @Override void close(long timeout, TimeUnit timeUnit); }
@PrepareOnlyThisForTest(Metadata.class) @Test public void testInterceptorPartitionSetOnTooLargeRecord() throws Exception { Properties props = new Properties(); props.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9999"); props.setProperty(ProducerConfig.MAX_REQUEST_SIZE_CONFIG, "1"); String topic = "topic"; ProducerRecord<String, String> record = new ProducerRecord<>(topic, "value"); KafkaProducer<String, String> producer = new KafkaProducer<>(props, new StringSerializer(), new StringSerializer()); Metadata metadata = PowerMock.createNiceMock(Metadata.class); MemberModifier.field(KafkaProducer.class, "metadata").set(producer, metadata); final Cluster cluster = new Cluster( "dummy", Collections.singletonList(new Node(0, "host1", 1000)), Arrays.asList(new PartitionInfo(topic, 0, null, null, null)), Collections.<String>emptySet(), Collections.<String>emptySet()); EasyMock.expect(metadata.fetch()).andReturn(cluster).once(); ProducerInterceptors interceptors = PowerMock.createMock(ProducerInterceptors.class); EasyMock.expect(interceptors.onSend(record)).andReturn(record); interceptors.onSendError(EasyMock.eq(record), EasyMock.<TopicPartition>notNull(), EasyMock.<Exception>notNull()); EasyMock.expectLastCall(); MemberModifier.field(KafkaProducer.class, "interceptors").set(producer, interceptors); PowerMock.replay(metadata); EasyMock.replay(interceptors); producer.send(record); EasyMock.verify(interceptors); }
@Override public Future<RecordMetadata> send(ProducerRecord<K, V> record) { return send(record, null); }
KafkaProducer implements Producer<K, V> { @Override public Future<RecordMetadata> send(ProducerRecord<K, V> record) { return send(record, null); } }
KafkaProducer implements Producer<K, V> { @Override public Future<RecordMetadata> send(ProducerRecord<K, V> record) { return send(record, null); } KafkaProducer(Map<String, Object> configs); KafkaProducer(Map<String, Object> configs, Serializer<K> keySerializer, Serializer<V> valueSerializer); KafkaProducer(Properties properties); KafkaProducer(Properties properties, Serializer<K> keySerializer, Serializer<V> valueSerializer); @SuppressWarnings({"unchecked", "deprecation"}) private KafkaProducer(ProducerConfig config, Serializer<K> keySerializer, Serializer<V> valueSerializer); }
KafkaProducer implements Producer<K, V> { @Override public Future<RecordMetadata> send(ProducerRecord<K, V> record) { return send(record, null); } KafkaProducer(Map<String, Object> configs); KafkaProducer(Map<String, Object> configs, Serializer<K> keySerializer, Serializer<V> valueSerializer); KafkaProducer(Properties properties); KafkaProducer(Properties properties, Serializer<K> keySerializer, Serializer<V> valueSerializer); @SuppressWarnings({"unchecked", "deprecation"}) private KafkaProducer(ProducerConfig config, Serializer<K> keySerializer, Serializer<V> valueSerializer); void initTransactions(); void beginTransaction(); void sendOffsetsToTransaction(Map<TopicPartition, OffsetAndMetadata> offsets, String consumerGroupId); void commitTransaction(); void abortTransaction(); @Override Future<RecordMetadata> send(ProducerRecord<K, V> record); @Override // 发送消息,将消息放入RecordAccumulator暂存 Future<RecordMetadata> send(ProducerRecord<K, V> record, Callback callback); @Override void flush(); @Override // 从Metadata中获取指定的topc分区信息 List<PartitionInfo> partitionsFor(String topic); @Override Map<MetricName, ? extends Metric> metrics(); @Override void close(); @Override void close(long timeout, TimeUnit timeUnit); }
KafkaProducer implements Producer<K, V> { @Override public Future<RecordMetadata> send(ProducerRecord<K, V> record) { return send(record, null); } KafkaProducer(Map<String, Object> configs); KafkaProducer(Map<String, Object> configs, Serializer<K> keySerializer, Serializer<V> valueSerializer); KafkaProducer(Properties properties); KafkaProducer(Properties properties, Serializer<K> keySerializer, Serializer<V> valueSerializer); @SuppressWarnings({"unchecked", "deprecation"}) private KafkaProducer(ProducerConfig config, Serializer<K> keySerializer, Serializer<V> valueSerializer); void initTransactions(); void beginTransaction(); void sendOffsetsToTransaction(Map<TopicPartition, OffsetAndMetadata> offsets, String consumerGroupId); void commitTransaction(); void abortTransaction(); @Override Future<RecordMetadata> send(ProducerRecord<K, V> record); @Override // 发送消息,将消息放入RecordAccumulator暂存 Future<RecordMetadata> send(ProducerRecord<K, V> record, Callback callback); @Override void flush(); @Override // 从Metadata中获取指定的topc分区信息 List<PartitionInfo> partitionsFor(String topic); @Override Map<MetricName, ? extends Metric> metrics(); @Override void close(); @Override void close(long timeout, TimeUnit timeUnit); }
@Test public void testGetOrCreateListValue() { Map<String, List<String>> map = new HashMap<>(); List<String> fooList = KafkaAdminClient.getOrCreateListValue(map, "foo"); assertNotNull(fooList); fooList.add("a"); fooList.add("b"); List<String> fooList2 = KafkaAdminClient.getOrCreateListValue(map, "foo"); assertEquals(fooList, fooList2); assertTrue(fooList2.contains("a")); assertTrue(fooList2.contains("b")); List<String> barList = KafkaAdminClient.getOrCreateListValue(map, "bar"); assertNotNull(barList); assertTrue(barList.isEmpty()); }
static <K, V> List<V> getOrCreateListValue(Map<K, List<V>> map, K key) { List<V> list = map.get(key); if (list != null) return list; list = new LinkedList<>(); map.put(key, list); return list; }
KafkaAdminClient extends AdminClient { static <K, V> List<V> getOrCreateListValue(Map<K, List<V>> map, K key) { List<V> list = map.get(key); if (list != null) return list; list = new LinkedList<>(); map.put(key, list); return list; } }
KafkaAdminClient extends AdminClient { static <K, V> List<V> getOrCreateListValue(Map<K, List<V>> map, K key) { List<V> list = map.get(key); if (list != null) return list; list = new LinkedList<>(); map.put(key, list); return list; } private KafkaAdminClient(AdminClientConfig config, String clientId, Time time, Metadata metadata, Metrics metrics, KafkaClient client, TimeoutProcessorFactory timeoutProcessorFactory); }
KafkaAdminClient extends AdminClient { static <K, V> List<V> getOrCreateListValue(Map<K, List<V>> map, K key) { List<V> list = map.get(key); if (list != null) return list; list = new LinkedList<>(); map.put(key, list); return list; } private KafkaAdminClient(AdminClientConfig config, String clientId, Time time, Metadata metadata, Metrics metrics, KafkaClient client, TimeoutProcessorFactory timeoutProcessorFactory); @Override void close(long duration, TimeUnit unit); @Override CreateTopicsResult createTopics(final Collection<NewTopic> newTopics, final CreateTopicsOptions options); @Override DeleteTopicsResult deleteTopics(final Collection<String> topicNames, DeleteTopicsOptions options); @Override ListTopicsResult listTopics(final ListTopicsOptions options); @Override DescribeTopicsResult describeTopics(final Collection<String> topicNames, DescribeTopicsOptions options); @Override DescribeClusterResult describeCluster(DescribeClusterOptions options); @Override DescribeAclsResult describeAcls(final AclBindingFilter filter, DescribeAclsOptions options); @Override CreateAclsResult createAcls(Collection<AclBinding> acls, CreateAclsOptions options); @Override DeleteAclsResult deleteAcls(Collection<AclBindingFilter> filters, DeleteAclsOptions options); @Override DescribeConfigsResult describeConfigs(Collection<ConfigResource> configResources, final DescribeConfigsOptions options); @Override AlterConfigsResult alterConfigs(Map<ConfigResource, Config> configs, final AlterConfigsOptions options); }
KafkaAdminClient extends AdminClient { static <K, V> List<V> getOrCreateListValue(Map<K, List<V>> map, K key) { List<V> list = map.get(key); if (list != null) return list; list = new LinkedList<>(); map.put(key, list); return list; } private KafkaAdminClient(AdminClientConfig config, String clientId, Time time, Metadata metadata, Metrics metrics, KafkaClient client, TimeoutProcessorFactory timeoutProcessorFactory); @Override void close(long duration, TimeUnit unit); @Override CreateTopicsResult createTopics(final Collection<NewTopic> newTopics, final CreateTopicsOptions options); @Override DeleteTopicsResult deleteTopics(final Collection<String> topicNames, DeleteTopicsOptions options); @Override ListTopicsResult listTopics(final ListTopicsOptions options); @Override DescribeTopicsResult describeTopics(final Collection<String> topicNames, DescribeTopicsOptions options); @Override DescribeClusterResult describeCluster(DescribeClusterOptions options); @Override DescribeAclsResult describeAcls(final AclBindingFilter filter, DescribeAclsOptions options); @Override CreateAclsResult createAcls(Collection<AclBinding> acls, CreateAclsOptions options); @Override DeleteAclsResult deleteAcls(Collection<AclBindingFilter> filters, DeleteAclsOptions options); @Override DescribeConfigsResult describeConfigs(Collection<ConfigResource> configResources, final DescribeConfigsOptions options); @Override AlterConfigsResult alterConfigs(Map<ConfigResource, Config> configs, final AlterConfigsOptions options); }
@Test public void testCalcTimeoutMsRemainingAsInt() { assertEquals(0, KafkaAdminClient.calcTimeoutMsRemainingAsInt(1000, 1000)); assertEquals(100, KafkaAdminClient.calcTimeoutMsRemainingAsInt(1000, 1100)); assertEquals(Integer.MAX_VALUE, KafkaAdminClient.calcTimeoutMsRemainingAsInt(0, Long.MAX_VALUE)); assertEquals(Integer.MIN_VALUE, KafkaAdminClient.calcTimeoutMsRemainingAsInt(Long.MAX_VALUE, 0)); }
static int calcTimeoutMsRemainingAsInt(long now, long deadlineMs) { long deltaMs = deadlineMs - now; if (deltaMs > Integer.MAX_VALUE) deltaMs = Integer.MAX_VALUE; else if (deltaMs < Integer.MIN_VALUE) deltaMs = Integer.MIN_VALUE; return (int) deltaMs; }
KafkaAdminClient extends AdminClient { static int calcTimeoutMsRemainingAsInt(long now, long deadlineMs) { long deltaMs = deadlineMs - now; if (deltaMs > Integer.MAX_VALUE) deltaMs = Integer.MAX_VALUE; else if (deltaMs < Integer.MIN_VALUE) deltaMs = Integer.MIN_VALUE; return (int) deltaMs; } }
KafkaAdminClient extends AdminClient { static int calcTimeoutMsRemainingAsInt(long now, long deadlineMs) { long deltaMs = deadlineMs - now; if (deltaMs > Integer.MAX_VALUE) deltaMs = Integer.MAX_VALUE; else if (deltaMs < Integer.MIN_VALUE) deltaMs = Integer.MIN_VALUE; return (int) deltaMs; } private KafkaAdminClient(AdminClientConfig config, String clientId, Time time, Metadata metadata, Metrics metrics, KafkaClient client, TimeoutProcessorFactory timeoutProcessorFactory); }
KafkaAdminClient extends AdminClient { static int calcTimeoutMsRemainingAsInt(long now, long deadlineMs) { long deltaMs = deadlineMs - now; if (deltaMs > Integer.MAX_VALUE) deltaMs = Integer.MAX_VALUE; else if (deltaMs < Integer.MIN_VALUE) deltaMs = Integer.MIN_VALUE; return (int) deltaMs; } private KafkaAdminClient(AdminClientConfig config, String clientId, Time time, Metadata metadata, Metrics metrics, KafkaClient client, TimeoutProcessorFactory timeoutProcessorFactory); @Override void close(long duration, TimeUnit unit); @Override CreateTopicsResult createTopics(final Collection<NewTopic> newTopics, final CreateTopicsOptions options); @Override DeleteTopicsResult deleteTopics(final Collection<String> topicNames, DeleteTopicsOptions options); @Override ListTopicsResult listTopics(final ListTopicsOptions options); @Override DescribeTopicsResult describeTopics(final Collection<String> topicNames, DescribeTopicsOptions options); @Override DescribeClusterResult describeCluster(DescribeClusterOptions options); @Override DescribeAclsResult describeAcls(final AclBindingFilter filter, DescribeAclsOptions options); @Override CreateAclsResult createAcls(Collection<AclBinding> acls, CreateAclsOptions options); @Override DeleteAclsResult deleteAcls(Collection<AclBindingFilter> filters, DeleteAclsOptions options); @Override DescribeConfigsResult describeConfigs(Collection<ConfigResource> configResources, final DescribeConfigsOptions options); @Override AlterConfigsResult alterConfigs(Map<ConfigResource, Config> configs, final AlterConfigsOptions options); }
KafkaAdminClient extends AdminClient { static int calcTimeoutMsRemainingAsInt(long now, long deadlineMs) { long deltaMs = deadlineMs - now; if (deltaMs > Integer.MAX_VALUE) deltaMs = Integer.MAX_VALUE; else if (deltaMs < Integer.MIN_VALUE) deltaMs = Integer.MIN_VALUE; return (int) deltaMs; } private KafkaAdminClient(AdminClientConfig config, String clientId, Time time, Metadata metadata, Metrics metrics, KafkaClient client, TimeoutProcessorFactory timeoutProcessorFactory); @Override void close(long duration, TimeUnit unit); @Override CreateTopicsResult createTopics(final Collection<NewTopic> newTopics, final CreateTopicsOptions options); @Override DeleteTopicsResult deleteTopics(final Collection<String> topicNames, DeleteTopicsOptions options); @Override ListTopicsResult listTopics(final ListTopicsOptions options); @Override DescribeTopicsResult describeTopics(final Collection<String> topicNames, DescribeTopicsOptions options); @Override DescribeClusterResult describeCluster(DescribeClusterOptions options); @Override DescribeAclsResult describeAcls(final AclBindingFilter filter, DescribeAclsOptions options); @Override CreateAclsResult createAcls(Collection<AclBinding> acls, CreateAclsOptions options); @Override DeleteAclsResult deleteAcls(Collection<AclBindingFilter> filters, DeleteAclsOptions options); @Override DescribeConfigsResult describeConfigs(Collection<ConfigResource> configResources, final DescribeConfigsOptions options); @Override AlterConfigsResult alterConfigs(Map<ConfigResource, Config> configs, final AlterConfigsOptions options); }
@Test public void testPrettyPrintException() { assertEquals("Null exception.", KafkaAdminClient.prettyPrintException(null)); assertEquals("TimeoutException", KafkaAdminClient.prettyPrintException(new TimeoutException())); assertEquals("TimeoutException: The foobar timed out.", KafkaAdminClient.prettyPrintException(new TimeoutException("The foobar timed out."))); }
static String prettyPrintException(Throwable throwable) { if (throwable == null) return "Null exception."; if (throwable.getMessage() != null) { return throwable.getClass().getSimpleName() + ": " + throwable.getMessage(); } return throwable.getClass().getSimpleName(); }
KafkaAdminClient extends AdminClient { static String prettyPrintException(Throwable throwable) { if (throwable == null) return "Null exception."; if (throwable.getMessage() != null) { return throwable.getClass().getSimpleName() + ": " + throwable.getMessage(); } return throwable.getClass().getSimpleName(); } }
KafkaAdminClient extends AdminClient { static String prettyPrintException(Throwable throwable) { if (throwable == null) return "Null exception."; if (throwable.getMessage() != null) { return throwable.getClass().getSimpleName() + ": " + throwable.getMessage(); } return throwable.getClass().getSimpleName(); } private KafkaAdminClient(AdminClientConfig config, String clientId, Time time, Metadata metadata, Metrics metrics, KafkaClient client, TimeoutProcessorFactory timeoutProcessorFactory); }
KafkaAdminClient extends AdminClient { static String prettyPrintException(Throwable throwable) { if (throwable == null) return "Null exception."; if (throwable.getMessage() != null) { return throwable.getClass().getSimpleName() + ": " + throwable.getMessage(); } return throwable.getClass().getSimpleName(); } private KafkaAdminClient(AdminClientConfig config, String clientId, Time time, Metadata metadata, Metrics metrics, KafkaClient client, TimeoutProcessorFactory timeoutProcessorFactory); @Override void close(long duration, TimeUnit unit); @Override CreateTopicsResult createTopics(final Collection<NewTopic> newTopics, final CreateTopicsOptions options); @Override DeleteTopicsResult deleteTopics(final Collection<String> topicNames, DeleteTopicsOptions options); @Override ListTopicsResult listTopics(final ListTopicsOptions options); @Override DescribeTopicsResult describeTopics(final Collection<String> topicNames, DescribeTopicsOptions options); @Override DescribeClusterResult describeCluster(DescribeClusterOptions options); @Override DescribeAclsResult describeAcls(final AclBindingFilter filter, DescribeAclsOptions options); @Override CreateAclsResult createAcls(Collection<AclBinding> acls, CreateAclsOptions options); @Override DeleteAclsResult deleteAcls(Collection<AclBindingFilter> filters, DeleteAclsOptions options); @Override DescribeConfigsResult describeConfigs(Collection<ConfigResource> configResources, final DescribeConfigsOptions options); @Override AlterConfigsResult alterConfigs(Map<ConfigResource, Config> configs, final AlterConfigsOptions options); }
KafkaAdminClient extends AdminClient { static String prettyPrintException(Throwable throwable) { if (throwable == null) return "Null exception."; if (throwable.getMessage() != null) { return throwable.getClass().getSimpleName() + ": " + throwable.getMessage(); } return throwable.getClass().getSimpleName(); } private KafkaAdminClient(AdminClientConfig config, String clientId, Time time, Metadata metadata, Metrics metrics, KafkaClient client, TimeoutProcessorFactory timeoutProcessorFactory); @Override void close(long duration, TimeUnit unit); @Override CreateTopicsResult createTopics(final Collection<NewTopic> newTopics, final CreateTopicsOptions options); @Override DeleteTopicsResult deleteTopics(final Collection<String> topicNames, DeleteTopicsOptions options); @Override ListTopicsResult listTopics(final ListTopicsOptions options); @Override DescribeTopicsResult describeTopics(final Collection<String> topicNames, DescribeTopicsOptions options); @Override DescribeClusterResult describeCluster(DescribeClusterOptions options); @Override DescribeAclsResult describeAcls(final AclBindingFilter filter, DescribeAclsOptions options); @Override CreateAclsResult createAcls(Collection<AclBinding> acls, CreateAclsOptions options); @Override DeleteAclsResult deleteAcls(Collection<AclBindingFilter> filters, DeleteAclsOptions options); @Override DescribeConfigsResult describeConfigs(Collection<ConfigResource> configResources, final DescribeConfigsOptions options); @Override AlterConfigsResult alterConfigs(Map<ConfigResource, Config> configs, final AlterConfigsOptions options); }
@Test public void testGenerateClientId() { Set<String> ids = new HashSet<>(); for (int i = 0; i < 10; i++) { String id = KafkaAdminClient.generateClientId(newConfMap(AdminClientConfig.CLIENT_ID_CONFIG, "")); assertTrue("Got duplicate id " + id, !ids.contains(id)); ids.add(id); } assertEquals("myCustomId", KafkaAdminClient.generateClientId(newConfMap(AdminClientConfig.CLIENT_ID_CONFIG, "myCustomId"))); }
static String generateClientId(AdminClientConfig config) { String clientId = config.getString(AdminClientConfig.CLIENT_ID_CONFIG); if (!clientId.isEmpty()) return clientId; return "adminclient-" + ADMIN_CLIENT_ID_SEQUENCE.getAndIncrement(); }
KafkaAdminClient extends AdminClient { static String generateClientId(AdminClientConfig config) { String clientId = config.getString(AdminClientConfig.CLIENT_ID_CONFIG); if (!clientId.isEmpty()) return clientId; return "adminclient-" + ADMIN_CLIENT_ID_SEQUENCE.getAndIncrement(); } }
KafkaAdminClient extends AdminClient { static String generateClientId(AdminClientConfig config) { String clientId = config.getString(AdminClientConfig.CLIENT_ID_CONFIG); if (!clientId.isEmpty()) return clientId; return "adminclient-" + ADMIN_CLIENT_ID_SEQUENCE.getAndIncrement(); } private KafkaAdminClient(AdminClientConfig config, String clientId, Time time, Metadata metadata, Metrics metrics, KafkaClient client, TimeoutProcessorFactory timeoutProcessorFactory); }
KafkaAdminClient extends AdminClient { static String generateClientId(AdminClientConfig config) { String clientId = config.getString(AdminClientConfig.CLIENT_ID_CONFIG); if (!clientId.isEmpty()) return clientId; return "adminclient-" + ADMIN_CLIENT_ID_SEQUENCE.getAndIncrement(); } private KafkaAdminClient(AdminClientConfig config, String clientId, Time time, Metadata metadata, Metrics metrics, KafkaClient client, TimeoutProcessorFactory timeoutProcessorFactory); @Override void close(long duration, TimeUnit unit); @Override CreateTopicsResult createTopics(final Collection<NewTopic> newTopics, final CreateTopicsOptions options); @Override DeleteTopicsResult deleteTopics(final Collection<String> topicNames, DeleteTopicsOptions options); @Override ListTopicsResult listTopics(final ListTopicsOptions options); @Override DescribeTopicsResult describeTopics(final Collection<String> topicNames, DescribeTopicsOptions options); @Override DescribeClusterResult describeCluster(DescribeClusterOptions options); @Override DescribeAclsResult describeAcls(final AclBindingFilter filter, DescribeAclsOptions options); @Override CreateAclsResult createAcls(Collection<AclBinding> acls, CreateAclsOptions options); @Override DeleteAclsResult deleteAcls(Collection<AclBindingFilter> filters, DeleteAclsOptions options); @Override DescribeConfigsResult describeConfigs(Collection<ConfigResource> configResources, final DescribeConfigsOptions options); @Override AlterConfigsResult alterConfigs(Map<ConfigResource, Config> configs, final AlterConfigsOptions options); }
KafkaAdminClient extends AdminClient { static String generateClientId(AdminClientConfig config) { String clientId = config.getString(AdminClientConfig.CLIENT_ID_CONFIG); if (!clientId.isEmpty()) return clientId; return "adminclient-" + ADMIN_CLIENT_ID_SEQUENCE.getAndIncrement(); } private KafkaAdminClient(AdminClientConfig config, String clientId, Time time, Metadata metadata, Metrics metrics, KafkaClient client, TimeoutProcessorFactory timeoutProcessorFactory); @Override void close(long duration, TimeUnit unit); @Override CreateTopicsResult createTopics(final Collection<NewTopic> newTopics, final CreateTopicsOptions options); @Override DeleteTopicsResult deleteTopics(final Collection<String> topicNames, DeleteTopicsOptions options); @Override ListTopicsResult listTopics(final ListTopicsOptions options); @Override DescribeTopicsResult describeTopics(final Collection<String> topicNames, DescribeTopicsOptions options); @Override DescribeClusterResult describeCluster(DescribeClusterOptions options); @Override DescribeAclsResult describeAcls(final AclBindingFilter filter, DescribeAclsOptions options); @Override CreateAclsResult createAcls(Collection<AclBinding> acls, CreateAclsOptions options); @Override DeleteAclsResult deleteAcls(Collection<AclBindingFilter> filters, DeleteAclsOptions options); @Override DescribeConfigsResult describeConfigs(Collection<ConfigResource> configResources, final DescribeConfigsOptions options); @Override AlterConfigsResult alterConfigs(Map<ConfigResource, Config> configs, final AlterConfigsOptions options); }
@Test public void testTimeoutWithoutMetadata() throws Exception { try (MockKafkaAdminClientEnv env = mockClientEnv(AdminClientConfig.REQUEST_TIMEOUT_MS_CONFIG, "10")) { env.kafkaClient().setNodeApiVersions(NodeApiVersions.create()); env.kafkaClient().setNode(new Node(0, "localhost", 8121)); env.kafkaClient().prepareResponse(new CreateTopicsResponse(Collections.singletonMap("myTopic", new ApiError(Errors.NONE, "")))); KafkaFuture<Void> future = env.adminClient().createTopics( Collections.singleton(new NewTopic("myTopic", Collections.singletonMap(Integer.valueOf(0), asList(new Integer[]{0, 1, 2})))), new CreateTopicsOptions().timeoutMs(1000)).all(); assertFutureError(future, TimeoutException.class); } }
@Override public CreateTopicsResult createTopics(final Collection<NewTopic> newTopics, final CreateTopicsOptions options) { final Map<String, KafkaFutureImpl<Void>> topicFutures = new HashMap<>(newTopics.size()); final Map<String, CreateTopicsRequest.TopicDetails> topicsMap = new HashMap<>(newTopics.size()); for (NewTopic newTopic : newTopics) { if (topicFutures.get(newTopic.name()) == null) { topicFutures.put(newTopic.name(), new KafkaFutureImpl<Void>()); topicsMap.put(newTopic.name(), newTopic.convertToTopicDetails()); } } final long now = time.milliseconds(); runnable.call(new Call("createTopics", calcDeadlineMs(now, options.timeoutMs()), new ControllerNodeProvider()) { @Override public AbstractRequest.Builder createRequest(int timeoutMs) { return new CreateTopicsRequest.Builder(topicsMap, timeoutMs, options.shouldValidateOnly()); } @Override public void handleResponse(AbstractResponse abstractResponse) { CreateTopicsResponse response = (CreateTopicsResponse) abstractResponse; for (Map.Entry<String, ApiError> entry : response.errors().entrySet()) { KafkaFutureImpl<Void> future = topicFutures.get(entry.getKey()); if (future == null) { log.warn("Server response mentioned unknown topic {}", entry.getKey()); } else { ApiException exception = entry.getValue().exception(); if (exception != null) { future.completeExceptionally(exception); } else { future.complete(null); } } } for (Map.Entry<String, KafkaFutureImpl<Void>> entry : topicFutures.entrySet()) { KafkaFutureImpl<Void> future = entry.getValue(); if (!future.isDone()) { future.completeExceptionally(new ApiException("The server response did not " + "contain a reference to node " + entry.getKey())); } } } @Override void handleFailure(Throwable throwable) { completeAllExceptionally(topicFutures.values(), throwable); } }, now); return new CreateTopicsResult(new HashMap<String, KafkaFuture<Void>>(topicFutures)); }
KafkaAdminClient extends AdminClient { @Override public CreateTopicsResult createTopics(final Collection<NewTopic> newTopics, final CreateTopicsOptions options) { final Map<String, KafkaFutureImpl<Void>> topicFutures = new HashMap<>(newTopics.size()); final Map<String, CreateTopicsRequest.TopicDetails> topicsMap = new HashMap<>(newTopics.size()); for (NewTopic newTopic : newTopics) { if (topicFutures.get(newTopic.name()) == null) { topicFutures.put(newTopic.name(), new KafkaFutureImpl<Void>()); topicsMap.put(newTopic.name(), newTopic.convertToTopicDetails()); } } final long now = time.milliseconds(); runnable.call(new Call("createTopics", calcDeadlineMs(now, options.timeoutMs()), new ControllerNodeProvider()) { @Override public AbstractRequest.Builder createRequest(int timeoutMs) { return new CreateTopicsRequest.Builder(topicsMap, timeoutMs, options.shouldValidateOnly()); } @Override public void handleResponse(AbstractResponse abstractResponse) { CreateTopicsResponse response = (CreateTopicsResponse) abstractResponse; for (Map.Entry<String, ApiError> entry : response.errors().entrySet()) { KafkaFutureImpl<Void> future = topicFutures.get(entry.getKey()); if (future == null) { log.warn("Server response mentioned unknown topic {}", entry.getKey()); } else { ApiException exception = entry.getValue().exception(); if (exception != null) { future.completeExceptionally(exception); } else { future.complete(null); } } } for (Map.Entry<String, KafkaFutureImpl<Void>> entry : topicFutures.entrySet()) { KafkaFutureImpl<Void> future = entry.getValue(); if (!future.isDone()) { future.completeExceptionally(new ApiException("The server response did not " + "contain a reference to node " + entry.getKey())); } } } @Override void handleFailure(Throwable throwable) { completeAllExceptionally(topicFutures.values(), throwable); } }, now); return new CreateTopicsResult(new HashMap<String, KafkaFuture<Void>>(topicFutures)); } }
KafkaAdminClient extends AdminClient { @Override public CreateTopicsResult createTopics(final Collection<NewTopic> newTopics, final CreateTopicsOptions options) { final Map<String, KafkaFutureImpl<Void>> topicFutures = new HashMap<>(newTopics.size()); final Map<String, CreateTopicsRequest.TopicDetails> topicsMap = new HashMap<>(newTopics.size()); for (NewTopic newTopic : newTopics) { if (topicFutures.get(newTopic.name()) == null) { topicFutures.put(newTopic.name(), new KafkaFutureImpl<Void>()); topicsMap.put(newTopic.name(), newTopic.convertToTopicDetails()); } } final long now = time.milliseconds(); runnable.call(new Call("createTopics", calcDeadlineMs(now, options.timeoutMs()), new ControllerNodeProvider()) { @Override public AbstractRequest.Builder createRequest(int timeoutMs) { return new CreateTopicsRequest.Builder(topicsMap, timeoutMs, options.shouldValidateOnly()); } @Override public void handleResponse(AbstractResponse abstractResponse) { CreateTopicsResponse response = (CreateTopicsResponse) abstractResponse; for (Map.Entry<String, ApiError> entry : response.errors().entrySet()) { KafkaFutureImpl<Void> future = topicFutures.get(entry.getKey()); if (future == null) { log.warn("Server response mentioned unknown topic {}", entry.getKey()); } else { ApiException exception = entry.getValue().exception(); if (exception != null) { future.completeExceptionally(exception); } else { future.complete(null); } } } for (Map.Entry<String, KafkaFutureImpl<Void>> entry : topicFutures.entrySet()) { KafkaFutureImpl<Void> future = entry.getValue(); if (!future.isDone()) { future.completeExceptionally(new ApiException("The server response did not " + "contain a reference to node " + entry.getKey())); } } } @Override void handleFailure(Throwable throwable) { completeAllExceptionally(topicFutures.values(), throwable); } }, now); return new CreateTopicsResult(new HashMap<String, KafkaFuture<Void>>(topicFutures)); } private KafkaAdminClient(AdminClientConfig config, String clientId, Time time, Metadata metadata, Metrics metrics, KafkaClient client, TimeoutProcessorFactory timeoutProcessorFactory); }
KafkaAdminClient extends AdminClient { @Override public CreateTopicsResult createTopics(final Collection<NewTopic> newTopics, final CreateTopicsOptions options) { final Map<String, KafkaFutureImpl<Void>> topicFutures = new HashMap<>(newTopics.size()); final Map<String, CreateTopicsRequest.TopicDetails> topicsMap = new HashMap<>(newTopics.size()); for (NewTopic newTopic : newTopics) { if (topicFutures.get(newTopic.name()) == null) { topicFutures.put(newTopic.name(), new KafkaFutureImpl<Void>()); topicsMap.put(newTopic.name(), newTopic.convertToTopicDetails()); } } final long now = time.milliseconds(); runnable.call(new Call("createTopics", calcDeadlineMs(now, options.timeoutMs()), new ControllerNodeProvider()) { @Override public AbstractRequest.Builder createRequest(int timeoutMs) { return new CreateTopicsRequest.Builder(topicsMap, timeoutMs, options.shouldValidateOnly()); } @Override public void handleResponse(AbstractResponse abstractResponse) { CreateTopicsResponse response = (CreateTopicsResponse) abstractResponse; for (Map.Entry<String, ApiError> entry : response.errors().entrySet()) { KafkaFutureImpl<Void> future = topicFutures.get(entry.getKey()); if (future == null) { log.warn("Server response mentioned unknown topic {}", entry.getKey()); } else { ApiException exception = entry.getValue().exception(); if (exception != null) { future.completeExceptionally(exception); } else { future.complete(null); } } } for (Map.Entry<String, KafkaFutureImpl<Void>> entry : topicFutures.entrySet()) { KafkaFutureImpl<Void> future = entry.getValue(); if (!future.isDone()) { future.completeExceptionally(new ApiException("The server response did not " + "contain a reference to node " + entry.getKey())); } } } @Override void handleFailure(Throwable throwable) { completeAllExceptionally(topicFutures.values(), throwable); } }, now); return new CreateTopicsResult(new HashMap<String, KafkaFuture<Void>>(topicFutures)); } private KafkaAdminClient(AdminClientConfig config, String clientId, Time time, Metadata metadata, Metrics metrics, KafkaClient client, TimeoutProcessorFactory timeoutProcessorFactory); @Override void close(long duration, TimeUnit unit); @Override CreateTopicsResult createTopics(final Collection<NewTopic> newTopics, final CreateTopicsOptions options); @Override DeleteTopicsResult deleteTopics(final Collection<String> topicNames, DeleteTopicsOptions options); @Override ListTopicsResult listTopics(final ListTopicsOptions options); @Override DescribeTopicsResult describeTopics(final Collection<String> topicNames, DescribeTopicsOptions options); @Override DescribeClusterResult describeCluster(DescribeClusterOptions options); @Override DescribeAclsResult describeAcls(final AclBindingFilter filter, DescribeAclsOptions options); @Override CreateAclsResult createAcls(Collection<AclBinding> acls, CreateAclsOptions options); @Override DeleteAclsResult deleteAcls(Collection<AclBindingFilter> filters, DeleteAclsOptions options); @Override DescribeConfigsResult describeConfigs(Collection<ConfigResource> configResources, final DescribeConfigsOptions options); @Override AlterConfigsResult alterConfigs(Map<ConfigResource, Config> configs, final AlterConfigsOptions options); }
KafkaAdminClient extends AdminClient { @Override public CreateTopicsResult createTopics(final Collection<NewTopic> newTopics, final CreateTopicsOptions options) { final Map<String, KafkaFutureImpl<Void>> topicFutures = new HashMap<>(newTopics.size()); final Map<String, CreateTopicsRequest.TopicDetails> topicsMap = new HashMap<>(newTopics.size()); for (NewTopic newTopic : newTopics) { if (topicFutures.get(newTopic.name()) == null) { topicFutures.put(newTopic.name(), new KafkaFutureImpl<Void>()); topicsMap.put(newTopic.name(), newTopic.convertToTopicDetails()); } } final long now = time.milliseconds(); runnable.call(new Call("createTopics", calcDeadlineMs(now, options.timeoutMs()), new ControllerNodeProvider()) { @Override public AbstractRequest.Builder createRequest(int timeoutMs) { return new CreateTopicsRequest.Builder(topicsMap, timeoutMs, options.shouldValidateOnly()); } @Override public void handleResponse(AbstractResponse abstractResponse) { CreateTopicsResponse response = (CreateTopicsResponse) abstractResponse; for (Map.Entry<String, ApiError> entry : response.errors().entrySet()) { KafkaFutureImpl<Void> future = topicFutures.get(entry.getKey()); if (future == null) { log.warn("Server response mentioned unknown topic {}", entry.getKey()); } else { ApiException exception = entry.getValue().exception(); if (exception != null) { future.completeExceptionally(exception); } else { future.complete(null); } } } for (Map.Entry<String, KafkaFutureImpl<Void>> entry : topicFutures.entrySet()) { KafkaFutureImpl<Void> future = entry.getValue(); if (!future.isDone()) { future.completeExceptionally(new ApiException("The server response did not " + "contain a reference to node " + entry.getKey())); } } } @Override void handleFailure(Throwable throwable) { completeAllExceptionally(topicFutures.values(), throwable); } }, now); return new CreateTopicsResult(new HashMap<String, KafkaFuture<Void>>(topicFutures)); } private KafkaAdminClient(AdminClientConfig config, String clientId, Time time, Metadata metadata, Metrics metrics, KafkaClient client, TimeoutProcessorFactory timeoutProcessorFactory); @Override void close(long duration, TimeUnit unit); @Override CreateTopicsResult createTopics(final Collection<NewTopic> newTopics, final CreateTopicsOptions options); @Override DeleteTopicsResult deleteTopics(final Collection<String> topicNames, DeleteTopicsOptions options); @Override ListTopicsResult listTopics(final ListTopicsOptions options); @Override DescribeTopicsResult describeTopics(final Collection<String> topicNames, DescribeTopicsOptions options); @Override DescribeClusterResult describeCluster(DescribeClusterOptions options); @Override DescribeAclsResult describeAcls(final AclBindingFilter filter, DescribeAclsOptions options); @Override CreateAclsResult createAcls(Collection<AclBinding> acls, CreateAclsOptions options); @Override DeleteAclsResult deleteAcls(Collection<AclBindingFilter> filters, DeleteAclsOptions options); @Override DescribeConfigsResult describeConfigs(Collection<ConfigResource> configResources, final DescribeConfigsOptions options); @Override AlterConfigsResult alterConfigs(Map<ConfigResource, Config> configs, final AlterConfigsOptions options); }
@Test public void bytesToJson() throws IOException { JsonNode converted = parse(converter.fromConnectData(TOPIC, Schema.BYTES_SCHEMA, "test-string".getBytes())); validateEnvelope(converted); assertEquals(parse("{ \"type\": \"bytes\", \"optional\": false }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME)); assertEquals(ByteBuffer.wrap("test-string".getBytes()), ByteBuffer.wrap(converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).binaryValue())); }
@Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } }
JsonConverter implements Converter { @Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } } }
JsonConverter implements Converter { @Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } } }
JsonConverter implements Converter { @Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } } @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); ObjectNode asJsonSchema(Schema schema); Schema asConnectSchema(JsonNode jsonSchema); }
JsonConverter implements Converter { @Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } } @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); ObjectNode asJsonSchema(Schema schema); Schema asConnectSchema(JsonNode jsonSchema); }
@Test public void testCreateTopics() throws Exception { try (MockKafkaAdminClientEnv env = mockClientEnv()) { env.kafkaClient().setNodeApiVersions(NodeApiVersions.create()); env.kafkaClient().prepareMetadataUpdate(env.cluster(), Collections.<String>emptySet()); env.kafkaClient().setNode(env.cluster().controller()); env.kafkaClient().prepareResponse(new CreateTopicsResponse(Collections.singletonMap("myTopic", new ApiError(Errors.NONE, "")))); KafkaFuture<Void> future = env.adminClient().createTopics( Collections.singleton(new NewTopic("myTopic", Collections.singletonMap(Integer.valueOf(0), asList(new Integer[]{0, 1, 2})))), new CreateTopicsOptions().timeoutMs(10000)).all(); future.get(); } }
@Override public CreateTopicsResult createTopics(final Collection<NewTopic> newTopics, final CreateTopicsOptions options) { final Map<String, KafkaFutureImpl<Void>> topicFutures = new HashMap<>(newTopics.size()); final Map<String, CreateTopicsRequest.TopicDetails> topicsMap = new HashMap<>(newTopics.size()); for (NewTopic newTopic : newTopics) { if (topicFutures.get(newTopic.name()) == null) { topicFutures.put(newTopic.name(), new KafkaFutureImpl<Void>()); topicsMap.put(newTopic.name(), newTopic.convertToTopicDetails()); } } final long now = time.milliseconds(); runnable.call(new Call("createTopics", calcDeadlineMs(now, options.timeoutMs()), new ControllerNodeProvider()) { @Override public AbstractRequest.Builder createRequest(int timeoutMs) { return new CreateTopicsRequest.Builder(topicsMap, timeoutMs, options.shouldValidateOnly()); } @Override public void handleResponse(AbstractResponse abstractResponse) { CreateTopicsResponse response = (CreateTopicsResponse) abstractResponse; for (Map.Entry<String, ApiError> entry : response.errors().entrySet()) { KafkaFutureImpl<Void> future = topicFutures.get(entry.getKey()); if (future == null) { log.warn("Server response mentioned unknown topic {}", entry.getKey()); } else { ApiException exception = entry.getValue().exception(); if (exception != null) { future.completeExceptionally(exception); } else { future.complete(null); } } } for (Map.Entry<String, KafkaFutureImpl<Void>> entry : topicFutures.entrySet()) { KafkaFutureImpl<Void> future = entry.getValue(); if (!future.isDone()) { future.completeExceptionally(new ApiException("The server response did not " + "contain a reference to node " + entry.getKey())); } } } @Override void handleFailure(Throwable throwable) { completeAllExceptionally(topicFutures.values(), throwable); } }, now); return new CreateTopicsResult(new HashMap<String, KafkaFuture<Void>>(topicFutures)); }
KafkaAdminClient extends AdminClient { @Override public CreateTopicsResult createTopics(final Collection<NewTopic> newTopics, final CreateTopicsOptions options) { final Map<String, KafkaFutureImpl<Void>> topicFutures = new HashMap<>(newTopics.size()); final Map<String, CreateTopicsRequest.TopicDetails> topicsMap = new HashMap<>(newTopics.size()); for (NewTopic newTopic : newTopics) { if (topicFutures.get(newTopic.name()) == null) { topicFutures.put(newTopic.name(), new KafkaFutureImpl<Void>()); topicsMap.put(newTopic.name(), newTopic.convertToTopicDetails()); } } final long now = time.milliseconds(); runnable.call(new Call("createTopics", calcDeadlineMs(now, options.timeoutMs()), new ControllerNodeProvider()) { @Override public AbstractRequest.Builder createRequest(int timeoutMs) { return new CreateTopicsRequest.Builder(topicsMap, timeoutMs, options.shouldValidateOnly()); } @Override public void handleResponse(AbstractResponse abstractResponse) { CreateTopicsResponse response = (CreateTopicsResponse) abstractResponse; for (Map.Entry<String, ApiError> entry : response.errors().entrySet()) { KafkaFutureImpl<Void> future = topicFutures.get(entry.getKey()); if (future == null) { log.warn("Server response mentioned unknown topic {}", entry.getKey()); } else { ApiException exception = entry.getValue().exception(); if (exception != null) { future.completeExceptionally(exception); } else { future.complete(null); } } } for (Map.Entry<String, KafkaFutureImpl<Void>> entry : topicFutures.entrySet()) { KafkaFutureImpl<Void> future = entry.getValue(); if (!future.isDone()) { future.completeExceptionally(new ApiException("The server response did not " + "contain a reference to node " + entry.getKey())); } } } @Override void handleFailure(Throwable throwable) { completeAllExceptionally(topicFutures.values(), throwable); } }, now); return new CreateTopicsResult(new HashMap<String, KafkaFuture<Void>>(topicFutures)); } }
KafkaAdminClient extends AdminClient { @Override public CreateTopicsResult createTopics(final Collection<NewTopic> newTopics, final CreateTopicsOptions options) { final Map<String, KafkaFutureImpl<Void>> topicFutures = new HashMap<>(newTopics.size()); final Map<String, CreateTopicsRequest.TopicDetails> topicsMap = new HashMap<>(newTopics.size()); for (NewTopic newTopic : newTopics) { if (topicFutures.get(newTopic.name()) == null) { topicFutures.put(newTopic.name(), new KafkaFutureImpl<Void>()); topicsMap.put(newTopic.name(), newTopic.convertToTopicDetails()); } } final long now = time.milliseconds(); runnable.call(new Call("createTopics", calcDeadlineMs(now, options.timeoutMs()), new ControllerNodeProvider()) { @Override public AbstractRequest.Builder createRequest(int timeoutMs) { return new CreateTopicsRequest.Builder(topicsMap, timeoutMs, options.shouldValidateOnly()); } @Override public void handleResponse(AbstractResponse abstractResponse) { CreateTopicsResponse response = (CreateTopicsResponse) abstractResponse; for (Map.Entry<String, ApiError> entry : response.errors().entrySet()) { KafkaFutureImpl<Void> future = topicFutures.get(entry.getKey()); if (future == null) { log.warn("Server response mentioned unknown topic {}", entry.getKey()); } else { ApiException exception = entry.getValue().exception(); if (exception != null) { future.completeExceptionally(exception); } else { future.complete(null); } } } for (Map.Entry<String, KafkaFutureImpl<Void>> entry : topicFutures.entrySet()) { KafkaFutureImpl<Void> future = entry.getValue(); if (!future.isDone()) { future.completeExceptionally(new ApiException("The server response did not " + "contain a reference to node " + entry.getKey())); } } } @Override void handleFailure(Throwable throwable) { completeAllExceptionally(topicFutures.values(), throwable); } }, now); return new CreateTopicsResult(new HashMap<String, KafkaFuture<Void>>(topicFutures)); } private KafkaAdminClient(AdminClientConfig config, String clientId, Time time, Metadata metadata, Metrics metrics, KafkaClient client, TimeoutProcessorFactory timeoutProcessorFactory); }
KafkaAdminClient extends AdminClient { @Override public CreateTopicsResult createTopics(final Collection<NewTopic> newTopics, final CreateTopicsOptions options) { final Map<String, KafkaFutureImpl<Void>> topicFutures = new HashMap<>(newTopics.size()); final Map<String, CreateTopicsRequest.TopicDetails> topicsMap = new HashMap<>(newTopics.size()); for (NewTopic newTopic : newTopics) { if (topicFutures.get(newTopic.name()) == null) { topicFutures.put(newTopic.name(), new KafkaFutureImpl<Void>()); topicsMap.put(newTopic.name(), newTopic.convertToTopicDetails()); } } final long now = time.milliseconds(); runnable.call(new Call("createTopics", calcDeadlineMs(now, options.timeoutMs()), new ControllerNodeProvider()) { @Override public AbstractRequest.Builder createRequest(int timeoutMs) { return new CreateTopicsRequest.Builder(topicsMap, timeoutMs, options.shouldValidateOnly()); } @Override public void handleResponse(AbstractResponse abstractResponse) { CreateTopicsResponse response = (CreateTopicsResponse) abstractResponse; for (Map.Entry<String, ApiError> entry : response.errors().entrySet()) { KafkaFutureImpl<Void> future = topicFutures.get(entry.getKey()); if (future == null) { log.warn("Server response mentioned unknown topic {}", entry.getKey()); } else { ApiException exception = entry.getValue().exception(); if (exception != null) { future.completeExceptionally(exception); } else { future.complete(null); } } } for (Map.Entry<String, KafkaFutureImpl<Void>> entry : topicFutures.entrySet()) { KafkaFutureImpl<Void> future = entry.getValue(); if (!future.isDone()) { future.completeExceptionally(new ApiException("The server response did not " + "contain a reference to node " + entry.getKey())); } } } @Override void handleFailure(Throwable throwable) { completeAllExceptionally(topicFutures.values(), throwable); } }, now); return new CreateTopicsResult(new HashMap<String, KafkaFuture<Void>>(topicFutures)); } private KafkaAdminClient(AdminClientConfig config, String clientId, Time time, Metadata metadata, Metrics metrics, KafkaClient client, TimeoutProcessorFactory timeoutProcessorFactory); @Override void close(long duration, TimeUnit unit); @Override CreateTopicsResult createTopics(final Collection<NewTopic> newTopics, final CreateTopicsOptions options); @Override DeleteTopicsResult deleteTopics(final Collection<String> topicNames, DeleteTopicsOptions options); @Override ListTopicsResult listTopics(final ListTopicsOptions options); @Override DescribeTopicsResult describeTopics(final Collection<String> topicNames, DescribeTopicsOptions options); @Override DescribeClusterResult describeCluster(DescribeClusterOptions options); @Override DescribeAclsResult describeAcls(final AclBindingFilter filter, DescribeAclsOptions options); @Override CreateAclsResult createAcls(Collection<AclBinding> acls, CreateAclsOptions options); @Override DeleteAclsResult deleteAcls(Collection<AclBindingFilter> filters, DeleteAclsOptions options); @Override DescribeConfigsResult describeConfigs(Collection<ConfigResource> configResources, final DescribeConfigsOptions options); @Override AlterConfigsResult alterConfigs(Map<ConfigResource, Config> configs, final AlterConfigsOptions options); }
KafkaAdminClient extends AdminClient { @Override public CreateTopicsResult createTopics(final Collection<NewTopic> newTopics, final CreateTopicsOptions options) { final Map<String, KafkaFutureImpl<Void>> topicFutures = new HashMap<>(newTopics.size()); final Map<String, CreateTopicsRequest.TopicDetails> topicsMap = new HashMap<>(newTopics.size()); for (NewTopic newTopic : newTopics) { if (topicFutures.get(newTopic.name()) == null) { topicFutures.put(newTopic.name(), new KafkaFutureImpl<Void>()); topicsMap.put(newTopic.name(), newTopic.convertToTopicDetails()); } } final long now = time.milliseconds(); runnable.call(new Call("createTopics", calcDeadlineMs(now, options.timeoutMs()), new ControllerNodeProvider()) { @Override public AbstractRequest.Builder createRequest(int timeoutMs) { return new CreateTopicsRequest.Builder(topicsMap, timeoutMs, options.shouldValidateOnly()); } @Override public void handleResponse(AbstractResponse abstractResponse) { CreateTopicsResponse response = (CreateTopicsResponse) abstractResponse; for (Map.Entry<String, ApiError> entry : response.errors().entrySet()) { KafkaFutureImpl<Void> future = topicFutures.get(entry.getKey()); if (future == null) { log.warn("Server response mentioned unknown topic {}", entry.getKey()); } else { ApiException exception = entry.getValue().exception(); if (exception != null) { future.completeExceptionally(exception); } else { future.complete(null); } } } for (Map.Entry<String, KafkaFutureImpl<Void>> entry : topicFutures.entrySet()) { KafkaFutureImpl<Void> future = entry.getValue(); if (!future.isDone()) { future.completeExceptionally(new ApiException("The server response did not " + "contain a reference to node " + entry.getKey())); } } } @Override void handleFailure(Throwable throwable) { completeAllExceptionally(topicFutures.values(), throwable); } }, now); return new CreateTopicsResult(new HashMap<String, KafkaFuture<Void>>(topicFutures)); } private KafkaAdminClient(AdminClientConfig config, String clientId, Time time, Metadata metadata, Metrics metrics, KafkaClient client, TimeoutProcessorFactory timeoutProcessorFactory); @Override void close(long duration, TimeUnit unit); @Override CreateTopicsResult createTopics(final Collection<NewTopic> newTopics, final CreateTopicsOptions options); @Override DeleteTopicsResult deleteTopics(final Collection<String> topicNames, DeleteTopicsOptions options); @Override ListTopicsResult listTopics(final ListTopicsOptions options); @Override DescribeTopicsResult describeTopics(final Collection<String> topicNames, DescribeTopicsOptions options); @Override DescribeClusterResult describeCluster(DescribeClusterOptions options); @Override DescribeAclsResult describeAcls(final AclBindingFilter filter, DescribeAclsOptions options); @Override CreateAclsResult createAcls(Collection<AclBinding> acls, CreateAclsOptions options); @Override DeleteAclsResult deleteAcls(Collection<AclBindingFilter> filters, DeleteAclsOptions options); @Override DescribeConfigsResult describeConfigs(Collection<ConfigResource> configResources, final DescribeConfigsOptions options); @Override AlterConfigsResult alterConfigs(Map<ConfigResource, Config> configs, final AlterConfigsOptions options); }
@Test public void testDescribeAcls() throws Exception { try (MockKafkaAdminClientEnv env = mockClientEnv()) { env.kafkaClient().setNodeApiVersions(NodeApiVersions.create()); env.kafkaClient().prepareMetadataUpdate(env.cluster(), Collections.<String>emptySet()); env.kafkaClient().setNode(env.cluster().controller()); env.kafkaClient().prepareResponse(new DescribeAclsResponse(0, ApiError.NONE, asList(ACL1, ACL2))); assertCollectionIs(env.adminClient().describeAcls(FILTER1).values().get(), ACL1, ACL2); env.kafkaClient().prepareResponse(new DescribeAclsResponse(0, ApiError.NONE, Collections.<AclBinding>emptySet())); assertTrue(env.adminClient().describeAcls(FILTER2).values().get().isEmpty()); env.kafkaClient().prepareResponse(new DescribeAclsResponse(0, new ApiError(Errors.SECURITY_DISABLED, "Security is disabled"), Collections.<AclBinding>emptySet())); assertFutureError(env.adminClient().describeAcls(FILTER2).values(), SecurityDisabledException.class); } }
@Override public DescribeAclsResult describeAcls(final AclBindingFilter filter, DescribeAclsOptions options) { final long now = time.milliseconds(); final KafkaFutureImpl<Collection<AclBinding>> future = new KafkaFutureImpl<>(); runnable.call(new Call("describeAcls", calcDeadlineMs(now, options.timeoutMs()), new LeastLoadedNodeProvider()) { @Override AbstractRequest.Builder createRequest(int timeoutMs) { return new DescribeAclsRequest.Builder(filter); } @Override void handleResponse(AbstractResponse abstractResponse) { DescribeAclsResponse response = (DescribeAclsResponse) abstractResponse; if (response.error().isFailure()) { future.completeExceptionally(response.error().exception()); } else { future.complete(response.acls()); } } @Override void handleFailure(Throwable throwable) { future.completeExceptionally(throwable); } }, now); return new DescribeAclsResult(future); }
KafkaAdminClient extends AdminClient { @Override public DescribeAclsResult describeAcls(final AclBindingFilter filter, DescribeAclsOptions options) { final long now = time.milliseconds(); final KafkaFutureImpl<Collection<AclBinding>> future = new KafkaFutureImpl<>(); runnable.call(new Call("describeAcls", calcDeadlineMs(now, options.timeoutMs()), new LeastLoadedNodeProvider()) { @Override AbstractRequest.Builder createRequest(int timeoutMs) { return new DescribeAclsRequest.Builder(filter); } @Override void handleResponse(AbstractResponse abstractResponse) { DescribeAclsResponse response = (DescribeAclsResponse) abstractResponse; if (response.error().isFailure()) { future.completeExceptionally(response.error().exception()); } else { future.complete(response.acls()); } } @Override void handleFailure(Throwable throwable) { future.completeExceptionally(throwable); } }, now); return new DescribeAclsResult(future); } }
KafkaAdminClient extends AdminClient { @Override public DescribeAclsResult describeAcls(final AclBindingFilter filter, DescribeAclsOptions options) { final long now = time.milliseconds(); final KafkaFutureImpl<Collection<AclBinding>> future = new KafkaFutureImpl<>(); runnable.call(new Call("describeAcls", calcDeadlineMs(now, options.timeoutMs()), new LeastLoadedNodeProvider()) { @Override AbstractRequest.Builder createRequest(int timeoutMs) { return new DescribeAclsRequest.Builder(filter); } @Override void handleResponse(AbstractResponse abstractResponse) { DescribeAclsResponse response = (DescribeAclsResponse) abstractResponse; if (response.error().isFailure()) { future.completeExceptionally(response.error().exception()); } else { future.complete(response.acls()); } } @Override void handleFailure(Throwable throwable) { future.completeExceptionally(throwable); } }, now); return new DescribeAclsResult(future); } private KafkaAdminClient(AdminClientConfig config, String clientId, Time time, Metadata metadata, Metrics metrics, KafkaClient client, TimeoutProcessorFactory timeoutProcessorFactory); }
KafkaAdminClient extends AdminClient { @Override public DescribeAclsResult describeAcls(final AclBindingFilter filter, DescribeAclsOptions options) { final long now = time.milliseconds(); final KafkaFutureImpl<Collection<AclBinding>> future = new KafkaFutureImpl<>(); runnable.call(new Call("describeAcls", calcDeadlineMs(now, options.timeoutMs()), new LeastLoadedNodeProvider()) { @Override AbstractRequest.Builder createRequest(int timeoutMs) { return new DescribeAclsRequest.Builder(filter); } @Override void handleResponse(AbstractResponse abstractResponse) { DescribeAclsResponse response = (DescribeAclsResponse) abstractResponse; if (response.error().isFailure()) { future.completeExceptionally(response.error().exception()); } else { future.complete(response.acls()); } } @Override void handleFailure(Throwable throwable) { future.completeExceptionally(throwable); } }, now); return new DescribeAclsResult(future); } private KafkaAdminClient(AdminClientConfig config, String clientId, Time time, Metadata metadata, Metrics metrics, KafkaClient client, TimeoutProcessorFactory timeoutProcessorFactory); @Override void close(long duration, TimeUnit unit); @Override CreateTopicsResult createTopics(final Collection<NewTopic> newTopics, final CreateTopicsOptions options); @Override DeleteTopicsResult deleteTopics(final Collection<String> topicNames, DeleteTopicsOptions options); @Override ListTopicsResult listTopics(final ListTopicsOptions options); @Override DescribeTopicsResult describeTopics(final Collection<String> topicNames, DescribeTopicsOptions options); @Override DescribeClusterResult describeCluster(DescribeClusterOptions options); @Override DescribeAclsResult describeAcls(final AclBindingFilter filter, DescribeAclsOptions options); @Override CreateAclsResult createAcls(Collection<AclBinding> acls, CreateAclsOptions options); @Override DeleteAclsResult deleteAcls(Collection<AclBindingFilter> filters, DeleteAclsOptions options); @Override DescribeConfigsResult describeConfigs(Collection<ConfigResource> configResources, final DescribeConfigsOptions options); @Override AlterConfigsResult alterConfigs(Map<ConfigResource, Config> configs, final AlterConfigsOptions options); }
KafkaAdminClient extends AdminClient { @Override public DescribeAclsResult describeAcls(final AclBindingFilter filter, DescribeAclsOptions options) { final long now = time.milliseconds(); final KafkaFutureImpl<Collection<AclBinding>> future = new KafkaFutureImpl<>(); runnable.call(new Call("describeAcls", calcDeadlineMs(now, options.timeoutMs()), new LeastLoadedNodeProvider()) { @Override AbstractRequest.Builder createRequest(int timeoutMs) { return new DescribeAclsRequest.Builder(filter); } @Override void handleResponse(AbstractResponse abstractResponse) { DescribeAclsResponse response = (DescribeAclsResponse) abstractResponse; if (response.error().isFailure()) { future.completeExceptionally(response.error().exception()); } else { future.complete(response.acls()); } } @Override void handleFailure(Throwable throwable) { future.completeExceptionally(throwable); } }, now); return new DescribeAclsResult(future); } private KafkaAdminClient(AdminClientConfig config, String clientId, Time time, Metadata metadata, Metrics metrics, KafkaClient client, TimeoutProcessorFactory timeoutProcessorFactory); @Override void close(long duration, TimeUnit unit); @Override CreateTopicsResult createTopics(final Collection<NewTopic> newTopics, final CreateTopicsOptions options); @Override DeleteTopicsResult deleteTopics(final Collection<String> topicNames, DeleteTopicsOptions options); @Override ListTopicsResult listTopics(final ListTopicsOptions options); @Override DescribeTopicsResult describeTopics(final Collection<String> topicNames, DescribeTopicsOptions options); @Override DescribeClusterResult describeCluster(DescribeClusterOptions options); @Override DescribeAclsResult describeAcls(final AclBindingFilter filter, DescribeAclsOptions options); @Override CreateAclsResult createAcls(Collection<AclBinding> acls, CreateAclsOptions options); @Override DeleteAclsResult deleteAcls(Collection<AclBindingFilter> filters, DeleteAclsOptions options); @Override DescribeConfigsResult describeConfigs(Collection<ConfigResource> configResources, final DescribeConfigsOptions options); @Override AlterConfigsResult alterConfigs(Map<ConfigResource, Config> configs, final AlterConfigsOptions options); }
@Test public void testCreateAcls() throws Exception { try (MockKafkaAdminClientEnv env = mockClientEnv()) { env.kafkaClient().setNodeApiVersions(NodeApiVersions.create()); env.kafkaClient().prepareMetadataUpdate(env.cluster(), Collections.<String>emptySet()); env.kafkaClient().setNode(env.cluster().controller()); env.kafkaClient().prepareResponse(new CreateAclsResponse(0, asList(new AclCreationResponse(ApiError.NONE), new AclCreationResponse(ApiError.NONE)))); CreateAclsResult results = env.adminClient().createAcls(asList(ACL1, ACL2)); assertCollectionIs(results.values().keySet(), ACL1, ACL2); for (KafkaFuture<Void> future : results.values().values()) future.get(); results.all().get(); env.kafkaClient().prepareResponse(new CreateAclsResponse(0, asList( new AclCreationResponse(new ApiError(Errors.SECURITY_DISABLED, "Security is disabled")), new AclCreationResponse(ApiError.NONE)) )); results = env.adminClient().createAcls(asList(ACL1, ACL2)); assertCollectionIs(results.values().keySet(), ACL1, ACL2); assertFutureError(results.values().get(ACL1), SecurityDisabledException.class); results.values().get(ACL2).get(); assertFutureError(results.all(), SecurityDisabledException.class); } }
@Override public CreateAclsResult createAcls(Collection<AclBinding> acls, CreateAclsOptions options) { final long now = time.milliseconds(); final Map<AclBinding, KafkaFutureImpl<Void>> futures = new HashMap<>(); final List<AclCreation> aclCreations = new ArrayList<>(); for (AclBinding acl : acls) { if (futures.get(acl) == null) { KafkaFutureImpl<Void> future = new KafkaFutureImpl<>(); futures.put(acl, future); String indefinite = acl.toFilter().findIndefiniteField(); if (indefinite == null) { aclCreations.add(new AclCreation(acl)); } else { future.completeExceptionally(new InvalidRequestException("Invalid ACL creation: " + indefinite)); } } } runnable.call(new Call("createAcls", calcDeadlineMs(now, options.timeoutMs()), new LeastLoadedNodeProvider()) { @Override AbstractRequest.Builder createRequest(int timeoutMs) { return new CreateAclsRequest.Builder(aclCreations); } @Override void handleResponse(AbstractResponse abstractResponse) { CreateAclsResponse response = (CreateAclsResponse) abstractResponse; List<AclCreationResponse> responses = response.aclCreationResponses(); Iterator<AclCreationResponse> iter = responses.iterator(); for (AclCreation aclCreation : aclCreations) { KafkaFutureImpl<Void> future = futures.get(aclCreation.acl()); if (!iter.hasNext()) { future.completeExceptionally(new UnknownServerException( "The broker reported no creation result for the given ACL.")); } else { AclCreationResponse creation = iter.next(); if (creation.error().isFailure()) { future.completeExceptionally(creation.error().exception()); } else { future.complete(null); } } } } @Override void handleFailure(Throwable throwable) { completeAllExceptionally(futures.values(), throwable); } }, now); return new CreateAclsResult(new HashMap<AclBinding, KafkaFuture<Void>>(futures)); }
KafkaAdminClient extends AdminClient { @Override public CreateAclsResult createAcls(Collection<AclBinding> acls, CreateAclsOptions options) { final long now = time.milliseconds(); final Map<AclBinding, KafkaFutureImpl<Void>> futures = new HashMap<>(); final List<AclCreation> aclCreations = new ArrayList<>(); for (AclBinding acl : acls) { if (futures.get(acl) == null) { KafkaFutureImpl<Void> future = new KafkaFutureImpl<>(); futures.put(acl, future); String indefinite = acl.toFilter().findIndefiniteField(); if (indefinite == null) { aclCreations.add(new AclCreation(acl)); } else { future.completeExceptionally(new InvalidRequestException("Invalid ACL creation: " + indefinite)); } } } runnable.call(new Call("createAcls", calcDeadlineMs(now, options.timeoutMs()), new LeastLoadedNodeProvider()) { @Override AbstractRequest.Builder createRequest(int timeoutMs) { return new CreateAclsRequest.Builder(aclCreations); } @Override void handleResponse(AbstractResponse abstractResponse) { CreateAclsResponse response = (CreateAclsResponse) abstractResponse; List<AclCreationResponse> responses = response.aclCreationResponses(); Iterator<AclCreationResponse> iter = responses.iterator(); for (AclCreation aclCreation : aclCreations) { KafkaFutureImpl<Void> future = futures.get(aclCreation.acl()); if (!iter.hasNext()) { future.completeExceptionally(new UnknownServerException( "The broker reported no creation result for the given ACL.")); } else { AclCreationResponse creation = iter.next(); if (creation.error().isFailure()) { future.completeExceptionally(creation.error().exception()); } else { future.complete(null); } } } } @Override void handleFailure(Throwable throwable) { completeAllExceptionally(futures.values(), throwable); } }, now); return new CreateAclsResult(new HashMap<AclBinding, KafkaFuture<Void>>(futures)); } }
KafkaAdminClient extends AdminClient { @Override public CreateAclsResult createAcls(Collection<AclBinding> acls, CreateAclsOptions options) { final long now = time.milliseconds(); final Map<AclBinding, KafkaFutureImpl<Void>> futures = new HashMap<>(); final List<AclCreation> aclCreations = new ArrayList<>(); for (AclBinding acl : acls) { if (futures.get(acl) == null) { KafkaFutureImpl<Void> future = new KafkaFutureImpl<>(); futures.put(acl, future); String indefinite = acl.toFilter().findIndefiniteField(); if (indefinite == null) { aclCreations.add(new AclCreation(acl)); } else { future.completeExceptionally(new InvalidRequestException("Invalid ACL creation: " + indefinite)); } } } runnable.call(new Call("createAcls", calcDeadlineMs(now, options.timeoutMs()), new LeastLoadedNodeProvider()) { @Override AbstractRequest.Builder createRequest(int timeoutMs) { return new CreateAclsRequest.Builder(aclCreations); } @Override void handleResponse(AbstractResponse abstractResponse) { CreateAclsResponse response = (CreateAclsResponse) abstractResponse; List<AclCreationResponse> responses = response.aclCreationResponses(); Iterator<AclCreationResponse> iter = responses.iterator(); for (AclCreation aclCreation : aclCreations) { KafkaFutureImpl<Void> future = futures.get(aclCreation.acl()); if (!iter.hasNext()) { future.completeExceptionally(new UnknownServerException( "The broker reported no creation result for the given ACL.")); } else { AclCreationResponse creation = iter.next(); if (creation.error().isFailure()) { future.completeExceptionally(creation.error().exception()); } else { future.complete(null); } } } } @Override void handleFailure(Throwable throwable) { completeAllExceptionally(futures.values(), throwable); } }, now); return new CreateAclsResult(new HashMap<AclBinding, KafkaFuture<Void>>(futures)); } private KafkaAdminClient(AdminClientConfig config, String clientId, Time time, Metadata metadata, Metrics metrics, KafkaClient client, TimeoutProcessorFactory timeoutProcessorFactory); }
KafkaAdminClient extends AdminClient { @Override public CreateAclsResult createAcls(Collection<AclBinding> acls, CreateAclsOptions options) { final long now = time.milliseconds(); final Map<AclBinding, KafkaFutureImpl<Void>> futures = new HashMap<>(); final List<AclCreation> aclCreations = new ArrayList<>(); for (AclBinding acl : acls) { if (futures.get(acl) == null) { KafkaFutureImpl<Void> future = new KafkaFutureImpl<>(); futures.put(acl, future); String indefinite = acl.toFilter().findIndefiniteField(); if (indefinite == null) { aclCreations.add(new AclCreation(acl)); } else { future.completeExceptionally(new InvalidRequestException("Invalid ACL creation: " + indefinite)); } } } runnable.call(new Call("createAcls", calcDeadlineMs(now, options.timeoutMs()), new LeastLoadedNodeProvider()) { @Override AbstractRequest.Builder createRequest(int timeoutMs) { return new CreateAclsRequest.Builder(aclCreations); } @Override void handleResponse(AbstractResponse abstractResponse) { CreateAclsResponse response = (CreateAclsResponse) abstractResponse; List<AclCreationResponse> responses = response.aclCreationResponses(); Iterator<AclCreationResponse> iter = responses.iterator(); for (AclCreation aclCreation : aclCreations) { KafkaFutureImpl<Void> future = futures.get(aclCreation.acl()); if (!iter.hasNext()) { future.completeExceptionally(new UnknownServerException( "The broker reported no creation result for the given ACL.")); } else { AclCreationResponse creation = iter.next(); if (creation.error().isFailure()) { future.completeExceptionally(creation.error().exception()); } else { future.complete(null); } } } } @Override void handleFailure(Throwable throwable) { completeAllExceptionally(futures.values(), throwable); } }, now); return new CreateAclsResult(new HashMap<AclBinding, KafkaFuture<Void>>(futures)); } private KafkaAdminClient(AdminClientConfig config, String clientId, Time time, Metadata metadata, Metrics metrics, KafkaClient client, TimeoutProcessorFactory timeoutProcessorFactory); @Override void close(long duration, TimeUnit unit); @Override CreateTopicsResult createTopics(final Collection<NewTopic> newTopics, final CreateTopicsOptions options); @Override DeleteTopicsResult deleteTopics(final Collection<String> topicNames, DeleteTopicsOptions options); @Override ListTopicsResult listTopics(final ListTopicsOptions options); @Override DescribeTopicsResult describeTopics(final Collection<String> topicNames, DescribeTopicsOptions options); @Override DescribeClusterResult describeCluster(DescribeClusterOptions options); @Override DescribeAclsResult describeAcls(final AclBindingFilter filter, DescribeAclsOptions options); @Override CreateAclsResult createAcls(Collection<AclBinding> acls, CreateAclsOptions options); @Override DeleteAclsResult deleteAcls(Collection<AclBindingFilter> filters, DeleteAclsOptions options); @Override DescribeConfigsResult describeConfigs(Collection<ConfigResource> configResources, final DescribeConfigsOptions options); @Override AlterConfigsResult alterConfigs(Map<ConfigResource, Config> configs, final AlterConfigsOptions options); }
KafkaAdminClient extends AdminClient { @Override public CreateAclsResult createAcls(Collection<AclBinding> acls, CreateAclsOptions options) { final long now = time.milliseconds(); final Map<AclBinding, KafkaFutureImpl<Void>> futures = new HashMap<>(); final List<AclCreation> aclCreations = new ArrayList<>(); for (AclBinding acl : acls) { if (futures.get(acl) == null) { KafkaFutureImpl<Void> future = new KafkaFutureImpl<>(); futures.put(acl, future); String indefinite = acl.toFilter().findIndefiniteField(); if (indefinite == null) { aclCreations.add(new AclCreation(acl)); } else { future.completeExceptionally(new InvalidRequestException("Invalid ACL creation: " + indefinite)); } } } runnable.call(new Call("createAcls", calcDeadlineMs(now, options.timeoutMs()), new LeastLoadedNodeProvider()) { @Override AbstractRequest.Builder createRequest(int timeoutMs) { return new CreateAclsRequest.Builder(aclCreations); } @Override void handleResponse(AbstractResponse abstractResponse) { CreateAclsResponse response = (CreateAclsResponse) abstractResponse; List<AclCreationResponse> responses = response.aclCreationResponses(); Iterator<AclCreationResponse> iter = responses.iterator(); for (AclCreation aclCreation : aclCreations) { KafkaFutureImpl<Void> future = futures.get(aclCreation.acl()); if (!iter.hasNext()) { future.completeExceptionally(new UnknownServerException( "The broker reported no creation result for the given ACL.")); } else { AclCreationResponse creation = iter.next(); if (creation.error().isFailure()) { future.completeExceptionally(creation.error().exception()); } else { future.complete(null); } } } } @Override void handleFailure(Throwable throwable) { completeAllExceptionally(futures.values(), throwable); } }, now); return new CreateAclsResult(new HashMap<AclBinding, KafkaFuture<Void>>(futures)); } private KafkaAdminClient(AdminClientConfig config, String clientId, Time time, Metadata metadata, Metrics metrics, KafkaClient client, TimeoutProcessorFactory timeoutProcessorFactory); @Override void close(long duration, TimeUnit unit); @Override CreateTopicsResult createTopics(final Collection<NewTopic> newTopics, final CreateTopicsOptions options); @Override DeleteTopicsResult deleteTopics(final Collection<String> topicNames, DeleteTopicsOptions options); @Override ListTopicsResult listTopics(final ListTopicsOptions options); @Override DescribeTopicsResult describeTopics(final Collection<String> topicNames, DescribeTopicsOptions options); @Override DescribeClusterResult describeCluster(DescribeClusterOptions options); @Override DescribeAclsResult describeAcls(final AclBindingFilter filter, DescribeAclsOptions options); @Override CreateAclsResult createAcls(Collection<AclBinding> acls, CreateAclsOptions options); @Override DeleteAclsResult deleteAcls(Collection<AclBindingFilter> filters, DeleteAclsOptions options); @Override DescribeConfigsResult describeConfigs(Collection<ConfigResource> configResources, final DescribeConfigsOptions options); @Override AlterConfigsResult alterConfigs(Map<ConfigResource, Config> configs, final AlterConfigsOptions options); }
@Test public void testDeleteAcls() throws Exception { try (MockKafkaAdminClientEnv env = mockClientEnv()) { env.kafkaClient().setNodeApiVersions(NodeApiVersions.create()); env.kafkaClient().prepareMetadataUpdate(env.cluster(), Collections.<String>emptySet()); env.kafkaClient().setNode(env.cluster().controller()); env.kafkaClient().prepareResponse(new DeleteAclsResponse(0, asList( new AclFilterResponse(asList(new AclDeletionResult(ACL1), new AclDeletionResult(ACL2))), new AclFilterResponse(new ApiError(Errors.SECURITY_DISABLED, "No security"), Collections.<AclDeletionResult>emptySet())))); DeleteAclsResult results = env.adminClient().deleteAcls(asList(FILTER1, FILTER2)); Map<AclBindingFilter, KafkaFuture<FilterResults>> filterResults = results.values(); FilterResults filter1Results = filterResults.get(FILTER1).get(); assertEquals(null, filter1Results.values().get(0).exception()); assertEquals(ACL1, filter1Results.values().get(0).binding()); assertEquals(null, filter1Results.values().get(1).exception()); assertEquals(ACL2, filter1Results.values().get(1).binding()); assertFutureError(filterResults.get(FILTER2), SecurityDisabledException.class); assertFutureError(results.all(), SecurityDisabledException.class); env.kafkaClient().prepareResponse(new DeleteAclsResponse(0, asList( new AclFilterResponse(asList(new AclDeletionResult(ACL1), new AclDeletionResult(new ApiError(Errors.SECURITY_DISABLED, "No security"), ACL2))), new AclFilterResponse(Collections.<AclDeletionResult>emptySet())))); results = env.adminClient().deleteAcls(asList(FILTER1, FILTER2)); assertTrue(results.values().get(FILTER2).get().values().isEmpty()); assertFutureError(results.all(), SecurityDisabledException.class); env.kafkaClient().prepareResponse(new DeleteAclsResponse(0, asList( new AclFilterResponse(asList(new AclDeletionResult(ACL1))), new AclFilterResponse(asList(new AclDeletionResult(ACL2)))))); results = env.adminClient().deleteAcls(asList(FILTER1, FILTER2)); Collection<AclBinding> deleted = results.all().get(); assertCollectionIs(deleted, ACL1, ACL2); } }
@Override public DeleteAclsResult deleteAcls(Collection<AclBindingFilter> filters, DeleteAclsOptions options) { final long now = time.milliseconds(); final Map<AclBindingFilter, KafkaFutureImpl<FilterResults>> futures = new HashMap<>(); final List<AclBindingFilter> filterList = new ArrayList<>(); for (AclBindingFilter filter : filters) { if (futures.get(filter) == null) { filterList.add(filter); futures.put(filter, new KafkaFutureImpl<FilterResults>()); } } runnable.call(new Call("deleteAcls", calcDeadlineMs(now, options.timeoutMs()), new LeastLoadedNodeProvider()) { @Override AbstractRequest.Builder createRequest(int timeoutMs) { return new DeleteAclsRequest.Builder(filterList); } @Override void handleResponse(AbstractResponse abstractResponse) { DeleteAclsResponse response = (DeleteAclsResponse) abstractResponse; List<AclFilterResponse> responses = response.responses(); Iterator<AclFilterResponse> iter = responses.iterator(); for (AclBindingFilter filter : filterList) { KafkaFutureImpl<FilterResults> future = futures.get(filter); if (!iter.hasNext()) { future.completeExceptionally(new UnknownServerException( "The broker reported no deletion result for the given filter.")); } else { AclFilterResponse deletion = iter.next(); if (deletion.error().isFailure()) { future.completeExceptionally(deletion.error().exception()); } else { List<FilterResult> filterResults = new ArrayList<>(); for (AclDeletionResult deletionResult : deletion.deletions()) { filterResults.add(new FilterResult(deletionResult.acl(), deletionResult.error().exception())); } future.complete(new FilterResults(filterResults)); } } } } @Override void handleFailure(Throwable throwable) { completeAllExceptionally(futures.values(), throwable); } }, now); return new DeleteAclsResult(new HashMap<AclBindingFilter, KafkaFuture<FilterResults>>(futures)); }
KafkaAdminClient extends AdminClient { @Override public DeleteAclsResult deleteAcls(Collection<AclBindingFilter> filters, DeleteAclsOptions options) { final long now = time.milliseconds(); final Map<AclBindingFilter, KafkaFutureImpl<FilterResults>> futures = new HashMap<>(); final List<AclBindingFilter> filterList = new ArrayList<>(); for (AclBindingFilter filter : filters) { if (futures.get(filter) == null) { filterList.add(filter); futures.put(filter, new KafkaFutureImpl<FilterResults>()); } } runnable.call(new Call("deleteAcls", calcDeadlineMs(now, options.timeoutMs()), new LeastLoadedNodeProvider()) { @Override AbstractRequest.Builder createRequest(int timeoutMs) { return new DeleteAclsRequest.Builder(filterList); } @Override void handleResponse(AbstractResponse abstractResponse) { DeleteAclsResponse response = (DeleteAclsResponse) abstractResponse; List<AclFilterResponse> responses = response.responses(); Iterator<AclFilterResponse> iter = responses.iterator(); for (AclBindingFilter filter : filterList) { KafkaFutureImpl<FilterResults> future = futures.get(filter); if (!iter.hasNext()) { future.completeExceptionally(new UnknownServerException( "The broker reported no deletion result for the given filter.")); } else { AclFilterResponse deletion = iter.next(); if (deletion.error().isFailure()) { future.completeExceptionally(deletion.error().exception()); } else { List<FilterResult> filterResults = new ArrayList<>(); for (AclDeletionResult deletionResult : deletion.deletions()) { filterResults.add(new FilterResult(deletionResult.acl(), deletionResult.error().exception())); } future.complete(new FilterResults(filterResults)); } } } } @Override void handleFailure(Throwable throwable) { completeAllExceptionally(futures.values(), throwable); } }, now); return new DeleteAclsResult(new HashMap<AclBindingFilter, KafkaFuture<FilterResults>>(futures)); } }
KafkaAdminClient extends AdminClient { @Override public DeleteAclsResult deleteAcls(Collection<AclBindingFilter> filters, DeleteAclsOptions options) { final long now = time.milliseconds(); final Map<AclBindingFilter, KafkaFutureImpl<FilterResults>> futures = new HashMap<>(); final List<AclBindingFilter> filterList = new ArrayList<>(); for (AclBindingFilter filter : filters) { if (futures.get(filter) == null) { filterList.add(filter); futures.put(filter, new KafkaFutureImpl<FilterResults>()); } } runnable.call(new Call("deleteAcls", calcDeadlineMs(now, options.timeoutMs()), new LeastLoadedNodeProvider()) { @Override AbstractRequest.Builder createRequest(int timeoutMs) { return new DeleteAclsRequest.Builder(filterList); } @Override void handleResponse(AbstractResponse abstractResponse) { DeleteAclsResponse response = (DeleteAclsResponse) abstractResponse; List<AclFilterResponse> responses = response.responses(); Iterator<AclFilterResponse> iter = responses.iterator(); for (AclBindingFilter filter : filterList) { KafkaFutureImpl<FilterResults> future = futures.get(filter); if (!iter.hasNext()) { future.completeExceptionally(new UnknownServerException( "The broker reported no deletion result for the given filter.")); } else { AclFilterResponse deletion = iter.next(); if (deletion.error().isFailure()) { future.completeExceptionally(deletion.error().exception()); } else { List<FilterResult> filterResults = new ArrayList<>(); for (AclDeletionResult deletionResult : deletion.deletions()) { filterResults.add(new FilterResult(deletionResult.acl(), deletionResult.error().exception())); } future.complete(new FilterResults(filterResults)); } } } } @Override void handleFailure(Throwable throwable) { completeAllExceptionally(futures.values(), throwable); } }, now); return new DeleteAclsResult(new HashMap<AclBindingFilter, KafkaFuture<FilterResults>>(futures)); } private KafkaAdminClient(AdminClientConfig config, String clientId, Time time, Metadata metadata, Metrics metrics, KafkaClient client, TimeoutProcessorFactory timeoutProcessorFactory); }
KafkaAdminClient extends AdminClient { @Override public DeleteAclsResult deleteAcls(Collection<AclBindingFilter> filters, DeleteAclsOptions options) { final long now = time.milliseconds(); final Map<AclBindingFilter, KafkaFutureImpl<FilterResults>> futures = new HashMap<>(); final List<AclBindingFilter> filterList = new ArrayList<>(); for (AclBindingFilter filter : filters) { if (futures.get(filter) == null) { filterList.add(filter); futures.put(filter, new KafkaFutureImpl<FilterResults>()); } } runnable.call(new Call("deleteAcls", calcDeadlineMs(now, options.timeoutMs()), new LeastLoadedNodeProvider()) { @Override AbstractRequest.Builder createRequest(int timeoutMs) { return new DeleteAclsRequest.Builder(filterList); } @Override void handleResponse(AbstractResponse abstractResponse) { DeleteAclsResponse response = (DeleteAclsResponse) abstractResponse; List<AclFilterResponse> responses = response.responses(); Iterator<AclFilterResponse> iter = responses.iterator(); for (AclBindingFilter filter : filterList) { KafkaFutureImpl<FilterResults> future = futures.get(filter); if (!iter.hasNext()) { future.completeExceptionally(new UnknownServerException( "The broker reported no deletion result for the given filter.")); } else { AclFilterResponse deletion = iter.next(); if (deletion.error().isFailure()) { future.completeExceptionally(deletion.error().exception()); } else { List<FilterResult> filterResults = new ArrayList<>(); for (AclDeletionResult deletionResult : deletion.deletions()) { filterResults.add(new FilterResult(deletionResult.acl(), deletionResult.error().exception())); } future.complete(new FilterResults(filterResults)); } } } } @Override void handleFailure(Throwable throwable) { completeAllExceptionally(futures.values(), throwable); } }, now); return new DeleteAclsResult(new HashMap<AclBindingFilter, KafkaFuture<FilterResults>>(futures)); } private KafkaAdminClient(AdminClientConfig config, String clientId, Time time, Metadata metadata, Metrics metrics, KafkaClient client, TimeoutProcessorFactory timeoutProcessorFactory); @Override void close(long duration, TimeUnit unit); @Override CreateTopicsResult createTopics(final Collection<NewTopic> newTopics, final CreateTopicsOptions options); @Override DeleteTopicsResult deleteTopics(final Collection<String> topicNames, DeleteTopicsOptions options); @Override ListTopicsResult listTopics(final ListTopicsOptions options); @Override DescribeTopicsResult describeTopics(final Collection<String> topicNames, DescribeTopicsOptions options); @Override DescribeClusterResult describeCluster(DescribeClusterOptions options); @Override DescribeAclsResult describeAcls(final AclBindingFilter filter, DescribeAclsOptions options); @Override CreateAclsResult createAcls(Collection<AclBinding> acls, CreateAclsOptions options); @Override DeleteAclsResult deleteAcls(Collection<AclBindingFilter> filters, DeleteAclsOptions options); @Override DescribeConfigsResult describeConfigs(Collection<ConfigResource> configResources, final DescribeConfigsOptions options); @Override AlterConfigsResult alterConfigs(Map<ConfigResource, Config> configs, final AlterConfigsOptions options); }
KafkaAdminClient extends AdminClient { @Override public DeleteAclsResult deleteAcls(Collection<AclBindingFilter> filters, DeleteAclsOptions options) { final long now = time.milliseconds(); final Map<AclBindingFilter, KafkaFutureImpl<FilterResults>> futures = new HashMap<>(); final List<AclBindingFilter> filterList = new ArrayList<>(); for (AclBindingFilter filter : filters) { if (futures.get(filter) == null) { filterList.add(filter); futures.put(filter, new KafkaFutureImpl<FilterResults>()); } } runnable.call(new Call("deleteAcls", calcDeadlineMs(now, options.timeoutMs()), new LeastLoadedNodeProvider()) { @Override AbstractRequest.Builder createRequest(int timeoutMs) { return new DeleteAclsRequest.Builder(filterList); } @Override void handleResponse(AbstractResponse abstractResponse) { DeleteAclsResponse response = (DeleteAclsResponse) abstractResponse; List<AclFilterResponse> responses = response.responses(); Iterator<AclFilterResponse> iter = responses.iterator(); for (AclBindingFilter filter : filterList) { KafkaFutureImpl<FilterResults> future = futures.get(filter); if (!iter.hasNext()) { future.completeExceptionally(new UnknownServerException( "The broker reported no deletion result for the given filter.")); } else { AclFilterResponse deletion = iter.next(); if (deletion.error().isFailure()) { future.completeExceptionally(deletion.error().exception()); } else { List<FilterResult> filterResults = new ArrayList<>(); for (AclDeletionResult deletionResult : deletion.deletions()) { filterResults.add(new FilterResult(deletionResult.acl(), deletionResult.error().exception())); } future.complete(new FilterResults(filterResults)); } } } } @Override void handleFailure(Throwable throwable) { completeAllExceptionally(futures.values(), throwable); } }, now); return new DeleteAclsResult(new HashMap<AclBindingFilter, KafkaFuture<FilterResults>>(futures)); } private KafkaAdminClient(AdminClientConfig config, String clientId, Time time, Metadata metadata, Metrics metrics, KafkaClient client, TimeoutProcessorFactory timeoutProcessorFactory); @Override void close(long duration, TimeUnit unit); @Override CreateTopicsResult createTopics(final Collection<NewTopic> newTopics, final CreateTopicsOptions options); @Override DeleteTopicsResult deleteTopics(final Collection<String> topicNames, DeleteTopicsOptions options); @Override ListTopicsResult listTopics(final ListTopicsOptions options); @Override DescribeTopicsResult describeTopics(final Collection<String> topicNames, DescribeTopicsOptions options); @Override DescribeClusterResult describeCluster(DescribeClusterOptions options); @Override DescribeAclsResult describeAcls(final AclBindingFilter filter, DescribeAclsOptions options); @Override CreateAclsResult createAcls(Collection<AclBinding> acls, CreateAclsOptions options); @Override DeleteAclsResult deleteAcls(Collection<AclBindingFilter> filters, DeleteAclsOptions options); @Override DescribeConfigsResult describeConfigs(Collection<ConfigResource> configResources, final DescribeConfigsOptions options); @Override AlterConfigsResult alterConfigs(Map<ConfigResource, Config> configs, final AlterConfigsOptions options); }
@Test public void testJmxRegistration() throws Exception { Metrics metrics = new Metrics(); try { metrics.addReporter(new JmxReporter()); Sensor sensor = metrics.sensor("kafka.requests"); sensor.add(metrics.metricName("pack.bean1.avg", "grp1"), new Avg()); sensor.add(metrics.metricName("pack.bean2.total", "grp2"), new Total()); Sensor sensor2 = metrics.sensor("kafka.blah"); sensor2.add(metrics.metricName("pack.bean1.some", "grp1"), new Total()); sensor2.add(metrics.metricName("pack.bean2.some", "grp1"), new Total()); } finally { metrics.close(); } }
public void close() { synchronized (LOCK) { for (KafkaMbean mbean : this.mbeans.values()) unregister(mbean); } }
JmxReporter implements MetricsReporter { public void close() { synchronized (LOCK) { for (KafkaMbean mbean : this.mbeans.values()) unregister(mbean); } } }
JmxReporter implements MetricsReporter { public void close() { synchronized (LOCK) { for (KafkaMbean mbean : this.mbeans.values()) unregister(mbean); } } JmxReporter(); JmxReporter(String prefix); }
JmxReporter implements MetricsReporter { public void close() { synchronized (LOCK) { for (KafkaMbean mbean : this.mbeans.values()) unregister(mbean); } } JmxReporter(); JmxReporter(String prefix); @Override void configure(Map<String, ?> configs); @Override void init(List<KafkaMetric> metrics); @Override void metricChange(KafkaMetric metric); @Override void metricRemoval(KafkaMetric metric); void close(); }
JmxReporter implements MetricsReporter { public void close() { synchronized (LOCK) { for (KafkaMbean mbean : this.mbeans.values()) unregister(mbean); } } JmxReporter(); JmxReporter(String prefix); @Override void configure(Map<String, ?> configs); @Override void init(List<KafkaMetric> metrics); @Override void metricChange(KafkaMetric metric); @Override void metricRemoval(KafkaMetric metric); void close(); }
@Test public void testMetricName() { MetricName n1 = metrics.metricName("name", "group", "description", "key1", "value1", "key2", "value2"); Map<String, String> tags = new HashMap<String, String>(); tags.put("key1", "value1"); tags.put("key2", "value2"); MetricName n2 = metrics.metricName("name", "group", "description", tags); assertEquals("metric names created in two different ways should be equal", n1, n2); try { metrics.metricName("name", "group", "description", "key1"); fail("Creating MetricName with an odd number of keyValue should fail"); } catch (IllegalArgumentException e) { } }
public MetricName metricName(String name, String group, String description, Map<String, String> tags) { Map<String, String> combinedTag = new LinkedHashMap<>(config.tags()); combinedTag.putAll(tags); return new MetricName(name, group, description, combinedTag); }
Metrics implements Closeable { public MetricName metricName(String name, String group, String description, Map<String, String> tags) { Map<String, String> combinedTag = new LinkedHashMap<>(config.tags()); combinedTag.putAll(tags); return new MetricName(name, group, description, combinedTag); } }
Metrics implements Closeable { public MetricName metricName(String name, String group, String description, Map<String, String> tags) { Map<String, String> combinedTag = new LinkedHashMap<>(config.tags()); combinedTag.putAll(tags); return new MetricName(name, group, description, combinedTag); } Metrics(); Metrics(Time time); Metrics(MetricConfig defaultConfig, Time time); Metrics(MetricConfig defaultConfig); Metrics(MetricConfig defaultConfig, List<MetricsReporter> reporters, Time time); Metrics(MetricConfig defaultConfig, List<MetricsReporter> reporters, Time time, boolean enableExpiration); }
Metrics implements Closeable { public MetricName metricName(String name, String group, String description, Map<String, String> tags) { Map<String, String> combinedTag = new LinkedHashMap<>(config.tags()); combinedTag.putAll(tags); return new MetricName(name, group, description, combinedTag); } Metrics(); Metrics(Time time); Metrics(MetricConfig defaultConfig, Time time); Metrics(MetricConfig defaultConfig); Metrics(MetricConfig defaultConfig, List<MetricsReporter> reporters, Time time); Metrics(MetricConfig defaultConfig, List<MetricsReporter> reporters, Time time, boolean enableExpiration); MetricName metricName(String name, String group, String description, Map<String, String> tags); MetricName metricName(String name, String group, String description); MetricName metricName(String name, String group); MetricName metricName(String name, String group, String description, String... keyValue); MetricName metricName(String name, String group, Map<String, String> tags); static String toHtmlTable(String domain, List<MetricNameTemplate> allMetrics); MetricConfig config(); Sensor getSensor(String name); Sensor sensor(String name); Sensor sensor(String name, Sensor.RecordingLevel recordingLevel); Sensor sensor(String name, Sensor... parents); Sensor sensor(String name, Sensor.RecordingLevel recordingLevel, Sensor... parents); synchronized Sensor sensor(String name, MetricConfig config, Sensor... parents); synchronized Sensor sensor(String name, MetricConfig config, Sensor.RecordingLevel recordingLevel, Sensor... parents); synchronized Sensor sensor(String name, MetricConfig config, long inactiveSensorExpirationTimeSeconds, Sensor.RecordingLevel recordingLevel, Sensor... parents); synchronized Sensor sensor(String name, MetricConfig config, long inactiveSensorExpirationTimeSeconds, Sensor... parents); void removeSensor(String name); void addMetric(MetricName metricName, Measurable measurable); synchronized void addMetric(MetricName metricName, MetricConfig config, Measurable measurable); synchronized KafkaMetric removeMetric(MetricName metricName); synchronized void addReporter(MetricsReporter reporter); Map<MetricName, KafkaMetric> metrics(); List<MetricsReporter> reporters(); KafkaMetric metric(MetricName metricName); MetricName metricInstance(MetricNameTemplate template, String... keyValue); MetricName metricInstance(MetricNameTemplate template, Map<String, String> tags); @Override void close(); }
Metrics implements Closeable { public MetricName metricName(String name, String group, String description, Map<String, String> tags) { Map<String, String> combinedTag = new LinkedHashMap<>(config.tags()); combinedTag.putAll(tags); return new MetricName(name, group, description, combinedTag); } Metrics(); Metrics(Time time); Metrics(MetricConfig defaultConfig, Time time); Metrics(MetricConfig defaultConfig); Metrics(MetricConfig defaultConfig, List<MetricsReporter> reporters, Time time); Metrics(MetricConfig defaultConfig, List<MetricsReporter> reporters, Time time, boolean enableExpiration); MetricName metricName(String name, String group, String description, Map<String, String> tags); MetricName metricName(String name, String group, String description); MetricName metricName(String name, String group); MetricName metricName(String name, String group, String description, String... keyValue); MetricName metricName(String name, String group, Map<String, String> tags); static String toHtmlTable(String domain, List<MetricNameTemplate> allMetrics); MetricConfig config(); Sensor getSensor(String name); Sensor sensor(String name); Sensor sensor(String name, Sensor.RecordingLevel recordingLevel); Sensor sensor(String name, Sensor... parents); Sensor sensor(String name, Sensor.RecordingLevel recordingLevel, Sensor... parents); synchronized Sensor sensor(String name, MetricConfig config, Sensor... parents); synchronized Sensor sensor(String name, MetricConfig config, Sensor.RecordingLevel recordingLevel, Sensor... parents); synchronized Sensor sensor(String name, MetricConfig config, long inactiveSensorExpirationTimeSeconds, Sensor.RecordingLevel recordingLevel, Sensor... parents); synchronized Sensor sensor(String name, MetricConfig config, long inactiveSensorExpirationTimeSeconds, Sensor... parents); void removeSensor(String name); void addMetric(MetricName metricName, Measurable measurable); synchronized void addMetric(MetricName metricName, MetricConfig config, Measurable measurable); synchronized KafkaMetric removeMetric(MetricName metricName); synchronized void addReporter(MetricsReporter reporter); Map<MetricName, KafkaMetric> metrics(); List<MetricsReporter> reporters(); KafkaMetric metric(MetricName metricName); MetricName metricInstance(MetricNameTemplate template, String... keyValue); MetricName metricInstance(MetricNameTemplate template, Map<String, String> tags); @Override void close(); }
@Test(expected = IllegalArgumentException.class) public void testBadSensorHierarchy() { Sensor p = metrics.sensor("parent"); Sensor c1 = metrics.sensor("child1", p); Sensor c2 = metrics.sensor("child2", p); metrics.sensor("gc", c1, c2); }
public Sensor sensor(String name) { return this.sensor(name, Sensor.RecordingLevel.INFO); }
Metrics implements Closeable { public Sensor sensor(String name) { return this.sensor(name, Sensor.RecordingLevel.INFO); } }
Metrics implements Closeable { public Sensor sensor(String name) { return this.sensor(name, Sensor.RecordingLevel.INFO); } Metrics(); Metrics(Time time); Metrics(MetricConfig defaultConfig, Time time); Metrics(MetricConfig defaultConfig); Metrics(MetricConfig defaultConfig, List<MetricsReporter> reporters, Time time); Metrics(MetricConfig defaultConfig, List<MetricsReporter> reporters, Time time, boolean enableExpiration); }
Metrics implements Closeable { public Sensor sensor(String name) { return this.sensor(name, Sensor.RecordingLevel.INFO); } Metrics(); Metrics(Time time); Metrics(MetricConfig defaultConfig, Time time); Metrics(MetricConfig defaultConfig); Metrics(MetricConfig defaultConfig, List<MetricsReporter> reporters, Time time); Metrics(MetricConfig defaultConfig, List<MetricsReporter> reporters, Time time, boolean enableExpiration); MetricName metricName(String name, String group, String description, Map<String, String> tags); MetricName metricName(String name, String group, String description); MetricName metricName(String name, String group); MetricName metricName(String name, String group, String description, String... keyValue); MetricName metricName(String name, String group, Map<String, String> tags); static String toHtmlTable(String domain, List<MetricNameTemplate> allMetrics); MetricConfig config(); Sensor getSensor(String name); Sensor sensor(String name); Sensor sensor(String name, Sensor.RecordingLevel recordingLevel); Sensor sensor(String name, Sensor... parents); Sensor sensor(String name, Sensor.RecordingLevel recordingLevel, Sensor... parents); synchronized Sensor sensor(String name, MetricConfig config, Sensor... parents); synchronized Sensor sensor(String name, MetricConfig config, Sensor.RecordingLevel recordingLevel, Sensor... parents); synchronized Sensor sensor(String name, MetricConfig config, long inactiveSensorExpirationTimeSeconds, Sensor.RecordingLevel recordingLevel, Sensor... parents); synchronized Sensor sensor(String name, MetricConfig config, long inactiveSensorExpirationTimeSeconds, Sensor... parents); void removeSensor(String name); void addMetric(MetricName metricName, Measurable measurable); synchronized void addMetric(MetricName metricName, MetricConfig config, Measurable measurable); synchronized KafkaMetric removeMetric(MetricName metricName); synchronized void addReporter(MetricsReporter reporter); Map<MetricName, KafkaMetric> metrics(); List<MetricsReporter> reporters(); KafkaMetric metric(MetricName metricName); MetricName metricInstance(MetricNameTemplate template, String... keyValue); MetricName metricInstance(MetricNameTemplate template, Map<String, String> tags); @Override void close(); }
Metrics implements Closeable { public Sensor sensor(String name) { return this.sensor(name, Sensor.RecordingLevel.INFO); } Metrics(); Metrics(Time time); Metrics(MetricConfig defaultConfig, Time time); Metrics(MetricConfig defaultConfig); Metrics(MetricConfig defaultConfig, List<MetricsReporter> reporters, Time time); Metrics(MetricConfig defaultConfig, List<MetricsReporter> reporters, Time time, boolean enableExpiration); MetricName metricName(String name, String group, String description, Map<String, String> tags); MetricName metricName(String name, String group, String description); MetricName metricName(String name, String group); MetricName metricName(String name, String group, String description, String... keyValue); MetricName metricName(String name, String group, Map<String, String> tags); static String toHtmlTable(String domain, List<MetricNameTemplate> allMetrics); MetricConfig config(); Sensor getSensor(String name); Sensor sensor(String name); Sensor sensor(String name, Sensor.RecordingLevel recordingLevel); Sensor sensor(String name, Sensor... parents); Sensor sensor(String name, Sensor.RecordingLevel recordingLevel, Sensor... parents); synchronized Sensor sensor(String name, MetricConfig config, Sensor... parents); synchronized Sensor sensor(String name, MetricConfig config, Sensor.RecordingLevel recordingLevel, Sensor... parents); synchronized Sensor sensor(String name, MetricConfig config, long inactiveSensorExpirationTimeSeconds, Sensor.RecordingLevel recordingLevel, Sensor... parents); synchronized Sensor sensor(String name, MetricConfig config, long inactiveSensorExpirationTimeSeconds, Sensor... parents); void removeSensor(String name); void addMetric(MetricName metricName, Measurable measurable); synchronized void addMetric(MetricName metricName, MetricConfig config, Measurable measurable); synchronized KafkaMetric removeMetric(MetricName metricName); synchronized void addReporter(MetricsReporter reporter); Map<MetricName, KafkaMetric> metrics(); List<MetricsReporter> reporters(); KafkaMetric metric(MetricName metricName); MetricName metricInstance(MetricNameTemplate template, String... keyValue); MetricName metricInstance(MetricNameTemplate template, Map<String, String> tags); @Override void close(); }
@Test public void testRemoveSensor() { int size = metrics.metrics().size(); Sensor parent1 = metrics.sensor("test.parent1"); parent1.add(metrics.metricName("test.parent1.count", "grp1"), new Count()); Sensor parent2 = metrics.sensor("test.parent2"); parent2.add(metrics.metricName("test.parent2.count", "grp1"), new Count()); Sensor child1 = metrics.sensor("test.child1", parent1, parent2); child1.add(metrics.metricName("test.child1.count", "grp1"), new Count()); Sensor child2 = metrics.sensor("test.child2", parent2); child2.add(metrics.metricName("test.child2.count", "grp1"), new Count()); Sensor grandChild1 = metrics.sensor("test.gchild2", child2); grandChild1.add(metrics.metricName("test.gchild2.count", "grp1"), new Count()); Sensor sensor = metrics.getSensor("test.parent1"); assertNotNull(sensor); metrics.removeSensor("test.parent1"); assertNull(metrics.getSensor("test.parent1")); assertNull(metrics.metrics().get(metrics.metricName("test.parent1.count", "grp1"))); assertNull(metrics.getSensor("test.child1")); assertNull(metrics.childrenSensors().get(sensor)); assertNull(metrics.metrics().get(metrics.metricName("test.child1.count", "grp1"))); sensor = metrics.getSensor("test.gchild2"); assertNotNull(sensor); metrics.removeSensor("test.gchild2"); assertNull(metrics.getSensor("test.gchild2")); assertNull(metrics.childrenSensors().get(sensor)); assertNull(metrics.metrics().get(metrics.metricName("test.gchild2.count", "grp1"))); sensor = metrics.getSensor("test.child2"); assertNotNull(sensor); metrics.removeSensor("test.child2"); assertNull(metrics.getSensor("test.child2")); assertNull(metrics.childrenSensors().get(sensor)); assertNull(metrics.metrics().get(metrics.metricName("test.child2.count", "grp1"))); sensor = metrics.getSensor("test.parent2"); assertNotNull(sensor); metrics.removeSensor("test.parent2"); assertNull(metrics.getSensor("test.parent2")); assertNull(metrics.childrenSensors().get(sensor)); assertNull(metrics.metrics().get(metrics.metricName("test.parent2.count", "grp1"))); assertEquals(size, metrics.metrics().size()); }
public void removeSensor(String name) { Sensor sensor = sensors.get(name); if (sensor != null) { List<Sensor> childSensors = null; synchronized (sensor) { synchronized (this) { if (sensors.remove(name, sensor)) { for (KafkaMetric metric : sensor.metrics()) removeMetric(metric.metricName()); log.debug("Removed sensor with name {}", name); childSensors = childrenSensors.remove(sensor); } } } if (childSensors != null) { for (Sensor childSensor : childSensors) removeSensor(childSensor.name()); } } }
Metrics implements Closeable { public void removeSensor(String name) { Sensor sensor = sensors.get(name); if (sensor != null) { List<Sensor> childSensors = null; synchronized (sensor) { synchronized (this) { if (sensors.remove(name, sensor)) { for (KafkaMetric metric : sensor.metrics()) removeMetric(metric.metricName()); log.debug("Removed sensor with name {}", name); childSensors = childrenSensors.remove(sensor); } } } if (childSensors != null) { for (Sensor childSensor : childSensors) removeSensor(childSensor.name()); } } } }
Metrics implements Closeable { public void removeSensor(String name) { Sensor sensor = sensors.get(name); if (sensor != null) { List<Sensor> childSensors = null; synchronized (sensor) { synchronized (this) { if (sensors.remove(name, sensor)) { for (KafkaMetric metric : sensor.metrics()) removeMetric(metric.metricName()); log.debug("Removed sensor with name {}", name); childSensors = childrenSensors.remove(sensor); } } } if (childSensors != null) { for (Sensor childSensor : childSensors) removeSensor(childSensor.name()); } } } Metrics(); Metrics(Time time); Metrics(MetricConfig defaultConfig, Time time); Metrics(MetricConfig defaultConfig); Metrics(MetricConfig defaultConfig, List<MetricsReporter> reporters, Time time); Metrics(MetricConfig defaultConfig, List<MetricsReporter> reporters, Time time, boolean enableExpiration); }
Metrics implements Closeable { public void removeSensor(String name) { Sensor sensor = sensors.get(name); if (sensor != null) { List<Sensor> childSensors = null; synchronized (sensor) { synchronized (this) { if (sensors.remove(name, sensor)) { for (KafkaMetric metric : sensor.metrics()) removeMetric(metric.metricName()); log.debug("Removed sensor with name {}", name); childSensors = childrenSensors.remove(sensor); } } } if (childSensors != null) { for (Sensor childSensor : childSensors) removeSensor(childSensor.name()); } } } Metrics(); Metrics(Time time); Metrics(MetricConfig defaultConfig, Time time); Metrics(MetricConfig defaultConfig); Metrics(MetricConfig defaultConfig, List<MetricsReporter> reporters, Time time); Metrics(MetricConfig defaultConfig, List<MetricsReporter> reporters, Time time, boolean enableExpiration); MetricName metricName(String name, String group, String description, Map<String, String> tags); MetricName metricName(String name, String group, String description); MetricName metricName(String name, String group); MetricName metricName(String name, String group, String description, String... keyValue); MetricName metricName(String name, String group, Map<String, String> tags); static String toHtmlTable(String domain, List<MetricNameTemplate> allMetrics); MetricConfig config(); Sensor getSensor(String name); Sensor sensor(String name); Sensor sensor(String name, Sensor.RecordingLevel recordingLevel); Sensor sensor(String name, Sensor... parents); Sensor sensor(String name, Sensor.RecordingLevel recordingLevel, Sensor... parents); synchronized Sensor sensor(String name, MetricConfig config, Sensor... parents); synchronized Sensor sensor(String name, MetricConfig config, Sensor.RecordingLevel recordingLevel, Sensor... parents); synchronized Sensor sensor(String name, MetricConfig config, long inactiveSensorExpirationTimeSeconds, Sensor.RecordingLevel recordingLevel, Sensor... parents); synchronized Sensor sensor(String name, MetricConfig config, long inactiveSensorExpirationTimeSeconds, Sensor... parents); void removeSensor(String name); void addMetric(MetricName metricName, Measurable measurable); synchronized void addMetric(MetricName metricName, MetricConfig config, Measurable measurable); synchronized KafkaMetric removeMetric(MetricName metricName); synchronized void addReporter(MetricsReporter reporter); Map<MetricName, KafkaMetric> metrics(); List<MetricsReporter> reporters(); KafkaMetric metric(MetricName metricName); MetricName metricInstance(MetricNameTemplate template, String... keyValue); MetricName metricInstance(MetricNameTemplate template, Map<String, String> tags); @Override void close(); }
Metrics implements Closeable { public void removeSensor(String name) { Sensor sensor = sensors.get(name); if (sensor != null) { List<Sensor> childSensors = null; synchronized (sensor) { synchronized (this) { if (sensors.remove(name, sensor)) { for (KafkaMetric metric : sensor.metrics()) removeMetric(metric.metricName()); log.debug("Removed sensor with name {}", name); childSensors = childrenSensors.remove(sensor); } } } if (childSensors != null) { for (Sensor childSensor : childSensors) removeSensor(childSensor.name()); } } } Metrics(); Metrics(Time time); Metrics(MetricConfig defaultConfig, Time time); Metrics(MetricConfig defaultConfig); Metrics(MetricConfig defaultConfig, List<MetricsReporter> reporters, Time time); Metrics(MetricConfig defaultConfig, List<MetricsReporter> reporters, Time time, boolean enableExpiration); MetricName metricName(String name, String group, String description, Map<String, String> tags); MetricName metricName(String name, String group, String description); MetricName metricName(String name, String group); MetricName metricName(String name, String group, String description, String... keyValue); MetricName metricName(String name, String group, Map<String, String> tags); static String toHtmlTable(String domain, List<MetricNameTemplate> allMetrics); MetricConfig config(); Sensor getSensor(String name); Sensor sensor(String name); Sensor sensor(String name, Sensor.RecordingLevel recordingLevel); Sensor sensor(String name, Sensor... parents); Sensor sensor(String name, Sensor.RecordingLevel recordingLevel, Sensor... parents); synchronized Sensor sensor(String name, MetricConfig config, Sensor... parents); synchronized Sensor sensor(String name, MetricConfig config, Sensor.RecordingLevel recordingLevel, Sensor... parents); synchronized Sensor sensor(String name, MetricConfig config, long inactiveSensorExpirationTimeSeconds, Sensor.RecordingLevel recordingLevel, Sensor... parents); synchronized Sensor sensor(String name, MetricConfig config, long inactiveSensorExpirationTimeSeconds, Sensor... parents); void removeSensor(String name); void addMetric(MetricName metricName, Measurable measurable); synchronized void addMetric(MetricName metricName, MetricConfig config, Measurable measurable); synchronized KafkaMetric removeMetric(MetricName metricName); synchronized void addReporter(MetricsReporter reporter); Map<MetricName, KafkaMetric> metrics(); List<MetricsReporter> reporters(); KafkaMetric metric(MetricName metricName); MetricName metricInstance(MetricNameTemplate template, String... keyValue); MetricName metricInstance(MetricNameTemplate template, Map<String, String> tags); @Override void close(); }
@Test public void testRemoveMetric() { int size = metrics.metrics().size(); metrics.addMetric(metrics.metricName("test1", "grp1"), new Count()); metrics.addMetric(metrics.metricName("test2", "grp1"), new Count()); assertNotNull(metrics.removeMetric(metrics.metricName("test1", "grp1"))); assertNull(metrics.metrics().get(metrics.metricName("test1", "grp1"))); assertNotNull(metrics.metrics().get(metrics.metricName("test2", "grp1"))); assertNotNull(metrics.removeMetric(metrics.metricName("test2", "grp1"))); assertNull(metrics.metrics().get(metrics.metricName("test2", "grp1"))); assertEquals(size, metrics.metrics().size()); }
public synchronized KafkaMetric removeMetric(MetricName metricName) { KafkaMetric metric = this.metrics.remove(metricName); if (metric != null) { for (MetricsReporter reporter : reporters) reporter.metricRemoval(metric); } return metric; }
Metrics implements Closeable { public synchronized KafkaMetric removeMetric(MetricName metricName) { KafkaMetric metric = this.metrics.remove(metricName); if (metric != null) { for (MetricsReporter reporter : reporters) reporter.metricRemoval(metric); } return metric; } }
Metrics implements Closeable { public synchronized KafkaMetric removeMetric(MetricName metricName) { KafkaMetric metric = this.metrics.remove(metricName); if (metric != null) { for (MetricsReporter reporter : reporters) reporter.metricRemoval(metric); } return metric; } Metrics(); Metrics(Time time); Metrics(MetricConfig defaultConfig, Time time); Metrics(MetricConfig defaultConfig); Metrics(MetricConfig defaultConfig, List<MetricsReporter> reporters, Time time); Metrics(MetricConfig defaultConfig, List<MetricsReporter> reporters, Time time, boolean enableExpiration); }
Metrics implements Closeable { public synchronized KafkaMetric removeMetric(MetricName metricName) { KafkaMetric metric = this.metrics.remove(metricName); if (metric != null) { for (MetricsReporter reporter : reporters) reporter.metricRemoval(metric); } return metric; } Metrics(); Metrics(Time time); Metrics(MetricConfig defaultConfig, Time time); Metrics(MetricConfig defaultConfig); Metrics(MetricConfig defaultConfig, List<MetricsReporter> reporters, Time time); Metrics(MetricConfig defaultConfig, List<MetricsReporter> reporters, Time time, boolean enableExpiration); MetricName metricName(String name, String group, String description, Map<String, String> tags); MetricName metricName(String name, String group, String description); MetricName metricName(String name, String group); MetricName metricName(String name, String group, String description, String... keyValue); MetricName metricName(String name, String group, Map<String, String> tags); static String toHtmlTable(String domain, List<MetricNameTemplate> allMetrics); MetricConfig config(); Sensor getSensor(String name); Sensor sensor(String name); Sensor sensor(String name, Sensor.RecordingLevel recordingLevel); Sensor sensor(String name, Sensor... parents); Sensor sensor(String name, Sensor.RecordingLevel recordingLevel, Sensor... parents); synchronized Sensor sensor(String name, MetricConfig config, Sensor... parents); synchronized Sensor sensor(String name, MetricConfig config, Sensor.RecordingLevel recordingLevel, Sensor... parents); synchronized Sensor sensor(String name, MetricConfig config, long inactiveSensorExpirationTimeSeconds, Sensor.RecordingLevel recordingLevel, Sensor... parents); synchronized Sensor sensor(String name, MetricConfig config, long inactiveSensorExpirationTimeSeconds, Sensor... parents); void removeSensor(String name); void addMetric(MetricName metricName, Measurable measurable); synchronized void addMetric(MetricName metricName, MetricConfig config, Measurable measurable); synchronized KafkaMetric removeMetric(MetricName metricName); synchronized void addReporter(MetricsReporter reporter); Map<MetricName, KafkaMetric> metrics(); List<MetricsReporter> reporters(); KafkaMetric metric(MetricName metricName); MetricName metricInstance(MetricNameTemplate template, String... keyValue); MetricName metricInstance(MetricNameTemplate template, Map<String, String> tags); @Override void close(); }
Metrics implements Closeable { public synchronized KafkaMetric removeMetric(MetricName metricName) { KafkaMetric metric = this.metrics.remove(metricName); if (metric != null) { for (MetricsReporter reporter : reporters) reporter.metricRemoval(metric); } return metric; } Metrics(); Metrics(Time time); Metrics(MetricConfig defaultConfig, Time time); Metrics(MetricConfig defaultConfig); Metrics(MetricConfig defaultConfig, List<MetricsReporter> reporters, Time time); Metrics(MetricConfig defaultConfig, List<MetricsReporter> reporters, Time time, boolean enableExpiration); MetricName metricName(String name, String group, String description, Map<String, String> tags); MetricName metricName(String name, String group, String description); MetricName metricName(String name, String group); MetricName metricName(String name, String group, String description, String... keyValue); MetricName metricName(String name, String group, Map<String, String> tags); static String toHtmlTable(String domain, List<MetricNameTemplate> allMetrics); MetricConfig config(); Sensor getSensor(String name); Sensor sensor(String name); Sensor sensor(String name, Sensor.RecordingLevel recordingLevel); Sensor sensor(String name, Sensor... parents); Sensor sensor(String name, Sensor.RecordingLevel recordingLevel, Sensor... parents); synchronized Sensor sensor(String name, MetricConfig config, Sensor... parents); synchronized Sensor sensor(String name, MetricConfig config, Sensor.RecordingLevel recordingLevel, Sensor... parents); synchronized Sensor sensor(String name, MetricConfig config, long inactiveSensorExpirationTimeSeconds, Sensor.RecordingLevel recordingLevel, Sensor... parents); synchronized Sensor sensor(String name, MetricConfig config, long inactiveSensorExpirationTimeSeconds, Sensor... parents); void removeSensor(String name); void addMetric(MetricName metricName, Measurable measurable); synchronized void addMetric(MetricName metricName, MetricConfig config, Measurable measurable); synchronized KafkaMetric removeMetric(MetricName metricName); synchronized void addReporter(MetricsReporter reporter); Map<MetricName, KafkaMetric> metrics(); List<MetricsReporter> reporters(); KafkaMetric metric(MetricName metricName); MetricName metricInstance(MetricNameTemplate template, String... keyValue); MetricName metricInstance(MetricNameTemplate template, Map<String, String> tags); @Override void close(); }
@Test public void testMetricInstances() { MetricName n1 = metrics.metricInstance(SampleMetrics.METRIC1, "key1", "value1", "key2", "value2"); Map<String, String> tags = new HashMap<String, String>(); tags.put("key1", "value1"); tags.put("key2", "value2"); MetricName n2 = metrics.metricInstance(SampleMetrics.METRIC2, tags); assertEquals("metric names created in two different ways should be equal", n1, n2); try { metrics.metricInstance(SampleMetrics.METRIC1, "key1"); fail("Creating MetricName with an odd number of keyValue should fail"); } catch (IllegalArgumentException e) { } Map<String, String> parentTagsWithValues = new HashMap<>(); parentTagsWithValues.put("parent-tag", "parent-tag-value"); Map<String, String> childTagsWithValues = new HashMap<>(); childTagsWithValues.put("child-tag", "child-tag-value"); try (Metrics inherited = new Metrics(new MetricConfig().tags(parentTagsWithValues), Arrays.asList((MetricsReporter) new JmxReporter()), time, true)) { MetricName inheritedMetric = inherited.metricInstance(SampleMetrics.METRIC_WITH_INHERITED_TAGS, childTagsWithValues); Map<String, String> filledOutTags = inheritedMetric.tags(); assertEquals("parent-tag should be set properly", filledOutTags.get("parent-tag"), "parent-tag-value"); assertEquals("child-tag should be set properly", filledOutTags.get("child-tag"), "child-tag-value"); try { inherited.metricInstance(SampleMetrics.METRIC_WITH_INHERITED_TAGS, parentTagsWithValues); fail("Creating MetricName should fail if the child metrics are not defined at runtime"); } catch (IllegalArgumentException e) { } try { Map<String, String> runtimeTags = new HashMap<>(); runtimeTags.put("child-tag", "child-tag-value"); runtimeTags.put("tag-not-in-template", "unexpected-value"); inherited.metricInstance(SampleMetrics.METRIC_WITH_INHERITED_TAGS, runtimeTags); fail("Creating MetricName should fail if there is a tag at runtime that is not in the template"); } catch (IllegalArgumentException e) { } } }
public MetricName metricInstance(MetricNameTemplate template, String... keyValue) { return metricInstance(template, getTags(keyValue)); }
Metrics implements Closeable { public MetricName metricInstance(MetricNameTemplate template, String... keyValue) { return metricInstance(template, getTags(keyValue)); } }
Metrics implements Closeable { public MetricName metricInstance(MetricNameTemplate template, String... keyValue) { return metricInstance(template, getTags(keyValue)); } Metrics(); Metrics(Time time); Metrics(MetricConfig defaultConfig, Time time); Metrics(MetricConfig defaultConfig); Metrics(MetricConfig defaultConfig, List<MetricsReporter> reporters, Time time); Metrics(MetricConfig defaultConfig, List<MetricsReporter> reporters, Time time, boolean enableExpiration); }
Metrics implements Closeable { public MetricName metricInstance(MetricNameTemplate template, String... keyValue) { return metricInstance(template, getTags(keyValue)); } Metrics(); Metrics(Time time); Metrics(MetricConfig defaultConfig, Time time); Metrics(MetricConfig defaultConfig); Metrics(MetricConfig defaultConfig, List<MetricsReporter> reporters, Time time); Metrics(MetricConfig defaultConfig, List<MetricsReporter> reporters, Time time, boolean enableExpiration); MetricName metricName(String name, String group, String description, Map<String, String> tags); MetricName metricName(String name, String group, String description); MetricName metricName(String name, String group); MetricName metricName(String name, String group, String description, String... keyValue); MetricName metricName(String name, String group, Map<String, String> tags); static String toHtmlTable(String domain, List<MetricNameTemplate> allMetrics); MetricConfig config(); Sensor getSensor(String name); Sensor sensor(String name); Sensor sensor(String name, Sensor.RecordingLevel recordingLevel); Sensor sensor(String name, Sensor... parents); Sensor sensor(String name, Sensor.RecordingLevel recordingLevel, Sensor... parents); synchronized Sensor sensor(String name, MetricConfig config, Sensor... parents); synchronized Sensor sensor(String name, MetricConfig config, Sensor.RecordingLevel recordingLevel, Sensor... parents); synchronized Sensor sensor(String name, MetricConfig config, long inactiveSensorExpirationTimeSeconds, Sensor.RecordingLevel recordingLevel, Sensor... parents); synchronized Sensor sensor(String name, MetricConfig config, long inactiveSensorExpirationTimeSeconds, Sensor... parents); void removeSensor(String name); void addMetric(MetricName metricName, Measurable measurable); synchronized void addMetric(MetricName metricName, MetricConfig config, Measurable measurable); synchronized KafkaMetric removeMetric(MetricName metricName); synchronized void addReporter(MetricsReporter reporter); Map<MetricName, KafkaMetric> metrics(); List<MetricsReporter> reporters(); KafkaMetric metric(MetricName metricName); MetricName metricInstance(MetricNameTemplate template, String... keyValue); MetricName metricInstance(MetricNameTemplate template, Map<String, String> tags); @Override void close(); }
Metrics implements Closeable { public MetricName metricInstance(MetricNameTemplate template, String... keyValue) { return metricInstance(template, getTags(keyValue)); } Metrics(); Metrics(Time time); Metrics(MetricConfig defaultConfig, Time time); Metrics(MetricConfig defaultConfig); Metrics(MetricConfig defaultConfig, List<MetricsReporter> reporters, Time time); Metrics(MetricConfig defaultConfig, List<MetricsReporter> reporters, Time time, boolean enableExpiration); MetricName metricName(String name, String group, String description, Map<String, String> tags); MetricName metricName(String name, String group, String description); MetricName metricName(String name, String group); MetricName metricName(String name, String group, String description, String... keyValue); MetricName metricName(String name, String group, Map<String, String> tags); static String toHtmlTable(String domain, List<MetricNameTemplate> allMetrics); MetricConfig config(); Sensor getSensor(String name); Sensor sensor(String name); Sensor sensor(String name, Sensor.RecordingLevel recordingLevel); Sensor sensor(String name, Sensor... parents); Sensor sensor(String name, Sensor.RecordingLevel recordingLevel, Sensor... parents); synchronized Sensor sensor(String name, MetricConfig config, Sensor... parents); synchronized Sensor sensor(String name, MetricConfig config, Sensor.RecordingLevel recordingLevel, Sensor... parents); synchronized Sensor sensor(String name, MetricConfig config, long inactiveSensorExpirationTimeSeconds, Sensor.RecordingLevel recordingLevel, Sensor... parents); synchronized Sensor sensor(String name, MetricConfig config, long inactiveSensorExpirationTimeSeconds, Sensor... parents); void removeSensor(String name); void addMetric(MetricName metricName, Measurable measurable); synchronized void addMetric(MetricName metricName, MetricConfig config, Measurable measurable); synchronized KafkaMetric removeMetric(MetricName metricName); synchronized void addReporter(MetricsReporter reporter); Map<MetricName, KafkaMetric> metrics(); List<MetricsReporter> reporters(); KafkaMetric metric(MetricName metricName); MetricName metricInstance(MetricNameTemplate template, String... keyValue); MetricName metricInstance(MetricNameTemplate template, Map<String, String> tags); @Override void close(); }
@Test public void stringToJson() { JsonNode converted = parse(converter.fromConnectData(TOPIC, Schema.STRING_SCHEMA, "test-string")); validateEnvelope(converted); assertEquals(parse("{ \"type\": \"string\", \"optional\": false }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME)); assertEquals("test-string", converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).textValue()); }
@Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } }
JsonConverter implements Converter { @Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } } }
JsonConverter implements Converter { @Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } } }
JsonConverter implements Converter { @Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } } @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); ObjectNode asJsonSchema(Schema schema); Schema asConnectSchema(JsonNode jsonSchema); }
JsonConverter implements Converter { @Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } } @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); ObjectNode asJsonSchema(Schema schema); Schema asConnectSchema(JsonNode jsonSchema); }
@Test public void testRecordLevelEnum() { Sensor.RecordingLevel configLevel = Sensor.RecordingLevel.INFO; assertTrue(Sensor.RecordingLevel.INFO.shouldRecord(configLevel.id)); assertFalse(Sensor.RecordingLevel.DEBUG.shouldRecord(configLevel.id)); configLevel = Sensor.RecordingLevel.DEBUG; assertTrue(Sensor.RecordingLevel.INFO.shouldRecord(configLevel.id)); assertTrue(Sensor.RecordingLevel.DEBUG.shouldRecord(configLevel.id)); assertEquals(Sensor.RecordingLevel.valueOf(Sensor.RecordingLevel.DEBUG.toString()), Sensor.RecordingLevel.DEBUG); assertEquals(Sensor.RecordingLevel.valueOf(Sensor.RecordingLevel.INFO.toString()), Sensor.RecordingLevel.INFO); }
public boolean shouldRecord() { return this.recordingLevel.shouldRecord(config.recordLevel().id); }
Sensor { public boolean shouldRecord() { return this.recordingLevel.shouldRecord(config.recordLevel().id); } }
Sensor { public boolean shouldRecord() { return this.recordingLevel.shouldRecord(config.recordLevel().id); } Sensor(Metrics registry, String name, Sensor[] parents, MetricConfig config, Time time, long inactiveSensorExpirationTimeSeconds, RecordingLevel recordingLevel); }
Sensor { public boolean shouldRecord() { return this.recordingLevel.shouldRecord(config.recordLevel().id); } Sensor(Metrics registry, String name, Sensor[] parents, MetricConfig config, Time time, long inactiveSensorExpirationTimeSeconds, RecordingLevel recordingLevel); String name(); void record(); boolean shouldRecord(); void record(double value); void record(double value, long timeMs); void record(double value, long timeMs, boolean checkQuotas); void checkQuotas(); void checkQuotas(long timeMs); void add(CompoundStat stat); synchronized void add(CompoundStat stat, MetricConfig config); void add(MetricName metricName, MeasurableStat stat); synchronized void add(MetricName metricName, MeasurableStat stat, MetricConfig config); boolean hasExpired(); }
Sensor { public boolean shouldRecord() { return this.recordingLevel.shouldRecord(config.recordLevel().id); } Sensor(Metrics registry, String name, Sensor[] parents, MetricConfig config, Time time, long inactiveSensorExpirationTimeSeconds, RecordingLevel recordingLevel); String name(); void record(); boolean shouldRecord(); void record(double value); void record(double value, long timeMs); void record(double value, long timeMs, boolean checkQuotas); void checkQuotas(); void checkQuotas(long timeMs); void add(CompoundStat stat); synchronized void add(CompoundStat stat, MetricConfig config); void add(MetricName metricName, MeasurableStat stat); synchronized void add(MetricName metricName, MeasurableStat stat, MetricConfig config); boolean hasExpired(); }
@Test public void testShouldRecord() { MetricConfig debugConfig = new MetricConfig().recordLevel(Sensor.RecordingLevel.DEBUG); MetricConfig infoConfig = new MetricConfig().recordLevel(Sensor.RecordingLevel.INFO); Sensor infoSensor = new Sensor(null, "infoSensor", null, debugConfig, new SystemTime(), 0, Sensor.RecordingLevel.INFO); assertTrue(infoSensor.shouldRecord()); infoSensor = new Sensor(null, "infoSensor", null, debugConfig, new SystemTime(), 0, Sensor.RecordingLevel.DEBUG); assertTrue(infoSensor.shouldRecord()); Sensor debugSensor = new Sensor(null, "debugSensor", null, infoConfig, new SystemTime(), 0, Sensor.RecordingLevel.INFO); assertTrue(debugSensor.shouldRecord()); debugSensor = new Sensor(null, "debugSensor", null, infoConfig, new SystemTime(), 0, Sensor.RecordingLevel.DEBUG); assertFalse(debugSensor.shouldRecord()); }
public boolean shouldRecord() { return this.recordingLevel.shouldRecord(config.recordLevel().id); }
Sensor { public boolean shouldRecord() { return this.recordingLevel.shouldRecord(config.recordLevel().id); } }
Sensor { public boolean shouldRecord() { return this.recordingLevel.shouldRecord(config.recordLevel().id); } Sensor(Metrics registry, String name, Sensor[] parents, MetricConfig config, Time time, long inactiveSensorExpirationTimeSeconds, RecordingLevel recordingLevel); }
Sensor { public boolean shouldRecord() { return this.recordingLevel.shouldRecord(config.recordLevel().id); } Sensor(Metrics registry, String name, Sensor[] parents, MetricConfig config, Time time, long inactiveSensorExpirationTimeSeconds, RecordingLevel recordingLevel); String name(); void record(); boolean shouldRecord(); void record(double value); void record(double value, long timeMs); void record(double value, long timeMs, boolean checkQuotas); void checkQuotas(); void checkQuotas(long timeMs); void add(CompoundStat stat); synchronized void add(CompoundStat stat, MetricConfig config); void add(MetricName metricName, MeasurableStat stat); synchronized void add(MetricName metricName, MeasurableStat stat, MetricConfig config); boolean hasExpired(); }
Sensor { public boolean shouldRecord() { return this.recordingLevel.shouldRecord(config.recordLevel().id); } Sensor(Metrics registry, String name, Sensor[] parents, MetricConfig config, Time time, long inactiveSensorExpirationTimeSeconds, RecordingLevel recordingLevel); String name(); void record(); boolean shouldRecord(); void record(double value); void record(double value, long timeMs); void record(double value, long timeMs, boolean checkQuotas); void checkQuotas(); void checkQuotas(long timeMs); void add(CompoundStat stat); synchronized void add(CompoundStat stat, MetricConfig config); void add(MetricName metricName, MeasurableStat stat); synchronized void add(MetricName metricName, MeasurableStat stat, MetricConfig config); boolean hasExpired(); }
@Test public void testHistogram() { BinScheme scheme = new ConstantBinScheme(12, -5, 5); Histogram hist = new Histogram(scheme); for (int i = -5; i < 5; i++) hist.record(i); for (int i = 0; i < 10; i++) assertEquals(scheme.fromBin(i + 1), hist.value(i / 10.0 + EPS), EPS); }
public Histogram(BinScheme binScheme) { this.hist = new float[binScheme.bins()]; this.count = 0.0f; this.binScheme = binScheme; }
Histogram { public Histogram(BinScheme binScheme) { this.hist = new float[binScheme.bins()]; this.count = 0.0f; this.binScheme = binScheme; } }
Histogram { public Histogram(BinScheme binScheme) { this.hist = new float[binScheme.bins()]; this.count = 0.0f; this.binScheme = binScheme; } Histogram(BinScheme binScheme); }
Histogram { public Histogram(BinScheme binScheme) { this.hist = new float[binScheme.bins()]; this.count = 0.0f; this.binScheme = binScheme; } Histogram(BinScheme binScheme); void record(double value); double value(double quantile); float[] counts(); void clear(); @Override String toString(); }
Histogram { public Histogram(BinScheme binScheme) { this.hist = new float[binScheme.bins()]; this.count = 0.0f; this.binScheme = binScheme; } Histogram(BinScheme binScheme); void record(double value); double value(double quantile); float[] counts(); void clear(); @Override String toString(); }
@Test(expected = IOException.class) public void testNoRouteToHost() throws Exception { selector.connect("0", new InetSocketAddress("some.invalid.hostname.foo.bar.local", server.port), BUFFER_SIZE, BUFFER_SIZE); }
@Override public void connect(String id, InetSocketAddress address, int sendBufferSize, int receiveBufferSize) throws IOException { if (this.channels.containsKey(id)) throw new IllegalStateException("There is already a connection for id " + id); SocketChannel socketChannel = SocketChannel.open(); socketChannel.configureBlocking(false); Socket socket = socketChannel.socket(); socket.setKeepAlive(true); if (sendBufferSize != Selectable.USE_DEFAULT_BUFFER_SIZE) socket.setSendBufferSize(sendBufferSize); if (receiveBufferSize != Selectable.USE_DEFAULT_BUFFER_SIZE) socket.setReceiveBufferSize(receiveBufferSize); socket.setTcpNoDelay(true); boolean connected; try { connected = socketChannel.connect(address); } catch (UnresolvedAddressException e) { socketChannel.close(); throw new IOException("Can't resolve address: " + address, e); } catch (IOException e) { socketChannel.close(); throw e; } SelectionKey key = socketChannel.register(nioSelector, SelectionKey.OP_CONNECT); KafkaChannel channel; try { channel = channelBuilder.buildChannel(id, key, maxReceiveSize); } catch (Exception e) { try { socketChannel.close(); } finally { key.cancel(); } throw new IOException("Channel could not be created for socket " + socketChannel, e); } key.attach(channel); this.channels.put(id, channel); if (connected) { log.debug("Immediately connected to node {}", channel.id()); immediatelyConnectedKeys.add(key); key.interestOps(0); } }
Selector implements Selectable, AutoCloseable { @Override public void connect(String id, InetSocketAddress address, int sendBufferSize, int receiveBufferSize) throws IOException { if (this.channels.containsKey(id)) throw new IllegalStateException("There is already a connection for id " + id); SocketChannel socketChannel = SocketChannel.open(); socketChannel.configureBlocking(false); Socket socket = socketChannel.socket(); socket.setKeepAlive(true); if (sendBufferSize != Selectable.USE_DEFAULT_BUFFER_SIZE) socket.setSendBufferSize(sendBufferSize); if (receiveBufferSize != Selectable.USE_DEFAULT_BUFFER_SIZE) socket.setReceiveBufferSize(receiveBufferSize); socket.setTcpNoDelay(true); boolean connected; try { connected = socketChannel.connect(address); } catch (UnresolvedAddressException e) { socketChannel.close(); throw new IOException("Can't resolve address: " + address, e); } catch (IOException e) { socketChannel.close(); throw e; } SelectionKey key = socketChannel.register(nioSelector, SelectionKey.OP_CONNECT); KafkaChannel channel; try { channel = channelBuilder.buildChannel(id, key, maxReceiveSize); } catch (Exception e) { try { socketChannel.close(); } finally { key.cancel(); } throw new IOException("Channel could not be created for socket " + socketChannel, e); } key.attach(channel); this.channels.put(id, channel); if (connected) { log.debug("Immediately connected to node {}", channel.id()); immediatelyConnectedKeys.add(key); key.interestOps(0); } } }
Selector implements Selectable, AutoCloseable { @Override public void connect(String id, InetSocketAddress address, int sendBufferSize, int receiveBufferSize) throws IOException { if (this.channels.containsKey(id)) throw new IllegalStateException("There is already a connection for id " + id); SocketChannel socketChannel = SocketChannel.open(); socketChannel.configureBlocking(false); Socket socket = socketChannel.socket(); socket.setKeepAlive(true); if (sendBufferSize != Selectable.USE_DEFAULT_BUFFER_SIZE) socket.setSendBufferSize(sendBufferSize); if (receiveBufferSize != Selectable.USE_DEFAULT_BUFFER_SIZE) socket.setReceiveBufferSize(receiveBufferSize); socket.setTcpNoDelay(true); boolean connected; try { connected = socketChannel.connect(address); } catch (UnresolvedAddressException e) { socketChannel.close(); throw new IOException("Can't resolve address: " + address, e); } catch (IOException e) { socketChannel.close(); throw e; } SelectionKey key = socketChannel.register(nioSelector, SelectionKey.OP_CONNECT); KafkaChannel channel; try { channel = channelBuilder.buildChannel(id, key, maxReceiveSize); } catch (Exception e) { try { socketChannel.close(); } finally { key.cancel(); } throw new IOException("Channel could not be created for socket " + socketChannel, e); } key.attach(channel); this.channels.put(id, channel); if (connected) { log.debug("Immediately connected to node {}", channel.id()); immediatelyConnectedKeys.add(key); key.interestOps(0); } } Selector(int maxReceiveSize, long connectionMaxIdleMs, Metrics metrics, Time time, String metricGrpPrefix, // group的前缀 Map<String, String> metricTags, // 创建MetricName时候使用的tags集合 boolean metricsPerConnection, boolean recordTimePerConnection, ChannelBuilder channelBuilder); Selector(int maxReceiveSize, long connectionMaxIdleMs, Metrics metrics, Time time, String metricGrpPrefix, Map<String, String> metricTags, boolean metricsPerConnection, ChannelBuilder channelBuilder); Selector(long connectionMaxIdleMS, Metrics metrics, Time time, String metricGrpPrefix, ChannelBuilder channelBuilder); }
Selector implements Selectable, AutoCloseable { @Override public void connect(String id, InetSocketAddress address, int sendBufferSize, int receiveBufferSize) throws IOException { if (this.channels.containsKey(id)) throw new IllegalStateException("There is already a connection for id " + id); SocketChannel socketChannel = SocketChannel.open(); socketChannel.configureBlocking(false); Socket socket = socketChannel.socket(); socket.setKeepAlive(true); if (sendBufferSize != Selectable.USE_DEFAULT_BUFFER_SIZE) socket.setSendBufferSize(sendBufferSize); if (receiveBufferSize != Selectable.USE_DEFAULT_BUFFER_SIZE) socket.setReceiveBufferSize(receiveBufferSize); socket.setTcpNoDelay(true); boolean connected; try { connected = socketChannel.connect(address); } catch (UnresolvedAddressException e) { socketChannel.close(); throw new IOException("Can't resolve address: " + address, e); } catch (IOException e) { socketChannel.close(); throw e; } SelectionKey key = socketChannel.register(nioSelector, SelectionKey.OP_CONNECT); KafkaChannel channel; try { channel = channelBuilder.buildChannel(id, key, maxReceiveSize); } catch (Exception e) { try { socketChannel.close(); } finally { key.cancel(); } throw new IOException("Channel could not be created for socket " + socketChannel, e); } key.attach(channel); this.channels.put(id, channel); if (connected) { log.debug("Immediately connected to node {}", channel.id()); immediatelyConnectedKeys.add(key); key.interestOps(0); } } Selector(int maxReceiveSize, long connectionMaxIdleMs, Metrics metrics, Time time, String metricGrpPrefix, // group的前缀 Map<String, String> metricTags, // 创建MetricName时候使用的tags集合 boolean metricsPerConnection, boolean recordTimePerConnection, ChannelBuilder channelBuilder); Selector(int maxReceiveSize, long connectionMaxIdleMs, Metrics metrics, Time time, String metricGrpPrefix, Map<String, String> metricTags, boolean metricsPerConnection, ChannelBuilder channelBuilder); Selector(long connectionMaxIdleMS, Metrics metrics, Time time, String metricGrpPrefix, ChannelBuilder channelBuilder); @Override // 创建 KafkaChannel添加到channels保存 void connect(String id, InetSocketAddress address, int sendBufferSize, int receiveBufferSize); void register(String id, SocketChannel socketChannel); @Override void wakeup(); @Override void close(); void send(Send send); @Override // 轮训的时候根据选在键读写,分别调用kafka通道的read和write void poll(long timeout); @Override List<Send> completedSends(); @Override List<NetworkReceive> completedReceives(); @Override Map<String, ChannelState> disconnected(); @Override List<String> connected(); @Override void mute(String id); @Override void unmute(String id); @Override void muteAll(); @Override void unmuteAll(); void close(String id); @Override boolean isChannelReady(String id); List<KafkaChannel> channels(); KafkaChannel channel(String id); KafkaChannel closingChannel(String id); Set<SelectionKey> keys(); }
Selector implements Selectable, AutoCloseable { @Override public void connect(String id, InetSocketAddress address, int sendBufferSize, int receiveBufferSize) throws IOException { if (this.channels.containsKey(id)) throw new IllegalStateException("There is already a connection for id " + id); SocketChannel socketChannel = SocketChannel.open(); socketChannel.configureBlocking(false); Socket socket = socketChannel.socket(); socket.setKeepAlive(true); if (sendBufferSize != Selectable.USE_DEFAULT_BUFFER_SIZE) socket.setSendBufferSize(sendBufferSize); if (receiveBufferSize != Selectable.USE_DEFAULT_BUFFER_SIZE) socket.setReceiveBufferSize(receiveBufferSize); socket.setTcpNoDelay(true); boolean connected; try { connected = socketChannel.connect(address); } catch (UnresolvedAddressException e) { socketChannel.close(); throw new IOException("Can't resolve address: " + address, e); } catch (IOException e) { socketChannel.close(); throw e; } SelectionKey key = socketChannel.register(nioSelector, SelectionKey.OP_CONNECT); KafkaChannel channel; try { channel = channelBuilder.buildChannel(id, key, maxReceiveSize); } catch (Exception e) { try { socketChannel.close(); } finally { key.cancel(); } throw new IOException("Channel could not be created for socket " + socketChannel, e); } key.attach(channel); this.channels.put(id, channel); if (connected) { log.debug("Immediately connected to node {}", channel.id()); immediatelyConnectedKeys.add(key); key.interestOps(0); } } Selector(int maxReceiveSize, long connectionMaxIdleMs, Metrics metrics, Time time, String metricGrpPrefix, // group的前缀 Map<String, String> metricTags, // 创建MetricName时候使用的tags集合 boolean metricsPerConnection, boolean recordTimePerConnection, ChannelBuilder channelBuilder); Selector(int maxReceiveSize, long connectionMaxIdleMs, Metrics metrics, Time time, String metricGrpPrefix, Map<String, String> metricTags, boolean metricsPerConnection, ChannelBuilder channelBuilder); Selector(long connectionMaxIdleMS, Metrics metrics, Time time, String metricGrpPrefix, ChannelBuilder channelBuilder); @Override // 创建 KafkaChannel添加到channels保存 void connect(String id, InetSocketAddress address, int sendBufferSize, int receiveBufferSize); void register(String id, SocketChannel socketChannel); @Override void wakeup(); @Override void close(); void send(Send send); @Override // 轮训的时候根据选在键读写,分别调用kafka通道的read和write void poll(long timeout); @Override List<Send> completedSends(); @Override List<NetworkReceive> completedReceives(); @Override Map<String, ChannelState> disconnected(); @Override List<String> connected(); @Override void mute(String id); @Override void unmute(String id); @Override void muteAll(); @Override void unmuteAll(); void close(String id); @Override boolean isChannelReady(String id); List<KafkaChannel> channels(); KafkaChannel channel(String id); KafkaChannel closingChannel(String id); Set<SelectionKey> keys(); static final long NO_IDLE_TIMEOUT_MS; }
@Test public void testLargeMessageSequence() throws Exception { int bufferSize = 512 * 1024; String node = "0"; int reqs = 50; InetSocketAddress addr = new InetSocketAddress("localhost", server.port); connect(node, addr); String requestPrefix = TestUtils.randomString(bufferSize); sendAndReceive(node, requestPrefix, 0, reqs); }
@Override public void connect(String id, InetSocketAddress address, int sendBufferSize, int receiveBufferSize) throws IOException { if (this.channels.containsKey(id)) throw new IllegalStateException("There is already a connection for id " + id); SocketChannel socketChannel = SocketChannel.open(); socketChannel.configureBlocking(false); Socket socket = socketChannel.socket(); socket.setKeepAlive(true); if (sendBufferSize != Selectable.USE_DEFAULT_BUFFER_SIZE) socket.setSendBufferSize(sendBufferSize); if (receiveBufferSize != Selectable.USE_DEFAULT_BUFFER_SIZE) socket.setReceiveBufferSize(receiveBufferSize); socket.setTcpNoDelay(true); boolean connected; try { connected = socketChannel.connect(address); } catch (UnresolvedAddressException e) { socketChannel.close(); throw new IOException("Can't resolve address: " + address, e); } catch (IOException e) { socketChannel.close(); throw e; } SelectionKey key = socketChannel.register(nioSelector, SelectionKey.OP_CONNECT); KafkaChannel channel; try { channel = channelBuilder.buildChannel(id, key, maxReceiveSize); } catch (Exception e) { try { socketChannel.close(); } finally { key.cancel(); } throw new IOException("Channel could not be created for socket " + socketChannel, e); } key.attach(channel); this.channels.put(id, channel); if (connected) { log.debug("Immediately connected to node {}", channel.id()); immediatelyConnectedKeys.add(key); key.interestOps(0); } }
Selector implements Selectable, AutoCloseable { @Override public void connect(String id, InetSocketAddress address, int sendBufferSize, int receiveBufferSize) throws IOException { if (this.channels.containsKey(id)) throw new IllegalStateException("There is already a connection for id " + id); SocketChannel socketChannel = SocketChannel.open(); socketChannel.configureBlocking(false); Socket socket = socketChannel.socket(); socket.setKeepAlive(true); if (sendBufferSize != Selectable.USE_DEFAULT_BUFFER_SIZE) socket.setSendBufferSize(sendBufferSize); if (receiveBufferSize != Selectable.USE_DEFAULT_BUFFER_SIZE) socket.setReceiveBufferSize(receiveBufferSize); socket.setTcpNoDelay(true); boolean connected; try { connected = socketChannel.connect(address); } catch (UnresolvedAddressException e) { socketChannel.close(); throw new IOException("Can't resolve address: " + address, e); } catch (IOException e) { socketChannel.close(); throw e; } SelectionKey key = socketChannel.register(nioSelector, SelectionKey.OP_CONNECT); KafkaChannel channel; try { channel = channelBuilder.buildChannel(id, key, maxReceiveSize); } catch (Exception e) { try { socketChannel.close(); } finally { key.cancel(); } throw new IOException("Channel could not be created for socket " + socketChannel, e); } key.attach(channel); this.channels.put(id, channel); if (connected) { log.debug("Immediately connected to node {}", channel.id()); immediatelyConnectedKeys.add(key); key.interestOps(0); } } }
Selector implements Selectable, AutoCloseable { @Override public void connect(String id, InetSocketAddress address, int sendBufferSize, int receiveBufferSize) throws IOException { if (this.channels.containsKey(id)) throw new IllegalStateException("There is already a connection for id " + id); SocketChannel socketChannel = SocketChannel.open(); socketChannel.configureBlocking(false); Socket socket = socketChannel.socket(); socket.setKeepAlive(true); if (sendBufferSize != Selectable.USE_DEFAULT_BUFFER_SIZE) socket.setSendBufferSize(sendBufferSize); if (receiveBufferSize != Selectable.USE_DEFAULT_BUFFER_SIZE) socket.setReceiveBufferSize(receiveBufferSize); socket.setTcpNoDelay(true); boolean connected; try { connected = socketChannel.connect(address); } catch (UnresolvedAddressException e) { socketChannel.close(); throw new IOException("Can't resolve address: " + address, e); } catch (IOException e) { socketChannel.close(); throw e; } SelectionKey key = socketChannel.register(nioSelector, SelectionKey.OP_CONNECT); KafkaChannel channel; try { channel = channelBuilder.buildChannel(id, key, maxReceiveSize); } catch (Exception e) { try { socketChannel.close(); } finally { key.cancel(); } throw new IOException("Channel could not be created for socket " + socketChannel, e); } key.attach(channel); this.channels.put(id, channel); if (connected) { log.debug("Immediately connected to node {}", channel.id()); immediatelyConnectedKeys.add(key); key.interestOps(0); } } Selector(int maxReceiveSize, long connectionMaxIdleMs, Metrics metrics, Time time, String metricGrpPrefix, // group的前缀 Map<String, String> metricTags, // 创建MetricName时候使用的tags集合 boolean metricsPerConnection, boolean recordTimePerConnection, ChannelBuilder channelBuilder); Selector(int maxReceiveSize, long connectionMaxIdleMs, Metrics metrics, Time time, String metricGrpPrefix, Map<String, String> metricTags, boolean metricsPerConnection, ChannelBuilder channelBuilder); Selector(long connectionMaxIdleMS, Metrics metrics, Time time, String metricGrpPrefix, ChannelBuilder channelBuilder); }
Selector implements Selectable, AutoCloseable { @Override public void connect(String id, InetSocketAddress address, int sendBufferSize, int receiveBufferSize) throws IOException { if (this.channels.containsKey(id)) throw new IllegalStateException("There is already a connection for id " + id); SocketChannel socketChannel = SocketChannel.open(); socketChannel.configureBlocking(false); Socket socket = socketChannel.socket(); socket.setKeepAlive(true); if (sendBufferSize != Selectable.USE_DEFAULT_BUFFER_SIZE) socket.setSendBufferSize(sendBufferSize); if (receiveBufferSize != Selectable.USE_DEFAULT_BUFFER_SIZE) socket.setReceiveBufferSize(receiveBufferSize); socket.setTcpNoDelay(true); boolean connected; try { connected = socketChannel.connect(address); } catch (UnresolvedAddressException e) { socketChannel.close(); throw new IOException("Can't resolve address: " + address, e); } catch (IOException e) { socketChannel.close(); throw e; } SelectionKey key = socketChannel.register(nioSelector, SelectionKey.OP_CONNECT); KafkaChannel channel; try { channel = channelBuilder.buildChannel(id, key, maxReceiveSize); } catch (Exception e) { try { socketChannel.close(); } finally { key.cancel(); } throw new IOException("Channel could not be created for socket " + socketChannel, e); } key.attach(channel); this.channels.put(id, channel); if (connected) { log.debug("Immediately connected to node {}", channel.id()); immediatelyConnectedKeys.add(key); key.interestOps(0); } } Selector(int maxReceiveSize, long connectionMaxIdleMs, Metrics metrics, Time time, String metricGrpPrefix, // group的前缀 Map<String, String> metricTags, // 创建MetricName时候使用的tags集合 boolean metricsPerConnection, boolean recordTimePerConnection, ChannelBuilder channelBuilder); Selector(int maxReceiveSize, long connectionMaxIdleMs, Metrics metrics, Time time, String metricGrpPrefix, Map<String, String> metricTags, boolean metricsPerConnection, ChannelBuilder channelBuilder); Selector(long connectionMaxIdleMS, Metrics metrics, Time time, String metricGrpPrefix, ChannelBuilder channelBuilder); @Override // 创建 KafkaChannel添加到channels保存 void connect(String id, InetSocketAddress address, int sendBufferSize, int receiveBufferSize); void register(String id, SocketChannel socketChannel); @Override void wakeup(); @Override void close(); void send(Send send); @Override // 轮训的时候根据选在键读写,分别调用kafka通道的read和write void poll(long timeout); @Override List<Send> completedSends(); @Override List<NetworkReceive> completedReceives(); @Override Map<String, ChannelState> disconnected(); @Override List<String> connected(); @Override void mute(String id); @Override void unmute(String id); @Override void muteAll(); @Override void unmuteAll(); void close(String id); @Override boolean isChannelReady(String id); List<KafkaChannel> channels(); KafkaChannel channel(String id); KafkaChannel closingChannel(String id); Set<SelectionKey> keys(); }
Selector implements Selectable, AutoCloseable { @Override public void connect(String id, InetSocketAddress address, int sendBufferSize, int receiveBufferSize) throws IOException { if (this.channels.containsKey(id)) throw new IllegalStateException("There is already a connection for id " + id); SocketChannel socketChannel = SocketChannel.open(); socketChannel.configureBlocking(false); Socket socket = socketChannel.socket(); socket.setKeepAlive(true); if (sendBufferSize != Selectable.USE_DEFAULT_BUFFER_SIZE) socket.setSendBufferSize(sendBufferSize); if (receiveBufferSize != Selectable.USE_DEFAULT_BUFFER_SIZE) socket.setReceiveBufferSize(receiveBufferSize); socket.setTcpNoDelay(true); boolean connected; try { connected = socketChannel.connect(address); } catch (UnresolvedAddressException e) { socketChannel.close(); throw new IOException("Can't resolve address: " + address, e); } catch (IOException e) { socketChannel.close(); throw e; } SelectionKey key = socketChannel.register(nioSelector, SelectionKey.OP_CONNECT); KafkaChannel channel; try { channel = channelBuilder.buildChannel(id, key, maxReceiveSize); } catch (Exception e) { try { socketChannel.close(); } finally { key.cancel(); } throw new IOException("Channel could not be created for socket " + socketChannel, e); } key.attach(channel); this.channels.put(id, channel); if (connected) { log.debug("Immediately connected to node {}", channel.id()); immediatelyConnectedKeys.add(key); key.interestOps(0); } } Selector(int maxReceiveSize, long connectionMaxIdleMs, Metrics metrics, Time time, String metricGrpPrefix, // group的前缀 Map<String, String> metricTags, // 创建MetricName时候使用的tags集合 boolean metricsPerConnection, boolean recordTimePerConnection, ChannelBuilder channelBuilder); Selector(int maxReceiveSize, long connectionMaxIdleMs, Metrics metrics, Time time, String metricGrpPrefix, Map<String, String> metricTags, boolean metricsPerConnection, ChannelBuilder channelBuilder); Selector(long connectionMaxIdleMS, Metrics metrics, Time time, String metricGrpPrefix, ChannelBuilder channelBuilder); @Override // 创建 KafkaChannel添加到channels保存 void connect(String id, InetSocketAddress address, int sendBufferSize, int receiveBufferSize); void register(String id, SocketChannel socketChannel); @Override void wakeup(); @Override void close(); void send(Send send); @Override // 轮训的时候根据选在键读写,分别调用kafka通道的read和write void poll(long timeout); @Override List<Send> completedSends(); @Override List<NetworkReceive> completedReceives(); @Override Map<String, ChannelState> disconnected(); @Override List<String> connected(); @Override void mute(String id); @Override void unmute(String id); @Override void muteAll(); @Override void unmuteAll(); void close(String id); @Override boolean isChannelReady(String id); List<KafkaChannel> channels(); KafkaChannel channel(String id); KafkaChannel closingChannel(String id); Set<SelectionKey> keys(); static final long NO_IDLE_TIMEOUT_MS; }
@Test public void testMute() throws Exception { blockingConnect("0"); blockingConnect("1"); selector.send(createSend("0", "hello")); selector.send(createSend("1", "hi")); selector.mute("1"); while (selector.completedReceives().isEmpty()) selector.poll(5); assertEquals("We should have only one response", 1, selector.completedReceives().size()); assertEquals("The response should not be from the muted node", "0", selector.completedReceives().get(0).source()); selector.unmute("1"); do { selector.poll(5); } while (selector.completedReceives().isEmpty()); assertEquals("We should have only one response", 1, selector.completedReceives().size()); assertEquals("The response should be from the previously muted node", "1", selector.completedReceives().get(0).source()); }
@Override public void mute(String id) { KafkaChannel channel = channelOrFail(id, true); mute(channel); }
Selector implements Selectable, AutoCloseable { @Override public void mute(String id) { KafkaChannel channel = channelOrFail(id, true); mute(channel); } }
Selector implements Selectable, AutoCloseable { @Override public void mute(String id) { KafkaChannel channel = channelOrFail(id, true); mute(channel); } Selector(int maxReceiveSize, long connectionMaxIdleMs, Metrics metrics, Time time, String metricGrpPrefix, // group的前缀 Map<String, String> metricTags, // 创建MetricName时候使用的tags集合 boolean metricsPerConnection, boolean recordTimePerConnection, ChannelBuilder channelBuilder); Selector(int maxReceiveSize, long connectionMaxIdleMs, Metrics metrics, Time time, String metricGrpPrefix, Map<String, String> metricTags, boolean metricsPerConnection, ChannelBuilder channelBuilder); Selector(long connectionMaxIdleMS, Metrics metrics, Time time, String metricGrpPrefix, ChannelBuilder channelBuilder); }
Selector implements Selectable, AutoCloseable { @Override public void mute(String id) { KafkaChannel channel = channelOrFail(id, true); mute(channel); } Selector(int maxReceiveSize, long connectionMaxIdleMs, Metrics metrics, Time time, String metricGrpPrefix, // group的前缀 Map<String, String> metricTags, // 创建MetricName时候使用的tags集合 boolean metricsPerConnection, boolean recordTimePerConnection, ChannelBuilder channelBuilder); Selector(int maxReceiveSize, long connectionMaxIdleMs, Metrics metrics, Time time, String metricGrpPrefix, Map<String, String> metricTags, boolean metricsPerConnection, ChannelBuilder channelBuilder); Selector(long connectionMaxIdleMS, Metrics metrics, Time time, String metricGrpPrefix, ChannelBuilder channelBuilder); @Override // 创建 KafkaChannel添加到channels保存 void connect(String id, InetSocketAddress address, int sendBufferSize, int receiveBufferSize); void register(String id, SocketChannel socketChannel); @Override void wakeup(); @Override void close(); void send(Send send); @Override // 轮训的时候根据选在键读写,分别调用kafka通道的read和write void poll(long timeout); @Override List<Send> completedSends(); @Override List<NetworkReceive> completedReceives(); @Override Map<String, ChannelState> disconnected(); @Override List<String> connected(); @Override void mute(String id); @Override void unmute(String id); @Override void muteAll(); @Override void unmuteAll(); void close(String id); @Override boolean isChannelReady(String id); List<KafkaChannel> channels(); KafkaChannel channel(String id); KafkaChannel closingChannel(String id); Set<SelectionKey> keys(); }
Selector implements Selectable, AutoCloseable { @Override public void mute(String id) { KafkaChannel channel = channelOrFail(id, true); mute(channel); } Selector(int maxReceiveSize, long connectionMaxIdleMs, Metrics metrics, Time time, String metricGrpPrefix, // group的前缀 Map<String, String> metricTags, // 创建MetricName时候使用的tags集合 boolean metricsPerConnection, boolean recordTimePerConnection, ChannelBuilder channelBuilder); Selector(int maxReceiveSize, long connectionMaxIdleMs, Metrics metrics, Time time, String metricGrpPrefix, Map<String, String> metricTags, boolean metricsPerConnection, ChannelBuilder channelBuilder); Selector(long connectionMaxIdleMS, Metrics metrics, Time time, String metricGrpPrefix, ChannelBuilder channelBuilder); @Override // 创建 KafkaChannel添加到channels保存 void connect(String id, InetSocketAddress address, int sendBufferSize, int receiveBufferSize); void register(String id, SocketChannel socketChannel); @Override void wakeup(); @Override void close(); void send(Send send); @Override // 轮训的时候根据选在键读写,分别调用kafka通道的read和write void poll(long timeout); @Override List<Send> completedSends(); @Override List<NetworkReceive> completedReceives(); @Override Map<String, ChannelState> disconnected(); @Override List<String> connected(); @Override void mute(String id); @Override void unmute(String id); @Override void muteAll(); @Override void unmuteAll(); void close(String id); @Override boolean isChannelReady(String id); List<KafkaChannel> channels(); KafkaChannel channel(String id); KafkaChannel closingChannel(String id); Set<SelectionKey> keys(); static final long NO_IDLE_TIMEOUT_MS; }
@Test public void testClientEndpointNotValidated() throws Exception { String node = "0"; clientCertStores = new CertStores(false, "non-existent.com"); serverCertStores = new CertStores(true, "localhost"); sslServerConfigs = serverCertStores.getTrustingConfig(clientCertStores); sslClientConfigs = clientCertStores.getTrustingConfig(serverCertStores); SslChannelBuilder serverChannelBuilder = new SslChannelBuilder(Mode.SERVER) { @Override protected SslTransportLayer buildTransportLayer(SslFactory sslFactory, String id, SelectionKey key, String host) throws IOException { SocketChannel socketChannel = (SocketChannel) key.channel(); SSLEngine sslEngine = sslFactory.createSslEngine(host, socketChannel.socket().getPort()); SSLParameters sslParams = sslEngine.getSSLParameters(); sslParams.setEndpointIdentificationAlgorithm("HTTPS"); sslEngine.setSSLParameters(sslParams); TestSslTransportLayer transportLayer = new TestSslTransportLayer(id, key, sslEngine, BUFFER_SIZE, BUFFER_SIZE, BUFFER_SIZE); transportLayer.startHandshake(); return transportLayer; } }; serverChannelBuilder.configure(sslServerConfigs); server = new NioEchoServer(ListenerName.forSecurityProtocol(SecurityProtocol.SSL), SecurityProtocol.SSL, new TestSecurityConfig(sslServerConfigs), "localhost", serverChannelBuilder); server.start(); createSelector(sslClientConfigs); InetSocketAddress addr = new InetSocketAddress("localhost", server.port()); selector.connect(node, addr, BUFFER_SIZE, BUFFER_SIZE); NetworkTestUtils.checkClientConnection(selector, node, 100, 10); }
protected void startHandshake() throws IOException { this.netReadBuffer = ByteBuffer.allocate(netReadBufferSize()); this.netWriteBuffer = ByteBuffer.allocate(netWriteBufferSize()); this.appReadBuffer = ByteBuffer.allocate(applicationBufferSize()); netWriteBuffer.position(0); netWriteBuffer.limit(0); netReadBuffer.position(0); netReadBuffer.limit(0); handshakeComplete = false; closing = false; sslEngine.beginHandshake(); handshakeStatus = sslEngine.getHandshakeStatus(); }
SslTransportLayer implements TransportLayer { protected void startHandshake() throws IOException { this.netReadBuffer = ByteBuffer.allocate(netReadBufferSize()); this.netWriteBuffer = ByteBuffer.allocate(netWriteBufferSize()); this.appReadBuffer = ByteBuffer.allocate(applicationBufferSize()); netWriteBuffer.position(0); netWriteBuffer.limit(0); netReadBuffer.position(0); netReadBuffer.limit(0); handshakeComplete = false; closing = false; sslEngine.beginHandshake(); handshakeStatus = sslEngine.getHandshakeStatus(); } }
SslTransportLayer implements TransportLayer { protected void startHandshake() throws IOException { this.netReadBuffer = ByteBuffer.allocate(netReadBufferSize()); this.netWriteBuffer = ByteBuffer.allocate(netWriteBufferSize()); this.appReadBuffer = ByteBuffer.allocate(applicationBufferSize()); netWriteBuffer.position(0); netWriteBuffer.limit(0); netReadBuffer.position(0); netReadBuffer.limit(0); handshakeComplete = false; closing = false; sslEngine.beginHandshake(); handshakeStatus = sslEngine.getHandshakeStatus(); } SslTransportLayer(String channelId, SelectionKey key, SSLEngine sslEngine, boolean enableRenegotiation); }
SslTransportLayer implements TransportLayer { protected void startHandshake() throws IOException { this.netReadBuffer = ByteBuffer.allocate(netReadBufferSize()); this.netWriteBuffer = ByteBuffer.allocate(netWriteBufferSize()); this.appReadBuffer = ByteBuffer.allocate(applicationBufferSize()); netWriteBuffer.position(0); netWriteBuffer.limit(0); netReadBuffer.position(0); netReadBuffer.limit(0); handshakeComplete = false; closing = false; sslEngine.beginHandshake(); handshakeStatus = sslEngine.getHandshakeStatus(); } SslTransportLayer(String channelId, SelectionKey key, SSLEngine sslEngine, boolean enableRenegotiation); static SslTransportLayer create(String channelId, SelectionKey key, SSLEngine sslEngine); @Override boolean ready(); @Override boolean finishConnect(); @Override void disconnect(); @Override SocketChannel socketChannel(); @Override boolean isOpen(); @Override boolean isConnected(); @Override void close(); @Override boolean hasPendingWrites(); @Override void handshake(); @Override int read(ByteBuffer dst); @Override long read(ByteBuffer[] dsts); @Override long read(ByteBuffer[] dsts, int offset, int length); @Override int write(ByteBuffer src); @Override long write(ByteBuffer[] srcs, int offset, int length); @Override long write(ByteBuffer[] srcs); Principal peerPrincipal(); SSLSession sslSession(); @Override void addInterestOps(int ops); @Override void removeInterestOps(int ops); @Override boolean isMute(); @Override long transferFrom(FileChannel fileChannel, long position, long count); }
SslTransportLayer implements TransportLayer { protected void startHandshake() throws IOException { this.netReadBuffer = ByteBuffer.allocate(netReadBufferSize()); this.netWriteBuffer = ByteBuffer.allocate(netWriteBufferSize()); this.appReadBuffer = ByteBuffer.allocate(applicationBufferSize()); netWriteBuffer.position(0); netWriteBuffer.limit(0); netReadBuffer.position(0); netReadBuffer.limit(0); handshakeComplete = false; closing = false; sslEngine.beginHandshake(); handshakeStatus = sslEngine.getHandshakeStatus(); } SslTransportLayer(String channelId, SelectionKey key, SSLEngine sslEngine, boolean enableRenegotiation); static SslTransportLayer create(String channelId, SelectionKey key, SSLEngine sslEngine); @Override boolean ready(); @Override boolean finishConnect(); @Override void disconnect(); @Override SocketChannel socketChannel(); @Override boolean isOpen(); @Override boolean isConnected(); @Override void close(); @Override boolean hasPendingWrites(); @Override void handshake(); @Override int read(ByteBuffer dst); @Override long read(ByteBuffer[] dsts); @Override long read(ByteBuffer[] dsts, int offset, int length); @Override int write(ByteBuffer src); @Override long write(ByteBuffer[] srcs, int offset, int length); @Override long write(ByteBuffer[] srcs); Principal peerPrincipal(); SSLSession sslSession(); @Override void addInterestOps(int ops); @Override void removeInterestOps(int ops); @Override boolean isMute(); @Override long transferFrom(FileChannel fileChannel, long position, long count); }
@Test public void testListenerConfigOverride() throws Exception { String node = "0"; ListenerName clientListenerName = new ListenerName("client"); sslServerConfigs.put(SslConfigs.SSL_CLIENT_AUTH_CONFIG, "required"); sslServerConfigs.put(clientListenerName.configPrefix() + SslConfigs.SSL_CLIENT_AUTH_CONFIG, "none"); server = createEchoServer(SecurityProtocol.SSL); InetSocketAddress addr = new InetSocketAddress("localhost", server.port()); createSelector(sslClientConfigs); selector.connect(node, addr, BUFFER_SIZE, BUFFER_SIZE); NetworkTestUtils.checkClientConnection(selector, node, 100, 10); selector.close(); sslClientConfigs.remove(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG); sslClientConfigs.remove(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG); sslClientConfigs.remove(SslConfigs.SSL_KEY_PASSWORD_CONFIG); createSelector(sslClientConfigs); selector.connect(node, addr, BUFFER_SIZE, BUFFER_SIZE); NetworkTestUtils.waitForChannelClose(selector, node, ChannelState.AUTHENTICATE); selector.close(); server.close(); server = createEchoServer(clientListenerName, SecurityProtocol.SSL); addr = new InetSocketAddress("localhost", server.port()); createSelector(sslClientConfigs); selector.connect(node, addr, BUFFER_SIZE, BUFFER_SIZE); NetworkTestUtils.checkClientConnection(selector, node, 100, 10); }
@Override public void close() throws IOException { if (closing) return; closing = true; sslEngine.closeOutbound(); try { if (isConnected()) { if (!flush(netWriteBuffer)) { throw new IOException("Remaining data in the network buffer, can't send SSL close message."); } netWriteBuffer.clear(); SSLEngineResult wrapResult = sslEngine.wrap(emptyBuf, netWriteBuffer); if (wrapResult.getStatus() != SSLEngineResult.Status.CLOSED) { throw new IOException("Unexpected status returned by SSLEngine.wrap, expected CLOSED, received " + wrapResult.getStatus() + ". Will not send close message to peer."); } netWriteBuffer.flip(); flush(netWriteBuffer); } } catch (IOException ie) { log.warn("Failed to send SSL Close message ", ie); } finally { try { socketChannel.socket().close(); socketChannel.close(); } finally { key.attach(null); key.cancel(); } } }
SslTransportLayer implements TransportLayer { @Override public void close() throws IOException { if (closing) return; closing = true; sslEngine.closeOutbound(); try { if (isConnected()) { if (!flush(netWriteBuffer)) { throw new IOException("Remaining data in the network buffer, can't send SSL close message."); } netWriteBuffer.clear(); SSLEngineResult wrapResult = sslEngine.wrap(emptyBuf, netWriteBuffer); if (wrapResult.getStatus() != SSLEngineResult.Status.CLOSED) { throw new IOException("Unexpected status returned by SSLEngine.wrap, expected CLOSED, received " + wrapResult.getStatus() + ". Will not send close message to peer."); } netWriteBuffer.flip(); flush(netWriteBuffer); } } catch (IOException ie) { log.warn("Failed to send SSL Close message ", ie); } finally { try { socketChannel.socket().close(); socketChannel.close(); } finally { key.attach(null); key.cancel(); } } } }
SslTransportLayer implements TransportLayer { @Override public void close() throws IOException { if (closing) return; closing = true; sslEngine.closeOutbound(); try { if (isConnected()) { if (!flush(netWriteBuffer)) { throw new IOException("Remaining data in the network buffer, can't send SSL close message."); } netWriteBuffer.clear(); SSLEngineResult wrapResult = sslEngine.wrap(emptyBuf, netWriteBuffer); if (wrapResult.getStatus() != SSLEngineResult.Status.CLOSED) { throw new IOException("Unexpected status returned by SSLEngine.wrap, expected CLOSED, received " + wrapResult.getStatus() + ". Will not send close message to peer."); } netWriteBuffer.flip(); flush(netWriteBuffer); } } catch (IOException ie) { log.warn("Failed to send SSL Close message ", ie); } finally { try { socketChannel.socket().close(); socketChannel.close(); } finally { key.attach(null); key.cancel(); } } } SslTransportLayer(String channelId, SelectionKey key, SSLEngine sslEngine, boolean enableRenegotiation); }
SslTransportLayer implements TransportLayer { @Override public void close() throws IOException { if (closing) return; closing = true; sslEngine.closeOutbound(); try { if (isConnected()) { if (!flush(netWriteBuffer)) { throw new IOException("Remaining data in the network buffer, can't send SSL close message."); } netWriteBuffer.clear(); SSLEngineResult wrapResult = sslEngine.wrap(emptyBuf, netWriteBuffer); if (wrapResult.getStatus() != SSLEngineResult.Status.CLOSED) { throw new IOException("Unexpected status returned by SSLEngine.wrap, expected CLOSED, received " + wrapResult.getStatus() + ". Will not send close message to peer."); } netWriteBuffer.flip(); flush(netWriteBuffer); } } catch (IOException ie) { log.warn("Failed to send SSL Close message ", ie); } finally { try { socketChannel.socket().close(); socketChannel.close(); } finally { key.attach(null); key.cancel(); } } } SslTransportLayer(String channelId, SelectionKey key, SSLEngine sslEngine, boolean enableRenegotiation); static SslTransportLayer create(String channelId, SelectionKey key, SSLEngine sslEngine); @Override boolean ready(); @Override boolean finishConnect(); @Override void disconnect(); @Override SocketChannel socketChannel(); @Override boolean isOpen(); @Override boolean isConnected(); @Override void close(); @Override boolean hasPendingWrites(); @Override void handshake(); @Override int read(ByteBuffer dst); @Override long read(ByteBuffer[] dsts); @Override long read(ByteBuffer[] dsts, int offset, int length); @Override int write(ByteBuffer src); @Override long write(ByteBuffer[] srcs, int offset, int length); @Override long write(ByteBuffer[] srcs); Principal peerPrincipal(); SSLSession sslSession(); @Override void addInterestOps(int ops); @Override void removeInterestOps(int ops); @Override boolean isMute(); @Override long transferFrom(FileChannel fileChannel, long position, long count); }
SslTransportLayer implements TransportLayer { @Override public void close() throws IOException { if (closing) return; closing = true; sslEngine.closeOutbound(); try { if (isConnected()) { if (!flush(netWriteBuffer)) { throw new IOException("Remaining data in the network buffer, can't send SSL close message."); } netWriteBuffer.clear(); SSLEngineResult wrapResult = sslEngine.wrap(emptyBuf, netWriteBuffer); if (wrapResult.getStatus() != SSLEngineResult.Status.CLOSED) { throw new IOException("Unexpected status returned by SSLEngine.wrap, expected CLOSED, received " + wrapResult.getStatus() + ". Will not send close message to peer."); } netWriteBuffer.flip(); flush(netWriteBuffer); } } catch (IOException ie) { log.warn("Failed to send SSL Close message ", ie); } finally { try { socketChannel.socket().close(); socketChannel.close(); } finally { key.attach(null); key.cancel(); } } } SslTransportLayer(String channelId, SelectionKey key, SSLEngine sslEngine, boolean enableRenegotiation); static SslTransportLayer create(String channelId, SelectionKey key, SSLEngine sslEngine); @Override boolean ready(); @Override boolean finishConnect(); @Override void disconnect(); @Override SocketChannel socketChannel(); @Override boolean isOpen(); @Override boolean isConnected(); @Override void close(); @Override boolean hasPendingWrites(); @Override void handshake(); @Override int read(ByteBuffer dst); @Override long read(ByteBuffer[] dsts); @Override long read(ByteBuffer[] dsts, int offset, int length); @Override int write(ByteBuffer src); @Override long write(ByteBuffer[] srcs, int offset, int length); @Override long write(ByteBuffer[] srcs); Principal peerPrincipal(); SSLSession sslSession(); @Override void addInterestOps(int ops); @Override void removeInterestOps(int ops); @Override boolean isMute(); @Override long transferFrom(FileChannel fileChannel, long position, long count); }
@Test public void testNetworkThreadTimeRecorded() throws Exception { selector.close(); this.selector = new Selector(NetworkReceive.UNLIMITED, 5000, new Metrics(), Time.SYSTEM, "MetricGroup", new HashMap<String, String>(), false, true, channelBuilder); String node = "0"; server = createEchoServer(SecurityProtocol.SSL); InetSocketAddress addr = new InetSocketAddress("localhost", server.port()); selector.connect(node, addr, BUFFER_SIZE, BUFFER_SIZE); String message = TestUtils.randomString(10 * 1024); NetworkTestUtils.waitForChannelReady(selector, node); KafkaChannel channel = selector.channel(node); assertTrue("SSL handshake time not recorded", channel.getAndResetNetworkThreadTimeNanos() > 0); assertEquals("Time not reset", 0, channel.getAndResetNetworkThreadTimeNanos()); selector.mute(node); selector.send(new NetworkSend(node, ByteBuffer.wrap(message.getBytes()))); while (selector.completedSends().isEmpty()) { selector.poll(100L); } assertTrue("Send time not recorded", channel.getAndResetNetworkThreadTimeNanos() > 0); assertEquals("Time not reset", 0, channel.getAndResetNetworkThreadTimeNanos()); selector.unmute(node); while (selector.completedReceives().isEmpty()) { selector.poll(100L); } assertTrue("Receive time not recorded", channel.getAndResetNetworkThreadTimeNanos() > 0); }
@Override public void close() throws IOException { if (closing) return; closing = true; sslEngine.closeOutbound(); try { if (isConnected()) { if (!flush(netWriteBuffer)) { throw new IOException("Remaining data in the network buffer, can't send SSL close message."); } netWriteBuffer.clear(); SSLEngineResult wrapResult = sslEngine.wrap(emptyBuf, netWriteBuffer); if (wrapResult.getStatus() != SSLEngineResult.Status.CLOSED) { throw new IOException("Unexpected status returned by SSLEngine.wrap, expected CLOSED, received " + wrapResult.getStatus() + ". Will not send close message to peer."); } netWriteBuffer.flip(); flush(netWriteBuffer); } } catch (IOException ie) { log.warn("Failed to send SSL Close message ", ie); } finally { try { socketChannel.socket().close(); socketChannel.close(); } finally { key.attach(null); key.cancel(); } } }
SslTransportLayer implements TransportLayer { @Override public void close() throws IOException { if (closing) return; closing = true; sslEngine.closeOutbound(); try { if (isConnected()) { if (!flush(netWriteBuffer)) { throw new IOException("Remaining data in the network buffer, can't send SSL close message."); } netWriteBuffer.clear(); SSLEngineResult wrapResult = sslEngine.wrap(emptyBuf, netWriteBuffer); if (wrapResult.getStatus() != SSLEngineResult.Status.CLOSED) { throw new IOException("Unexpected status returned by SSLEngine.wrap, expected CLOSED, received " + wrapResult.getStatus() + ". Will not send close message to peer."); } netWriteBuffer.flip(); flush(netWriteBuffer); } } catch (IOException ie) { log.warn("Failed to send SSL Close message ", ie); } finally { try { socketChannel.socket().close(); socketChannel.close(); } finally { key.attach(null); key.cancel(); } } } }
SslTransportLayer implements TransportLayer { @Override public void close() throws IOException { if (closing) return; closing = true; sslEngine.closeOutbound(); try { if (isConnected()) { if (!flush(netWriteBuffer)) { throw new IOException("Remaining data in the network buffer, can't send SSL close message."); } netWriteBuffer.clear(); SSLEngineResult wrapResult = sslEngine.wrap(emptyBuf, netWriteBuffer); if (wrapResult.getStatus() != SSLEngineResult.Status.CLOSED) { throw new IOException("Unexpected status returned by SSLEngine.wrap, expected CLOSED, received " + wrapResult.getStatus() + ". Will not send close message to peer."); } netWriteBuffer.flip(); flush(netWriteBuffer); } } catch (IOException ie) { log.warn("Failed to send SSL Close message ", ie); } finally { try { socketChannel.socket().close(); socketChannel.close(); } finally { key.attach(null); key.cancel(); } } } SslTransportLayer(String channelId, SelectionKey key, SSLEngine sslEngine, boolean enableRenegotiation); }
SslTransportLayer implements TransportLayer { @Override public void close() throws IOException { if (closing) return; closing = true; sslEngine.closeOutbound(); try { if (isConnected()) { if (!flush(netWriteBuffer)) { throw new IOException("Remaining data in the network buffer, can't send SSL close message."); } netWriteBuffer.clear(); SSLEngineResult wrapResult = sslEngine.wrap(emptyBuf, netWriteBuffer); if (wrapResult.getStatus() != SSLEngineResult.Status.CLOSED) { throw new IOException("Unexpected status returned by SSLEngine.wrap, expected CLOSED, received " + wrapResult.getStatus() + ". Will not send close message to peer."); } netWriteBuffer.flip(); flush(netWriteBuffer); } } catch (IOException ie) { log.warn("Failed to send SSL Close message ", ie); } finally { try { socketChannel.socket().close(); socketChannel.close(); } finally { key.attach(null); key.cancel(); } } } SslTransportLayer(String channelId, SelectionKey key, SSLEngine sslEngine, boolean enableRenegotiation); static SslTransportLayer create(String channelId, SelectionKey key, SSLEngine sslEngine); @Override boolean ready(); @Override boolean finishConnect(); @Override void disconnect(); @Override SocketChannel socketChannel(); @Override boolean isOpen(); @Override boolean isConnected(); @Override void close(); @Override boolean hasPendingWrites(); @Override void handshake(); @Override int read(ByteBuffer dst); @Override long read(ByteBuffer[] dsts); @Override long read(ByteBuffer[] dsts, int offset, int length); @Override int write(ByteBuffer src); @Override long write(ByteBuffer[] srcs, int offset, int length); @Override long write(ByteBuffer[] srcs); Principal peerPrincipal(); SSLSession sslSession(); @Override void addInterestOps(int ops); @Override void removeInterestOps(int ops); @Override boolean isMute(); @Override long transferFrom(FileChannel fileChannel, long position, long count); }
SslTransportLayer implements TransportLayer { @Override public void close() throws IOException { if (closing) return; closing = true; sslEngine.closeOutbound(); try { if (isConnected()) { if (!flush(netWriteBuffer)) { throw new IOException("Remaining data in the network buffer, can't send SSL close message."); } netWriteBuffer.clear(); SSLEngineResult wrapResult = sslEngine.wrap(emptyBuf, netWriteBuffer); if (wrapResult.getStatus() != SSLEngineResult.Status.CLOSED) { throw new IOException("Unexpected status returned by SSLEngine.wrap, expected CLOSED, received " + wrapResult.getStatus() + ". Will not send close message to peer."); } netWriteBuffer.flip(); flush(netWriteBuffer); } } catch (IOException ie) { log.warn("Failed to send SSL Close message ", ie); } finally { try { socketChannel.socket().close(); socketChannel.close(); } finally { key.attach(null); key.cancel(); } } } SslTransportLayer(String channelId, SelectionKey key, SSLEngine sslEngine, boolean enableRenegotiation); static SslTransportLayer create(String channelId, SelectionKey key, SSLEngine sslEngine); @Override boolean ready(); @Override boolean finishConnect(); @Override void disconnect(); @Override SocketChannel socketChannel(); @Override boolean isOpen(); @Override boolean isConnected(); @Override void close(); @Override boolean hasPendingWrites(); @Override void handshake(); @Override int read(ByteBuffer dst); @Override long read(ByteBuffer[] dsts); @Override long read(ByteBuffer[] dsts, int offset, int length); @Override int write(ByteBuffer src); @Override long write(ByteBuffer[] srcs, int offset, int length); @Override long write(ByteBuffer[] srcs); Principal peerPrincipal(); SSLSession sslSession(); @Override void addInterestOps(int ops); @Override void removeInterestOps(int ops); @Override boolean isMute(); @Override long transferFrom(FileChannel fileChannel, long position, long count); }
@Test public void testMatching() throws Exception { assertTrue(ACL1.equals(ACL1)); final AclBinding acl1Copy = new AclBinding( new Resource(ResourceType.TOPIC, "mytopic"), new AccessControlEntry("User:ANONYMOUS", "", AclOperation.ALL, AclPermissionType.ALLOW)); assertTrue(ACL1.equals(acl1Copy)); assertTrue(acl1Copy.equals(ACL1)); assertTrue(ACL2.equals(ACL2)); assertFalse(ACL1.equals(ACL2)); assertFalse(ACL2.equals(ACL1)); assertTrue(AclBindingFilter.ANY.matches(ACL1)); assertFalse(AclBindingFilter.ANY.equals(ACL1)); assertTrue(AclBindingFilter.ANY.matches(ACL2)); assertFalse(AclBindingFilter.ANY.equals(ACL2)); assertTrue(AclBindingFilter.ANY.matches(ACL3)); assertFalse(AclBindingFilter.ANY.equals(ACL3)); assertTrue(AclBindingFilter.ANY.equals(AclBindingFilter.ANY)); assertTrue(ANY_ANONYMOUS.matches(ACL1)); assertFalse(ANY_ANONYMOUS.equals(ACL1)); assertFalse(ANY_ANONYMOUS.matches(ACL2)); assertFalse(ANY_ANONYMOUS.equals(ACL2)); assertTrue(ANY_ANONYMOUS.matches(ACL3)); assertFalse(ANY_ANONYMOUS.equals(ACL3)); assertFalse(ANY_DENY.matches(ACL1)); assertFalse(ANY_DENY.matches(ACL2)); assertTrue(ANY_DENY.matches(ACL3)); assertTrue(ANY_MYTOPIC.matches(ACL1)); assertTrue(ANY_MYTOPIC.matches(ACL2)); assertFalse(ANY_MYTOPIC.matches(ACL3)); assertTrue(ANY_ANONYMOUS.matches(UNKNOWN_ACL)); assertTrue(ANY_DENY.matches(UNKNOWN_ACL)); assertTrue(UNKNOWN_ACL.equals(UNKNOWN_ACL)); assertFalse(ANY_MYTOPIC.matches(UNKNOWN_ACL)); }
@Override public boolean equals(Object o) { if (!(o instanceof AclBinding)) return false; AclBinding other = (AclBinding) o; return resource.equals(other.resource) && entry.equals(other.entry); }
AclBinding { @Override public boolean equals(Object o) { if (!(o instanceof AclBinding)) return false; AclBinding other = (AclBinding) o; return resource.equals(other.resource) && entry.equals(other.entry); } }
AclBinding { @Override public boolean equals(Object o) { if (!(o instanceof AclBinding)) return false; AclBinding other = (AclBinding) o; return resource.equals(other.resource) && entry.equals(other.entry); } AclBinding(Resource resource, AccessControlEntry entry); }
AclBinding { @Override public boolean equals(Object o) { if (!(o instanceof AclBinding)) return false; AclBinding other = (AclBinding) o; return resource.equals(other.resource) && entry.equals(other.entry); } AclBinding(Resource resource, AccessControlEntry entry); boolean isUnknown(); Resource resource(); final AccessControlEntry entry(); AclBindingFilter toFilter(); @Override String toString(); @Override boolean equals(Object o); @Override int hashCode(); }
AclBinding { @Override public boolean equals(Object o) { if (!(o instanceof AclBinding)) return false; AclBinding other = (AclBinding) o; return resource.equals(other.resource) && entry.equals(other.entry); } AclBinding(Resource resource, AccessControlEntry entry); boolean isUnknown(); Resource resource(); final AccessControlEntry entry(); AclBindingFilter toFilter(); @Override String toString(); @Override boolean equals(Object o); @Override int hashCode(); }
@Test public void arrayToJson() { Schema int32Array = SchemaBuilder.array(Schema.INT32_SCHEMA).build(); JsonNode converted = parse(converter.fromConnectData(TOPIC, int32Array, Arrays.asList(1, 2, 3))); validateEnvelope(converted); assertEquals(parse("{ \"type\": \"array\", \"items\": { \"type\": \"int32\", \"optional\": false }, \"optional\": false }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME)); assertEquals(JsonNodeFactory.instance.arrayNode().add(1).add(2).add(3), converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME)); }
@Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } }
JsonConverter implements Converter { @Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } } }
JsonConverter implements Converter { @Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } } }
JsonConverter implements Converter { @Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } } @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); ObjectNode asJsonSchema(Schema schema); Schema asConnectSchema(JsonNode jsonSchema); }
JsonConverter implements Converter { @Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } } @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); ObjectNode asJsonSchema(Schema schema); Schema asConnectSchema(JsonNode jsonSchema); }
@Test public void testUnknowns() throws Exception { assertFalse(ACL1.isUnknown()); assertFalse(ACL2.isUnknown()); assertFalse(ACL3.isUnknown()); assertFalse(ANY_ANONYMOUS.isUnknown()); assertFalse(ANY_DENY.isUnknown()); assertFalse(ANY_MYTOPIC.isUnknown()); assertTrue(UNKNOWN_ACL.isUnknown()); }
public boolean isUnknown() { return resource.isUnknown() || entry.isUnknown(); }
AclBinding { public boolean isUnknown() { return resource.isUnknown() || entry.isUnknown(); } }
AclBinding { public boolean isUnknown() { return resource.isUnknown() || entry.isUnknown(); } AclBinding(Resource resource, AccessControlEntry entry); }
AclBinding { public boolean isUnknown() { return resource.isUnknown() || entry.isUnknown(); } AclBinding(Resource resource, AccessControlEntry entry); boolean isUnknown(); Resource resource(); final AccessControlEntry entry(); AclBindingFilter toFilter(); @Override String toString(); @Override boolean equals(Object o); @Override int hashCode(); }
AclBinding { public boolean isUnknown() { return resource.isUnknown() || entry.isUnknown(); } AclBinding(Resource resource, AccessControlEntry entry); boolean isUnknown(); Resource resource(); final AccessControlEntry entry(); AclBindingFilter toFilter(); @Override String toString(); @Override boolean equals(Object o); @Override int hashCode(); }
@Test public void testMatchesAtMostOne() throws Exception { assertEquals(null, ACL1.toFilter().findIndefiniteField()); assertEquals(null, ACL2.toFilter().findIndefiniteField()); assertEquals(null, ACL3.toFilter().findIndefiniteField()); assertFalse(ANY_ANONYMOUS.matchesAtMostOne()); assertFalse(ANY_DENY.matchesAtMostOne()); assertFalse(ANY_MYTOPIC.matchesAtMostOne()); }
public AclBindingFilter toFilter() { return new AclBindingFilter(resource.toFilter(), entry.toFilter()); }
AclBinding { public AclBindingFilter toFilter() { return new AclBindingFilter(resource.toFilter(), entry.toFilter()); } }
AclBinding { public AclBindingFilter toFilter() { return new AclBindingFilter(resource.toFilter(), entry.toFilter()); } AclBinding(Resource resource, AccessControlEntry entry); }
AclBinding { public AclBindingFilter toFilter() { return new AclBindingFilter(resource.toFilter(), entry.toFilter()); } AclBinding(Resource resource, AccessControlEntry entry); boolean isUnknown(); Resource resource(); final AccessControlEntry entry(); AclBindingFilter toFilter(); @Override String toString(); @Override boolean equals(Object o); @Override int hashCode(); }
AclBinding { public AclBindingFilter toFilter() { return new AclBindingFilter(resource.toFilter(), entry.toFilter()); } AclBinding(Resource resource, AccessControlEntry entry); boolean isUnknown(); Resource resource(); final AccessControlEntry entry(); AclBindingFilter toFilter(); @Override String toString(); @Override boolean equals(Object o); @Override int hashCode(); }
@Test(expected = IllegalArgumentException.class) public void testLoadForServerWithWrongListenerName() throws IOException { writeConfiguration("Server", "test.LoginModule required;"); JaasContext.load(JaasContext.Type.SERVER, new ListenerName("plaintext"), Collections.<String, Object>emptyMap()); }
public static JaasContext load(JaasContext.Type contextType, ListenerName listenerName, Map<String, ?> configs) { String listenerContextName; String globalContextName; switch (contextType) { case CLIENT: if (listenerName != null) throw new IllegalArgumentException("listenerName should be null for CLIENT"); globalContextName = GLOBAL_CONTEXT_NAME_CLIENT; listenerContextName = null; break; case SERVER: if (listenerName == null) throw new IllegalArgumentException("listenerName should not be null for SERVER"); globalContextName = GLOBAL_CONTEXT_NAME_SERVER; listenerContextName = listenerName.value().toLowerCase(Locale.ROOT) + "." + GLOBAL_CONTEXT_NAME_SERVER; break; default: throw new IllegalArgumentException("Unexpected context type " + contextType); } return load(contextType, listenerContextName, globalContextName, configs); }
JaasContext { public static JaasContext load(JaasContext.Type contextType, ListenerName listenerName, Map<String, ?> configs) { String listenerContextName; String globalContextName; switch (contextType) { case CLIENT: if (listenerName != null) throw new IllegalArgumentException("listenerName should be null for CLIENT"); globalContextName = GLOBAL_CONTEXT_NAME_CLIENT; listenerContextName = null; break; case SERVER: if (listenerName == null) throw new IllegalArgumentException("listenerName should not be null for SERVER"); globalContextName = GLOBAL_CONTEXT_NAME_SERVER; listenerContextName = listenerName.value().toLowerCase(Locale.ROOT) + "." + GLOBAL_CONTEXT_NAME_SERVER; break; default: throw new IllegalArgumentException("Unexpected context type " + contextType); } return load(contextType, listenerContextName, globalContextName, configs); } }
JaasContext { public static JaasContext load(JaasContext.Type contextType, ListenerName listenerName, Map<String, ?> configs) { String listenerContextName; String globalContextName; switch (contextType) { case CLIENT: if (listenerName != null) throw new IllegalArgumentException("listenerName should be null for CLIENT"); globalContextName = GLOBAL_CONTEXT_NAME_CLIENT; listenerContextName = null; break; case SERVER: if (listenerName == null) throw new IllegalArgumentException("listenerName should not be null for SERVER"); globalContextName = GLOBAL_CONTEXT_NAME_SERVER; listenerContextName = listenerName.value().toLowerCase(Locale.ROOT) + "." + GLOBAL_CONTEXT_NAME_SERVER; break; default: throw new IllegalArgumentException("Unexpected context type " + contextType); } return load(contextType, listenerContextName, globalContextName, configs); } JaasContext(String name, Type type, Configuration configuration); }
JaasContext { public static JaasContext load(JaasContext.Type contextType, ListenerName listenerName, Map<String, ?> configs) { String listenerContextName; String globalContextName; switch (contextType) { case CLIENT: if (listenerName != null) throw new IllegalArgumentException("listenerName should be null for CLIENT"); globalContextName = GLOBAL_CONTEXT_NAME_CLIENT; listenerContextName = null; break; case SERVER: if (listenerName == null) throw new IllegalArgumentException("listenerName should not be null for SERVER"); globalContextName = GLOBAL_CONTEXT_NAME_SERVER; listenerContextName = listenerName.value().toLowerCase(Locale.ROOT) + "." + GLOBAL_CONTEXT_NAME_SERVER; break; default: throw new IllegalArgumentException("Unexpected context type " + contextType); } return load(contextType, listenerContextName, globalContextName, configs); } JaasContext(String name, Type type, Configuration configuration); static JaasContext load(JaasContext.Type contextType, ListenerName listenerName, Map<String, ?> configs); String name(); Type type(); Configuration configuration(); List<AppConfigurationEntry> configurationEntries(); String configEntryOption(String key, String loginModuleName); }
JaasContext { public static JaasContext load(JaasContext.Type contextType, ListenerName listenerName, Map<String, ?> configs) { String listenerContextName; String globalContextName; switch (contextType) { case CLIENT: if (listenerName != null) throw new IllegalArgumentException("listenerName should be null for CLIENT"); globalContextName = GLOBAL_CONTEXT_NAME_CLIENT; listenerContextName = null; break; case SERVER: if (listenerName == null) throw new IllegalArgumentException("listenerName should not be null for SERVER"); globalContextName = GLOBAL_CONTEXT_NAME_SERVER; listenerContextName = listenerName.value().toLowerCase(Locale.ROOT) + "." + GLOBAL_CONTEXT_NAME_SERVER; break; default: throw new IllegalArgumentException("Unexpected context type " + contextType); } return load(contextType, listenerContextName, globalContextName, configs); } JaasContext(String name, Type type, Configuration configuration); static JaasContext load(JaasContext.Type contextType, ListenerName listenerName, Map<String, ?> configs); String name(); Type type(); Configuration configuration(); List<AppConfigurationEntry> configurationEntries(); String configEntryOption(String key, String loginModuleName); }
@Test(expected = IllegalArgumentException.class) public void testLoadForClientWithListenerName() { JaasContext.load(JaasContext.Type.CLIENT, new ListenerName("foo"), Collections.<String, Object>emptyMap()); }
public static JaasContext load(JaasContext.Type contextType, ListenerName listenerName, Map<String, ?> configs) { String listenerContextName; String globalContextName; switch (contextType) { case CLIENT: if (listenerName != null) throw new IllegalArgumentException("listenerName should be null for CLIENT"); globalContextName = GLOBAL_CONTEXT_NAME_CLIENT; listenerContextName = null; break; case SERVER: if (listenerName == null) throw new IllegalArgumentException("listenerName should not be null for SERVER"); globalContextName = GLOBAL_CONTEXT_NAME_SERVER; listenerContextName = listenerName.value().toLowerCase(Locale.ROOT) + "." + GLOBAL_CONTEXT_NAME_SERVER; break; default: throw new IllegalArgumentException("Unexpected context type " + contextType); } return load(contextType, listenerContextName, globalContextName, configs); }
JaasContext { public static JaasContext load(JaasContext.Type contextType, ListenerName listenerName, Map<String, ?> configs) { String listenerContextName; String globalContextName; switch (contextType) { case CLIENT: if (listenerName != null) throw new IllegalArgumentException("listenerName should be null for CLIENT"); globalContextName = GLOBAL_CONTEXT_NAME_CLIENT; listenerContextName = null; break; case SERVER: if (listenerName == null) throw new IllegalArgumentException("listenerName should not be null for SERVER"); globalContextName = GLOBAL_CONTEXT_NAME_SERVER; listenerContextName = listenerName.value().toLowerCase(Locale.ROOT) + "." + GLOBAL_CONTEXT_NAME_SERVER; break; default: throw new IllegalArgumentException("Unexpected context type " + contextType); } return load(contextType, listenerContextName, globalContextName, configs); } }
JaasContext { public static JaasContext load(JaasContext.Type contextType, ListenerName listenerName, Map<String, ?> configs) { String listenerContextName; String globalContextName; switch (contextType) { case CLIENT: if (listenerName != null) throw new IllegalArgumentException("listenerName should be null for CLIENT"); globalContextName = GLOBAL_CONTEXT_NAME_CLIENT; listenerContextName = null; break; case SERVER: if (listenerName == null) throw new IllegalArgumentException("listenerName should not be null for SERVER"); globalContextName = GLOBAL_CONTEXT_NAME_SERVER; listenerContextName = listenerName.value().toLowerCase(Locale.ROOT) + "." + GLOBAL_CONTEXT_NAME_SERVER; break; default: throw new IllegalArgumentException("Unexpected context type " + contextType); } return load(contextType, listenerContextName, globalContextName, configs); } JaasContext(String name, Type type, Configuration configuration); }
JaasContext { public static JaasContext load(JaasContext.Type contextType, ListenerName listenerName, Map<String, ?> configs) { String listenerContextName; String globalContextName; switch (contextType) { case CLIENT: if (listenerName != null) throw new IllegalArgumentException("listenerName should be null for CLIENT"); globalContextName = GLOBAL_CONTEXT_NAME_CLIENT; listenerContextName = null; break; case SERVER: if (listenerName == null) throw new IllegalArgumentException("listenerName should not be null for SERVER"); globalContextName = GLOBAL_CONTEXT_NAME_SERVER; listenerContextName = listenerName.value().toLowerCase(Locale.ROOT) + "." + GLOBAL_CONTEXT_NAME_SERVER; break; default: throw new IllegalArgumentException("Unexpected context type " + contextType); } return load(contextType, listenerContextName, globalContextName, configs); } JaasContext(String name, Type type, Configuration configuration); static JaasContext load(JaasContext.Type contextType, ListenerName listenerName, Map<String, ?> configs); String name(); Type type(); Configuration configuration(); List<AppConfigurationEntry> configurationEntries(); String configEntryOption(String key, String loginModuleName); }
JaasContext { public static JaasContext load(JaasContext.Type contextType, ListenerName listenerName, Map<String, ?> configs) { String listenerContextName; String globalContextName; switch (contextType) { case CLIENT: if (listenerName != null) throw new IllegalArgumentException("listenerName should be null for CLIENT"); globalContextName = GLOBAL_CONTEXT_NAME_CLIENT; listenerContextName = null; break; case SERVER: if (listenerName == null) throw new IllegalArgumentException("listenerName should not be null for SERVER"); globalContextName = GLOBAL_CONTEXT_NAME_SERVER; listenerContextName = listenerName.value().toLowerCase(Locale.ROOT) + "." + GLOBAL_CONTEXT_NAME_SERVER; break; default: throw new IllegalArgumentException("Unexpected context type " + contextType); } return load(contextType, listenerContextName, globalContextName, configs); } JaasContext(String name, Type type, Configuration configuration); static JaasContext load(JaasContext.Type contextType, ListenerName listenerName, Map<String, ?> configs); String name(); Type type(); Configuration configuration(); List<AppConfigurationEntry> configurationEntries(); String configEntryOption(String key, String loginModuleName); }
@Test public void testSslFactoryWithoutPasswordConfiguration() throws Exception { File trustStoreFile = File.createTempFile("truststore", ".jks"); Map<String, Object> serverSslConfig = TestSslUtils.createSslConfig(false, true, Mode.SERVER, trustStoreFile, "server"); serverSslConfig.remove(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG); SslFactory sslFactory = new SslFactory(Mode.SERVER); try { sslFactory.configure(serverSslConfig); } catch (Exception e) { fail("An exception was thrown when configuring the truststore without a password: " + e); } }
@Override public void configure(Map<String, ?> configs) throws KafkaException { this.protocol = (String) configs.get(SslConfigs.SSL_PROTOCOL_CONFIG); this.provider = (String) configs.get(SslConfigs.SSL_PROVIDER_CONFIG); @SuppressWarnings("unchecked") List<String> cipherSuitesList = (List<String>) configs.get(SslConfigs.SSL_CIPHER_SUITES_CONFIG); if (cipherSuitesList != null) this.cipherSuites = cipherSuitesList.toArray(new String[cipherSuitesList.size()]); @SuppressWarnings("unchecked") List<String> enabledProtocolsList = (List<String>) configs.get(SslConfigs.SSL_ENABLED_PROTOCOLS_CONFIG); if (enabledProtocolsList != null) this.enabledProtocols = enabledProtocolsList.toArray(new String[enabledProtocolsList.size()]); String endpointIdentification = (String) configs.get(SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG); if (endpointIdentification != null) this.endpointIdentification = endpointIdentification; String secureRandomImplementation = (String) configs.get(SslConfigs.SSL_SECURE_RANDOM_IMPLEMENTATION_CONFIG); if (secureRandomImplementation != null) { try { this.secureRandomImplementation = SecureRandom.getInstance(secureRandomImplementation); } catch (GeneralSecurityException e) { throw new KafkaException(e); } } String clientAuthConfig = clientAuthConfigOverride; if (clientAuthConfig == null) clientAuthConfig = (String) configs.get(SslConfigs.SSL_CLIENT_AUTH_CONFIG); if (clientAuthConfig != null) { if (clientAuthConfig.equals("required")) this.needClientAuth = true; else if (clientAuthConfig.equals("requested")) this.wantClientAuth = true; } this.kmfAlgorithm = (String) configs.get(SslConfigs.SSL_KEYMANAGER_ALGORITHM_CONFIG); this.tmfAlgorithm = (String) configs.get(SslConfigs.SSL_TRUSTMANAGER_ALGORITHM_CONFIG); createKeystore((String) configs.get(SslConfigs.SSL_KEYSTORE_TYPE_CONFIG), (String) configs.get(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG), (Password) configs.get(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG), (Password) configs.get(SslConfigs.SSL_KEY_PASSWORD_CONFIG)); createTruststore((String) configs.get(SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG), (String) configs.get(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG), (Password) configs.get(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG)); try { this.sslContext = createSSLContext(); } catch (Exception e) { throw new KafkaException(e); } }
SslFactory implements Configurable { @Override public void configure(Map<String, ?> configs) throws KafkaException { this.protocol = (String) configs.get(SslConfigs.SSL_PROTOCOL_CONFIG); this.provider = (String) configs.get(SslConfigs.SSL_PROVIDER_CONFIG); @SuppressWarnings("unchecked") List<String> cipherSuitesList = (List<String>) configs.get(SslConfigs.SSL_CIPHER_SUITES_CONFIG); if (cipherSuitesList != null) this.cipherSuites = cipherSuitesList.toArray(new String[cipherSuitesList.size()]); @SuppressWarnings("unchecked") List<String> enabledProtocolsList = (List<String>) configs.get(SslConfigs.SSL_ENABLED_PROTOCOLS_CONFIG); if (enabledProtocolsList != null) this.enabledProtocols = enabledProtocolsList.toArray(new String[enabledProtocolsList.size()]); String endpointIdentification = (String) configs.get(SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG); if (endpointIdentification != null) this.endpointIdentification = endpointIdentification; String secureRandomImplementation = (String) configs.get(SslConfigs.SSL_SECURE_RANDOM_IMPLEMENTATION_CONFIG); if (secureRandomImplementation != null) { try { this.secureRandomImplementation = SecureRandom.getInstance(secureRandomImplementation); } catch (GeneralSecurityException e) { throw new KafkaException(e); } } String clientAuthConfig = clientAuthConfigOverride; if (clientAuthConfig == null) clientAuthConfig = (String) configs.get(SslConfigs.SSL_CLIENT_AUTH_CONFIG); if (clientAuthConfig != null) { if (clientAuthConfig.equals("required")) this.needClientAuth = true; else if (clientAuthConfig.equals("requested")) this.wantClientAuth = true; } this.kmfAlgorithm = (String) configs.get(SslConfigs.SSL_KEYMANAGER_ALGORITHM_CONFIG); this.tmfAlgorithm = (String) configs.get(SslConfigs.SSL_TRUSTMANAGER_ALGORITHM_CONFIG); createKeystore((String) configs.get(SslConfigs.SSL_KEYSTORE_TYPE_CONFIG), (String) configs.get(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG), (Password) configs.get(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG), (Password) configs.get(SslConfigs.SSL_KEY_PASSWORD_CONFIG)); createTruststore((String) configs.get(SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG), (String) configs.get(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG), (Password) configs.get(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG)); try { this.sslContext = createSSLContext(); } catch (Exception e) { throw new KafkaException(e); } } }
SslFactory implements Configurable { @Override public void configure(Map<String, ?> configs) throws KafkaException { this.protocol = (String) configs.get(SslConfigs.SSL_PROTOCOL_CONFIG); this.provider = (String) configs.get(SslConfigs.SSL_PROVIDER_CONFIG); @SuppressWarnings("unchecked") List<String> cipherSuitesList = (List<String>) configs.get(SslConfigs.SSL_CIPHER_SUITES_CONFIG); if (cipherSuitesList != null) this.cipherSuites = cipherSuitesList.toArray(new String[cipherSuitesList.size()]); @SuppressWarnings("unchecked") List<String> enabledProtocolsList = (List<String>) configs.get(SslConfigs.SSL_ENABLED_PROTOCOLS_CONFIG); if (enabledProtocolsList != null) this.enabledProtocols = enabledProtocolsList.toArray(new String[enabledProtocolsList.size()]); String endpointIdentification = (String) configs.get(SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG); if (endpointIdentification != null) this.endpointIdentification = endpointIdentification; String secureRandomImplementation = (String) configs.get(SslConfigs.SSL_SECURE_RANDOM_IMPLEMENTATION_CONFIG); if (secureRandomImplementation != null) { try { this.secureRandomImplementation = SecureRandom.getInstance(secureRandomImplementation); } catch (GeneralSecurityException e) { throw new KafkaException(e); } } String clientAuthConfig = clientAuthConfigOverride; if (clientAuthConfig == null) clientAuthConfig = (String) configs.get(SslConfigs.SSL_CLIENT_AUTH_CONFIG); if (clientAuthConfig != null) { if (clientAuthConfig.equals("required")) this.needClientAuth = true; else if (clientAuthConfig.equals("requested")) this.wantClientAuth = true; } this.kmfAlgorithm = (String) configs.get(SslConfigs.SSL_KEYMANAGER_ALGORITHM_CONFIG); this.tmfAlgorithm = (String) configs.get(SslConfigs.SSL_TRUSTMANAGER_ALGORITHM_CONFIG); createKeystore((String) configs.get(SslConfigs.SSL_KEYSTORE_TYPE_CONFIG), (String) configs.get(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG), (Password) configs.get(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG), (Password) configs.get(SslConfigs.SSL_KEY_PASSWORD_CONFIG)); createTruststore((String) configs.get(SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG), (String) configs.get(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG), (Password) configs.get(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG)); try { this.sslContext = createSSLContext(); } catch (Exception e) { throw new KafkaException(e); } } SslFactory(Mode mode); SslFactory(Mode mode, String clientAuthConfigOverride); }
SslFactory implements Configurable { @Override public void configure(Map<String, ?> configs) throws KafkaException { this.protocol = (String) configs.get(SslConfigs.SSL_PROTOCOL_CONFIG); this.provider = (String) configs.get(SslConfigs.SSL_PROVIDER_CONFIG); @SuppressWarnings("unchecked") List<String> cipherSuitesList = (List<String>) configs.get(SslConfigs.SSL_CIPHER_SUITES_CONFIG); if (cipherSuitesList != null) this.cipherSuites = cipherSuitesList.toArray(new String[cipherSuitesList.size()]); @SuppressWarnings("unchecked") List<String> enabledProtocolsList = (List<String>) configs.get(SslConfigs.SSL_ENABLED_PROTOCOLS_CONFIG); if (enabledProtocolsList != null) this.enabledProtocols = enabledProtocolsList.toArray(new String[enabledProtocolsList.size()]); String endpointIdentification = (String) configs.get(SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG); if (endpointIdentification != null) this.endpointIdentification = endpointIdentification; String secureRandomImplementation = (String) configs.get(SslConfigs.SSL_SECURE_RANDOM_IMPLEMENTATION_CONFIG); if (secureRandomImplementation != null) { try { this.secureRandomImplementation = SecureRandom.getInstance(secureRandomImplementation); } catch (GeneralSecurityException e) { throw new KafkaException(e); } } String clientAuthConfig = clientAuthConfigOverride; if (clientAuthConfig == null) clientAuthConfig = (String) configs.get(SslConfigs.SSL_CLIENT_AUTH_CONFIG); if (clientAuthConfig != null) { if (clientAuthConfig.equals("required")) this.needClientAuth = true; else if (clientAuthConfig.equals("requested")) this.wantClientAuth = true; } this.kmfAlgorithm = (String) configs.get(SslConfigs.SSL_KEYMANAGER_ALGORITHM_CONFIG); this.tmfAlgorithm = (String) configs.get(SslConfigs.SSL_TRUSTMANAGER_ALGORITHM_CONFIG); createKeystore((String) configs.get(SslConfigs.SSL_KEYSTORE_TYPE_CONFIG), (String) configs.get(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG), (Password) configs.get(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG), (Password) configs.get(SslConfigs.SSL_KEY_PASSWORD_CONFIG)); createTruststore((String) configs.get(SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG), (String) configs.get(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG), (Password) configs.get(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG)); try { this.sslContext = createSSLContext(); } catch (Exception e) { throw new KafkaException(e); } } SslFactory(Mode mode); SslFactory(Mode mode, String clientAuthConfigOverride); @Override void configure(Map<String, ?> configs); SSLEngine createSslEngine(String peerHost, int peerPort); SSLContext sslContext(); }
SslFactory implements Configurable { @Override public void configure(Map<String, ?> configs) throws KafkaException { this.protocol = (String) configs.get(SslConfigs.SSL_PROTOCOL_CONFIG); this.provider = (String) configs.get(SslConfigs.SSL_PROVIDER_CONFIG); @SuppressWarnings("unchecked") List<String> cipherSuitesList = (List<String>) configs.get(SslConfigs.SSL_CIPHER_SUITES_CONFIG); if (cipherSuitesList != null) this.cipherSuites = cipherSuitesList.toArray(new String[cipherSuitesList.size()]); @SuppressWarnings("unchecked") List<String> enabledProtocolsList = (List<String>) configs.get(SslConfigs.SSL_ENABLED_PROTOCOLS_CONFIG); if (enabledProtocolsList != null) this.enabledProtocols = enabledProtocolsList.toArray(new String[enabledProtocolsList.size()]); String endpointIdentification = (String) configs.get(SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG); if (endpointIdentification != null) this.endpointIdentification = endpointIdentification; String secureRandomImplementation = (String) configs.get(SslConfigs.SSL_SECURE_RANDOM_IMPLEMENTATION_CONFIG); if (secureRandomImplementation != null) { try { this.secureRandomImplementation = SecureRandom.getInstance(secureRandomImplementation); } catch (GeneralSecurityException e) { throw new KafkaException(e); } } String clientAuthConfig = clientAuthConfigOverride; if (clientAuthConfig == null) clientAuthConfig = (String) configs.get(SslConfigs.SSL_CLIENT_AUTH_CONFIG); if (clientAuthConfig != null) { if (clientAuthConfig.equals("required")) this.needClientAuth = true; else if (clientAuthConfig.equals("requested")) this.wantClientAuth = true; } this.kmfAlgorithm = (String) configs.get(SslConfigs.SSL_KEYMANAGER_ALGORITHM_CONFIG); this.tmfAlgorithm = (String) configs.get(SslConfigs.SSL_TRUSTMANAGER_ALGORITHM_CONFIG); createKeystore((String) configs.get(SslConfigs.SSL_KEYSTORE_TYPE_CONFIG), (String) configs.get(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG), (Password) configs.get(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG), (Password) configs.get(SslConfigs.SSL_KEY_PASSWORD_CONFIG)); createTruststore((String) configs.get(SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG), (String) configs.get(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG), (Password) configs.get(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG)); try { this.sslContext = createSSLContext(); } catch (Exception e) { throw new KafkaException(e); } } SslFactory(Mode mode); SslFactory(Mode mode, String clientAuthConfigOverride); @Override void configure(Map<String, ?> configs); SSLEngine createSslEngine(String peerHost, int peerPort); SSLContext sslContext(); }
@Test public void testParse() throws IOException { List<String> rules = new ArrayList<>(Arrays.asList( "RULE:[1:$1](App\\..*)s/App\\.(.*)/$1/g", "RULE:[2:$1](App\\..*)s/App\\.(.*)/$1/g", "DEFAULT" )); KerberosShortNamer shortNamer = KerberosShortNamer.fromUnparsedRules("REALM.COM", rules); KerberosName name = KerberosName.parse("App.service-name/[email protected]"); assertEquals("App.service-name", name.serviceName()); assertEquals("example.com", name.hostName()); assertEquals("REALM.COM", name.realm()); assertEquals("service-name", shortNamer.shortName(name)); name = KerberosName.parse("[email protected]"); assertEquals("App.service-name", name.serviceName()); assertNull(name.hostName()); assertEquals("REALM.COM", name.realm()); assertEquals("service-name", shortNamer.shortName(name)); name = KerberosName.parse("user/[email protected]"); assertEquals("user", name.serviceName()); assertEquals("host", name.hostName()); assertEquals("REALM.COM", name.realm()); assertEquals("user", shortNamer.shortName(name)); }
public static KerberosName parse(String principalName) { Matcher match = NAME_PARSER.matcher(principalName); if (!match.matches()) { if (principalName.contains("@")) { throw new IllegalArgumentException("Malformed Kerberos name: " + principalName); } else { return new KerberosName(principalName, null, null); } } else { return new KerberosName(match.group(1), match.group(3), match.group(4)); } }
KerberosName { public static KerberosName parse(String principalName) { Matcher match = NAME_PARSER.matcher(principalName); if (!match.matches()) { if (principalName.contains("@")) { throw new IllegalArgumentException("Malformed Kerberos name: " + principalName); } else { return new KerberosName(principalName, null, null); } } else { return new KerberosName(match.group(1), match.group(3), match.group(4)); } } }
KerberosName { public static KerberosName parse(String principalName) { Matcher match = NAME_PARSER.matcher(principalName); if (!match.matches()) { if (principalName.contains("@")) { throw new IllegalArgumentException("Malformed Kerberos name: " + principalName); } else { return new KerberosName(principalName, null, null); } } else { return new KerberosName(match.group(1), match.group(3), match.group(4)); } } KerberosName(String serviceName, String hostName, String realm); }
KerberosName { public static KerberosName parse(String principalName) { Matcher match = NAME_PARSER.matcher(principalName); if (!match.matches()) { if (principalName.contains("@")) { throw new IllegalArgumentException("Malformed Kerberos name: " + principalName); } else { return new KerberosName(principalName, null, null); } } else { return new KerberosName(match.group(1), match.group(3), match.group(4)); } } KerberosName(String serviceName, String hostName, String realm); static KerberosName parse(String principalName); @Override String toString(); String serviceName(); String hostName(); String realm(); }
KerberosName { public static KerberosName parse(String principalName) { Matcher match = NAME_PARSER.matcher(principalName); if (!match.matches()) { if (principalName.contains("@")) { throw new IllegalArgumentException("Malformed Kerberos name: " + principalName); } else { return new KerberosName(principalName, null, null); } } else { return new KerberosName(match.group(1), match.group(3), match.group(4)); } } KerberosName(String serviceName, String hostName, String realm); static KerberosName parse(String principalName); @Override String toString(); String serviceName(); String hostName(); String realm(); }
@Test public void generateCredential() { ScramCredential credential1 = formatter.generateCredential("password", 4096); ScramCredential credential2 = formatter.generateCredential("password", 4096); assertNotEquals(ScramCredentialUtils.credentialToString(credential1), ScramCredentialUtils.credentialToString(credential2)); }
public static String credentialToString(ScramCredential credential) { return String.format("%s=%s,%s=%s,%s=%s,%s=%d", SALT, DatatypeConverter.printBase64Binary(credential.salt()), STORED_KEY, DatatypeConverter.printBase64Binary(credential.storedKey()), SERVER_KEY, DatatypeConverter.printBase64Binary(credential.serverKey()), ITERATIONS, credential.iterations()); }
ScramCredentialUtils { public static String credentialToString(ScramCredential credential) { return String.format("%s=%s,%s=%s,%s=%s,%s=%d", SALT, DatatypeConverter.printBase64Binary(credential.salt()), STORED_KEY, DatatypeConverter.printBase64Binary(credential.storedKey()), SERVER_KEY, DatatypeConverter.printBase64Binary(credential.serverKey()), ITERATIONS, credential.iterations()); } }
ScramCredentialUtils { public static String credentialToString(ScramCredential credential) { return String.format("%s=%s,%s=%s,%s=%s,%s=%d", SALT, DatatypeConverter.printBase64Binary(credential.salt()), STORED_KEY, DatatypeConverter.printBase64Binary(credential.storedKey()), SERVER_KEY, DatatypeConverter.printBase64Binary(credential.serverKey()), ITERATIONS, credential.iterations()); } }
ScramCredentialUtils { public static String credentialToString(ScramCredential credential) { return String.format("%s=%s,%s=%s,%s=%s,%s=%d", SALT, DatatypeConverter.printBase64Binary(credential.salt()), STORED_KEY, DatatypeConverter.printBase64Binary(credential.storedKey()), SERVER_KEY, DatatypeConverter.printBase64Binary(credential.serverKey()), ITERATIONS, credential.iterations()); } static String credentialToString(ScramCredential credential); static ScramCredential credentialFromString(String str); static void createCache(CredentialCache cache, Collection<String> enabledMechanisms); }
ScramCredentialUtils { public static String credentialToString(ScramCredential credential) { return String.format("%s=%s,%s=%s,%s=%s,%s=%d", SALT, DatatypeConverter.printBase64Binary(credential.salt()), STORED_KEY, DatatypeConverter.printBase64Binary(credential.storedKey()), SERVER_KEY, DatatypeConverter.printBase64Binary(credential.serverKey()), ITERATIONS, credential.iterations()); } static String credentialToString(ScramCredential credential); static ScramCredential credentialFromString(String str); static void createCache(CredentialCache cache, Collection<String> enabledMechanisms); }
@Test(expected = IllegalArgumentException.class) public void invalidCredential() { ScramCredentialUtils.credentialFromString("abc"); }
public static ScramCredential credentialFromString(String str) { Properties props = toProps(str); if (props.size() != 4 || !props.containsKey(SALT) || !props.containsKey(STORED_KEY) || !props.containsKey(SERVER_KEY) || !props.containsKey(ITERATIONS)) { throw new IllegalArgumentException("Credentials not valid: " + str); } byte[] salt = DatatypeConverter.parseBase64Binary(props.getProperty(SALT)); byte[] storedKey = DatatypeConverter.parseBase64Binary(props.getProperty(STORED_KEY)); byte[] serverKey = DatatypeConverter.parseBase64Binary(props.getProperty(SERVER_KEY)); int iterations = Integer.parseInt(props.getProperty(ITERATIONS)); return new ScramCredential(salt, storedKey, serverKey, iterations); }
ScramCredentialUtils { public static ScramCredential credentialFromString(String str) { Properties props = toProps(str); if (props.size() != 4 || !props.containsKey(SALT) || !props.containsKey(STORED_KEY) || !props.containsKey(SERVER_KEY) || !props.containsKey(ITERATIONS)) { throw new IllegalArgumentException("Credentials not valid: " + str); } byte[] salt = DatatypeConverter.parseBase64Binary(props.getProperty(SALT)); byte[] storedKey = DatatypeConverter.parseBase64Binary(props.getProperty(STORED_KEY)); byte[] serverKey = DatatypeConverter.parseBase64Binary(props.getProperty(SERVER_KEY)); int iterations = Integer.parseInt(props.getProperty(ITERATIONS)); return new ScramCredential(salt, storedKey, serverKey, iterations); } }
ScramCredentialUtils { public static ScramCredential credentialFromString(String str) { Properties props = toProps(str); if (props.size() != 4 || !props.containsKey(SALT) || !props.containsKey(STORED_KEY) || !props.containsKey(SERVER_KEY) || !props.containsKey(ITERATIONS)) { throw new IllegalArgumentException("Credentials not valid: " + str); } byte[] salt = DatatypeConverter.parseBase64Binary(props.getProperty(SALT)); byte[] storedKey = DatatypeConverter.parseBase64Binary(props.getProperty(STORED_KEY)); byte[] serverKey = DatatypeConverter.parseBase64Binary(props.getProperty(SERVER_KEY)); int iterations = Integer.parseInt(props.getProperty(ITERATIONS)); return new ScramCredential(salt, storedKey, serverKey, iterations); } }
ScramCredentialUtils { public static ScramCredential credentialFromString(String str) { Properties props = toProps(str); if (props.size() != 4 || !props.containsKey(SALT) || !props.containsKey(STORED_KEY) || !props.containsKey(SERVER_KEY) || !props.containsKey(ITERATIONS)) { throw new IllegalArgumentException("Credentials not valid: " + str); } byte[] salt = DatatypeConverter.parseBase64Binary(props.getProperty(SALT)); byte[] storedKey = DatatypeConverter.parseBase64Binary(props.getProperty(STORED_KEY)); byte[] serverKey = DatatypeConverter.parseBase64Binary(props.getProperty(SERVER_KEY)); int iterations = Integer.parseInt(props.getProperty(ITERATIONS)); return new ScramCredential(salt, storedKey, serverKey, iterations); } static String credentialToString(ScramCredential credential); static ScramCredential credentialFromString(String str); static void createCache(CredentialCache cache, Collection<String> enabledMechanisms); }
ScramCredentialUtils { public static ScramCredential credentialFromString(String str) { Properties props = toProps(str); if (props.size() != 4 || !props.containsKey(SALT) || !props.containsKey(STORED_KEY) || !props.containsKey(SERVER_KEY) || !props.containsKey(ITERATIONS)) { throw new IllegalArgumentException("Credentials not valid: " + str); } byte[] salt = DatatypeConverter.parseBase64Binary(props.getProperty(SALT)); byte[] storedKey = DatatypeConverter.parseBase64Binary(props.getProperty(STORED_KEY)); byte[] serverKey = DatatypeConverter.parseBase64Binary(props.getProperty(SERVER_KEY)); int iterations = Integer.parseInt(props.getProperty(ITERATIONS)); return new ScramCredential(salt, storedKey, serverKey, iterations); } static String credentialToString(ScramCredential credential); static ScramCredential credentialFromString(String str); static void createCache(CredentialCache cache, Collection<String> enabledMechanisms); }
@Test public void scramCredentialCache() throws Exception { CredentialCache cache = new CredentialCache(); ScramCredentialUtils.createCache(cache, Arrays.asList("SCRAM-SHA-512", "PLAIN")); assertNotNull("Cache not created for enabled mechanism", cache.cache(ScramMechanism.SCRAM_SHA_512.mechanismName(), ScramCredential.class)); assertNull("Cache created for disabled mechanism", cache.cache(ScramMechanism.SCRAM_SHA_256.mechanismName(), ScramCredential.class)); CredentialCache.Cache<ScramCredential> sha512Cache = cache.cache(ScramMechanism.SCRAM_SHA_512.mechanismName(), ScramCredential.class); ScramFormatter formatter = new ScramFormatter(ScramMechanism.SCRAM_SHA_512); ScramCredential credentialA = formatter.generateCredential("password", 4096); sha512Cache.put("userA", credentialA); assertEquals(credentialA, sha512Cache.get("userA")); assertNull("Invalid user credential", sha512Cache.get("userB")); }
public static void createCache(CredentialCache cache, Collection<String> enabledMechanisms) { for (String mechanism : ScramMechanism.mechanismNames()) { if (enabledMechanisms.contains(mechanism)) cache.createCache(mechanism, ScramCredential.class); } }
ScramCredentialUtils { public static void createCache(CredentialCache cache, Collection<String> enabledMechanisms) { for (String mechanism : ScramMechanism.mechanismNames()) { if (enabledMechanisms.contains(mechanism)) cache.createCache(mechanism, ScramCredential.class); } } }
ScramCredentialUtils { public static void createCache(CredentialCache cache, Collection<String> enabledMechanisms) { for (String mechanism : ScramMechanism.mechanismNames()) { if (enabledMechanisms.contains(mechanism)) cache.createCache(mechanism, ScramCredential.class); } } }
ScramCredentialUtils { public static void createCache(CredentialCache cache, Collection<String> enabledMechanisms) { for (String mechanism : ScramMechanism.mechanismNames()) { if (enabledMechanisms.contains(mechanism)) cache.createCache(mechanism, ScramCredential.class); } } static String credentialToString(ScramCredential credential); static ScramCredential credentialFromString(String str); static void createCache(CredentialCache cache, Collection<String> enabledMechanisms); }
ScramCredentialUtils { public static void createCache(CredentialCache cache, Collection<String> enabledMechanisms) { for (String mechanism : ScramMechanism.mechanismNames()) { if (enabledMechanisms.contains(mechanism)) cache.createCache(mechanism, ScramCredential.class); } } static String credentialToString(ScramCredential credential); static ScramCredential credentialFromString(String str); static void createCache(CredentialCache cache, Collection<String> enabledMechanisms); }
@Test public void mapToJsonStringKeys() { Schema stringIntMap = SchemaBuilder.map(Schema.STRING_SCHEMA, Schema.INT32_SCHEMA).build(); Map<String, Integer> input = new HashMap<>(); input.put("key1", 12); input.put("key2", 15); JsonNode converted = parse(converter.fromConnectData(TOPIC, stringIntMap, input)); validateEnvelope(converted); assertEquals(parse("{ \"type\": \"map\", \"keys\": { \"type\" : \"string\", \"optional\": false }, \"values\": { \"type\" : \"int32\", \"optional\": false }, \"optional\": false }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME)); assertEquals(JsonNodeFactory.instance.objectNode().put("key1", 12).put("key2", 15), converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME)); }
@Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } }
JsonConverter implements Converter { @Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } } }
JsonConverter implements Converter { @Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } } }
JsonConverter implements Converter { @Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } } @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); ObjectNode asJsonSchema(Schema schema); Schema asConnectSchema(JsonNode jsonSchema); }
JsonConverter implements Converter { @Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } } @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); ObjectNode asJsonSchema(Schema schema); Schema asConnectSchema(JsonNode jsonSchema); }
@Test public void testEqualsAndHashCode() { String name = "KafkaUser"; KafkaPrincipal principal1 = new KafkaPrincipal(KafkaPrincipal.USER_TYPE, name); KafkaPrincipal principal2 = new KafkaPrincipal(KafkaPrincipal.USER_TYPE, name); Assert.assertEquals(principal1.hashCode(), principal2.hashCode()); Assert.assertEquals(principal1, principal2); }
@Override public int hashCode() { int result = principalType.hashCode(); result = 31 * result + name.hashCode(); return result; }
KafkaPrincipal implements Principal { @Override public int hashCode() { int result = principalType.hashCode(); result = 31 * result + name.hashCode(); return result; } }
KafkaPrincipal implements Principal { @Override public int hashCode() { int result = principalType.hashCode(); result = 31 * result + name.hashCode(); return result; } KafkaPrincipal(String principalType, String name); }
KafkaPrincipal implements Principal { @Override public int hashCode() { int result = principalType.hashCode(); result = 31 * result + name.hashCode(); return result; } KafkaPrincipal(String principalType, String name); static KafkaPrincipal fromString(String str); @Override String toString(); @Override boolean equals(Object o); @Override int hashCode(); @Override String getName(); String getPrincipalType(); }
KafkaPrincipal implements Principal { @Override public int hashCode() { int result = principalType.hashCode(); result = 31 * result + name.hashCode(); return result; } KafkaPrincipal(String principalType, String name); static KafkaPrincipal fromString(String str); @Override String toString(); @Override boolean equals(Object o); @Override int hashCode(); @Override String getName(); String getPrincipalType(); static final String SEPARATOR; static final String USER_TYPE; final static KafkaPrincipal ANONYMOUS; }
@Test(expected = ConfigException.class) public void testInvalidDefault() { new ConfigDef().define("a", Type.INT, "hello", Importance.HIGH, "docs"); }
public ConfigDef define(ConfigKey key) { if (configKeys.containsKey(key.name)) { throw new ConfigException("Configuration " + key.name + " is defined twice."); } if (key.group != null && !groups.contains(key.group)) { groups.add(key.group); } configKeys.put(key.name, key); return this; }
ConfigDef { public ConfigDef define(ConfigKey key) { if (configKeys.containsKey(key.name)) { throw new ConfigException("Configuration " + key.name + " is defined twice."); } if (key.group != null && !groups.contains(key.group)) { groups.add(key.group); } configKeys.put(key.name, key); return this; } }
ConfigDef { public ConfigDef define(ConfigKey key) { if (configKeys.containsKey(key.name)) { throw new ConfigException("Configuration " + key.name + " is defined twice."); } if (key.group != null && !groups.contains(key.group)) { groups.add(key.group); } configKeys.put(key.name, key); return this; } ConfigDef(); ConfigDef(ConfigDef base); }
ConfigDef { public ConfigDef define(ConfigKey key) { if (configKeys.containsKey(key.name)) { throw new ConfigException("Configuration " + key.name + " is defined twice."); } if (key.group != null && !groups.contains(key.group)) { groups.add(key.group); } configKeys.put(key.name, key); return this; } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); }
ConfigDef { public ConfigDef define(ConfigKey key) { if (configKeys.containsKey(key.name)) { throw new ConfigException("Configuration " + key.name + " is defined twice."); } if (key.group != null && !groups.contains(key.group)) { groups.add(key.group); } configKeys.put(key.name, key); return this; } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); static final Object NO_DEFAULT_VALUE; }
@Test(expected = ConfigException.class) public void testDefinedTwice() { new ConfigDef().define("a", Type.STRING, Importance.HIGH, "docs").define("a", Type.INT, Importance.HIGH, "docs"); }
public ConfigDef define(ConfigKey key) { if (configKeys.containsKey(key.name)) { throw new ConfigException("Configuration " + key.name + " is defined twice."); } if (key.group != null && !groups.contains(key.group)) { groups.add(key.group); } configKeys.put(key.name, key); return this; }
ConfigDef { public ConfigDef define(ConfigKey key) { if (configKeys.containsKey(key.name)) { throw new ConfigException("Configuration " + key.name + " is defined twice."); } if (key.group != null && !groups.contains(key.group)) { groups.add(key.group); } configKeys.put(key.name, key); return this; } }
ConfigDef { public ConfigDef define(ConfigKey key) { if (configKeys.containsKey(key.name)) { throw new ConfigException("Configuration " + key.name + " is defined twice."); } if (key.group != null && !groups.contains(key.group)) { groups.add(key.group); } configKeys.put(key.name, key); return this; } ConfigDef(); ConfigDef(ConfigDef base); }
ConfigDef { public ConfigDef define(ConfigKey key) { if (configKeys.containsKey(key.name)) { throw new ConfigException("Configuration " + key.name + " is defined twice."); } if (key.group != null && !groups.contains(key.group)) { groups.add(key.group); } configKeys.put(key.name, key); return this; } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); }
ConfigDef { public ConfigDef define(ConfigKey key) { if (configKeys.containsKey(key.name)) { throw new ConfigException("Configuration " + key.name + " is defined twice."); } if (key.group != null && !groups.contains(key.group)) { groups.add(key.group); } configKeys.put(key.name, key); return this; } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); static final Object NO_DEFAULT_VALUE; }
@Test(expected = ConfigException.class) public void testInvalidDefaultRange() { new ConfigDef().define("name", Type.INT, -1, Range.between(0, 10), Importance.HIGH, "docs"); }
public ConfigDef define(ConfigKey key) { if (configKeys.containsKey(key.name)) { throw new ConfigException("Configuration " + key.name + " is defined twice."); } if (key.group != null && !groups.contains(key.group)) { groups.add(key.group); } configKeys.put(key.name, key); return this; }
ConfigDef { public ConfigDef define(ConfigKey key) { if (configKeys.containsKey(key.name)) { throw new ConfigException("Configuration " + key.name + " is defined twice."); } if (key.group != null && !groups.contains(key.group)) { groups.add(key.group); } configKeys.put(key.name, key); return this; } }
ConfigDef { public ConfigDef define(ConfigKey key) { if (configKeys.containsKey(key.name)) { throw new ConfigException("Configuration " + key.name + " is defined twice."); } if (key.group != null && !groups.contains(key.group)) { groups.add(key.group); } configKeys.put(key.name, key); return this; } ConfigDef(); ConfigDef(ConfigDef base); }
ConfigDef { public ConfigDef define(ConfigKey key) { if (configKeys.containsKey(key.name)) { throw new ConfigException("Configuration " + key.name + " is defined twice."); } if (key.group != null && !groups.contains(key.group)) { groups.add(key.group); } configKeys.put(key.name, key); return this; } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); }
ConfigDef { public ConfigDef define(ConfigKey key) { if (configKeys.containsKey(key.name)) { throw new ConfigException("Configuration " + key.name + " is defined twice."); } if (key.group != null && !groups.contains(key.group)) { groups.add(key.group); } configKeys.put(key.name, key); return this; } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); static final Object NO_DEFAULT_VALUE; }
@Test(expected = ConfigException.class) public void testInvalidDefaultString() { new ConfigDef().define("name", Type.STRING, "bad", ValidString.in("valid", "values"), Importance.HIGH, "docs"); }
public ConfigDef define(ConfigKey key) { if (configKeys.containsKey(key.name)) { throw new ConfigException("Configuration " + key.name + " is defined twice."); } if (key.group != null && !groups.contains(key.group)) { groups.add(key.group); } configKeys.put(key.name, key); return this; }
ConfigDef { public ConfigDef define(ConfigKey key) { if (configKeys.containsKey(key.name)) { throw new ConfigException("Configuration " + key.name + " is defined twice."); } if (key.group != null && !groups.contains(key.group)) { groups.add(key.group); } configKeys.put(key.name, key); return this; } }
ConfigDef { public ConfigDef define(ConfigKey key) { if (configKeys.containsKey(key.name)) { throw new ConfigException("Configuration " + key.name + " is defined twice."); } if (key.group != null && !groups.contains(key.group)) { groups.add(key.group); } configKeys.put(key.name, key); return this; } ConfigDef(); ConfigDef(ConfigDef base); }
ConfigDef { public ConfigDef define(ConfigKey key) { if (configKeys.containsKey(key.name)) { throw new ConfigException("Configuration " + key.name + " is defined twice."); } if (key.group != null && !groups.contains(key.group)) { groups.add(key.group); } configKeys.put(key.name, key); return this; } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); }
ConfigDef { public ConfigDef define(ConfigKey key) { if (configKeys.containsKey(key.name)) { throw new ConfigException("Configuration " + key.name + " is defined twice."); } if (key.group != null && !groups.contains(key.group)) { groups.add(key.group); } configKeys.put(key.name, key); return this; } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); static final Object NO_DEFAULT_VALUE; }
@Test public void testSslPasswords() { ConfigDef def = new ConfigDef(); SslConfigs.addClientSslSupport(def); Properties props = new Properties(); props.put(SslConfigs.SSL_KEY_PASSWORD_CONFIG, "key_password"); props.put(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, "keystore_password"); props.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, "truststore_password"); Map<String, Object> vals = def.parse(props); assertEquals(new Password("key_password"), vals.get(SslConfigs.SSL_KEY_PASSWORD_CONFIG)); assertEquals(Password.HIDDEN, vals.get(SslConfigs.SSL_KEY_PASSWORD_CONFIG).toString()); assertEquals(new Password("keystore_password"), vals.get(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG)); assertEquals(Password.HIDDEN, vals.get(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG).toString()); assertEquals(new Password("truststore_password"), vals.get(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG)); assertEquals(Password.HIDDEN, vals.get(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG).toString()); }
public Map<String, Object> parse(Map<?, ?> props) { List<String> undefinedConfigKeys = undefinedDependentConfigs(); if (!undefinedConfigKeys.isEmpty()) { String joined = Utils.join(undefinedConfigKeys, ","); throw new ConfigException("Some configurations in are referred in the dependents, but not defined: " + joined); } Map<String, Object> values = new HashMap<>(); for (ConfigKey key : configKeys.values()) values.put(key.name, parseValue(key, props.get(key.name), props.containsKey(key.name))); return values; }
ConfigDef { public Map<String, Object> parse(Map<?, ?> props) { List<String> undefinedConfigKeys = undefinedDependentConfigs(); if (!undefinedConfigKeys.isEmpty()) { String joined = Utils.join(undefinedConfigKeys, ","); throw new ConfigException("Some configurations in are referred in the dependents, but not defined: " + joined); } Map<String, Object> values = new HashMap<>(); for (ConfigKey key : configKeys.values()) values.put(key.name, parseValue(key, props.get(key.name), props.containsKey(key.name))); return values; } }
ConfigDef { public Map<String, Object> parse(Map<?, ?> props) { List<String> undefinedConfigKeys = undefinedDependentConfigs(); if (!undefinedConfigKeys.isEmpty()) { String joined = Utils.join(undefinedConfigKeys, ","); throw new ConfigException("Some configurations in are referred in the dependents, but not defined: " + joined); } Map<String, Object> values = new HashMap<>(); for (ConfigKey key : configKeys.values()) values.put(key.name, parseValue(key, props.get(key.name), props.containsKey(key.name))); return values; } ConfigDef(); ConfigDef(ConfigDef base); }
ConfigDef { public Map<String, Object> parse(Map<?, ?> props) { List<String> undefinedConfigKeys = undefinedDependentConfigs(); if (!undefinedConfigKeys.isEmpty()) { String joined = Utils.join(undefinedConfigKeys, ","); throw new ConfigException("Some configurations in are referred in the dependents, but not defined: " + joined); } Map<String, Object> values = new HashMap<>(); for (ConfigKey key : configKeys.values()) values.put(key.name, parseValue(key, props.get(key.name), props.containsKey(key.name))); return values; } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); }
ConfigDef { public Map<String, Object> parse(Map<?, ?> props) { List<String> undefinedConfigKeys = undefinedDependentConfigs(); if (!undefinedConfigKeys.isEmpty()) { String joined = Utils.join(undefinedConfigKeys, ","); throw new ConfigException("Some configurations in are referred in the dependents, but not defined: " + joined); } Map<String, Object> values = new HashMap<>(); for (ConfigKey key : configKeys.values()) values.put(key.name, parseValue(key, props.get(key.name), props.containsKey(key.name))); return values; } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); static final Object NO_DEFAULT_VALUE; }
@Test public void testParseForValidate() { Map<String, Object> expectedParsed = new HashMap<>(); expectedParsed.put("a", 1); expectedParsed.put("b", null); expectedParsed.put("c", null); expectedParsed.put("d", 10); Map<String, ConfigValue> expected = new HashMap<>(); String errorMessageB = "Missing required configuration \"b\" which has no default value."; String errorMessageC = "Missing required configuration \"c\" which has no default value."; ConfigValue configA = new ConfigValue("a", 1, Collections.<Object>emptyList(), Collections.<String>emptyList()); ConfigValue configB = new ConfigValue("b", null, Collections.<Object>emptyList(), Arrays.asList(errorMessageB, errorMessageB)); ConfigValue configC = new ConfigValue("c", null, Collections.<Object>emptyList(), Arrays.asList(errorMessageC)); ConfigValue configD = new ConfigValue("d", 10, Collections.<Object>emptyList(), Collections.<String>emptyList()); expected.put("a", configA); expected.put("b", configB); expected.put("c", configC); expected.put("d", configD); ConfigDef def = new ConfigDef() .define("a", Type.INT, Importance.HIGH, "docs", "group", 1, Width.SHORT, "a", Arrays.asList("b", "c"), new IntegerRecommender(false)) .define("b", Type.INT, Importance.HIGH, "docs", "group", 2, Width.SHORT, "b", new IntegerRecommender(true)) .define("c", Type.INT, Importance.HIGH, "docs", "group", 3, Width.SHORT, "c", new IntegerRecommender(true)) .define("d", Type.INT, Importance.HIGH, "docs", "group", 4, Width.SHORT, "d", Arrays.asList("b"), new IntegerRecommender(false)); Map<String, String> props = new HashMap<>(); props.put("a", "1"); props.put("d", "10"); Map<String, ConfigValue> configValues = new HashMap<>(); for (String name : def.configKeys().keySet()) { configValues.put(name, new ConfigValue(name)); } Map<String, Object> parsed = def.parseForValidate(props, configValues); assertEquals(expectedParsed, parsed); assertEquals(expected, configValues); }
Map<String, Object> parseForValidate(Map<String, String> props, Map<String, ConfigValue> configValues) { Map<String, Object> parsed = new HashMap<>(); Set<String> configsWithNoParent = getConfigsWithNoParent(); for (String name: configsWithNoParent) { parseForValidate(name, props, parsed, configValues); } return parsed; }
ConfigDef { Map<String, Object> parseForValidate(Map<String, String> props, Map<String, ConfigValue> configValues) { Map<String, Object> parsed = new HashMap<>(); Set<String> configsWithNoParent = getConfigsWithNoParent(); for (String name: configsWithNoParent) { parseForValidate(name, props, parsed, configValues); } return parsed; } }
ConfigDef { Map<String, Object> parseForValidate(Map<String, String> props, Map<String, ConfigValue> configValues) { Map<String, Object> parsed = new HashMap<>(); Set<String> configsWithNoParent = getConfigsWithNoParent(); for (String name: configsWithNoParent) { parseForValidate(name, props, parsed, configValues); } return parsed; } ConfigDef(); ConfigDef(ConfigDef base); }
ConfigDef { Map<String, Object> parseForValidate(Map<String, String> props, Map<String, ConfigValue> configValues) { Map<String, Object> parsed = new HashMap<>(); Set<String> configsWithNoParent = getConfigsWithNoParent(); for (String name: configsWithNoParent) { parseForValidate(name, props, parsed, configValues); } return parsed; } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); }
ConfigDef { Map<String, Object> parseForValidate(Map<String, String> props, Map<String, ConfigValue> configValues) { Map<String, Object> parsed = new HashMap<>(); Set<String> configsWithNoParent = getConfigsWithNoParent(); for (String name: configsWithNoParent) { parseForValidate(name, props, parsed, configValues); } return parsed; } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); static final Object NO_DEFAULT_VALUE; }
@Test public void testValidate() { Map<String, ConfigValue> expected = new HashMap<>(); String errorMessageB = "Missing required configuration \"b\" which has no default value."; String errorMessageC = "Missing required configuration \"c\" which has no default value."; ConfigValue configA = new ConfigValue("a", 1, Arrays.<Object>asList(1, 2, 3), Collections.<String>emptyList()); ConfigValue configB = new ConfigValue("b", null, Arrays.<Object>asList(4, 5), Arrays.asList(errorMessageB, errorMessageB)); ConfigValue configC = new ConfigValue("c", null, Arrays.<Object>asList(4, 5), Arrays.asList(errorMessageC)); ConfigValue configD = new ConfigValue("d", 10, Arrays.<Object>asList(1, 2, 3), Collections.<String>emptyList()); expected.put("a", configA); expected.put("b", configB); expected.put("c", configC); expected.put("d", configD); ConfigDef def = new ConfigDef() .define("a", Type.INT, Importance.HIGH, "docs", "group", 1, Width.SHORT, "a", Arrays.asList("b", "c"), new IntegerRecommender(false)) .define("b", Type.INT, Importance.HIGH, "docs", "group", 2, Width.SHORT, "b", new IntegerRecommender(true)) .define("c", Type.INT, Importance.HIGH, "docs", "group", 3, Width.SHORT, "c", new IntegerRecommender(true)) .define("d", Type.INT, Importance.HIGH, "docs", "group", 4, Width.SHORT, "d", Arrays.asList("b"), new IntegerRecommender(false)); Map<String, String> props = new HashMap<>(); props.put("a", "1"); props.put("d", "10"); List<ConfigValue> configs = def.validate(props); for (ConfigValue config : configs) { String name = config.name(); ConfigValue expectedConfig = expected.get(name); assertEquals(expectedConfig, config); } }
public List<ConfigValue> validate(Map<String, String> props) { return new ArrayList<>(validateAll(props).values()); }
ConfigDef { public List<ConfigValue> validate(Map<String, String> props) { return new ArrayList<>(validateAll(props).values()); } }
ConfigDef { public List<ConfigValue> validate(Map<String, String> props) { return new ArrayList<>(validateAll(props).values()); } ConfigDef(); ConfigDef(ConfigDef base); }
ConfigDef { public List<ConfigValue> validate(Map<String, String> props) { return new ArrayList<>(validateAll(props).values()); } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); }
ConfigDef { public List<ConfigValue> validate(Map<String, String> props) { return new ArrayList<>(validateAll(props).values()); } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); static final Object NO_DEFAULT_VALUE; }
@Test public void testNames() { final ConfigDef configDef = new ConfigDef() .define("a", Type.STRING, Importance.LOW, "docs") .define("b", Type.STRING, Importance.LOW, "docs"); Set<String> names = configDef.names(); assertEquals(new HashSet<>(Arrays.asList("a", "b")), names); try { names.add("new"); fail(); } catch (UnsupportedOperationException e) { } }
public Set<String> names() { return Collections.unmodifiableSet(configKeys.keySet()); }
ConfigDef { public Set<String> names() { return Collections.unmodifiableSet(configKeys.keySet()); } }
ConfigDef { public Set<String> names() { return Collections.unmodifiableSet(configKeys.keySet()); } ConfigDef(); ConfigDef(ConfigDef base); }
ConfigDef { public Set<String> names() { return Collections.unmodifiableSet(configKeys.keySet()); } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); }
ConfigDef { public Set<String> names() { return Collections.unmodifiableSet(configKeys.keySet()); } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); static final Object NO_DEFAULT_VALUE; }
@Test public void toRst() { final ConfigDef def = new ConfigDef() .define("opt1", Type.STRING, "a", ValidString.in("a", "b", "c"), Importance.HIGH, "docs1") .define("opt2", Type.INT, Importance.MEDIUM, "docs2") .define("opt3", Type.LIST, Arrays.asList("a", "b"), Importance.LOW, "docs3"); final String expectedRst = "" + "``opt2``\n" + " docs2\n" + "\n" + " * Type: int\n" + " * Importance: medium\n" + "\n" + "``opt1``\n" + " docs1\n" + "\n" + " * Type: string\n" + " * Default: a\n" + " * Valid Values: [a, b, c]\n" + " * Importance: high\n" + "\n" + "``opt3``\n" + " docs3\n" + "\n" + " * Type: list\n" + " * Default: a,b\n" + " * Importance: low\n" + "\n"; assertEquals(expectedRst, def.toRst()); }
public String toRst() { StringBuilder b = new StringBuilder(); for (ConfigKey key : sortedConfigs()) { if (key.internalConfig) { continue; } getConfigKeyRst(key, b); b.append("\n"); } return b.toString(); }
ConfigDef { public String toRst() { StringBuilder b = new StringBuilder(); for (ConfigKey key : sortedConfigs()) { if (key.internalConfig) { continue; } getConfigKeyRst(key, b); b.append("\n"); } return b.toString(); } }
ConfigDef { public String toRst() { StringBuilder b = new StringBuilder(); for (ConfigKey key : sortedConfigs()) { if (key.internalConfig) { continue; } getConfigKeyRst(key, b); b.append("\n"); } return b.toString(); } ConfigDef(); ConfigDef(ConfigDef base); }
ConfigDef { public String toRst() { StringBuilder b = new StringBuilder(); for (ConfigKey key : sortedConfigs()) { if (key.internalConfig) { continue; } getConfigKeyRst(key, b); b.append("\n"); } return b.toString(); } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); }
ConfigDef { public String toRst() { StringBuilder b = new StringBuilder(); for (ConfigKey key : sortedConfigs()) { if (key.internalConfig) { continue; } getConfigKeyRst(key, b); b.append("\n"); } return b.toString(); } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); static final Object NO_DEFAULT_VALUE; }
@Test public void mapToJsonNonStringKeys() { Schema intIntMap = SchemaBuilder.map(Schema.INT32_SCHEMA, Schema.INT32_SCHEMA).build(); Map<Integer, Integer> input = new HashMap<>(); input.put(1, 12); input.put(2, 15); JsonNode converted = parse(converter.fromConnectData(TOPIC, intIntMap, input)); validateEnvelope(converted); assertEquals(parse("{ \"type\": \"map\", \"keys\": { \"type\" : \"int32\", \"optional\": false }, \"values\": { \"type\" : \"int32\", \"optional\": false }, \"optional\": false }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME)); assertTrue(converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).isArray()); ArrayNode payload = (ArrayNode) converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME); assertEquals(2, payload.size()); Set<JsonNode> payloadEntries = new HashSet<>(); for (JsonNode elem : payload) payloadEntries.add(elem); assertEquals(new HashSet<>(Arrays.asList(JsonNodeFactory.instance.arrayNode().add(1).add(12), JsonNodeFactory.instance.arrayNode().add(2).add(15))), payloadEntries ); }
@Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } }
JsonConverter implements Converter { @Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } } }
JsonConverter implements Converter { @Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } } }
JsonConverter implements Converter { @Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } } @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); ObjectNode asJsonSchema(Schema schema); Schema asConnectSchema(JsonNode jsonSchema); }
JsonConverter implements Converter { @Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } } @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); ObjectNode asJsonSchema(Schema schema); Schema asConnectSchema(JsonNode jsonSchema); }
@Test public void toEnrichedRst() { final ConfigDef def = new ConfigDef() .define("opt1.of.group1", Type.STRING, "a", ValidString.in("a", "b", "c"), Importance.HIGH, "Doc doc.", "Group One", 0, Width.NONE, "..", Collections.<String>emptyList()) .define("opt2.of.group1", Type.INT, ConfigDef.NO_DEFAULT_VALUE, Importance.MEDIUM, "Doc doc doc.", "Group One", 1, Width.NONE, "..", Arrays.asList("some.option1", "some.option2")) .define("opt2.of.group2", Type.BOOLEAN, false, Importance.HIGH, "Doc doc doc doc.", "Group Two", 1, Width.NONE, "..", Collections.<String>emptyList()) .define("opt1.of.group2", Type.BOOLEAN, false, Importance.HIGH, "Doc doc doc doc doc.", "Group Two", 0, Width.NONE, "..", Collections.singletonList("some.option")) .define("poor.opt", Type.STRING, "foo", Importance.HIGH, "Doc doc doc doc."); final String expectedRst = "" + "``poor.opt``\n" + " Doc doc doc doc.\n" + "\n" + " * Type: string\n" + " * Default: foo\n" + " * Importance: high\n" + "\n" + "Group One\n" + "^^^^^^^^^\n" + "\n" + "``opt1.of.group1``\n" + " Doc doc.\n" + "\n" + " * Type: string\n" + " * Default: a\n" + " * Valid Values: [a, b, c]\n" + " * Importance: high\n" + "\n" + "``opt2.of.group1``\n" + " Doc doc doc.\n" + "\n" + " * Type: int\n" + " * Importance: medium\n" + " * Dependents: ``some.option1``, ``some.option2``\n" + "\n" + "Group Two\n" + "^^^^^^^^^\n" + "\n" + "``opt1.of.group2``\n" + " Doc doc doc doc doc.\n" + "\n" + " * Type: boolean\n" + " * Default: false\n" + " * Importance: high\n" + " * Dependents: ``some.option``\n" + "\n" + "``opt2.of.group2``\n" + " Doc doc doc doc.\n" + "\n" + " * Type: boolean\n" + " * Default: false\n" + " * Importance: high\n" + "\n"; assertEquals(expectedRst, def.toEnrichedRst()); }
public String toEnrichedRst() { StringBuilder b = new StringBuilder(); String lastKeyGroupName = ""; for (ConfigKey key : sortedConfigs()) { if (key.internalConfig) { continue; } if (key.group != null) { if (!lastKeyGroupName.equalsIgnoreCase(key.group)) { b.append(key.group).append("\n"); char[] underLine = new char[key.group.length()]; Arrays.fill(underLine, '^'); b.append(new String(underLine)).append("\n\n"); } lastKeyGroupName = key.group; } getConfigKeyRst(key, b); if (key.dependents != null && key.dependents.size() > 0) { int j = 0; b.append(" * Dependents: "); for (String dependent : key.dependents) { b.append("``"); b.append(dependent); if (++j == key.dependents.size()) b.append("``"); else b.append("``, "); } b.append("\n"); } b.append("\n"); } return b.toString(); }
ConfigDef { public String toEnrichedRst() { StringBuilder b = new StringBuilder(); String lastKeyGroupName = ""; for (ConfigKey key : sortedConfigs()) { if (key.internalConfig) { continue; } if (key.group != null) { if (!lastKeyGroupName.equalsIgnoreCase(key.group)) { b.append(key.group).append("\n"); char[] underLine = new char[key.group.length()]; Arrays.fill(underLine, '^'); b.append(new String(underLine)).append("\n\n"); } lastKeyGroupName = key.group; } getConfigKeyRst(key, b); if (key.dependents != null && key.dependents.size() > 0) { int j = 0; b.append(" * Dependents: "); for (String dependent : key.dependents) { b.append("``"); b.append(dependent); if (++j == key.dependents.size()) b.append("``"); else b.append("``, "); } b.append("\n"); } b.append("\n"); } return b.toString(); } }
ConfigDef { public String toEnrichedRst() { StringBuilder b = new StringBuilder(); String lastKeyGroupName = ""; for (ConfigKey key : sortedConfigs()) { if (key.internalConfig) { continue; } if (key.group != null) { if (!lastKeyGroupName.equalsIgnoreCase(key.group)) { b.append(key.group).append("\n"); char[] underLine = new char[key.group.length()]; Arrays.fill(underLine, '^'); b.append(new String(underLine)).append("\n\n"); } lastKeyGroupName = key.group; } getConfigKeyRst(key, b); if (key.dependents != null && key.dependents.size() > 0) { int j = 0; b.append(" * Dependents: "); for (String dependent : key.dependents) { b.append("``"); b.append(dependent); if (++j == key.dependents.size()) b.append("``"); else b.append("``, "); } b.append("\n"); } b.append("\n"); } return b.toString(); } ConfigDef(); ConfigDef(ConfigDef base); }
ConfigDef { public String toEnrichedRst() { StringBuilder b = new StringBuilder(); String lastKeyGroupName = ""; for (ConfigKey key : sortedConfigs()) { if (key.internalConfig) { continue; } if (key.group != null) { if (!lastKeyGroupName.equalsIgnoreCase(key.group)) { b.append(key.group).append("\n"); char[] underLine = new char[key.group.length()]; Arrays.fill(underLine, '^'); b.append(new String(underLine)).append("\n\n"); } lastKeyGroupName = key.group; } getConfigKeyRst(key, b); if (key.dependents != null && key.dependents.size() > 0) { int j = 0; b.append(" * Dependents: "); for (String dependent : key.dependents) { b.append("``"); b.append(dependent); if (++j == key.dependents.size()) b.append("``"); else b.append("``, "); } b.append("\n"); } b.append("\n"); } return b.toString(); } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); }
ConfigDef { public String toEnrichedRst() { StringBuilder b = new StringBuilder(); String lastKeyGroupName = ""; for (ConfigKey key : sortedConfigs()) { if (key.internalConfig) { continue; } if (key.group != null) { if (!lastKeyGroupName.equalsIgnoreCase(key.group)) { b.append(key.group).append("\n"); char[] underLine = new char[key.group.length()]; Arrays.fill(underLine, '^'); b.append(new String(underLine)).append("\n\n"); } lastKeyGroupName = key.group; } getConfigKeyRst(key, b); if (key.dependents != null && key.dependents.size() > 0) { int j = 0; b.append(" * Dependents: "); for (String dependent : key.dependents) { b.append("``"); b.append(dependent); if (++j == key.dependents.size()) b.append("``"); else b.append("``, "); } b.append("\n"); } b.append("\n"); } return b.toString(); } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); static final Object NO_DEFAULT_VALUE; }
@Test public void testConvertValueToStringBoolean() { assertEquals("true", ConfigDef.convertToString(true, Type.BOOLEAN)); assertNull(ConfigDef.convertToString(null, Type.BOOLEAN)); }
public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } }
ConfigDef { public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } } }
ConfigDef { public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } } ConfigDef(); ConfigDef(ConfigDef base); }
ConfigDef { public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); }
ConfigDef { public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); static final Object NO_DEFAULT_VALUE; }
@Test public void testConvertValueToStringShort() { assertEquals("32767", ConfigDef.convertToString(Short.MAX_VALUE, Type.SHORT)); assertNull(ConfigDef.convertToString(null, Type.SHORT)); }
public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } }
ConfigDef { public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } } }
ConfigDef { public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } } ConfigDef(); ConfigDef(ConfigDef base); }
ConfigDef { public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); }
ConfigDef { public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); static final Object NO_DEFAULT_VALUE; }
@Test public void testConvertValueToStringInt() { assertEquals("2147483647", ConfigDef.convertToString(Integer.MAX_VALUE, Type.INT)); assertNull(ConfigDef.convertToString(null, Type.INT)); }
public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } }
ConfigDef { public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } } }
ConfigDef { public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } } ConfigDef(); ConfigDef(ConfigDef base); }
ConfigDef { public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); }
ConfigDef { public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); static final Object NO_DEFAULT_VALUE; }
@Test public void testConvertValueToStringLong() { assertEquals("9223372036854775807", ConfigDef.convertToString(Long.MAX_VALUE, Type.LONG)); assertNull(ConfigDef.convertToString(null, Type.LONG)); }
public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } }
ConfigDef { public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } } }
ConfigDef { public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } } ConfigDef(); ConfigDef(ConfigDef base); }
ConfigDef { public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); }
ConfigDef { public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); static final Object NO_DEFAULT_VALUE; }
@Test public void testConvertValueToStringDouble() { assertEquals("3.125", ConfigDef.convertToString(3.125, Type.DOUBLE)); assertNull(ConfigDef.convertToString(null, Type.DOUBLE)); }
public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } }
ConfigDef { public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } } }
ConfigDef { public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } } ConfigDef(); ConfigDef(ConfigDef base); }
ConfigDef { public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); }
ConfigDef { public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); static final Object NO_DEFAULT_VALUE; }
@Test public void testConvertValueToStringString() { assertEquals("foobar", ConfigDef.convertToString("foobar", Type.STRING)); assertNull(ConfigDef.convertToString(null, Type.STRING)); }
public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } }
ConfigDef { public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } } }
ConfigDef { public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } } ConfigDef(); ConfigDef(ConfigDef base); }
ConfigDef { public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); }
ConfigDef { public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); static final Object NO_DEFAULT_VALUE; }
@Test public void testConvertValueToStringPassword() { assertEquals(Password.HIDDEN, ConfigDef.convertToString(new Password("foobar"), Type.PASSWORD)); assertEquals("foobar", ConfigDef.convertToString("foobar", Type.PASSWORD)); assertNull(ConfigDef.convertToString(null, Type.PASSWORD)); }
public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } }
ConfigDef { public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } } }
ConfigDef { public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } } ConfigDef(); ConfigDef(ConfigDef base); }
ConfigDef { public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); }
ConfigDef { public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); static final Object NO_DEFAULT_VALUE; }
@Test public void testConvertValueToStringList() { assertEquals("a,bc,d", ConfigDef.convertToString(Arrays.asList("a", "bc", "d"), Type.LIST)); assertNull(ConfigDef.convertToString(null, Type.LIST)); }
public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } }
ConfigDef { public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } } }
ConfigDef { public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } } ConfigDef(); ConfigDef(ConfigDef base); }
ConfigDef { public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); }
ConfigDef { public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); static final Object NO_DEFAULT_VALUE; }
@Test public void testConvertValueToStringClass() throws ClassNotFoundException { String actual = ConfigDef.convertToString(ConfigDefTest.class, Type.CLASS); assertEquals("org.apache.kafka.common.config.ConfigDefTest", actual); assertEquals(ConfigDefTest.class, Class.forName(actual)); assertNull(ConfigDef.convertToString(null, Type.CLASS)); }
public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } }
ConfigDef { public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } } }
ConfigDef { public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } } ConfigDef(); ConfigDef(ConfigDef base); }
ConfigDef { public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); }
ConfigDef { public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); static final Object NO_DEFAULT_VALUE; }
@Test public void structToJson() { Schema schema = SchemaBuilder.struct().field("field1", Schema.BOOLEAN_SCHEMA).field("field2", Schema.STRING_SCHEMA).field("field3", Schema.STRING_SCHEMA).field("field4", Schema.BOOLEAN_SCHEMA).build(); Struct input = new Struct(schema).put("field1", true).put("field2", "string2").put("field3", "string3").put("field4", false); JsonNode converted = parse(converter.fromConnectData(TOPIC, schema, input)); validateEnvelope(converted); assertEquals(parse("{ \"type\": \"struct\", \"optional\": false, \"fields\": [{ \"field\": \"field1\", \"type\": \"boolean\", \"optional\": false }, { \"field\": \"field2\", \"type\": \"string\", \"optional\": false }, { \"field\": \"field3\", \"type\": \"string\", \"optional\": false }, { \"field\": \"field4\", \"type\": \"boolean\", \"optional\": false }] }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME)); assertEquals(JsonNodeFactory.instance.objectNode() .put("field1", true) .put("field2", "string2") .put("field3", "string3") .put("field4", false), converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME)); }
@Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } }
JsonConverter implements Converter { @Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } } }
JsonConverter implements Converter { @Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } } }
JsonConverter implements Converter { @Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } } @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); ObjectNode asJsonSchema(Schema schema); Schema asConnectSchema(JsonNode jsonSchema); }
JsonConverter implements Converter { @Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } } @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); ObjectNode asJsonSchema(Schema schema); Schema asConnectSchema(JsonNode jsonSchema); }
@Test public void testConvertValueToStringNestedClass() throws ClassNotFoundException { String actual = ConfigDef.convertToString(NestedClass.class, Type.CLASS); assertEquals("org.apache.kafka.common.config.ConfigDefTest$NestedClass", actual); assertEquals(NestedClass.class, Class.forName(actual)); }
public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } }
ConfigDef { public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } } }
ConfigDef { public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } } ConfigDef(); ConfigDef(ConfigDef base); }
ConfigDef { public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); }
ConfigDef { public static String convertToString(Object parsedValue, Type type) { if (parsedValue == null) { return null; } if (type == null) { return parsedValue.toString(); } switch (type) { case BOOLEAN: case SHORT: case INT: case LONG: case DOUBLE: case STRING: case PASSWORD: return parsedValue.toString(); case LIST: List<?> valueList = (List<?>) parsedValue; return Utils.join(valueList, ","); case CLASS: Class<?> clazz = (Class<?>) parsedValue; return clazz.getName(); default: throw new IllegalStateException("Unknown type."); } } ConfigDef(); ConfigDef(ConfigDef base); Set<String> names(); ConfigDef define(ConfigKey key); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, List<String> dependents); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName, Recommender recommender); ConfigDef define(String name, Type type, Importance importance, String documentation, String group, int orderInGroup, Width width, String displayName); ConfigDef define(String name, Type type, Object defaultValue, Validator validator, Importance importance, String documentation); ConfigDef define(String name, Type type, Object defaultValue, Importance importance, String documentation); ConfigDef define(String name, Type type, Importance importance, String documentation); ConfigDef defineInternal(final String name, final Type type, final Object defaultValue, final Importance importance); Map<String, ConfigKey> configKeys(); List<String> groups(); ConfigDef withClientSslSupport(); ConfigDef withClientSaslSupport(); Map<String, Object> parse(Map<?, ?> props); List<ConfigValue> validate(Map<String, String> props); Map<String, ConfigValue> validateAll(Map<String, String> props); static Object parseType(String name, Object value, Type type); static String convertToString(Object parsedValue, Type type); String toHtmlTable(); String toRst(); String toEnrichedRst(); void embed(final String keyPrefix, final String groupPrefix, final int startingOrd, final ConfigDef child); static final Object NO_DEFAULT_VALUE; }
@Test public void testOriginalsWithPrefix() { Properties props = new Properties(); props.put("foo.bar", "abc"); props.put("setting", "def"); TestConfig config = new TestConfig(props); Map<String, Object> originalsWithPrefix = config.originalsWithPrefix("foo."); assertTrue(config.unused().contains("foo.bar")); originalsWithPrefix.get("bar"); assertFalse(config.unused().contains("foo.bar")); Map<String, Object> expected = new HashMap<>(); expected.put("bar", "abc"); assertEquals(expected, originalsWithPrefix); }
public Map<String, Object> originalsWithPrefix(String prefix) { Map<String, Object> result = new RecordingMap<>(prefix, false); for (Map.Entry<String, ?> entry : originals.entrySet()) { if (entry.getKey().startsWith(prefix) && entry.getKey().length() > prefix.length()) result.put(entry.getKey().substring(prefix.length()), entry.getValue()); } return result; }
AbstractConfig { public Map<String, Object> originalsWithPrefix(String prefix) { Map<String, Object> result = new RecordingMap<>(prefix, false); for (Map.Entry<String, ?> entry : originals.entrySet()) { if (entry.getKey().startsWith(prefix) && entry.getKey().length() > prefix.length()) result.put(entry.getKey().substring(prefix.length()), entry.getValue()); } return result; } }
AbstractConfig { public Map<String, Object> originalsWithPrefix(String prefix) { Map<String, Object> result = new RecordingMap<>(prefix, false); for (Map.Entry<String, ?> entry : originals.entrySet()) { if (entry.getKey().startsWith(prefix) && entry.getKey().length() > prefix.length()) result.put(entry.getKey().substring(prefix.length()), entry.getValue()); } return result; } @SuppressWarnings("unchecked") AbstractConfig(ConfigDef definition, Map<?, ?> originals, boolean doLog); AbstractConfig(ConfigDef definition, Map<?, ?> originals); }
AbstractConfig { public Map<String, Object> originalsWithPrefix(String prefix) { Map<String, Object> result = new RecordingMap<>(prefix, false); for (Map.Entry<String, ?> entry : originals.entrySet()) { if (entry.getKey().startsWith(prefix) && entry.getKey().length() > prefix.length()) result.put(entry.getKey().substring(prefix.length()), entry.getValue()); } return result; } @SuppressWarnings("unchecked") AbstractConfig(ConfigDef definition, Map<?, ?> originals, boolean doLog); AbstractConfig(ConfigDef definition, Map<?, ?> originals); void ignore(String key); Short getShort(String key); Integer getInt(String key); Long getLong(String key); Double getDouble(String key); @SuppressWarnings("unchecked") List<String> getList(String key); Boolean getBoolean(String key); String getString(String key); ConfigDef.Type typeOf(String key); Password getPassword(String key); Class<?> getClass(String key); Set<String> unused(); Map<String, Object> originals(); Map<String, String> originalsStrings(); Map<String, Object> originalsWithPrefix(String prefix); Map<String, Object> valuesWithPrefixOverride(String prefix); Map<String, ?> values(); void logUnused(); T getConfiguredInstance(String key, Class<T> t); List<T> getConfiguredInstances(String key, Class<T> t); List<T> getConfiguredInstances(String key, Class<T> t, Map<String, Object> configOverrides); @Override boolean equals(Object o); @Override int hashCode(); }
AbstractConfig { public Map<String, Object> originalsWithPrefix(String prefix) { Map<String, Object> result = new RecordingMap<>(prefix, false); for (Map.Entry<String, ?> entry : originals.entrySet()) { if (entry.getKey().startsWith(prefix) && entry.getKey().length() > prefix.length()) result.put(entry.getKey().substring(prefix.length()), entry.getValue()); } return result; } @SuppressWarnings("unchecked") AbstractConfig(ConfigDef definition, Map<?, ?> originals, boolean doLog); AbstractConfig(ConfigDef definition, Map<?, ?> originals); void ignore(String key); Short getShort(String key); Integer getInt(String key); Long getLong(String key); Double getDouble(String key); @SuppressWarnings("unchecked") List<String> getList(String key); Boolean getBoolean(String key); String getString(String key); ConfigDef.Type typeOf(String key); Password getPassword(String key); Class<?> getClass(String key); Set<String> unused(); Map<String, Object> originals(); Map<String, String> originalsStrings(); Map<String, Object> originalsWithPrefix(String prefix); Map<String, Object> valuesWithPrefixOverride(String prefix); Map<String, ?> values(); void logUnused(); T getConfiguredInstance(String key, Class<T> t); List<T> getConfiguredInstances(String key, Class<T> t); List<T> getConfiguredInstances(String key, Class<T> t, Map<String, Object> configOverrides); @Override boolean equals(Object o); @Override int hashCode(); }
@Test public void testValuesWithPrefixOverride() { String prefix = "prefix."; Properties props = new Properties(); props.put("sasl.mechanism", "PLAIN"); props.put("prefix.sasl.mechanism", "GSSAPI"); props.put("prefix.sasl.kerberos.kinit.cmd", "/usr/bin/kinit2"); props.put("prefix.ssl.truststore.location", "my location"); props.put("sasl.kerberos.service.name", "service name"); props.put("ssl.keymanager.algorithm", "algorithm"); TestSecurityConfig config = new TestSecurityConfig(props); Map<String, Object> valuesWithPrefixOverride = config.valuesWithPrefixOverride(prefix); assertTrue(config.unused().contains("prefix.sasl.mechanism")); assertTrue(config.unused().contains("sasl.mechanism")); assertEquals("GSSAPI", valuesWithPrefixOverride.get("sasl.mechanism")); assertFalse(config.unused().contains("sasl.mechanism")); assertFalse(config.unused().contains("prefix.sasl.mechanism")); assertTrue(config.unused().contains("prefix.sasl.kerberos.kinit.cmd")); assertFalse(config.unused().contains("sasl.kerberos.kinit.cmd")); assertEquals("/usr/bin/kinit2", valuesWithPrefixOverride.get("sasl.kerberos.kinit.cmd")); assertFalse(config.unused().contains("sasl.kerberos.kinit.cmd")); assertFalse(config.unused().contains("prefix.sasl.kerberos.kinit.cmd")); assertTrue(config.unused().contains("prefix.ssl.truststore.location")); assertFalse(config.unused().contains("ssl.truststore.location")); assertEquals("my location", valuesWithPrefixOverride.get("ssl.truststore.location")); assertFalse(config.unused().contains("ssl.truststore.location")); assertFalse(config.unused().contains("prefix.ssl.truststore.location")); assertTrue(config.unused().contains("ssl.keymanager.algorithm")); assertEquals("algorithm", valuesWithPrefixOverride.get("ssl.keymanager.algorithm")); assertFalse(config.unused().contains("ssl.keymanager.algorithm")); assertTrue(config.unused().contains("sasl.kerberos.service.name")); assertEquals("service name", valuesWithPrefixOverride.get("sasl.kerberos.service.name")); assertFalse(config.unused().contains("sasl.kerberos.service.name")); assertFalse(config.unused().contains("sasl.kerberos.min.time.before.relogin")); assertEquals(SaslConfigs.DEFAULT_KERBEROS_MIN_TIME_BEFORE_RELOGIN, valuesWithPrefixOverride.get("sasl.kerberos.min.time.before.relogin")); assertFalse(config.unused().contains("sasl.kerberos.min.time.before.relogin")); assertFalse(config.unused().contains("ssl.key.password")); assertNull(valuesWithPrefixOverride.get("ssl.key.password")); assertFalse(config.unused().contains("ssl.key.password")); }
public Map<String, Object> valuesWithPrefixOverride(String prefix) { Map<String, Object> result = new RecordingMap<>(values(), prefix, true); for (Map.Entry<String, ?> entry : originals.entrySet()) { if (entry.getKey().startsWith(prefix) && entry.getKey().length() > prefix.length()) { String keyWithNoPrefix = entry.getKey().substring(prefix.length()); ConfigDef.ConfigKey configKey = definition.configKeys().get(keyWithNoPrefix); if (configKey != null) result.put(keyWithNoPrefix, definition.parseValue(configKey, entry.getValue(), true)); } } return result; }
AbstractConfig { public Map<String, Object> valuesWithPrefixOverride(String prefix) { Map<String, Object> result = new RecordingMap<>(values(), prefix, true); for (Map.Entry<String, ?> entry : originals.entrySet()) { if (entry.getKey().startsWith(prefix) && entry.getKey().length() > prefix.length()) { String keyWithNoPrefix = entry.getKey().substring(prefix.length()); ConfigDef.ConfigKey configKey = definition.configKeys().get(keyWithNoPrefix); if (configKey != null) result.put(keyWithNoPrefix, definition.parseValue(configKey, entry.getValue(), true)); } } return result; } }
AbstractConfig { public Map<String, Object> valuesWithPrefixOverride(String prefix) { Map<String, Object> result = new RecordingMap<>(values(), prefix, true); for (Map.Entry<String, ?> entry : originals.entrySet()) { if (entry.getKey().startsWith(prefix) && entry.getKey().length() > prefix.length()) { String keyWithNoPrefix = entry.getKey().substring(prefix.length()); ConfigDef.ConfigKey configKey = definition.configKeys().get(keyWithNoPrefix); if (configKey != null) result.put(keyWithNoPrefix, definition.parseValue(configKey, entry.getValue(), true)); } } return result; } @SuppressWarnings("unchecked") AbstractConfig(ConfigDef definition, Map<?, ?> originals, boolean doLog); AbstractConfig(ConfigDef definition, Map<?, ?> originals); }
AbstractConfig { public Map<String, Object> valuesWithPrefixOverride(String prefix) { Map<String, Object> result = new RecordingMap<>(values(), prefix, true); for (Map.Entry<String, ?> entry : originals.entrySet()) { if (entry.getKey().startsWith(prefix) && entry.getKey().length() > prefix.length()) { String keyWithNoPrefix = entry.getKey().substring(prefix.length()); ConfigDef.ConfigKey configKey = definition.configKeys().get(keyWithNoPrefix); if (configKey != null) result.put(keyWithNoPrefix, definition.parseValue(configKey, entry.getValue(), true)); } } return result; } @SuppressWarnings("unchecked") AbstractConfig(ConfigDef definition, Map<?, ?> originals, boolean doLog); AbstractConfig(ConfigDef definition, Map<?, ?> originals); void ignore(String key); Short getShort(String key); Integer getInt(String key); Long getLong(String key); Double getDouble(String key); @SuppressWarnings("unchecked") List<String> getList(String key); Boolean getBoolean(String key); String getString(String key); ConfigDef.Type typeOf(String key); Password getPassword(String key); Class<?> getClass(String key); Set<String> unused(); Map<String, Object> originals(); Map<String, String> originalsStrings(); Map<String, Object> originalsWithPrefix(String prefix); Map<String, Object> valuesWithPrefixOverride(String prefix); Map<String, ?> values(); void logUnused(); T getConfiguredInstance(String key, Class<T> t); List<T> getConfiguredInstances(String key, Class<T> t); List<T> getConfiguredInstances(String key, Class<T> t, Map<String, Object> configOverrides); @Override boolean equals(Object o); @Override int hashCode(); }
AbstractConfig { public Map<String, Object> valuesWithPrefixOverride(String prefix) { Map<String, Object> result = new RecordingMap<>(values(), prefix, true); for (Map.Entry<String, ?> entry : originals.entrySet()) { if (entry.getKey().startsWith(prefix) && entry.getKey().length() > prefix.length()) { String keyWithNoPrefix = entry.getKey().substring(prefix.length()); ConfigDef.ConfigKey configKey = definition.configKeys().get(keyWithNoPrefix); if (configKey != null) result.put(keyWithNoPrefix, definition.parseValue(configKey, entry.getValue(), true)); } } return result; } @SuppressWarnings("unchecked") AbstractConfig(ConfigDef definition, Map<?, ?> originals, boolean doLog); AbstractConfig(ConfigDef definition, Map<?, ?> originals); void ignore(String key); Short getShort(String key); Integer getInt(String key); Long getLong(String key); Double getDouble(String key); @SuppressWarnings("unchecked") List<String> getList(String key); Boolean getBoolean(String key); String getString(String key); ConfigDef.Type typeOf(String key); Password getPassword(String key); Class<?> getClass(String key); Set<String> unused(); Map<String, Object> originals(); Map<String, String> originalsStrings(); Map<String, Object> originalsWithPrefix(String prefix); Map<String, Object> valuesWithPrefixOverride(String prefix); Map<String, ?> values(); void logUnused(); T getConfiguredInstance(String key, Class<T> t); List<T> getConfiguredInstances(String key, Class<T> t); List<T> getConfiguredInstances(String key, Class<T> t, Map<String, Object> configOverrides); @Override boolean equals(Object o); @Override int hashCode(); }
@Test public void testUnused() { Properties props = new Properties(); String configValue = "org.apache.kafka.common.config.AbstractConfigTest$ConfiguredFakeMetricsReporter"; props.put(TestConfig.METRIC_REPORTER_CLASSES_CONFIG, configValue); props.put(FakeMetricsReporterConfig.EXTRA_CONFIG, "my_value"); TestConfig config = new TestConfig(props); assertTrue("metric.extra_config should be marked unused before getConfiguredInstances is called", config.unused().contains(FakeMetricsReporterConfig.EXTRA_CONFIG)); config.getConfiguredInstances(TestConfig.METRIC_REPORTER_CLASSES_CONFIG, MetricsReporter.class); assertTrue("All defined configurations should be marked as used", config.unused().isEmpty()); }
public Set<String> unused() { Set<String> keys = new HashSet<>(originals.keySet()); keys.removeAll(used); return keys; }
AbstractConfig { public Set<String> unused() { Set<String> keys = new HashSet<>(originals.keySet()); keys.removeAll(used); return keys; } }
AbstractConfig { public Set<String> unused() { Set<String> keys = new HashSet<>(originals.keySet()); keys.removeAll(used); return keys; } @SuppressWarnings("unchecked") AbstractConfig(ConfigDef definition, Map<?, ?> originals, boolean doLog); AbstractConfig(ConfigDef definition, Map<?, ?> originals); }
AbstractConfig { public Set<String> unused() { Set<String> keys = new HashSet<>(originals.keySet()); keys.removeAll(used); return keys; } @SuppressWarnings("unchecked") AbstractConfig(ConfigDef definition, Map<?, ?> originals, boolean doLog); AbstractConfig(ConfigDef definition, Map<?, ?> originals); void ignore(String key); Short getShort(String key); Integer getInt(String key); Long getLong(String key); Double getDouble(String key); @SuppressWarnings("unchecked") List<String> getList(String key); Boolean getBoolean(String key); String getString(String key); ConfigDef.Type typeOf(String key); Password getPassword(String key); Class<?> getClass(String key); Set<String> unused(); Map<String, Object> originals(); Map<String, String> originalsStrings(); Map<String, Object> originalsWithPrefix(String prefix); Map<String, Object> valuesWithPrefixOverride(String prefix); Map<String, ?> values(); void logUnused(); T getConfiguredInstance(String key, Class<T> t); List<T> getConfiguredInstances(String key, Class<T> t); List<T> getConfiguredInstances(String key, Class<T> t, Map<String, Object> configOverrides); @Override boolean equals(Object o); @Override int hashCode(); }
AbstractConfig { public Set<String> unused() { Set<String> keys = new HashSet<>(originals.keySet()); keys.removeAll(used); return keys; } @SuppressWarnings("unchecked") AbstractConfig(ConfigDef definition, Map<?, ?> originals, boolean doLog); AbstractConfig(ConfigDef definition, Map<?, ?> originals); void ignore(String key); Short getShort(String key); Integer getInt(String key); Long getLong(String key); Double getDouble(String key); @SuppressWarnings("unchecked") List<String> getList(String key); Boolean getBoolean(String key); String getString(String key); ConfigDef.Type typeOf(String key); Password getPassword(String key); Class<?> getClass(String key); Set<String> unused(); Map<String, Object> originals(); Map<String, String> originalsStrings(); Map<String, Object> originalsWithPrefix(String prefix); Map<String, Object> valuesWithPrefixOverride(String prefix); Map<String, ?> values(); void logUnused(); T getConfiguredInstance(String key, Class<T> t); List<T> getConfiguredInstances(String key, Class<T> t); List<T> getConfiguredInstances(String key, Class<T> t, Map<String, Object> configOverrides); @Override boolean equals(Object o); @Override int hashCode(); }
@Test public void testBootstrap() { String ipAddress = "140.211.11.105"; String hostName = "www.example.com"; Cluster cluster = Cluster.bootstrap(Arrays.asList( new InetSocketAddress(ipAddress, 9002), new InetSocketAddress(hostName, 9002) )); Set<String> expectedHosts = Utils.mkSet(ipAddress, hostName); Set<String> actualHosts = new HashSet<>(); for (Node node : cluster.nodes()) actualHosts.add(node.host()); assertEquals(expectedHosts, actualHosts); }
public static Cluster bootstrap(List<InetSocketAddress> addresses) { List<Node> nodes = new ArrayList<>(); int nodeId = -1; for (InetSocketAddress address : addresses) nodes.add(new Node(nodeId--, address.getHostString(), address.getPort())); return new Cluster(null, true, nodes, new ArrayList<PartitionInfo>(0), Collections.<String>emptySet(), Collections.<String>emptySet(), null); }
Cluster { public static Cluster bootstrap(List<InetSocketAddress> addresses) { List<Node> nodes = new ArrayList<>(); int nodeId = -1; for (InetSocketAddress address : addresses) nodes.add(new Node(nodeId--, address.getHostString(), address.getPort())); return new Cluster(null, true, nodes, new ArrayList<PartitionInfo>(0), Collections.<String>emptySet(), Collections.<String>emptySet(), null); } }
Cluster { public static Cluster bootstrap(List<InetSocketAddress> addresses) { List<Node> nodes = new ArrayList<>(); int nodeId = -1; for (InetSocketAddress address : addresses) nodes.add(new Node(nodeId--, address.getHostString(), address.getPort())); return new Cluster(null, true, nodes, new ArrayList<PartitionInfo>(0), Collections.<String>emptySet(), Collections.<String>emptySet(), null); } Cluster(String clusterId, Collection<Node> nodes, Collection<PartitionInfo> partitions, Set<String> unauthorizedTopics, Set<String> internalTopics); Cluster(String clusterId, Collection<Node> nodes, Collection<PartitionInfo> partitions, Set<String> unauthorizedTopics, Set<String> internalTopics, Node controller); private Cluster(String clusterId, boolean isBootstrapConfigured, Collection<Node> nodes, Collection<PartitionInfo> partitions, Set<String> unauthorizedTopics, Set<String> internalTopics, Node controller); }
Cluster { public static Cluster bootstrap(List<InetSocketAddress> addresses) { List<Node> nodes = new ArrayList<>(); int nodeId = -1; for (InetSocketAddress address : addresses) nodes.add(new Node(nodeId--, address.getHostString(), address.getPort())); return new Cluster(null, true, nodes, new ArrayList<PartitionInfo>(0), Collections.<String>emptySet(), Collections.<String>emptySet(), null); } Cluster(String clusterId, Collection<Node> nodes, Collection<PartitionInfo> partitions, Set<String> unauthorizedTopics, Set<String> internalTopics); Cluster(String clusterId, Collection<Node> nodes, Collection<PartitionInfo> partitions, Set<String> unauthorizedTopics, Set<String> internalTopics, Node controller); private Cluster(String clusterId, boolean isBootstrapConfigured, Collection<Node> nodes, Collection<PartitionInfo> partitions, Set<String> unauthorizedTopics, Set<String> internalTopics, Node controller); static Cluster empty(); static Cluster bootstrap(List<InetSocketAddress> addresses); Cluster withPartitions(Map<TopicPartition, PartitionInfo> partitions); List<Node> nodes(); Node nodeById(int id); Node leaderFor(TopicPartition topicPartition); PartitionInfo partition(TopicPartition topicPartition); List<PartitionInfo> partitionsForTopic(String topic); Integer partitionCountForTopic(String topic); List<PartitionInfo> availablePartitionsForTopic(String topic); List<PartitionInfo> partitionsForNode(int nodeId); Set<String> topics(); Set<String> unauthorizedTopics(); Set<String> internalTopics(); boolean isBootstrapConfigured(); ClusterResource clusterResource(); Node controller(); @Override String toString(); }
Cluster { public static Cluster bootstrap(List<InetSocketAddress> addresses) { List<Node> nodes = new ArrayList<>(); int nodeId = -1; for (InetSocketAddress address : addresses) nodes.add(new Node(nodeId--, address.getHostString(), address.getPort())); return new Cluster(null, true, nodes, new ArrayList<PartitionInfo>(0), Collections.<String>emptySet(), Collections.<String>emptySet(), null); } Cluster(String clusterId, Collection<Node> nodes, Collection<PartitionInfo> partitions, Set<String> unauthorizedTopics, Set<String> internalTopics); Cluster(String clusterId, Collection<Node> nodes, Collection<PartitionInfo> partitions, Set<String> unauthorizedTopics, Set<String> internalTopics, Node controller); private Cluster(String clusterId, boolean isBootstrapConfigured, Collection<Node> nodes, Collection<PartitionInfo> partitions, Set<String> unauthorizedTopics, Set<String> internalTopics, Node controller); static Cluster empty(); static Cluster bootstrap(List<InetSocketAddress> addresses); Cluster withPartitions(Map<TopicPartition, PartitionInfo> partitions); List<Node> nodes(); Node nodeById(int id); Node leaderFor(TopicPartition topicPartition); PartitionInfo partition(TopicPartition topicPartition); List<PartitionInfo> partitionsForTopic(String topic); Integer partitionCountForTopic(String topic); List<PartitionInfo> availablePartitionsForTopic(String topic); List<PartitionInfo> partitionsForNode(int nodeId); Set<String> topics(); Set<String> unauthorizedTopics(); Set<String> internalTopics(); boolean isBootstrapConfigured(); ClusterResource clusterResource(); Node controller(); @Override String toString(); }
@Test public void testToString() { String topic = "sample"; int partition = 0; Node leader = new Node(0, "localhost", 9092); Node r1 = new Node(1, "localhost", 9093); Node r2 = new Node(2, "localhost", 9094); Node[] replicas = new Node[] {leader, r1, r2}; Node[] inSyncReplicas = new Node[] {leader, r1, r2}; PartitionInfo partitionInfo = new PartitionInfo(topic, partition, leader, replicas, inSyncReplicas); String expected = String.format("Partition(topic = %s, partition = %d, leader = %s, replicas = %s, isr = %s)", topic, partition, leader.idString(), "[0,1,2]", "[0,1,2]"); Assert.assertEquals(expected, partitionInfo.toString()); }
@Override public String toString() { return String.format("Partition(topic = %s, partition = %d, leader = %s, replicas = %s, isr = %s)", topic, partition, leader == null ? "none" : leader.idString(), formatNodeIds(replicas), formatNodeIds(inSyncReplicas)); }
PartitionInfo { @Override public String toString() { return String.format("Partition(topic = %s, partition = %d, leader = %s, replicas = %s, isr = %s)", topic, partition, leader == null ? "none" : leader.idString(), formatNodeIds(replicas), formatNodeIds(inSyncReplicas)); } }
PartitionInfo { @Override public String toString() { return String.format("Partition(topic = %s, partition = %d, leader = %s, replicas = %s, isr = %s)", topic, partition, leader == null ? "none" : leader.idString(), formatNodeIds(replicas), formatNodeIds(inSyncReplicas)); } PartitionInfo(String topic, int partition, Node leader, Node[] replicas, Node[] inSyncReplicas); }
PartitionInfo { @Override public String toString() { return String.format("Partition(topic = %s, partition = %d, leader = %s, replicas = %s, isr = %s)", topic, partition, leader == null ? "none" : leader.idString(), formatNodeIds(replicas), formatNodeIds(inSyncReplicas)); } PartitionInfo(String topic, int partition, Node leader, Node[] replicas, Node[] inSyncReplicas); String topic(); int partition(); Node leader(); Node[] replicas(); Node[] inSyncReplicas(); @Override String toString(); }
PartitionInfo { @Override public String toString() { return String.format("Partition(topic = %s, partition = %d, leader = %s, replicas = %s, isr = %s)", topic, partition, leader == null ? "none" : leader.idString(), formatNodeIds(replicas), formatNodeIds(inSyncReplicas)); } PartitionInfo(String topic, int partition, Node leader, Node[] replicas, Node[] inSyncReplicas); String topic(); int partition(); Node leader(); Node[] replicas(); Node[] inSyncReplicas(); @Override String toString(); }
@Test public void testAdd() { Headers headers = new RecordHeaders(); headers.add(new RecordHeader("key", "value".getBytes())); Header header = headers.iterator().next(); assertHeader("key", "value", header); headers.add(new RecordHeader("key2", "value2".getBytes())); assertHeader("key2", "value2", headers.lastHeader("key2")); assertEquals(2, getCount(headers)); }
@Override public Headers add(Header header) throws IllegalStateException { canWrite(); headers.add(header); return this; }
RecordHeaders implements Headers { @Override public Headers add(Header header) throws IllegalStateException { canWrite(); headers.add(header); return this; } }
RecordHeaders implements Headers { @Override public Headers add(Header header) throws IllegalStateException { canWrite(); headers.add(header); return this; } RecordHeaders(); RecordHeaders(Header[] headers); RecordHeaders(Iterable<Header> headers); }
RecordHeaders implements Headers { @Override public Headers add(Header header) throws IllegalStateException { canWrite(); headers.add(header); return this; } RecordHeaders(); RecordHeaders(Header[] headers); RecordHeaders(Iterable<Header> headers); @Override Headers add(Header header); @Override Headers add(String key, byte[] value); @Override Headers remove(String key); @Override Header lastHeader(String key); @Override Iterable<Header> headers(final String key); @Override Iterator<Header> iterator(); void setReadOnly(); Header[] toArray(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); }
RecordHeaders implements Headers { @Override public Headers add(Header header) throws IllegalStateException { canWrite(); headers.add(header); return this; } RecordHeaders(); RecordHeaders(Header[] headers); RecordHeaders(Iterable<Header> headers); @Override Headers add(Header header); @Override Headers add(String key, byte[] value); @Override Headers remove(String key); @Override Header lastHeader(String key); @Override Iterable<Header> headers(final String key); @Override Iterator<Header> iterator(); void setReadOnly(); Header[] toArray(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); }
@Test public void testHeaders() throws IOException { RecordHeaders headers = new RecordHeaders(); headers.add(new RecordHeader("key", "value".getBytes())); headers.add(new RecordHeader("key1", "key1value".getBytes())); headers.add(new RecordHeader("key", "value2".getBytes())); headers.add(new RecordHeader("key2", "key2value".getBytes())); Iterator<Header> keyHeaders = headers.headers("key").iterator(); assertHeader("key", "value", keyHeaders.next()); assertHeader("key", "value2", keyHeaders.next()); assertFalse(keyHeaders.hasNext()); keyHeaders = headers.headers("key1").iterator(); assertHeader("key1", "key1value", keyHeaders.next()); assertFalse(keyHeaders.hasNext()); keyHeaders = headers.headers("key2").iterator(); assertHeader("key2", "key2value", keyHeaders.next()); assertFalse(keyHeaders.hasNext()); }
@Override public Iterable<Header> headers(final String key) { checkKey(key); return new Iterable<Header>() { @Override public Iterator<Header> iterator() { return new FilterByKeyIterator(headers.iterator(), key); } }; }
RecordHeaders implements Headers { @Override public Iterable<Header> headers(final String key) { checkKey(key); return new Iterable<Header>() { @Override public Iterator<Header> iterator() { return new FilterByKeyIterator(headers.iterator(), key); } }; } }
RecordHeaders implements Headers { @Override public Iterable<Header> headers(final String key) { checkKey(key); return new Iterable<Header>() { @Override public Iterator<Header> iterator() { return new FilterByKeyIterator(headers.iterator(), key); } }; } RecordHeaders(); RecordHeaders(Header[] headers); RecordHeaders(Iterable<Header> headers); }
RecordHeaders implements Headers { @Override public Iterable<Header> headers(final String key) { checkKey(key); return new Iterable<Header>() { @Override public Iterator<Header> iterator() { return new FilterByKeyIterator(headers.iterator(), key); } }; } RecordHeaders(); RecordHeaders(Header[] headers); RecordHeaders(Iterable<Header> headers); @Override Headers add(Header header); @Override Headers add(String key, byte[] value); @Override Headers remove(String key); @Override Header lastHeader(String key); @Override Iterable<Header> headers(final String key); @Override Iterator<Header> iterator(); void setReadOnly(); Header[] toArray(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); }
RecordHeaders implements Headers { @Override public Iterable<Header> headers(final String key) { checkKey(key); return new Iterable<Header>() { @Override public Iterator<Header> iterator() { return new FilterByKeyIterator(headers.iterator(), key); } }; } RecordHeaders(); RecordHeaders(Header[] headers); RecordHeaders(Iterable<Header> headers); @Override Headers add(Header header); @Override Headers add(String key, byte[] value); @Override Headers remove(String key); @Override Header lastHeader(String key); @Override Iterable<Header> headers(final String key); @Override Iterator<Header> iterator(); void setReadOnly(); Header[] toArray(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); }
@Test public void decimalToJson() throws IOException { JsonNode converted = parse(converter.fromConnectData(TOPIC, Decimal.schema(2), new BigDecimal(new BigInteger("156"), 2))); validateEnvelope(converted); assertEquals(parse("{ \"type\": \"bytes\", \"optional\": false, \"name\": \"org.apache.kafka.connect.data.Decimal\", \"version\": 1, \"parameters\": { \"scale\": \"2\" } }"), converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME)); assertArrayEquals(new byte[]{0, -100}, converted.get(JsonSchema.ENVELOPE_PAYLOAD_FIELD_NAME).binaryValue()); }
@Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } }
JsonConverter implements Converter { @Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } } }
JsonConverter implements Converter { @Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } } }
JsonConverter implements Converter { @Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } } @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); ObjectNode asJsonSchema(Schema schema); Schema asConnectSchema(JsonNode jsonSchema); }
JsonConverter implements Converter { @Override public byte[] fromConnectData(String topic, Schema schema, Object value) { JsonNode jsonValue = enableSchemas ? convertToJsonWithEnvelope(schema, value) : convertToJsonWithoutEnvelope(schema, value); try { return serializer.serialize(topic, jsonValue); } catch (SerializationException e) { throw new DataException("Converting Kafka Connect data to byte[] failed due to serialization error: ", e); } } @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); ObjectNode asJsonSchema(Schema schema); Schema asConnectSchema(JsonNode jsonSchema); }
@Test public void shouldRecognizeInvalidCharactersInTopicNames() { char[] invalidChars = {'/', '\\', ',', '\u0000', ':', '"', '\'', ';', '*', '?', ' ', '\t', '\r', '\n', '='}; for (char c : invalidChars) { String topicName = "Is " + c + "illegal"; assertFalse(Topic.containsValidPattern(topicName)); } }
static boolean containsValidPattern(String topic) { return LEGAL_CHARS_PATTERN.matcher(topic).matches(); }
Topic { static boolean containsValidPattern(String topic) { return LEGAL_CHARS_PATTERN.matcher(topic).matches(); } }
Topic { static boolean containsValidPattern(String topic) { return LEGAL_CHARS_PATTERN.matcher(topic).matches(); } }
Topic { static boolean containsValidPattern(String topic) { return LEGAL_CHARS_PATTERN.matcher(topic).matches(); } static void validate(String topic); static boolean isInternal(String topic); static boolean hasCollisionChars(String topic); static boolean hasCollision(String topicA, String topicB); }
Topic { static boolean containsValidPattern(String topic) { return LEGAL_CHARS_PATTERN.matcher(topic).matches(); } static void validate(String topic); static boolean isInternal(String topic); static boolean hasCollisionChars(String topic); static boolean hasCollision(String topicA, String topicB); static final String GROUP_METADATA_TOPIC_NAME; static final String TRANSACTION_STATE_TOPIC_NAME; static final String LEGAL_CHARS; }
@Test public void testIsIBMJdk() { System.setProperty("java.vendor", "Oracle Corporation"); assertFalse(Java.isIBMJdk()); System.setProperty("java.vendor", "IBM Corporation"); assertTrue(Java.isIBMJdk()); }
public static boolean isIBMJdk() { return System.getProperty("java.vendor").contains("IBM"); }
Java { public static boolean isIBMJdk() { return System.getProperty("java.vendor").contains("IBM"); } }
Java { public static boolean isIBMJdk() { return System.getProperty("java.vendor").contains("IBM"); } private Java(); }
Java { public static boolean isIBMJdk() { return System.getProperty("java.vendor").contains("IBM"); } private Java(); static boolean isIBMJdk(); }
Java { public static boolean isIBMJdk() { return System.getProperty("java.vendor").contains("IBM"); } private Java(); static boolean isIBMJdk(); static final String JVM_SPEC_VERSION; static final boolean IS_JAVA9_COMPATIBLE; }
@Test public void testLoadKerberosLoginModule() throws ClassNotFoundException { String clazz = Java.isIBMJdk() ? "com.ibm.security.auth.module.Krb5LoginModule" : "com.sun.security.auth.module.Krb5LoginModule"; Class.forName(clazz); }
public static boolean isIBMJdk() { return System.getProperty("java.vendor").contains("IBM"); }
Java { public static boolean isIBMJdk() { return System.getProperty("java.vendor").contains("IBM"); } }
Java { public static boolean isIBMJdk() { return System.getProperty("java.vendor").contains("IBM"); } private Java(); }
Java { public static boolean isIBMJdk() { return System.getProperty("java.vendor").contains("IBM"); } private Java(); static boolean isIBMJdk(); }
Java { public static boolean isIBMJdk() { return System.getProperty("java.vendor").contains("IBM"); } private Java(); static boolean isIBMJdk(); static final String JVM_SPEC_VERSION; static final boolean IS_JAVA9_COMPATIBLE; }
@Test public void testUpdate() { final byte[] bytes = "Any String you want".getBytes(); final int len = bytes.length; Checksum crc1 = Crc32C.create(); Checksum crc2 = Crc32C.create(); Checksum crc3 = Crc32C.create(); crc1.update(bytes, 0, len); for (int i = 0; i < len; i++) crc2.update(bytes[i]); crc3.update(bytes, 0, len / 2); crc3.update(bytes, len / 2, len - len / 2); assertEquals("Crc values should be the same", crc1.getValue(), crc2.getValue()); assertEquals("Crc values should be the same", crc1.getValue(), crc3.getValue()); }
public static Checksum create() { return CHECKSUM_FACTORY.create(); }
Crc32C { public static Checksum create() { return CHECKSUM_FACTORY.create(); } }
Crc32C { public static Checksum create() { return CHECKSUM_FACTORY.create(); } }
Crc32C { public static Checksum create() { return CHECKSUM_FACTORY.create(); } static long compute(byte[] bytes, int offset, int size); static long compute(ByteBuffer buffer, int offset, int size); static Checksum create(); }
Crc32C { public static Checksum create() { return CHECKSUM_FACTORY.create(); } static long compute(byte[] bytes, int offset, int size); static long compute(ByteBuffer buffer, int offset, int size); static Checksum create(); }