target
stringlengths 20
113k
| src_fm
stringlengths 11
86.3k
| src_fm_fc
stringlengths 21
86.4k
| src_fm_fc_co
stringlengths 30
86.4k
| src_fm_fc_ms
stringlengths 42
86.8k
| src_fm_fc_ms_ff
stringlengths 43
86.8k
|
---|---|---|---|---|---|
@Test(expected = TopologyBuilderException.class) public void shouldNotAllowToAddGlobalStoreWithSourceNameEqualsProcessorName() { final String sameNameForSourceAndProcessor = "sameName"; final TopologyBuilder topologyBuilder = new TopologyBuilder() .addGlobalStore(new MockStateStoreSupplier("anyName", false, false), sameNameForSourceAndProcessor, null, null, "anyTopicName", sameNameForSourceAndProcessor, new MockProcessorSupplier()); }
|
public synchronized TopologyBuilder addGlobalStore(final StateStoreSupplier<KeyValueStore> storeSupplier, final String sourceName, final Deserializer keyDeserializer, final Deserializer valueDeserializer, final String topic, final String processorName, final ProcessorSupplier stateUpdateSupplier) { return addGlobalStore(storeSupplier, sourceName, null, keyDeserializer, valueDeserializer, topic, processorName, stateUpdateSupplier); }
|
TopologyBuilder { public synchronized TopologyBuilder addGlobalStore(final StateStoreSupplier<KeyValueStore> storeSupplier, final String sourceName, final Deserializer keyDeserializer, final Deserializer valueDeserializer, final String topic, final String processorName, final ProcessorSupplier stateUpdateSupplier) { return addGlobalStore(storeSupplier, sourceName, null, keyDeserializer, valueDeserializer, topic, processorName, stateUpdateSupplier); } }
|
TopologyBuilder { public synchronized TopologyBuilder addGlobalStore(final StateStoreSupplier<KeyValueStore> storeSupplier, final String sourceName, final Deserializer keyDeserializer, final Deserializer valueDeserializer, final String topic, final String processorName, final ProcessorSupplier stateUpdateSupplier) { return addGlobalStore(storeSupplier, sourceName, null, keyDeserializer, valueDeserializer, topic, processorName, stateUpdateSupplier); } TopologyBuilder(); }
|
TopologyBuilder { public synchronized TopologyBuilder addGlobalStore(final StateStoreSupplier<KeyValueStore> storeSupplier, final String sourceName, final Deserializer keyDeserializer, final Deserializer valueDeserializer, final String topic, final String processorName, final ProcessorSupplier stateUpdateSupplier) { return addGlobalStore(storeSupplier, sourceName, null, keyDeserializer, valueDeserializer, topic, processorName, stateUpdateSupplier); } TopologyBuilder(); synchronized final TopologyBuilder setApplicationId(final String applicationId); synchronized final TopologyBuilder addSource(final String name, final String... topics); synchronized final TopologyBuilder addSource(final AutoOffsetReset offsetReset, final String name, final String... topics); synchronized final TopologyBuilder addSource(final TimestampExtractor timestampExtractor, final String name, final String... topics); synchronized final TopologyBuilder addSource(final AutoOffsetReset offsetReset, final TimestampExtractor timestampExtractor, final String name, final String... topics); synchronized final TopologyBuilder addSource(final String name, final Pattern topicPattern); synchronized final TopologyBuilder addSource(final AutoOffsetReset offsetReset, final String name, final Pattern topicPattern); synchronized final TopologyBuilder addSource(final TimestampExtractor timestampExtractor, final String name, final Pattern topicPattern); synchronized final TopologyBuilder addSource(final AutoOffsetReset offsetReset, final TimestampExtractor timestampExtractor, final String name, final Pattern topicPattern); synchronized final TopologyBuilder addSource(final String name, final Deserializer keyDeserializer, final Deserializer valDeserializer, final String... topics); synchronized final TopologyBuilder addSource(final AutoOffsetReset offsetReset,
final String name,
final TimestampExtractor timestampExtractor,
final Deserializer keyDeserializer,
final Deserializer valDeserializer,
final String... topics); synchronized TopologyBuilder addGlobalStore(final StateStoreSupplier<KeyValueStore> storeSupplier,
final String sourceName,
final Deserializer keyDeserializer,
final Deserializer valueDeserializer,
final String topic,
final String processorName,
final ProcessorSupplier stateUpdateSupplier); synchronized TopologyBuilder addGlobalStore(final StateStoreSupplier<KeyValueStore> storeSupplier,
final String sourceName,
final TimestampExtractor timestampExtractor,
final Deserializer keyDeserializer,
final Deserializer valueDeserializer,
final String topic,
final String processorName,
final ProcessorSupplier stateUpdateSupplier); synchronized final TopologyBuilder addSource(final String name, final Deserializer keyDeserializer, final Deserializer valDeserializer, final Pattern topicPattern); synchronized final TopologyBuilder addSource(final AutoOffsetReset offsetReset,
final String name,
final TimestampExtractor timestampExtractor,
final Deserializer keyDeserializer,
final Deserializer valDeserializer,
final Pattern topicPattern); synchronized final TopologyBuilder addSource(final AutoOffsetReset offsetReset,
final String name,
final Deserializer keyDeserializer,
final Deserializer valDeserializer,
final Pattern topicPattern); synchronized final TopologyBuilder addSink(final String name, final String topic, final String... parentNames); synchronized final TopologyBuilder addSink(final String name, final String topic, final StreamPartitioner partitioner, final String... parentNames); synchronized final TopologyBuilder addSink(final String name, final String topic, final Serializer keySerializer, final Serializer valSerializer, final String... parentNames); synchronized final TopologyBuilder addSink(final String name, final String topic, final Serializer<K> keySerializer, final Serializer<V> valSerializer, final StreamPartitioner<? super K, ? super V> partitioner, final String... parentNames); synchronized final TopologyBuilder addProcessor(final String name, final ProcessorSupplier supplier, final String... parentNames); synchronized final TopologyBuilder addStateStore(final StateStoreSupplier supplier, final String... processorNames); synchronized final TopologyBuilder connectProcessorAndStateStores(final String processorName, final String... stateStoreNames); synchronized final TopologyBuilder connectProcessors(final String... processorNames); synchronized final TopologyBuilder addInternalTopic(final String topicName); synchronized final TopologyBuilder copartitionSources(final Collection<String> sourceNodes); synchronized Map<Integer, Set<String>> nodeGroups(); synchronized ProcessorTopology build(final Integer topicGroupId); synchronized ProcessorTopology buildGlobalStateTopology(); Map<String, StateStore> globalStateStores(); synchronized Map<Integer, TopicsInfo> topicGroups(); synchronized Pattern earliestResetTopicsPattern(); synchronized Pattern latestResetTopicsPattern(); Map<String, List<String>> stateStoreNameToSourceTopics(); synchronized Collection<Set<String>> copartitionGroups(); SubscriptionUpdates subscriptionUpdates(); synchronized Pattern sourceTopicPattern(); synchronized void updateSubscriptions(final SubscriptionUpdates subscriptionUpdates, final String threadId); }
|
TopologyBuilder { public synchronized TopologyBuilder addGlobalStore(final StateStoreSupplier<KeyValueStore> storeSupplier, final String sourceName, final Deserializer keyDeserializer, final Deserializer valueDeserializer, final String topic, final String processorName, final ProcessorSupplier stateUpdateSupplier) { return addGlobalStore(storeSupplier, sourceName, null, keyDeserializer, valueDeserializer, topic, processorName, stateUpdateSupplier); } TopologyBuilder(); synchronized final TopologyBuilder setApplicationId(final String applicationId); synchronized final TopologyBuilder addSource(final String name, final String... topics); synchronized final TopologyBuilder addSource(final AutoOffsetReset offsetReset, final String name, final String... topics); synchronized final TopologyBuilder addSource(final TimestampExtractor timestampExtractor, final String name, final String... topics); synchronized final TopologyBuilder addSource(final AutoOffsetReset offsetReset, final TimestampExtractor timestampExtractor, final String name, final String... topics); synchronized final TopologyBuilder addSource(final String name, final Pattern topicPattern); synchronized final TopologyBuilder addSource(final AutoOffsetReset offsetReset, final String name, final Pattern topicPattern); synchronized final TopologyBuilder addSource(final TimestampExtractor timestampExtractor, final String name, final Pattern topicPattern); synchronized final TopologyBuilder addSource(final AutoOffsetReset offsetReset, final TimestampExtractor timestampExtractor, final String name, final Pattern topicPattern); synchronized final TopologyBuilder addSource(final String name, final Deserializer keyDeserializer, final Deserializer valDeserializer, final String... topics); synchronized final TopologyBuilder addSource(final AutoOffsetReset offsetReset,
final String name,
final TimestampExtractor timestampExtractor,
final Deserializer keyDeserializer,
final Deserializer valDeserializer,
final String... topics); synchronized TopologyBuilder addGlobalStore(final StateStoreSupplier<KeyValueStore> storeSupplier,
final String sourceName,
final Deserializer keyDeserializer,
final Deserializer valueDeserializer,
final String topic,
final String processorName,
final ProcessorSupplier stateUpdateSupplier); synchronized TopologyBuilder addGlobalStore(final StateStoreSupplier<KeyValueStore> storeSupplier,
final String sourceName,
final TimestampExtractor timestampExtractor,
final Deserializer keyDeserializer,
final Deserializer valueDeserializer,
final String topic,
final String processorName,
final ProcessorSupplier stateUpdateSupplier); synchronized final TopologyBuilder addSource(final String name, final Deserializer keyDeserializer, final Deserializer valDeserializer, final Pattern topicPattern); synchronized final TopologyBuilder addSource(final AutoOffsetReset offsetReset,
final String name,
final TimestampExtractor timestampExtractor,
final Deserializer keyDeserializer,
final Deserializer valDeserializer,
final Pattern topicPattern); synchronized final TopologyBuilder addSource(final AutoOffsetReset offsetReset,
final String name,
final Deserializer keyDeserializer,
final Deserializer valDeserializer,
final Pattern topicPattern); synchronized final TopologyBuilder addSink(final String name, final String topic, final String... parentNames); synchronized final TopologyBuilder addSink(final String name, final String topic, final StreamPartitioner partitioner, final String... parentNames); synchronized final TopologyBuilder addSink(final String name, final String topic, final Serializer keySerializer, final Serializer valSerializer, final String... parentNames); synchronized final TopologyBuilder addSink(final String name, final String topic, final Serializer<K> keySerializer, final Serializer<V> valSerializer, final StreamPartitioner<? super K, ? super V> partitioner, final String... parentNames); synchronized final TopologyBuilder addProcessor(final String name, final ProcessorSupplier supplier, final String... parentNames); synchronized final TopologyBuilder addStateStore(final StateStoreSupplier supplier, final String... processorNames); synchronized final TopologyBuilder connectProcessorAndStateStores(final String processorName, final String... stateStoreNames); synchronized final TopologyBuilder connectProcessors(final String... processorNames); synchronized final TopologyBuilder addInternalTopic(final String topicName); synchronized final TopologyBuilder copartitionSources(final Collection<String> sourceNodes); synchronized Map<Integer, Set<String>> nodeGroups(); synchronized ProcessorTopology build(final Integer topicGroupId); synchronized ProcessorTopology buildGlobalStateTopology(); Map<String, StateStore> globalStateStores(); synchronized Map<Integer, TopicsInfo> topicGroups(); synchronized Pattern earliestResetTopicsPattern(); synchronized Pattern latestResetTopicsPattern(); Map<String, List<String>> stateStoreNameToSourceTopics(); synchronized Collection<Set<String>> copartitionGroups(); SubscriptionUpdates subscriptionUpdates(); synchronized Pattern sourceTopicPattern(); synchronized void updateSubscriptions(final SubscriptionUpdates subscriptionUpdates, final String threadId); }
|
@Test public void testPutConnectorTaskConfigs() throws Throwable { final Capture<Callback<Void>> cb = Capture.newInstance(); herder.putTaskConfigs(EasyMock.eq(CONNECTOR_NAME), EasyMock.eq(TASK_CONFIGS), EasyMock.capture(cb)); expectAndCallbackResult(cb, null); PowerMock.replayAll(); connectorsResource.putTaskConfigs(CONNECTOR_NAME, FORWARD, TASK_CONFIGS); PowerMock.verifyAll(); }
|
@POST @Path("/{connector}/tasks") public void putTaskConfigs(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward, final List<Map<String, String>> taskConfigs) throws Throwable { FutureCallback<Void> cb = new FutureCallback<>(); herder.putTaskConfigs(connector, taskConfigs, cb); completeOrForwardRequest(cb, "/connectors/" + connector + "/tasks", "POST", taskConfigs, forward); }
|
ConnectorsResource { @POST @Path("/{connector}/tasks") public void putTaskConfigs(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward, final List<Map<String, String>> taskConfigs) throws Throwable { FutureCallback<Void> cb = new FutureCallback<>(); herder.putTaskConfigs(connector, taskConfigs, cb); completeOrForwardRequest(cb, "/connectors/" + connector + "/tasks", "POST", taskConfigs, forward); } }
|
ConnectorsResource { @POST @Path("/{connector}/tasks") public void putTaskConfigs(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward, final List<Map<String, String>> taskConfigs) throws Throwable { FutureCallback<Void> cb = new FutureCallback<>(); herder.putTaskConfigs(connector, taskConfigs, cb); completeOrForwardRequest(cb, "/connectors/" + connector + "/tasks", "POST", taskConfigs, forward); } ConnectorsResource(Herder herder); }
|
ConnectorsResource { @POST @Path("/{connector}/tasks") public void putTaskConfigs(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward, final List<Map<String, String>> taskConfigs) throws Throwable { FutureCallback<Void> cb = new FutureCallback<>(); herder.putTaskConfigs(connector, taskConfigs, cb); completeOrForwardRequest(cb, "/connectors/" + connector + "/tasks", "POST", taskConfigs, forward); } ConnectorsResource(Herder herder); @GET @Path("/") Collection<String> listConnectors(final @QueryParam("forward") Boolean forward); @POST @Path("/") Response createConnector(final @QueryParam("forward") Boolean forward,
final CreateConnectorRequest createRequest); @GET @Path("/{connector}") ConnectorInfo getConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @GET @Path("/{connector}/config") Map<String, String> getConnectorConfig(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @GET @Path("/{connector}/status") ConnectorStateInfo getConnectorStatus(final @PathParam("connector") String connector); @PUT @Path("/{connector}/config") Response putConnectorConfig(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward,
final Map<String, String> connectorConfig); @POST @Path("/{connector}/restart") void restartConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @PUT @Path("/{connector}/pause") Response pauseConnector(@PathParam("connector") String connector); @PUT @Path("/{connector}/resume") Response resumeConnector(@PathParam("connector") String connector); @GET @Path("/{connector}/tasks") List<TaskInfo> getTaskConfigs(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @POST @Path("/{connector}/tasks") void putTaskConfigs(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward,
final List<Map<String, String>> taskConfigs); @GET @Path("/{connector}/tasks/{task}/status") ConnectorStateInfo.TaskState getTaskStatus(final @PathParam("connector") String connector,
final @PathParam("task") Integer task); @POST @Path("/{connector}/tasks/{task}/restart") void restartTask(final @PathParam("connector") String connector,
final @PathParam("task") Integer task,
final @QueryParam("forward") Boolean forward); @DELETE @Path("/{connector}") void destroyConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); }
|
ConnectorsResource { @POST @Path("/{connector}/tasks") public void putTaskConfigs(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward, final List<Map<String, String>> taskConfigs) throws Throwable { FutureCallback<Void> cb = new FutureCallback<>(); herder.putTaskConfigs(connector, taskConfigs, cb); completeOrForwardRequest(cb, "/connectors/" + connector + "/tasks", "POST", taskConfigs, forward); } ConnectorsResource(Herder herder); @GET @Path("/") Collection<String> listConnectors(final @QueryParam("forward") Boolean forward); @POST @Path("/") Response createConnector(final @QueryParam("forward") Boolean forward,
final CreateConnectorRequest createRequest); @GET @Path("/{connector}") ConnectorInfo getConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @GET @Path("/{connector}/config") Map<String, String> getConnectorConfig(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @GET @Path("/{connector}/status") ConnectorStateInfo getConnectorStatus(final @PathParam("connector") String connector); @PUT @Path("/{connector}/config") Response putConnectorConfig(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward,
final Map<String, String> connectorConfig); @POST @Path("/{connector}/restart") void restartConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @PUT @Path("/{connector}/pause") Response pauseConnector(@PathParam("connector") String connector); @PUT @Path("/{connector}/resume") Response resumeConnector(@PathParam("connector") String connector); @GET @Path("/{connector}/tasks") List<TaskInfo> getTaskConfigs(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @POST @Path("/{connector}/tasks") void putTaskConfigs(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward,
final List<Map<String, String>> taskConfigs); @GET @Path("/{connector}/tasks/{task}/status") ConnectorStateInfo.TaskState getTaskStatus(final @PathParam("connector") String connector,
final @PathParam("task") Integer task); @POST @Path("/{connector}/tasks/{task}/restart") void restartTask(final @PathParam("connector") String connector,
final @PathParam("task") Integer task,
final @QueryParam("forward") Boolean forward); @DELETE @Path("/{connector}") void destroyConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); }
|
@Test(expected = NotFoundException.class) public void testPutConnectorTaskConfigsConnectorNotFound() throws Throwable { final Capture<Callback<Void>> cb = Capture.newInstance(); herder.putTaskConfigs(EasyMock.eq(CONNECTOR_NAME), EasyMock.eq(TASK_CONFIGS), EasyMock.capture(cb)); expectAndCallbackException(cb, new NotFoundException("not found")); PowerMock.replayAll(); connectorsResource.putTaskConfigs(CONNECTOR_NAME, FORWARD, TASK_CONFIGS); PowerMock.verifyAll(); }
|
@POST @Path("/{connector}/tasks") public void putTaskConfigs(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward, final List<Map<String, String>> taskConfigs) throws Throwable { FutureCallback<Void> cb = new FutureCallback<>(); herder.putTaskConfigs(connector, taskConfigs, cb); completeOrForwardRequest(cb, "/connectors/" + connector + "/tasks", "POST", taskConfigs, forward); }
|
ConnectorsResource { @POST @Path("/{connector}/tasks") public void putTaskConfigs(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward, final List<Map<String, String>> taskConfigs) throws Throwable { FutureCallback<Void> cb = new FutureCallback<>(); herder.putTaskConfigs(connector, taskConfigs, cb); completeOrForwardRequest(cb, "/connectors/" + connector + "/tasks", "POST", taskConfigs, forward); } }
|
ConnectorsResource { @POST @Path("/{connector}/tasks") public void putTaskConfigs(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward, final List<Map<String, String>> taskConfigs) throws Throwable { FutureCallback<Void> cb = new FutureCallback<>(); herder.putTaskConfigs(connector, taskConfigs, cb); completeOrForwardRequest(cb, "/connectors/" + connector + "/tasks", "POST", taskConfigs, forward); } ConnectorsResource(Herder herder); }
|
ConnectorsResource { @POST @Path("/{connector}/tasks") public void putTaskConfigs(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward, final List<Map<String, String>> taskConfigs) throws Throwable { FutureCallback<Void> cb = new FutureCallback<>(); herder.putTaskConfigs(connector, taskConfigs, cb); completeOrForwardRequest(cb, "/connectors/" + connector + "/tasks", "POST", taskConfigs, forward); } ConnectorsResource(Herder herder); @GET @Path("/") Collection<String> listConnectors(final @QueryParam("forward") Boolean forward); @POST @Path("/") Response createConnector(final @QueryParam("forward") Boolean forward,
final CreateConnectorRequest createRequest); @GET @Path("/{connector}") ConnectorInfo getConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @GET @Path("/{connector}/config") Map<String, String> getConnectorConfig(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @GET @Path("/{connector}/status") ConnectorStateInfo getConnectorStatus(final @PathParam("connector") String connector); @PUT @Path("/{connector}/config") Response putConnectorConfig(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward,
final Map<String, String> connectorConfig); @POST @Path("/{connector}/restart") void restartConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @PUT @Path("/{connector}/pause") Response pauseConnector(@PathParam("connector") String connector); @PUT @Path("/{connector}/resume") Response resumeConnector(@PathParam("connector") String connector); @GET @Path("/{connector}/tasks") List<TaskInfo> getTaskConfigs(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @POST @Path("/{connector}/tasks") void putTaskConfigs(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward,
final List<Map<String, String>> taskConfigs); @GET @Path("/{connector}/tasks/{task}/status") ConnectorStateInfo.TaskState getTaskStatus(final @PathParam("connector") String connector,
final @PathParam("task") Integer task); @POST @Path("/{connector}/tasks/{task}/restart") void restartTask(final @PathParam("connector") String connector,
final @PathParam("task") Integer task,
final @QueryParam("forward") Boolean forward); @DELETE @Path("/{connector}") void destroyConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); }
|
ConnectorsResource { @POST @Path("/{connector}/tasks") public void putTaskConfigs(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward, final List<Map<String, String>> taskConfigs) throws Throwable { FutureCallback<Void> cb = new FutureCallback<>(); herder.putTaskConfigs(connector, taskConfigs, cb); completeOrForwardRequest(cb, "/connectors/" + connector + "/tasks", "POST", taskConfigs, forward); } ConnectorsResource(Herder herder); @GET @Path("/") Collection<String> listConnectors(final @QueryParam("forward") Boolean forward); @POST @Path("/") Response createConnector(final @QueryParam("forward") Boolean forward,
final CreateConnectorRequest createRequest); @GET @Path("/{connector}") ConnectorInfo getConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @GET @Path("/{connector}/config") Map<String, String> getConnectorConfig(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @GET @Path("/{connector}/status") ConnectorStateInfo getConnectorStatus(final @PathParam("connector") String connector); @PUT @Path("/{connector}/config") Response putConnectorConfig(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward,
final Map<String, String> connectorConfig); @POST @Path("/{connector}/restart") void restartConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @PUT @Path("/{connector}/pause") Response pauseConnector(@PathParam("connector") String connector); @PUT @Path("/{connector}/resume") Response resumeConnector(@PathParam("connector") String connector); @GET @Path("/{connector}/tasks") List<TaskInfo> getTaskConfigs(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @POST @Path("/{connector}/tasks") void putTaskConfigs(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward,
final List<Map<String, String>> taskConfigs); @GET @Path("/{connector}/tasks/{task}/status") ConnectorStateInfo.TaskState getTaskStatus(final @PathParam("connector") String connector,
final @PathParam("task") Integer task); @POST @Path("/{connector}/tasks/{task}/restart") void restartTask(final @PathParam("connector") String connector,
final @PathParam("task") Integer task,
final @QueryParam("forward") Boolean forward); @DELETE @Path("/{connector}") void destroyConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); }
|
@Test(expected = NotFoundException.class) public void testRestartConnectorNotFound() throws Throwable { final Capture<Callback<Void>> cb = Capture.newInstance(); herder.restartConnector(EasyMock.eq(CONNECTOR_NAME), EasyMock.capture(cb)); expectAndCallbackException(cb, new NotFoundException("not found")); PowerMock.replayAll(); connectorsResource.restartConnector(CONNECTOR_NAME, FORWARD); PowerMock.verifyAll(); }
|
@POST @Path("/{connector}/restart") public void restartConnector(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward) throws Throwable { FutureCallback<Void> cb = new FutureCallback<>(); herder.restartConnector(connector, cb); completeOrForwardRequest(cb, "/connectors/" + connector + "/restart", "POST", null, forward); }
|
ConnectorsResource { @POST @Path("/{connector}/restart") public void restartConnector(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward) throws Throwable { FutureCallback<Void> cb = new FutureCallback<>(); herder.restartConnector(connector, cb); completeOrForwardRequest(cb, "/connectors/" + connector + "/restart", "POST", null, forward); } }
|
ConnectorsResource { @POST @Path("/{connector}/restart") public void restartConnector(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward) throws Throwable { FutureCallback<Void> cb = new FutureCallback<>(); herder.restartConnector(connector, cb); completeOrForwardRequest(cb, "/connectors/" + connector + "/restart", "POST", null, forward); } ConnectorsResource(Herder herder); }
|
ConnectorsResource { @POST @Path("/{connector}/restart") public void restartConnector(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward) throws Throwable { FutureCallback<Void> cb = new FutureCallback<>(); herder.restartConnector(connector, cb); completeOrForwardRequest(cb, "/connectors/" + connector + "/restart", "POST", null, forward); } ConnectorsResource(Herder herder); @GET @Path("/") Collection<String> listConnectors(final @QueryParam("forward") Boolean forward); @POST @Path("/") Response createConnector(final @QueryParam("forward") Boolean forward,
final CreateConnectorRequest createRequest); @GET @Path("/{connector}") ConnectorInfo getConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @GET @Path("/{connector}/config") Map<String, String> getConnectorConfig(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @GET @Path("/{connector}/status") ConnectorStateInfo getConnectorStatus(final @PathParam("connector") String connector); @PUT @Path("/{connector}/config") Response putConnectorConfig(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward,
final Map<String, String> connectorConfig); @POST @Path("/{connector}/restart") void restartConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @PUT @Path("/{connector}/pause") Response pauseConnector(@PathParam("connector") String connector); @PUT @Path("/{connector}/resume") Response resumeConnector(@PathParam("connector") String connector); @GET @Path("/{connector}/tasks") List<TaskInfo> getTaskConfigs(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @POST @Path("/{connector}/tasks") void putTaskConfigs(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward,
final List<Map<String, String>> taskConfigs); @GET @Path("/{connector}/tasks/{task}/status") ConnectorStateInfo.TaskState getTaskStatus(final @PathParam("connector") String connector,
final @PathParam("task") Integer task); @POST @Path("/{connector}/tasks/{task}/restart") void restartTask(final @PathParam("connector") String connector,
final @PathParam("task") Integer task,
final @QueryParam("forward") Boolean forward); @DELETE @Path("/{connector}") void destroyConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); }
|
ConnectorsResource { @POST @Path("/{connector}/restart") public void restartConnector(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward) throws Throwable { FutureCallback<Void> cb = new FutureCallback<>(); herder.restartConnector(connector, cb); completeOrForwardRequest(cb, "/connectors/" + connector + "/restart", "POST", null, forward); } ConnectorsResource(Herder herder); @GET @Path("/") Collection<String> listConnectors(final @QueryParam("forward") Boolean forward); @POST @Path("/") Response createConnector(final @QueryParam("forward") Boolean forward,
final CreateConnectorRequest createRequest); @GET @Path("/{connector}") ConnectorInfo getConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @GET @Path("/{connector}/config") Map<String, String> getConnectorConfig(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @GET @Path("/{connector}/status") ConnectorStateInfo getConnectorStatus(final @PathParam("connector") String connector); @PUT @Path("/{connector}/config") Response putConnectorConfig(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward,
final Map<String, String> connectorConfig); @POST @Path("/{connector}/restart") void restartConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @PUT @Path("/{connector}/pause") Response pauseConnector(@PathParam("connector") String connector); @PUT @Path("/{connector}/resume") Response resumeConnector(@PathParam("connector") String connector); @GET @Path("/{connector}/tasks") List<TaskInfo> getTaskConfigs(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @POST @Path("/{connector}/tasks") void putTaskConfigs(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward,
final List<Map<String, String>> taskConfigs); @GET @Path("/{connector}/tasks/{task}/status") ConnectorStateInfo.TaskState getTaskStatus(final @PathParam("connector") String connector,
final @PathParam("task") Integer task); @POST @Path("/{connector}/tasks/{task}/restart") void restartTask(final @PathParam("connector") String connector,
final @PathParam("task") Integer task,
final @QueryParam("forward") Boolean forward); @DELETE @Path("/{connector}") void destroyConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); }
|
@Test public void testRestartConnectorLeaderRedirect() throws Throwable { final Capture<Callback<Void>> cb = Capture.newInstance(); herder.restartConnector(EasyMock.eq(CONNECTOR_NAME), EasyMock.capture(cb)); expectAndCallbackNotLeaderException(cb); EasyMock.expect(RestServer.httpRequest(EasyMock.eq("http: EasyMock.eq("POST"), EasyMock.isNull(), EasyMock.<TypeReference>anyObject())) .andReturn(new RestServer.HttpResponse<>(202, new HashMap<String, List<String>>(), null)); PowerMock.replayAll(); connectorsResource.restartConnector(CONNECTOR_NAME, null); PowerMock.verifyAll(); }
|
@POST @Path("/{connector}/restart") public void restartConnector(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward) throws Throwable { FutureCallback<Void> cb = new FutureCallback<>(); herder.restartConnector(connector, cb); completeOrForwardRequest(cb, "/connectors/" + connector + "/restart", "POST", null, forward); }
|
ConnectorsResource { @POST @Path("/{connector}/restart") public void restartConnector(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward) throws Throwable { FutureCallback<Void> cb = new FutureCallback<>(); herder.restartConnector(connector, cb); completeOrForwardRequest(cb, "/connectors/" + connector + "/restart", "POST", null, forward); } }
|
ConnectorsResource { @POST @Path("/{connector}/restart") public void restartConnector(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward) throws Throwable { FutureCallback<Void> cb = new FutureCallback<>(); herder.restartConnector(connector, cb); completeOrForwardRequest(cb, "/connectors/" + connector + "/restart", "POST", null, forward); } ConnectorsResource(Herder herder); }
|
ConnectorsResource { @POST @Path("/{connector}/restart") public void restartConnector(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward) throws Throwable { FutureCallback<Void> cb = new FutureCallback<>(); herder.restartConnector(connector, cb); completeOrForwardRequest(cb, "/connectors/" + connector + "/restart", "POST", null, forward); } ConnectorsResource(Herder herder); @GET @Path("/") Collection<String> listConnectors(final @QueryParam("forward") Boolean forward); @POST @Path("/") Response createConnector(final @QueryParam("forward") Boolean forward,
final CreateConnectorRequest createRequest); @GET @Path("/{connector}") ConnectorInfo getConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @GET @Path("/{connector}/config") Map<String, String> getConnectorConfig(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @GET @Path("/{connector}/status") ConnectorStateInfo getConnectorStatus(final @PathParam("connector") String connector); @PUT @Path("/{connector}/config") Response putConnectorConfig(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward,
final Map<String, String> connectorConfig); @POST @Path("/{connector}/restart") void restartConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @PUT @Path("/{connector}/pause") Response pauseConnector(@PathParam("connector") String connector); @PUT @Path("/{connector}/resume") Response resumeConnector(@PathParam("connector") String connector); @GET @Path("/{connector}/tasks") List<TaskInfo> getTaskConfigs(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @POST @Path("/{connector}/tasks") void putTaskConfigs(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward,
final List<Map<String, String>> taskConfigs); @GET @Path("/{connector}/tasks/{task}/status") ConnectorStateInfo.TaskState getTaskStatus(final @PathParam("connector") String connector,
final @PathParam("task") Integer task); @POST @Path("/{connector}/tasks/{task}/restart") void restartTask(final @PathParam("connector") String connector,
final @PathParam("task") Integer task,
final @QueryParam("forward") Boolean forward); @DELETE @Path("/{connector}") void destroyConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); }
|
ConnectorsResource { @POST @Path("/{connector}/restart") public void restartConnector(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward) throws Throwable { FutureCallback<Void> cb = new FutureCallback<>(); herder.restartConnector(connector, cb); completeOrForwardRequest(cb, "/connectors/" + connector + "/restart", "POST", null, forward); } ConnectorsResource(Herder herder); @GET @Path("/") Collection<String> listConnectors(final @QueryParam("forward") Boolean forward); @POST @Path("/") Response createConnector(final @QueryParam("forward") Boolean forward,
final CreateConnectorRequest createRequest); @GET @Path("/{connector}") ConnectorInfo getConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @GET @Path("/{connector}/config") Map<String, String> getConnectorConfig(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @GET @Path("/{connector}/status") ConnectorStateInfo getConnectorStatus(final @PathParam("connector") String connector); @PUT @Path("/{connector}/config") Response putConnectorConfig(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward,
final Map<String, String> connectorConfig); @POST @Path("/{connector}/restart") void restartConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @PUT @Path("/{connector}/pause") Response pauseConnector(@PathParam("connector") String connector); @PUT @Path("/{connector}/resume") Response resumeConnector(@PathParam("connector") String connector); @GET @Path("/{connector}/tasks") List<TaskInfo> getTaskConfigs(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @POST @Path("/{connector}/tasks") void putTaskConfigs(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward,
final List<Map<String, String>> taskConfigs); @GET @Path("/{connector}/tasks/{task}/status") ConnectorStateInfo.TaskState getTaskStatus(final @PathParam("connector") String connector,
final @PathParam("task") Integer task); @POST @Path("/{connector}/tasks/{task}/restart") void restartTask(final @PathParam("connector") String connector,
final @PathParam("task") Integer task,
final @QueryParam("forward") Boolean forward); @DELETE @Path("/{connector}") void destroyConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); }
|
@Test public void testRestartConnectorOwnerRedirect() throws Throwable { final Capture<Callback<Void>> cb = Capture.newInstance(); herder.restartConnector(EasyMock.eq(CONNECTOR_NAME), EasyMock.capture(cb)); String ownerUrl = "http: expectAndCallbackException(cb, new NotAssignedException("not owner test", ownerUrl)); EasyMock.expect(RestServer.httpRequest(EasyMock.eq("http: EasyMock.eq("POST"), EasyMock.isNull(), EasyMock.<TypeReference>anyObject())) .andReturn(new RestServer.HttpResponse<>(202, new HashMap<String, List<String>>(), null)); PowerMock.replayAll(); connectorsResource.restartConnector(CONNECTOR_NAME, true); PowerMock.verifyAll(); }
|
@POST @Path("/{connector}/restart") public void restartConnector(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward) throws Throwable { FutureCallback<Void> cb = new FutureCallback<>(); herder.restartConnector(connector, cb); completeOrForwardRequest(cb, "/connectors/" + connector + "/restart", "POST", null, forward); }
|
ConnectorsResource { @POST @Path("/{connector}/restart") public void restartConnector(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward) throws Throwable { FutureCallback<Void> cb = new FutureCallback<>(); herder.restartConnector(connector, cb); completeOrForwardRequest(cb, "/connectors/" + connector + "/restart", "POST", null, forward); } }
|
ConnectorsResource { @POST @Path("/{connector}/restart") public void restartConnector(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward) throws Throwable { FutureCallback<Void> cb = new FutureCallback<>(); herder.restartConnector(connector, cb); completeOrForwardRequest(cb, "/connectors/" + connector + "/restart", "POST", null, forward); } ConnectorsResource(Herder herder); }
|
ConnectorsResource { @POST @Path("/{connector}/restart") public void restartConnector(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward) throws Throwable { FutureCallback<Void> cb = new FutureCallback<>(); herder.restartConnector(connector, cb); completeOrForwardRequest(cb, "/connectors/" + connector + "/restart", "POST", null, forward); } ConnectorsResource(Herder herder); @GET @Path("/") Collection<String> listConnectors(final @QueryParam("forward") Boolean forward); @POST @Path("/") Response createConnector(final @QueryParam("forward") Boolean forward,
final CreateConnectorRequest createRequest); @GET @Path("/{connector}") ConnectorInfo getConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @GET @Path("/{connector}/config") Map<String, String> getConnectorConfig(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @GET @Path("/{connector}/status") ConnectorStateInfo getConnectorStatus(final @PathParam("connector") String connector); @PUT @Path("/{connector}/config") Response putConnectorConfig(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward,
final Map<String, String> connectorConfig); @POST @Path("/{connector}/restart") void restartConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @PUT @Path("/{connector}/pause") Response pauseConnector(@PathParam("connector") String connector); @PUT @Path("/{connector}/resume") Response resumeConnector(@PathParam("connector") String connector); @GET @Path("/{connector}/tasks") List<TaskInfo> getTaskConfigs(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @POST @Path("/{connector}/tasks") void putTaskConfigs(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward,
final List<Map<String, String>> taskConfigs); @GET @Path("/{connector}/tasks/{task}/status") ConnectorStateInfo.TaskState getTaskStatus(final @PathParam("connector") String connector,
final @PathParam("task") Integer task); @POST @Path("/{connector}/tasks/{task}/restart") void restartTask(final @PathParam("connector") String connector,
final @PathParam("task") Integer task,
final @QueryParam("forward") Boolean forward); @DELETE @Path("/{connector}") void destroyConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); }
|
ConnectorsResource { @POST @Path("/{connector}/restart") public void restartConnector(final @PathParam("connector") String connector, final @QueryParam("forward") Boolean forward) throws Throwable { FutureCallback<Void> cb = new FutureCallback<>(); herder.restartConnector(connector, cb); completeOrForwardRequest(cb, "/connectors/" + connector + "/restart", "POST", null, forward); } ConnectorsResource(Herder herder); @GET @Path("/") Collection<String> listConnectors(final @QueryParam("forward") Boolean forward); @POST @Path("/") Response createConnector(final @QueryParam("forward") Boolean forward,
final CreateConnectorRequest createRequest); @GET @Path("/{connector}") ConnectorInfo getConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @GET @Path("/{connector}/config") Map<String, String> getConnectorConfig(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @GET @Path("/{connector}/status") ConnectorStateInfo getConnectorStatus(final @PathParam("connector") String connector); @PUT @Path("/{connector}/config") Response putConnectorConfig(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward,
final Map<String, String> connectorConfig); @POST @Path("/{connector}/restart") void restartConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @PUT @Path("/{connector}/pause") Response pauseConnector(@PathParam("connector") String connector); @PUT @Path("/{connector}/resume") Response resumeConnector(@PathParam("connector") String connector); @GET @Path("/{connector}/tasks") List<TaskInfo> getTaskConfigs(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @POST @Path("/{connector}/tasks") void putTaskConfigs(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward,
final List<Map<String, String>> taskConfigs); @GET @Path("/{connector}/tasks/{task}/status") ConnectorStateInfo.TaskState getTaskStatus(final @PathParam("connector") String connector,
final @PathParam("task") Integer task); @POST @Path("/{connector}/tasks/{task}/restart") void restartTask(final @PathParam("connector") String connector,
final @PathParam("task") Integer task,
final @QueryParam("forward") Boolean forward); @DELETE @Path("/{connector}") void destroyConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); }
|
@Test public void arrayToConnect() { byte[] arrayJson = "{ \"schema\": { \"type\": \"array\", \"items\": { \"type\" : \"int32\" } }, \"payload\": [1, 2, 3] }".getBytes(); assertEquals(new SchemaAndValue(SchemaBuilder.array(Schema.INT32_SCHEMA).build(), Arrays.asList(1, 2, 3)), converter.toConnectData(TOPIC, arrayJson)); }
|
@Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); }
|
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } }
|
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } }
|
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); ObjectNode asJsonSchema(Schema schema); Schema asConnectSchema(JsonNode jsonSchema); }
|
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); ObjectNode asJsonSchema(Schema schema); Schema asConnectSchema(JsonNode jsonSchema); }
|
@Test(expected = NotFoundException.class) public void testRestartTaskNotFound() throws Throwable { ConnectorTaskId taskId = new ConnectorTaskId(CONNECTOR_NAME, 0); final Capture<Callback<Void>> cb = Capture.newInstance(); herder.restartTask(EasyMock.eq(taskId), EasyMock.capture(cb)); expectAndCallbackException(cb, new NotFoundException("not found")); PowerMock.replayAll(); connectorsResource.restartTask(CONNECTOR_NAME, 0, FORWARD); PowerMock.verifyAll(); }
|
@POST @Path("/{connector}/tasks/{task}/restart") public void restartTask(final @PathParam("connector") String connector, final @PathParam("task") Integer task, final @QueryParam("forward") Boolean forward) throws Throwable { FutureCallback<Void> cb = new FutureCallback<>(); ConnectorTaskId taskId = new ConnectorTaskId(connector, task); herder.restartTask(taskId, cb); completeOrForwardRequest(cb, "/connectors/" + connector + "/tasks/" + task + "/restart", "POST", null, forward); }
|
ConnectorsResource { @POST @Path("/{connector}/tasks/{task}/restart") public void restartTask(final @PathParam("connector") String connector, final @PathParam("task") Integer task, final @QueryParam("forward") Boolean forward) throws Throwable { FutureCallback<Void> cb = new FutureCallback<>(); ConnectorTaskId taskId = new ConnectorTaskId(connector, task); herder.restartTask(taskId, cb); completeOrForwardRequest(cb, "/connectors/" + connector + "/tasks/" + task + "/restart", "POST", null, forward); } }
|
ConnectorsResource { @POST @Path("/{connector}/tasks/{task}/restart") public void restartTask(final @PathParam("connector") String connector, final @PathParam("task") Integer task, final @QueryParam("forward") Boolean forward) throws Throwable { FutureCallback<Void> cb = new FutureCallback<>(); ConnectorTaskId taskId = new ConnectorTaskId(connector, task); herder.restartTask(taskId, cb); completeOrForwardRequest(cb, "/connectors/" + connector + "/tasks/" + task + "/restart", "POST", null, forward); } ConnectorsResource(Herder herder); }
|
ConnectorsResource { @POST @Path("/{connector}/tasks/{task}/restart") public void restartTask(final @PathParam("connector") String connector, final @PathParam("task") Integer task, final @QueryParam("forward") Boolean forward) throws Throwable { FutureCallback<Void> cb = new FutureCallback<>(); ConnectorTaskId taskId = new ConnectorTaskId(connector, task); herder.restartTask(taskId, cb); completeOrForwardRequest(cb, "/connectors/" + connector + "/tasks/" + task + "/restart", "POST", null, forward); } ConnectorsResource(Herder herder); @GET @Path("/") Collection<String> listConnectors(final @QueryParam("forward") Boolean forward); @POST @Path("/") Response createConnector(final @QueryParam("forward") Boolean forward,
final CreateConnectorRequest createRequest); @GET @Path("/{connector}") ConnectorInfo getConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @GET @Path("/{connector}/config") Map<String, String> getConnectorConfig(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @GET @Path("/{connector}/status") ConnectorStateInfo getConnectorStatus(final @PathParam("connector") String connector); @PUT @Path("/{connector}/config") Response putConnectorConfig(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward,
final Map<String, String> connectorConfig); @POST @Path("/{connector}/restart") void restartConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @PUT @Path("/{connector}/pause") Response pauseConnector(@PathParam("connector") String connector); @PUT @Path("/{connector}/resume") Response resumeConnector(@PathParam("connector") String connector); @GET @Path("/{connector}/tasks") List<TaskInfo> getTaskConfigs(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @POST @Path("/{connector}/tasks") void putTaskConfigs(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward,
final List<Map<String, String>> taskConfigs); @GET @Path("/{connector}/tasks/{task}/status") ConnectorStateInfo.TaskState getTaskStatus(final @PathParam("connector") String connector,
final @PathParam("task") Integer task); @POST @Path("/{connector}/tasks/{task}/restart") void restartTask(final @PathParam("connector") String connector,
final @PathParam("task") Integer task,
final @QueryParam("forward") Boolean forward); @DELETE @Path("/{connector}") void destroyConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); }
|
ConnectorsResource { @POST @Path("/{connector}/tasks/{task}/restart") public void restartTask(final @PathParam("connector") String connector, final @PathParam("task") Integer task, final @QueryParam("forward") Boolean forward) throws Throwable { FutureCallback<Void> cb = new FutureCallback<>(); ConnectorTaskId taskId = new ConnectorTaskId(connector, task); herder.restartTask(taskId, cb); completeOrForwardRequest(cb, "/connectors/" + connector + "/tasks/" + task + "/restart", "POST", null, forward); } ConnectorsResource(Herder herder); @GET @Path("/") Collection<String> listConnectors(final @QueryParam("forward") Boolean forward); @POST @Path("/") Response createConnector(final @QueryParam("forward") Boolean forward,
final CreateConnectorRequest createRequest); @GET @Path("/{connector}") ConnectorInfo getConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @GET @Path("/{connector}/config") Map<String, String> getConnectorConfig(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @GET @Path("/{connector}/status") ConnectorStateInfo getConnectorStatus(final @PathParam("connector") String connector); @PUT @Path("/{connector}/config") Response putConnectorConfig(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward,
final Map<String, String> connectorConfig); @POST @Path("/{connector}/restart") void restartConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @PUT @Path("/{connector}/pause") Response pauseConnector(@PathParam("connector") String connector); @PUT @Path("/{connector}/resume") Response resumeConnector(@PathParam("connector") String connector); @GET @Path("/{connector}/tasks") List<TaskInfo> getTaskConfigs(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @POST @Path("/{connector}/tasks") void putTaskConfigs(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward,
final List<Map<String, String>> taskConfigs); @GET @Path("/{connector}/tasks/{task}/status") ConnectorStateInfo.TaskState getTaskStatus(final @PathParam("connector") String connector,
final @PathParam("task") Integer task); @POST @Path("/{connector}/tasks/{task}/restart") void restartTask(final @PathParam("connector") String connector,
final @PathParam("task") Integer task,
final @QueryParam("forward") Boolean forward); @DELETE @Path("/{connector}") void destroyConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); }
|
@Test public void testRestartTaskLeaderRedirect() throws Throwable { ConnectorTaskId taskId = new ConnectorTaskId(CONNECTOR_NAME, 0); final Capture<Callback<Void>> cb = Capture.newInstance(); herder.restartTask(EasyMock.eq(taskId), EasyMock.capture(cb)); expectAndCallbackNotLeaderException(cb); EasyMock.expect(RestServer.httpRequest(EasyMock.eq("http: EasyMock.eq("POST"), EasyMock.isNull(), EasyMock.<TypeReference>anyObject())) .andReturn(new RestServer.HttpResponse<>(202, new HashMap<String, List<String>>(), null)); PowerMock.replayAll(); connectorsResource.restartTask(CONNECTOR_NAME, 0, null); PowerMock.verifyAll(); }
|
@POST @Path("/{connector}/tasks/{task}/restart") public void restartTask(final @PathParam("connector") String connector, final @PathParam("task") Integer task, final @QueryParam("forward") Boolean forward) throws Throwable { FutureCallback<Void> cb = new FutureCallback<>(); ConnectorTaskId taskId = new ConnectorTaskId(connector, task); herder.restartTask(taskId, cb); completeOrForwardRequest(cb, "/connectors/" + connector + "/tasks/" + task + "/restart", "POST", null, forward); }
|
ConnectorsResource { @POST @Path("/{connector}/tasks/{task}/restart") public void restartTask(final @PathParam("connector") String connector, final @PathParam("task") Integer task, final @QueryParam("forward") Boolean forward) throws Throwable { FutureCallback<Void> cb = new FutureCallback<>(); ConnectorTaskId taskId = new ConnectorTaskId(connector, task); herder.restartTask(taskId, cb); completeOrForwardRequest(cb, "/connectors/" + connector + "/tasks/" + task + "/restart", "POST", null, forward); } }
|
ConnectorsResource { @POST @Path("/{connector}/tasks/{task}/restart") public void restartTask(final @PathParam("connector") String connector, final @PathParam("task") Integer task, final @QueryParam("forward") Boolean forward) throws Throwable { FutureCallback<Void> cb = new FutureCallback<>(); ConnectorTaskId taskId = new ConnectorTaskId(connector, task); herder.restartTask(taskId, cb); completeOrForwardRequest(cb, "/connectors/" + connector + "/tasks/" + task + "/restart", "POST", null, forward); } ConnectorsResource(Herder herder); }
|
ConnectorsResource { @POST @Path("/{connector}/tasks/{task}/restart") public void restartTask(final @PathParam("connector") String connector, final @PathParam("task") Integer task, final @QueryParam("forward") Boolean forward) throws Throwable { FutureCallback<Void> cb = new FutureCallback<>(); ConnectorTaskId taskId = new ConnectorTaskId(connector, task); herder.restartTask(taskId, cb); completeOrForwardRequest(cb, "/connectors/" + connector + "/tasks/" + task + "/restart", "POST", null, forward); } ConnectorsResource(Herder herder); @GET @Path("/") Collection<String> listConnectors(final @QueryParam("forward") Boolean forward); @POST @Path("/") Response createConnector(final @QueryParam("forward") Boolean forward,
final CreateConnectorRequest createRequest); @GET @Path("/{connector}") ConnectorInfo getConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @GET @Path("/{connector}/config") Map<String, String> getConnectorConfig(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @GET @Path("/{connector}/status") ConnectorStateInfo getConnectorStatus(final @PathParam("connector") String connector); @PUT @Path("/{connector}/config") Response putConnectorConfig(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward,
final Map<String, String> connectorConfig); @POST @Path("/{connector}/restart") void restartConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @PUT @Path("/{connector}/pause") Response pauseConnector(@PathParam("connector") String connector); @PUT @Path("/{connector}/resume") Response resumeConnector(@PathParam("connector") String connector); @GET @Path("/{connector}/tasks") List<TaskInfo> getTaskConfigs(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @POST @Path("/{connector}/tasks") void putTaskConfigs(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward,
final List<Map<String, String>> taskConfigs); @GET @Path("/{connector}/tasks/{task}/status") ConnectorStateInfo.TaskState getTaskStatus(final @PathParam("connector") String connector,
final @PathParam("task") Integer task); @POST @Path("/{connector}/tasks/{task}/restart") void restartTask(final @PathParam("connector") String connector,
final @PathParam("task") Integer task,
final @QueryParam("forward") Boolean forward); @DELETE @Path("/{connector}") void destroyConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); }
|
ConnectorsResource { @POST @Path("/{connector}/tasks/{task}/restart") public void restartTask(final @PathParam("connector") String connector, final @PathParam("task") Integer task, final @QueryParam("forward") Boolean forward) throws Throwable { FutureCallback<Void> cb = new FutureCallback<>(); ConnectorTaskId taskId = new ConnectorTaskId(connector, task); herder.restartTask(taskId, cb); completeOrForwardRequest(cb, "/connectors/" + connector + "/tasks/" + task + "/restart", "POST", null, forward); } ConnectorsResource(Herder herder); @GET @Path("/") Collection<String> listConnectors(final @QueryParam("forward") Boolean forward); @POST @Path("/") Response createConnector(final @QueryParam("forward") Boolean forward,
final CreateConnectorRequest createRequest); @GET @Path("/{connector}") ConnectorInfo getConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @GET @Path("/{connector}/config") Map<String, String> getConnectorConfig(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @GET @Path("/{connector}/status") ConnectorStateInfo getConnectorStatus(final @PathParam("connector") String connector); @PUT @Path("/{connector}/config") Response putConnectorConfig(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward,
final Map<String, String> connectorConfig); @POST @Path("/{connector}/restart") void restartConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @PUT @Path("/{connector}/pause") Response pauseConnector(@PathParam("connector") String connector); @PUT @Path("/{connector}/resume") Response resumeConnector(@PathParam("connector") String connector); @GET @Path("/{connector}/tasks") List<TaskInfo> getTaskConfigs(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @POST @Path("/{connector}/tasks") void putTaskConfigs(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward,
final List<Map<String, String>> taskConfigs); @GET @Path("/{connector}/tasks/{task}/status") ConnectorStateInfo.TaskState getTaskStatus(final @PathParam("connector") String connector,
final @PathParam("task") Integer task); @POST @Path("/{connector}/tasks/{task}/restart") void restartTask(final @PathParam("connector") String connector,
final @PathParam("task") Integer task,
final @QueryParam("forward") Boolean forward); @DELETE @Path("/{connector}") void destroyConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); }
|
@Test public void testRestartTaskOwnerRedirect() throws Throwable { ConnectorTaskId taskId = new ConnectorTaskId(CONNECTOR_NAME, 0); final Capture<Callback<Void>> cb = Capture.newInstance(); herder.restartTask(EasyMock.eq(taskId), EasyMock.capture(cb)); String ownerUrl = "http: expectAndCallbackException(cb, new NotAssignedException("not owner test", ownerUrl)); EasyMock.expect(RestServer.httpRequest(EasyMock.eq("http: EasyMock.eq("POST"), EasyMock.isNull(), EasyMock.<TypeReference>anyObject())) .andReturn(new RestServer.HttpResponse<>(202, new HashMap<String, List<String>>(), null)); PowerMock.replayAll(); connectorsResource.restartTask(CONNECTOR_NAME, 0, true); PowerMock.verifyAll(); }
|
@POST @Path("/{connector}/tasks/{task}/restart") public void restartTask(final @PathParam("connector") String connector, final @PathParam("task") Integer task, final @QueryParam("forward") Boolean forward) throws Throwable { FutureCallback<Void> cb = new FutureCallback<>(); ConnectorTaskId taskId = new ConnectorTaskId(connector, task); herder.restartTask(taskId, cb); completeOrForwardRequest(cb, "/connectors/" + connector + "/tasks/" + task + "/restart", "POST", null, forward); }
|
ConnectorsResource { @POST @Path("/{connector}/tasks/{task}/restart") public void restartTask(final @PathParam("connector") String connector, final @PathParam("task") Integer task, final @QueryParam("forward") Boolean forward) throws Throwable { FutureCallback<Void> cb = new FutureCallback<>(); ConnectorTaskId taskId = new ConnectorTaskId(connector, task); herder.restartTask(taskId, cb); completeOrForwardRequest(cb, "/connectors/" + connector + "/tasks/" + task + "/restart", "POST", null, forward); } }
|
ConnectorsResource { @POST @Path("/{connector}/tasks/{task}/restart") public void restartTask(final @PathParam("connector") String connector, final @PathParam("task") Integer task, final @QueryParam("forward") Boolean forward) throws Throwable { FutureCallback<Void> cb = new FutureCallback<>(); ConnectorTaskId taskId = new ConnectorTaskId(connector, task); herder.restartTask(taskId, cb); completeOrForwardRequest(cb, "/connectors/" + connector + "/tasks/" + task + "/restart", "POST", null, forward); } ConnectorsResource(Herder herder); }
|
ConnectorsResource { @POST @Path("/{connector}/tasks/{task}/restart") public void restartTask(final @PathParam("connector") String connector, final @PathParam("task") Integer task, final @QueryParam("forward") Boolean forward) throws Throwable { FutureCallback<Void> cb = new FutureCallback<>(); ConnectorTaskId taskId = new ConnectorTaskId(connector, task); herder.restartTask(taskId, cb); completeOrForwardRequest(cb, "/connectors/" + connector + "/tasks/" + task + "/restart", "POST", null, forward); } ConnectorsResource(Herder herder); @GET @Path("/") Collection<String> listConnectors(final @QueryParam("forward") Boolean forward); @POST @Path("/") Response createConnector(final @QueryParam("forward") Boolean forward,
final CreateConnectorRequest createRequest); @GET @Path("/{connector}") ConnectorInfo getConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @GET @Path("/{connector}/config") Map<String, String> getConnectorConfig(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @GET @Path("/{connector}/status") ConnectorStateInfo getConnectorStatus(final @PathParam("connector") String connector); @PUT @Path("/{connector}/config") Response putConnectorConfig(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward,
final Map<String, String> connectorConfig); @POST @Path("/{connector}/restart") void restartConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @PUT @Path("/{connector}/pause") Response pauseConnector(@PathParam("connector") String connector); @PUT @Path("/{connector}/resume") Response resumeConnector(@PathParam("connector") String connector); @GET @Path("/{connector}/tasks") List<TaskInfo> getTaskConfigs(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @POST @Path("/{connector}/tasks") void putTaskConfigs(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward,
final List<Map<String, String>> taskConfigs); @GET @Path("/{connector}/tasks/{task}/status") ConnectorStateInfo.TaskState getTaskStatus(final @PathParam("connector") String connector,
final @PathParam("task") Integer task); @POST @Path("/{connector}/tasks/{task}/restart") void restartTask(final @PathParam("connector") String connector,
final @PathParam("task") Integer task,
final @QueryParam("forward") Boolean forward); @DELETE @Path("/{connector}") void destroyConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); }
|
ConnectorsResource { @POST @Path("/{connector}/tasks/{task}/restart") public void restartTask(final @PathParam("connector") String connector, final @PathParam("task") Integer task, final @QueryParam("forward") Boolean forward) throws Throwable { FutureCallback<Void> cb = new FutureCallback<>(); ConnectorTaskId taskId = new ConnectorTaskId(connector, task); herder.restartTask(taskId, cb); completeOrForwardRequest(cb, "/connectors/" + connector + "/tasks/" + task + "/restart", "POST", null, forward); } ConnectorsResource(Herder herder); @GET @Path("/") Collection<String> listConnectors(final @QueryParam("forward") Boolean forward); @POST @Path("/") Response createConnector(final @QueryParam("forward") Boolean forward,
final CreateConnectorRequest createRequest); @GET @Path("/{connector}") ConnectorInfo getConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @GET @Path("/{connector}/config") Map<String, String> getConnectorConfig(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @GET @Path("/{connector}/status") ConnectorStateInfo getConnectorStatus(final @PathParam("connector") String connector); @PUT @Path("/{connector}/config") Response putConnectorConfig(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward,
final Map<String, String> connectorConfig); @POST @Path("/{connector}/restart") void restartConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @PUT @Path("/{connector}/pause") Response pauseConnector(@PathParam("connector") String connector); @PUT @Path("/{connector}/resume") Response resumeConnector(@PathParam("connector") String connector); @GET @Path("/{connector}/tasks") List<TaskInfo> getTaskConfigs(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); @POST @Path("/{connector}/tasks") void putTaskConfigs(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward,
final List<Map<String, String>> taskConfigs); @GET @Path("/{connector}/tasks/{task}/status") ConnectorStateInfo.TaskState getTaskStatus(final @PathParam("connector") String connector,
final @PathParam("task") Integer task); @POST @Path("/{connector}/tasks/{task}/restart") void restartTask(final @PathParam("connector") String connector,
final @PathParam("task") Integer task,
final @QueryParam("forward") Boolean forward); @DELETE @Path("/{connector}") void destroyConnector(final @PathParam("connector") String connector,
final @QueryParam("forward") Boolean forward); }
|
@Test public void testValidateConfigWithSingleErrorDueToMissingConnectorClassname() throws Throwable { herder.validateConnectorConfig(EasyMock.eq(partialProps)); PowerMock.expectLastCall().andAnswer(new IAnswer<ConfigInfos>() { @Override public ConfigInfos answer() { ConfigDef connectorConfigDef = ConnectorConfig.configDef(); List<ConfigValue> connectorConfigValues = connectorConfigDef.validate(partialProps); Connector connector = new ConnectorPluginsResourceTestConnector(); Config config = connector.validate(partialProps); ConfigDef configDef = connector.config(); Map<String, ConfigDef.ConfigKey> configKeys = configDef.configKeys(); List<ConfigValue> configValues = config.configValues(); Map<String, ConfigDef.ConfigKey> resultConfigKeys = new HashMap<>(configKeys); resultConfigKeys.putAll(connectorConfigDef.configKeys()); configValues.addAll(connectorConfigValues); return AbstractHerder.generateResult( ConnectorPluginsResourceTestConnector.class.getName(), resultConfigKeys, configValues, Collections.singletonList("Test") ); } }); PowerMock.replayAll(); ConfigInfos configInfos = connectorPluginsResource.validateConfigs( ConnectorPluginsResourceTestConnector.class.getSimpleName(), partialProps ); assertEquals(PARTIAL_CONFIG_INFOS.name(), configInfos.name()); assertEquals(PARTIAL_CONFIG_INFOS.errorCount(), configInfos.errorCount()); assertEquals(PARTIAL_CONFIG_INFOS.groups(), configInfos.groups()); assertEquals( new HashSet<>(PARTIAL_CONFIG_INFOS.values()), new HashSet<>(configInfos.values()) ); PowerMock.verifyAll(); }
|
@PUT @Path("/{connectorType}/config/validate") public ConfigInfos validateConfigs( final @PathParam("connectorType") String connType, final Map<String, String> connectorConfig ) throws Throwable { String includedConnType = connectorConfig.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG); if (includedConnType != null && !normalizedPluginName(includedConnType).endsWith(normalizedPluginName(connType))) { throw new BadRequestException( "Included connector type " + includedConnType + " does not match request type " + connType ); } return herder.validateConnectorConfig(connectorConfig); }
|
ConnectorPluginsResource { @PUT @Path("/{connectorType}/config/validate") public ConfigInfos validateConfigs( final @PathParam("connectorType") String connType, final Map<String, String> connectorConfig ) throws Throwable { String includedConnType = connectorConfig.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG); if (includedConnType != null && !normalizedPluginName(includedConnType).endsWith(normalizedPluginName(connType))) { throw new BadRequestException( "Included connector type " + includedConnType + " does not match request type " + connType ); } return herder.validateConnectorConfig(connectorConfig); } }
|
ConnectorPluginsResource { @PUT @Path("/{connectorType}/config/validate") public ConfigInfos validateConfigs( final @PathParam("connectorType") String connType, final Map<String, String> connectorConfig ) throws Throwable { String includedConnType = connectorConfig.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG); if (includedConnType != null && !normalizedPluginName(includedConnType).endsWith(normalizedPluginName(connType))) { throw new BadRequestException( "Included connector type " + includedConnType + " does not match request type " + connType ); } return herder.validateConnectorConfig(connectorConfig); } ConnectorPluginsResource(Herder herder); }
|
ConnectorPluginsResource { @PUT @Path("/{connectorType}/config/validate") public ConfigInfos validateConfigs( final @PathParam("connectorType") String connType, final Map<String, String> connectorConfig ) throws Throwable { String includedConnType = connectorConfig.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG); if (includedConnType != null && !normalizedPluginName(includedConnType).endsWith(normalizedPluginName(connType))) { throw new BadRequestException( "Included connector type " + includedConnType + " does not match request type " + connType ); } return herder.validateConnectorConfig(connectorConfig); } ConnectorPluginsResource(Herder herder); @PUT @Path("/{connectorType}/config/validate") ConfigInfos validateConfigs(
final @PathParam("connectorType") String connType,
final Map<String, String> connectorConfig
); @GET @Path("/") List<ConnectorPluginInfo> listConnectorPlugins(); }
|
ConnectorPluginsResource { @PUT @Path("/{connectorType}/config/validate") public ConfigInfos validateConfigs( final @PathParam("connectorType") String connType, final Map<String, String> connectorConfig ) throws Throwable { String includedConnType = connectorConfig.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG); if (includedConnType != null && !normalizedPluginName(includedConnType).endsWith(normalizedPluginName(connType))) { throw new BadRequestException( "Included connector type " + includedConnType + " does not match request type " + connType ); } return herder.validateConnectorConfig(connectorConfig); } ConnectorPluginsResource(Herder herder); @PUT @Path("/{connectorType}/config/validate") ConfigInfos validateConfigs(
final @PathParam("connectorType") String connType,
final Map<String, String> connectorConfig
); @GET @Path("/") List<ConnectorPluginInfo> listConnectorPlugins(); }
|
@Test public void testValidateConfigWithSimpleName() throws Throwable { herder.validateConnectorConfig(EasyMock.eq(props)); PowerMock.expectLastCall().andAnswer(new IAnswer<ConfigInfos>() { @Override public ConfigInfos answer() { ConfigDef connectorConfigDef = ConnectorConfig.configDef(); List<ConfigValue> connectorConfigValues = connectorConfigDef.validate(props); Connector connector = new ConnectorPluginsResourceTestConnector(); Config config = connector.validate(props); ConfigDef configDef = connector.config(); Map<String, ConfigDef.ConfigKey> configKeys = configDef.configKeys(); List<ConfigValue> configValues = config.configValues(); Map<String, ConfigDef.ConfigKey> resultConfigKeys = new HashMap<>(configKeys); resultConfigKeys.putAll(connectorConfigDef.configKeys()); configValues.addAll(connectorConfigValues); return AbstractHerder.generateResult( ConnectorPluginsResourceTestConnector.class.getName(), resultConfigKeys, configValues, Collections.singletonList("Test") ); } }); PowerMock.replayAll(); ConfigInfos configInfos = connectorPluginsResource.validateConfigs( ConnectorPluginsResourceTestConnector.class.getSimpleName(), props ); assertEquals(CONFIG_INFOS.name(), configInfos.name()); assertEquals(0, configInfos.errorCount()); assertEquals(CONFIG_INFOS.groups(), configInfos.groups()); assertEquals(new HashSet<>(CONFIG_INFOS.values()), new HashSet<>(configInfos.values())); PowerMock.verifyAll(); }
|
@PUT @Path("/{connectorType}/config/validate") public ConfigInfos validateConfigs( final @PathParam("connectorType") String connType, final Map<String, String> connectorConfig ) throws Throwable { String includedConnType = connectorConfig.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG); if (includedConnType != null && !normalizedPluginName(includedConnType).endsWith(normalizedPluginName(connType))) { throw new BadRequestException( "Included connector type " + includedConnType + " does not match request type " + connType ); } return herder.validateConnectorConfig(connectorConfig); }
|
ConnectorPluginsResource { @PUT @Path("/{connectorType}/config/validate") public ConfigInfos validateConfigs( final @PathParam("connectorType") String connType, final Map<String, String> connectorConfig ) throws Throwable { String includedConnType = connectorConfig.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG); if (includedConnType != null && !normalizedPluginName(includedConnType).endsWith(normalizedPluginName(connType))) { throw new BadRequestException( "Included connector type " + includedConnType + " does not match request type " + connType ); } return herder.validateConnectorConfig(connectorConfig); } }
|
ConnectorPluginsResource { @PUT @Path("/{connectorType}/config/validate") public ConfigInfos validateConfigs( final @PathParam("connectorType") String connType, final Map<String, String> connectorConfig ) throws Throwable { String includedConnType = connectorConfig.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG); if (includedConnType != null && !normalizedPluginName(includedConnType).endsWith(normalizedPluginName(connType))) { throw new BadRequestException( "Included connector type " + includedConnType + " does not match request type " + connType ); } return herder.validateConnectorConfig(connectorConfig); } ConnectorPluginsResource(Herder herder); }
|
ConnectorPluginsResource { @PUT @Path("/{connectorType}/config/validate") public ConfigInfos validateConfigs( final @PathParam("connectorType") String connType, final Map<String, String> connectorConfig ) throws Throwable { String includedConnType = connectorConfig.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG); if (includedConnType != null && !normalizedPluginName(includedConnType).endsWith(normalizedPluginName(connType))) { throw new BadRequestException( "Included connector type " + includedConnType + " does not match request type " + connType ); } return herder.validateConnectorConfig(connectorConfig); } ConnectorPluginsResource(Herder herder); @PUT @Path("/{connectorType}/config/validate") ConfigInfos validateConfigs(
final @PathParam("connectorType") String connType,
final Map<String, String> connectorConfig
); @GET @Path("/") List<ConnectorPluginInfo> listConnectorPlugins(); }
|
ConnectorPluginsResource { @PUT @Path("/{connectorType}/config/validate") public ConfigInfos validateConfigs( final @PathParam("connectorType") String connType, final Map<String, String> connectorConfig ) throws Throwable { String includedConnType = connectorConfig.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG); if (includedConnType != null && !normalizedPluginName(includedConnType).endsWith(normalizedPluginName(connType))) { throw new BadRequestException( "Included connector type " + includedConnType + " does not match request type " + connType ); } return herder.validateConnectorConfig(connectorConfig); } ConnectorPluginsResource(Herder herder); @PUT @Path("/{connectorType}/config/validate") ConfigInfos validateConfigs(
final @PathParam("connectorType") String connType,
final Map<String, String> connectorConfig
); @GET @Path("/") List<ConnectorPluginInfo> listConnectorPlugins(); }
|
@Test public void testValidateConfigWithAlias() throws Throwable { herder.validateConnectorConfig(EasyMock.eq(props)); PowerMock.expectLastCall().andAnswer(new IAnswer<ConfigInfos>() { @Override public ConfigInfos answer() { ConfigDef connectorConfigDef = ConnectorConfig.configDef(); List<ConfigValue> connectorConfigValues = connectorConfigDef.validate(props); Connector connector = new ConnectorPluginsResourceTestConnector(); Config config = connector.validate(props); ConfigDef configDef = connector.config(); Map<String, ConfigDef.ConfigKey> configKeys = configDef.configKeys(); List<ConfigValue> configValues = config.configValues(); Map<String, ConfigDef.ConfigKey> resultConfigKeys = new HashMap<>(configKeys); resultConfigKeys.putAll(connectorConfigDef.configKeys()); configValues.addAll(connectorConfigValues); return AbstractHerder.generateResult( ConnectorPluginsResourceTestConnector.class.getName(), resultConfigKeys, configValues, Collections.singletonList("Test") ); } }); PowerMock.replayAll(); ConfigInfos configInfos = connectorPluginsResource.validateConfigs( "ConnectorPluginsResourceTest", props ); assertEquals(CONFIG_INFOS.name(), configInfos.name()); assertEquals(0, configInfos.errorCount()); assertEquals(CONFIG_INFOS.groups(), configInfos.groups()); assertEquals(new HashSet<>(CONFIG_INFOS.values()), new HashSet<>(configInfos.values())); PowerMock.verifyAll(); }
|
@PUT @Path("/{connectorType}/config/validate") public ConfigInfos validateConfigs( final @PathParam("connectorType") String connType, final Map<String, String> connectorConfig ) throws Throwable { String includedConnType = connectorConfig.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG); if (includedConnType != null && !normalizedPluginName(includedConnType).endsWith(normalizedPluginName(connType))) { throw new BadRequestException( "Included connector type " + includedConnType + " does not match request type " + connType ); } return herder.validateConnectorConfig(connectorConfig); }
|
ConnectorPluginsResource { @PUT @Path("/{connectorType}/config/validate") public ConfigInfos validateConfigs( final @PathParam("connectorType") String connType, final Map<String, String> connectorConfig ) throws Throwable { String includedConnType = connectorConfig.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG); if (includedConnType != null && !normalizedPluginName(includedConnType).endsWith(normalizedPluginName(connType))) { throw new BadRequestException( "Included connector type " + includedConnType + " does not match request type " + connType ); } return herder.validateConnectorConfig(connectorConfig); } }
|
ConnectorPluginsResource { @PUT @Path("/{connectorType}/config/validate") public ConfigInfos validateConfigs( final @PathParam("connectorType") String connType, final Map<String, String> connectorConfig ) throws Throwable { String includedConnType = connectorConfig.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG); if (includedConnType != null && !normalizedPluginName(includedConnType).endsWith(normalizedPluginName(connType))) { throw new BadRequestException( "Included connector type " + includedConnType + " does not match request type " + connType ); } return herder.validateConnectorConfig(connectorConfig); } ConnectorPluginsResource(Herder herder); }
|
ConnectorPluginsResource { @PUT @Path("/{connectorType}/config/validate") public ConfigInfos validateConfigs( final @PathParam("connectorType") String connType, final Map<String, String> connectorConfig ) throws Throwable { String includedConnType = connectorConfig.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG); if (includedConnType != null && !normalizedPluginName(includedConnType).endsWith(normalizedPluginName(connType))) { throw new BadRequestException( "Included connector type " + includedConnType + " does not match request type " + connType ); } return herder.validateConnectorConfig(connectorConfig); } ConnectorPluginsResource(Herder herder); @PUT @Path("/{connectorType}/config/validate") ConfigInfos validateConfigs(
final @PathParam("connectorType") String connType,
final Map<String, String> connectorConfig
); @GET @Path("/") List<ConnectorPluginInfo> listConnectorPlugins(); }
|
ConnectorPluginsResource { @PUT @Path("/{connectorType}/config/validate") public ConfigInfos validateConfigs( final @PathParam("connectorType") String connType, final Map<String, String> connectorConfig ) throws Throwable { String includedConnType = connectorConfig.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG); if (includedConnType != null && !normalizedPluginName(includedConnType).endsWith(normalizedPluginName(connType))) { throw new BadRequestException( "Included connector type " + includedConnType + " does not match request type " + connType ); } return herder.validateConnectorConfig(connectorConfig); } ConnectorPluginsResource(Herder herder); @PUT @Path("/{connectorType}/config/validate") ConfigInfos validateConfigs(
final @PathParam("connectorType") String connType,
final Map<String, String> connectorConfig
); @GET @Path("/") List<ConnectorPluginInfo> listConnectorPlugins(); }
|
@Test(expected = BadRequestException.class) public void testValidateConfigWithNonExistentName() throws Throwable { herder.validateConnectorConfig(EasyMock.eq(props)); PowerMock.expectLastCall().andAnswer(new IAnswer<ConfigInfos>() { @Override public ConfigInfos answer() { ConfigDef connectorConfigDef = ConnectorConfig.configDef(); List<ConfigValue> connectorConfigValues = connectorConfigDef.validate(props); Connector connector = new ConnectorPluginsResourceTestConnector(); Config config = connector.validate(props); ConfigDef configDef = connector.config(); Map<String, ConfigDef.ConfigKey> configKeys = configDef.configKeys(); List<ConfigValue> configValues = config.configValues(); Map<String, ConfigDef.ConfigKey> resultConfigKeys = new HashMap<>(configKeys); resultConfigKeys.putAll(connectorConfigDef.configKeys()); configValues.addAll(connectorConfigValues); return AbstractHerder.generateResult( ConnectorPluginsResourceTestConnector.class.getName(), resultConfigKeys, configValues, Collections.singletonList("Test") ); } }); PowerMock.replayAll(); String customClassname = "com.custom.package." + ConnectorPluginsResourceTestConnector.class.getSimpleName(); connectorPluginsResource.validateConfigs(customClassname, props); PowerMock.verifyAll(); }
|
@PUT @Path("/{connectorType}/config/validate") public ConfigInfos validateConfigs( final @PathParam("connectorType") String connType, final Map<String, String> connectorConfig ) throws Throwable { String includedConnType = connectorConfig.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG); if (includedConnType != null && !normalizedPluginName(includedConnType).endsWith(normalizedPluginName(connType))) { throw new BadRequestException( "Included connector type " + includedConnType + " does not match request type " + connType ); } return herder.validateConnectorConfig(connectorConfig); }
|
ConnectorPluginsResource { @PUT @Path("/{connectorType}/config/validate") public ConfigInfos validateConfigs( final @PathParam("connectorType") String connType, final Map<String, String> connectorConfig ) throws Throwable { String includedConnType = connectorConfig.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG); if (includedConnType != null && !normalizedPluginName(includedConnType).endsWith(normalizedPluginName(connType))) { throw new BadRequestException( "Included connector type " + includedConnType + " does not match request type " + connType ); } return herder.validateConnectorConfig(connectorConfig); } }
|
ConnectorPluginsResource { @PUT @Path("/{connectorType}/config/validate") public ConfigInfos validateConfigs( final @PathParam("connectorType") String connType, final Map<String, String> connectorConfig ) throws Throwable { String includedConnType = connectorConfig.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG); if (includedConnType != null && !normalizedPluginName(includedConnType).endsWith(normalizedPluginName(connType))) { throw new BadRequestException( "Included connector type " + includedConnType + " does not match request type " + connType ); } return herder.validateConnectorConfig(connectorConfig); } ConnectorPluginsResource(Herder herder); }
|
ConnectorPluginsResource { @PUT @Path("/{connectorType}/config/validate") public ConfigInfos validateConfigs( final @PathParam("connectorType") String connType, final Map<String, String> connectorConfig ) throws Throwable { String includedConnType = connectorConfig.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG); if (includedConnType != null && !normalizedPluginName(includedConnType).endsWith(normalizedPluginName(connType))) { throw new BadRequestException( "Included connector type " + includedConnType + " does not match request type " + connType ); } return herder.validateConnectorConfig(connectorConfig); } ConnectorPluginsResource(Herder herder); @PUT @Path("/{connectorType}/config/validate") ConfigInfos validateConfigs(
final @PathParam("connectorType") String connType,
final Map<String, String> connectorConfig
); @GET @Path("/") List<ConnectorPluginInfo> listConnectorPlugins(); }
|
ConnectorPluginsResource { @PUT @Path("/{connectorType}/config/validate") public ConfigInfos validateConfigs( final @PathParam("connectorType") String connType, final Map<String, String> connectorConfig ) throws Throwable { String includedConnType = connectorConfig.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG); if (includedConnType != null && !normalizedPluginName(includedConnType).endsWith(normalizedPluginName(connType))) { throw new BadRequestException( "Included connector type " + includedConnType + " does not match request type " + connType ); } return herder.validateConnectorConfig(connectorConfig); } ConnectorPluginsResource(Herder herder); @PUT @Path("/{connectorType}/config/validate") ConfigInfos validateConfigs(
final @PathParam("connectorType") String connType,
final Map<String, String> connectorConfig
); @GET @Path("/") List<ConnectorPluginInfo> listConnectorPlugins(); }
|
@Test(expected = BadRequestException.class) public void testValidateConfigWithNonExistentAlias() throws Throwable { herder.validateConnectorConfig(EasyMock.eq(props)); PowerMock.expectLastCall().andAnswer(new IAnswer<ConfigInfos>() { @Override public ConfigInfos answer() { ConfigDef connectorConfigDef = ConnectorConfig.configDef(); List<ConfigValue> connectorConfigValues = connectorConfigDef.validate(props); Connector connector = new ConnectorPluginsResourceTestConnector(); Config config = connector.validate(props); ConfigDef configDef = connector.config(); Map<String, ConfigDef.ConfigKey> configKeys = configDef.configKeys(); List<ConfigValue> configValues = config.configValues(); Map<String, ConfigDef.ConfigKey> resultConfigKeys = new HashMap<>(configKeys); resultConfigKeys.putAll(connectorConfigDef.configKeys()); configValues.addAll(connectorConfigValues); return AbstractHerder.generateResult( ConnectorPluginsResourceTestConnector.class.getName(), resultConfigKeys, configValues, Collections.singletonList("Test") ); } }); PowerMock.replayAll(); connectorPluginsResource.validateConfigs("ConnectorPluginsTest", props); PowerMock.verifyAll(); }
|
@PUT @Path("/{connectorType}/config/validate") public ConfigInfos validateConfigs( final @PathParam("connectorType") String connType, final Map<String, String> connectorConfig ) throws Throwable { String includedConnType = connectorConfig.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG); if (includedConnType != null && !normalizedPluginName(includedConnType).endsWith(normalizedPluginName(connType))) { throw new BadRequestException( "Included connector type " + includedConnType + " does not match request type " + connType ); } return herder.validateConnectorConfig(connectorConfig); }
|
ConnectorPluginsResource { @PUT @Path("/{connectorType}/config/validate") public ConfigInfos validateConfigs( final @PathParam("connectorType") String connType, final Map<String, String> connectorConfig ) throws Throwable { String includedConnType = connectorConfig.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG); if (includedConnType != null && !normalizedPluginName(includedConnType).endsWith(normalizedPluginName(connType))) { throw new BadRequestException( "Included connector type " + includedConnType + " does not match request type " + connType ); } return herder.validateConnectorConfig(connectorConfig); } }
|
ConnectorPluginsResource { @PUT @Path("/{connectorType}/config/validate") public ConfigInfos validateConfigs( final @PathParam("connectorType") String connType, final Map<String, String> connectorConfig ) throws Throwable { String includedConnType = connectorConfig.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG); if (includedConnType != null && !normalizedPluginName(includedConnType).endsWith(normalizedPluginName(connType))) { throw new BadRequestException( "Included connector type " + includedConnType + " does not match request type " + connType ); } return herder.validateConnectorConfig(connectorConfig); } ConnectorPluginsResource(Herder herder); }
|
ConnectorPluginsResource { @PUT @Path("/{connectorType}/config/validate") public ConfigInfos validateConfigs( final @PathParam("connectorType") String connType, final Map<String, String> connectorConfig ) throws Throwable { String includedConnType = connectorConfig.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG); if (includedConnType != null && !normalizedPluginName(includedConnType).endsWith(normalizedPluginName(connType))) { throw new BadRequestException( "Included connector type " + includedConnType + " does not match request type " + connType ); } return herder.validateConnectorConfig(connectorConfig); } ConnectorPluginsResource(Herder herder); @PUT @Path("/{connectorType}/config/validate") ConfigInfos validateConfigs(
final @PathParam("connectorType") String connType,
final Map<String, String> connectorConfig
); @GET @Path("/") List<ConnectorPluginInfo> listConnectorPlugins(); }
|
ConnectorPluginsResource { @PUT @Path("/{connectorType}/config/validate") public ConfigInfos validateConfigs( final @PathParam("connectorType") String connType, final Map<String, String> connectorConfig ) throws Throwable { String includedConnType = connectorConfig.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG); if (includedConnType != null && !normalizedPluginName(includedConnType).endsWith(normalizedPluginName(connType))) { throw new BadRequestException( "Included connector type " + includedConnType + " does not match request type " + connType ); } return herder.validateConnectorConfig(connectorConfig); } ConnectorPluginsResource(Herder herder); @PUT @Path("/{connectorType}/config/validate") ConfigInfos validateConfigs(
final @PathParam("connectorType") String connType,
final Map<String, String> connectorConfig
); @GET @Path("/") List<ConnectorPluginInfo> listConnectorPlugins(); }
|
@Test public void connectorStatus() { ConnectorTaskId taskId = new ConnectorTaskId(connector, 0); ConfigBackingStore configStore = strictMock(ConfigBackingStore.class); StatusBackingStore statusStore = strictMock(StatusBackingStore.class); AbstractHerder herder = partialMockBuilder(AbstractHerder.class) .withConstructor(Worker.class, String.class, StatusBackingStore.class, ConfigBackingStore.class) .withArgs(worker, workerId, statusStore, configStore) .addMockedMethod("generation") .createMock(); EasyMock.expect(herder.generation()).andStubReturn(generation); EasyMock.expect(statusStore.get(connector)) .andReturn(new ConnectorStatus(connector, AbstractStatus.State.RUNNING, workerId, generation)); EasyMock.expect(statusStore.getAll(connector)) .andReturn(Collections.singletonList( new TaskStatus(taskId, AbstractStatus.State.UNASSIGNED, workerId, generation))); replayAll(); ConnectorStateInfo state = herder.connectorStatus(connector); assertEquals(connector, state.name()); assertEquals("RUNNING", state.connector().state()); assertEquals(1, state.tasks().size()); assertEquals(workerId, state.connector().workerId()); ConnectorStateInfo.TaskState taskState = state.tasks().get(0); assertEquals(0, taskState.id()); assertEquals("UNASSIGNED", taskState.state()); assertEquals(workerId, taskState.workerId()); verifyAll(); }
|
@Override public ConnectorStateInfo connectorStatus(String connName) { ConnectorStatus connector = statusBackingStore.get(connName); if (connector == null) throw new NotFoundException("No status found for connector " + connName); Collection<TaskStatus> tasks = statusBackingStore.getAll(connName); ConnectorStateInfo.ConnectorState connectorState = new ConnectorStateInfo.ConnectorState( connector.state().toString(), connector.workerId(), connector.trace()); List<ConnectorStateInfo.TaskState> taskStates = new ArrayList<>(); for (TaskStatus status : tasks) { taskStates.add(new ConnectorStateInfo.TaskState(status.id().task(), status.state().toString(), status.workerId(), status.trace())); } Collections.sort(taskStates); return new ConnectorStateInfo(connName, connectorState, taskStates); }
|
AbstractHerder implements Herder, TaskStatus.Listener, ConnectorStatus.Listener { @Override public ConnectorStateInfo connectorStatus(String connName) { ConnectorStatus connector = statusBackingStore.get(connName); if (connector == null) throw new NotFoundException("No status found for connector " + connName); Collection<TaskStatus> tasks = statusBackingStore.getAll(connName); ConnectorStateInfo.ConnectorState connectorState = new ConnectorStateInfo.ConnectorState( connector.state().toString(), connector.workerId(), connector.trace()); List<ConnectorStateInfo.TaskState> taskStates = new ArrayList<>(); for (TaskStatus status : tasks) { taskStates.add(new ConnectorStateInfo.TaskState(status.id().task(), status.state().toString(), status.workerId(), status.trace())); } Collections.sort(taskStates); return new ConnectorStateInfo(connName, connectorState, taskStates); } }
|
AbstractHerder implements Herder, TaskStatus.Listener, ConnectorStatus.Listener { @Override public ConnectorStateInfo connectorStatus(String connName) { ConnectorStatus connector = statusBackingStore.get(connName); if (connector == null) throw new NotFoundException("No status found for connector " + connName); Collection<TaskStatus> tasks = statusBackingStore.getAll(connName); ConnectorStateInfo.ConnectorState connectorState = new ConnectorStateInfo.ConnectorState( connector.state().toString(), connector.workerId(), connector.trace()); List<ConnectorStateInfo.TaskState> taskStates = new ArrayList<>(); for (TaskStatus status : tasks) { taskStates.add(new ConnectorStateInfo.TaskState(status.id().task(), status.state().toString(), status.workerId(), status.trace())); } Collections.sort(taskStates); return new ConnectorStateInfo(connName, connectorState, taskStates); } AbstractHerder(Worker worker,
String workerId,
StatusBackingStore statusBackingStore,
ConfigBackingStore configBackingStore); }
|
AbstractHerder implements Herder, TaskStatus.Listener, ConnectorStatus.Listener { @Override public ConnectorStateInfo connectorStatus(String connName) { ConnectorStatus connector = statusBackingStore.get(connName); if (connector == null) throw new NotFoundException("No status found for connector " + connName); Collection<TaskStatus> tasks = statusBackingStore.getAll(connName); ConnectorStateInfo.ConnectorState connectorState = new ConnectorStateInfo.ConnectorState( connector.state().toString(), connector.workerId(), connector.trace()); List<ConnectorStateInfo.TaskState> taskStates = new ArrayList<>(); for (TaskStatus status : tasks) { taskStates.add(new ConnectorStateInfo.TaskState(status.id().task(), status.state().toString(), status.workerId(), status.trace())); } Collections.sort(taskStates); return new ConnectorStateInfo(connName, connectorState, taskStates); } AbstractHerder(Worker worker,
String workerId,
StatusBackingStore statusBackingStore,
ConfigBackingStore configBackingStore); @Override void onStartup(String connector); @Override void onPause(String connector); @Override void onResume(String connector); @Override void onShutdown(String connector); @Override void onFailure(String connector, Throwable cause); @Override void onStartup(ConnectorTaskId id); @Override void onFailure(ConnectorTaskId id, Throwable cause); @Override void onShutdown(ConnectorTaskId id); @Override void onResume(ConnectorTaskId id); @Override void onPause(ConnectorTaskId id); @Override void onDeletion(String connector); @Override void pauseConnector(String connector); @Override void resumeConnector(String connector); @Override Plugins plugins(); @Override ConnectorStateInfo connectorStatus(String connName); @Override ConnectorStateInfo.TaskState taskStatus(ConnectorTaskId id); @Override ConfigInfos validateConnectorConfig(Map<String, String> connectorProps); static ConfigInfos generateResult(String connType, Map<String, ConfigKey> configKeys, List<ConfigValue> configValues, List<String> groups); }
|
AbstractHerder implements Herder, TaskStatus.Listener, ConnectorStatus.Listener { @Override public ConnectorStateInfo connectorStatus(String connName) { ConnectorStatus connector = statusBackingStore.get(connName); if (connector == null) throw new NotFoundException("No status found for connector " + connName); Collection<TaskStatus> tasks = statusBackingStore.getAll(connName); ConnectorStateInfo.ConnectorState connectorState = new ConnectorStateInfo.ConnectorState( connector.state().toString(), connector.workerId(), connector.trace()); List<ConnectorStateInfo.TaskState> taskStates = new ArrayList<>(); for (TaskStatus status : tasks) { taskStates.add(new ConnectorStateInfo.TaskState(status.id().task(), status.state().toString(), status.workerId(), status.trace())); } Collections.sort(taskStates); return new ConnectorStateInfo(connName, connectorState, taskStates); } AbstractHerder(Worker worker,
String workerId,
StatusBackingStore statusBackingStore,
ConfigBackingStore configBackingStore); @Override void onStartup(String connector); @Override void onPause(String connector); @Override void onResume(String connector); @Override void onShutdown(String connector); @Override void onFailure(String connector, Throwable cause); @Override void onStartup(ConnectorTaskId id); @Override void onFailure(ConnectorTaskId id, Throwable cause); @Override void onShutdown(ConnectorTaskId id); @Override void onResume(ConnectorTaskId id); @Override void onPause(ConnectorTaskId id); @Override void onDeletion(String connector); @Override void pauseConnector(String connector); @Override void resumeConnector(String connector); @Override Plugins plugins(); @Override ConnectorStateInfo connectorStatus(String connName); @Override ConnectorStateInfo.TaskState taskStatus(ConnectorTaskId id); @Override ConfigInfos validateConnectorConfig(Map<String, String> connectorProps); static ConfigInfos generateResult(String connType, Map<String, ConfigKey> configKeys, List<ConfigValue> configValues, List<String> groups); }
|
@Test public void mapToConnectStringKeys() { byte[] mapJson = "{ \"schema\": { \"type\": \"map\", \"keys\": { \"type\" : \"string\" }, \"values\": { \"type\" : \"int32\" } }, \"payload\": { \"key1\": 12, \"key2\": 15} }".getBytes(); Map<String, Integer> expected = new HashMap<>(); expected.put("key1", 12); expected.put("key2", 15); assertEquals(new SchemaAndValue(SchemaBuilder.map(Schema.STRING_SCHEMA, Schema.INT32_SCHEMA).build(), expected), converter.toConnectData(TOPIC, mapJson)); }
|
@Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); }
|
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } }
|
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } }
|
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); ObjectNode asJsonSchema(Schema schema); Schema asConnectSchema(JsonNode jsonSchema); }
|
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); ObjectNode asJsonSchema(Schema schema); Schema asConnectSchema(JsonNode jsonSchema); }
|
@Test public void taskStatus() { ConnectorTaskId taskId = new ConnectorTaskId("connector", 0); String workerId = "workerId"; ConfigBackingStore configStore = strictMock(ConfigBackingStore.class); StatusBackingStore statusStore = strictMock(StatusBackingStore.class); AbstractHerder herder = partialMockBuilder(AbstractHerder.class) .withConstructor(Worker.class, String.class, StatusBackingStore.class, ConfigBackingStore.class) .withArgs(worker, workerId, statusStore, configStore) .addMockedMethod("generation") .createMock(); EasyMock.expect(herder.generation()).andStubReturn(5); final Capture<TaskStatus> statusCapture = EasyMock.newCapture(); statusStore.putSafe(EasyMock.capture(statusCapture)); EasyMock.expectLastCall(); EasyMock.expect(statusStore.get(taskId)).andAnswer(new IAnswer<TaskStatus>() { @Override public TaskStatus answer() throws Throwable { return statusCapture.getValue(); } }); replayAll(); herder.onFailure(taskId, new RuntimeException()); ConnectorStateInfo.TaskState taskState = herder.taskStatus(taskId); assertEquals(workerId, taskState.workerId()); assertEquals("FAILED", taskState.state()); assertEquals(0, taskState.id()); assertNotNull(taskState.trace()); verifyAll(); }
|
@Override public ConnectorStateInfo.TaskState taskStatus(ConnectorTaskId id) { TaskStatus status = statusBackingStore.get(id); if (status == null) throw new NotFoundException("No status found for task " + id); return new ConnectorStateInfo.TaskState(id.task(), status.state().toString(), status.workerId(), status.trace()); }
|
AbstractHerder implements Herder, TaskStatus.Listener, ConnectorStatus.Listener { @Override public ConnectorStateInfo.TaskState taskStatus(ConnectorTaskId id) { TaskStatus status = statusBackingStore.get(id); if (status == null) throw new NotFoundException("No status found for task " + id); return new ConnectorStateInfo.TaskState(id.task(), status.state().toString(), status.workerId(), status.trace()); } }
|
AbstractHerder implements Herder, TaskStatus.Listener, ConnectorStatus.Listener { @Override public ConnectorStateInfo.TaskState taskStatus(ConnectorTaskId id) { TaskStatus status = statusBackingStore.get(id); if (status == null) throw new NotFoundException("No status found for task " + id); return new ConnectorStateInfo.TaskState(id.task(), status.state().toString(), status.workerId(), status.trace()); } AbstractHerder(Worker worker,
String workerId,
StatusBackingStore statusBackingStore,
ConfigBackingStore configBackingStore); }
|
AbstractHerder implements Herder, TaskStatus.Listener, ConnectorStatus.Listener { @Override public ConnectorStateInfo.TaskState taskStatus(ConnectorTaskId id) { TaskStatus status = statusBackingStore.get(id); if (status == null) throw new NotFoundException("No status found for task " + id); return new ConnectorStateInfo.TaskState(id.task(), status.state().toString(), status.workerId(), status.trace()); } AbstractHerder(Worker worker,
String workerId,
StatusBackingStore statusBackingStore,
ConfigBackingStore configBackingStore); @Override void onStartup(String connector); @Override void onPause(String connector); @Override void onResume(String connector); @Override void onShutdown(String connector); @Override void onFailure(String connector, Throwable cause); @Override void onStartup(ConnectorTaskId id); @Override void onFailure(ConnectorTaskId id, Throwable cause); @Override void onShutdown(ConnectorTaskId id); @Override void onResume(ConnectorTaskId id); @Override void onPause(ConnectorTaskId id); @Override void onDeletion(String connector); @Override void pauseConnector(String connector); @Override void resumeConnector(String connector); @Override Plugins plugins(); @Override ConnectorStateInfo connectorStatus(String connName); @Override ConnectorStateInfo.TaskState taskStatus(ConnectorTaskId id); @Override ConfigInfos validateConnectorConfig(Map<String, String> connectorProps); static ConfigInfos generateResult(String connType, Map<String, ConfigKey> configKeys, List<ConfigValue> configValues, List<String> groups); }
|
AbstractHerder implements Herder, TaskStatus.Listener, ConnectorStatus.Listener { @Override public ConnectorStateInfo.TaskState taskStatus(ConnectorTaskId id) { TaskStatus status = statusBackingStore.get(id); if (status == null) throw new NotFoundException("No status found for task " + id); return new ConnectorStateInfo.TaskState(id.task(), status.state().toString(), status.workerId(), status.trace()); } AbstractHerder(Worker worker,
String workerId,
StatusBackingStore statusBackingStore,
ConfigBackingStore configBackingStore); @Override void onStartup(String connector); @Override void onPause(String connector); @Override void onResume(String connector); @Override void onShutdown(String connector); @Override void onFailure(String connector, Throwable cause); @Override void onStartup(ConnectorTaskId id); @Override void onFailure(ConnectorTaskId id, Throwable cause); @Override void onShutdown(ConnectorTaskId id); @Override void onResume(ConnectorTaskId id); @Override void onPause(ConnectorTaskId id); @Override void onDeletion(String connector); @Override void pauseConnector(String connector); @Override void resumeConnector(String connector); @Override Plugins plugins(); @Override ConnectorStateInfo connectorStatus(String connName); @Override ConnectorStateInfo.TaskState taskStatus(ConnectorTaskId id); @Override ConfigInfos validateConnectorConfig(Map<String, String> connectorProps); static ConfigInfos generateResult(String connType, Map<String, ConfigKey> configKeys, List<ConfigValue> configValues, List<String> groups); }
|
@Test(expected = BadRequestException.class) public void testConfigValidationEmptyConfig() { AbstractHerder herder = createConfigValidationHerder(TestSourceConnector.class); replayAll(); herder.validateConnectorConfig(new HashMap<String, String>()); verifyAll(); }
|
@Override public ConfigInfos validateConnectorConfig(Map<String, String> connectorProps) { String connType = connectorProps.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG); if (connType == null) throw new BadRequestException("Connector config " + connectorProps + " contains no connector type"); List<ConfigValue> configValues = new ArrayList<>(); Map<String, ConfigKey> configKeys = new LinkedHashMap<>(); Set<String> allGroups = new LinkedHashSet<>(); Connector connector = getConnector(connType); ClassLoader savedLoader = plugins().compareAndSwapLoaders(connector); try { ConfigDef baseConfigDef = (connector instanceof SourceConnector) ? SourceConnectorConfig.configDef() : SinkConnectorConfig.configDef(); ConfigDef enrichedConfigDef = ConnectorConfig.enrich(plugins(), baseConfigDef, connectorProps, false); Map<String, ConfigValue> validatedConnectorConfig = validateBasicConnectorConfig( connector, enrichedConfigDef, connectorProps ); configValues.addAll(validatedConnectorConfig.values()); configKeys.putAll(enrichedConfigDef.configKeys()); allGroups.addAll(enrichedConfigDef.groups()); Config config = connector.validate(connectorProps); ConfigDef configDef = connector.config(); configKeys.putAll(configDef.configKeys()); allGroups.addAll(configDef.groups()); configValues.addAll(config.configValues()); return generateResult(connType, configKeys, configValues, new ArrayList<>(allGroups)); } finally { Plugins.compareAndSwapLoaders(savedLoader); } }
|
AbstractHerder implements Herder, TaskStatus.Listener, ConnectorStatus.Listener { @Override public ConfigInfos validateConnectorConfig(Map<String, String> connectorProps) { String connType = connectorProps.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG); if (connType == null) throw new BadRequestException("Connector config " + connectorProps + " contains no connector type"); List<ConfigValue> configValues = new ArrayList<>(); Map<String, ConfigKey> configKeys = new LinkedHashMap<>(); Set<String> allGroups = new LinkedHashSet<>(); Connector connector = getConnector(connType); ClassLoader savedLoader = plugins().compareAndSwapLoaders(connector); try { ConfigDef baseConfigDef = (connector instanceof SourceConnector) ? SourceConnectorConfig.configDef() : SinkConnectorConfig.configDef(); ConfigDef enrichedConfigDef = ConnectorConfig.enrich(plugins(), baseConfigDef, connectorProps, false); Map<String, ConfigValue> validatedConnectorConfig = validateBasicConnectorConfig( connector, enrichedConfigDef, connectorProps ); configValues.addAll(validatedConnectorConfig.values()); configKeys.putAll(enrichedConfigDef.configKeys()); allGroups.addAll(enrichedConfigDef.groups()); Config config = connector.validate(connectorProps); ConfigDef configDef = connector.config(); configKeys.putAll(configDef.configKeys()); allGroups.addAll(configDef.groups()); configValues.addAll(config.configValues()); return generateResult(connType, configKeys, configValues, new ArrayList<>(allGroups)); } finally { Plugins.compareAndSwapLoaders(savedLoader); } } }
|
AbstractHerder implements Herder, TaskStatus.Listener, ConnectorStatus.Listener { @Override public ConfigInfos validateConnectorConfig(Map<String, String> connectorProps) { String connType = connectorProps.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG); if (connType == null) throw new BadRequestException("Connector config " + connectorProps + " contains no connector type"); List<ConfigValue> configValues = new ArrayList<>(); Map<String, ConfigKey> configKeys = new LinkedHashMap<>(); Set<String> allGroups = new LinkedHashSet<>(); Connector connector = getConnector(connType); ClassLoader savedLoader = plugins().compareAndSwapLoaders(connector); try { ConfigDef baseConfigDef = (connector instanceof SourceConnector) ? SourceConnectorConfig.configDef() : SinkConnectorConfig.configDef(); ConfigDef enrichedConfigDef = ConnectorConfig.enrich(plugins(), baseConfigDef, connectorProps, false); Map<String, ConfigValue> validatedConnectorConfig = validateBasicConnectorConfig( connector, enrichedConfigDef, connectorProps ); configValues.addAll(validatedConnectorConfig.values()); configKeys.putAll(enrichedConfigDef.configKeys()); allGroups.addAll(enrichedConfigDef.groups()); Config config = connector.validate(connectorProps); ConfigDef configDef = connector.config(); configKeys.putAll(configDef.configKeys()); allGroups.addAll(configDef.groups()); configValues.addAll(config.configValues()); return generateResult(connType, configKeys, configValues, new ArrayList<>(allGroups)); } finally { Plugins.compareAndSwapLoaders(savedLoader); } } AbstractHerder(Worker worker,
String workerId,
StatusBackingStore statusBackingStore,
ConfigBackingStore configBackingStore); }
|
AbstractHerder implements Herder, TaskStatus.Listener, ConnectorStatus.Listener { @Override public ConfigInfos validateConnectorConfig(Map<String, String> connectorProps) { String connType = connectorProps.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG); if (connType == null) throw new BadRequestException("Connector config " + connectorProps + " contains no connector type"); List<ConfigValue> configValues = new ArrayList<>(); Map<String, ConfigKey> configKeys = new LinkedHashMap<>(); Set<String> allGroups = new LinkedHashSet<>(); Connector connector = getConnector(connType); ClassLoader savedLoader = plugins().compareAndSwapLoaders(connector); try { ConfigDef baseConfigDef = (connector instanceof SourceConnector) ? SourceConnectorConfig.configDef() : SinkConnectorConfig.configDef(); ConfigDef enrichedConfigDef = ConnectorConfig.enrich(plugins(), baseConfigDef, connectorProps, false); Map<String, ConfigValue> validatedConnectorConfig = validateBasicConnectorConfig( connector, enrichedConfigDef, connectorProps ); configValues.addAll(validatedConnectorConfig.values()); configKeys.putAll(enrichedConfigDef.configKeys()); allGroups.addAll(enrichedConfigDef.groups()); Config config = connector.validate(connectorProps); ConfigDef configDef = connector.config(); configKeys.putAll(configDef.configKeys()); allGroups.addAll(configDef.groups()); configValues.addAll(config.configValues()); return generateResult(connType, configKeys, configValues, new ArrayList<>(allGroups)); } finally { Plugins.compareAndSwapLoaders(savedLoader); } } AbstractHerder(Worker worker,
String workerId,
StatusBackingStore statusBackingStore,
ConfigBackingStore configBackingStore); @Override void onStartup(String connector); @Override void onPause(String connector); @Override void onResume(String connector); @Override void onShutdown(String connector); @Override void onFailure(String connector, Throwable cause); @Override void onStartup(ConnectorTaskId id); @Override void onFailure(ConnectorTaskId id, Throwable cause); @Override void onShutdown(ConnectorTaskId id); @Override void onResume(ConnectorTaskId id); @Override void onPause(ConnectorTaskId id); @Override void onDeletion(String connector); @Override void pauseConnector(String connector); @Override void resumeConnector(String connector); @Override Plugins plugins(); @Override ConnectorStateInfo connectorStatus(String connName); @Override ConnectorStateInfo.TaskState taskStatus(ConnectorTaskId id); @Override ConfigInfos validateConnectorConfig(Map<String, String> connectorProps); static ConfigInfos generateResult(String connType, Map<String, ConfigKey> configKeys, List<ConfigValue> configValues, List<String> groups); }
|
AbstractHerder implements Herder, TaskStatus.Listener, ConnectorStatus.Listener { @Override public ConfigInfos validateConnectorConfig(Map<String, String> connectorProps) { String connType = connectorProps.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG); if (connType == null) throw new BadRequestException("Connector config " + connectorProps + " contains no connector type"); List<ConfigValue> configValues = new ArrayList<>(); Map<String, ConfigKey> configKeys = new LinkedHashMap<>(); Set<String> allGroups = new LinkedHashSet<>(); Connector connector = getConnector(connType); ClassLoader savedLoader = plugins().compareAndSwapLoaders(connector); try { ConfigDef baseConfigDef = (connector instanceof SourceConnector) ? SourceConnectorConfig.configDef() : SinkConnectorConfig.configDef(); ConfigDef enrichedConfigDef = ConnectorConfig.enrich(plugins(), baseConfigDef, connectorProps, false); Map<String, ConfigValue> validatedConnectorConfig = validateBasicConnectorConfig( connector, enrichedConfigDef, connectorProps ); configValues.addAll(validatedConnectorConfig.values()); configKeys.putAll(enrichedConfigDef.configKeys()); allGroups.addAll(enrichedConfigDef.groups()); Config config = connector.validate(connectorProps); ConfigDef configDef = connector.config(); configKeys.putAll(configDef.configKeys()); allGroups.addAll(configDef.groups()); configValues.addAll(config.configValues()); return generateResult(connType, configKeys, configValues, new ArrayList<>(allGroups)); } finally { Plugins.compareAndSwapLoaders(savedLoader); } } AbstractHerder(Worker worker,
String workerId,
StatusBackingStore statusBackingStore,
ConfigBackingStore configBackingStore); @Override void onStartup(String connector); @Override void onPause(String connector); @Override void onResume(String connector); @Override void onShutdown(String connector); @Override void onFailure(String connector, Throwable cause); @Override void onStartup(ConnectorTaskId id); @Override void onFailure(ConnectorTaskId id, Throwable cause); @Override void onShutdown(ConnectorTaskId id); @Override void onResume(ConnectorTaskId id); @Override void onPause(ConnectorTaskId id); @Override void onDeletion(String connector); @Override void pauseConnector(String connector); @Override void resumeConnector(String connector); @Override Plugins plugins(); @Override ConnectorStateInfo connectorStatus(String connName); @Override ConnectorStateInfo.TaskState taskStatus(ConnectorTaskId id); @Override ConfigInfos validateConnectorConfig(Map<String, String> connectorProps); static ConfigInfos generateResult(String connType, Map<String, ConfigKey> configKeys, List<ConfigValue> configValues, List<String> groups); }
|
@Test() public void testConfigValidationMissingName() { AbstractHerder herder = createConfigValidationHerder(TestSourceConnector.class); replayAll(); Map<String, String> config = Collections.singletonMap(ConnectorConfig.CONNECTOR_CLASS_CONFIG, TestSourceConnector.class.getName()); ConfigInfos result = herder.validateConnectorConfig(config); assertEquals(TestSourceConnector.class.getName(), result.name()); assertEquals(Arrays.asList(ConnectorConfig.COMMON_GROUP, ConnectorConfig.TRANSFORMS_GROUP), result.groups()); assertEquals(2, result.errorCount()); assertEquals(8, result.values().size()); assertEquals(ConnectorConfig.NAME_CONFIG, result.values().get(0).configValue().name()); assertEquals(1, result.values().get(0).configValue().errors().size()); assertEquals("required", result.values().get(6).configValue().name()); assertEquals(1, result.values().get(6).configValue().errors().size()); verifyAll(); }
|
@Override public ConfigInfos validateConnectorConfig(Map<String, String> connectorProps) { String connType = connectorProps.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG); if (connType == null) throw new BadRequestException("Connector config " + connectorProps + " contains no connector type"); List<ConfigValue> configValues = new ArrayList<>(); Map<String, ConfigKey> configKeys = new LinkedHashMap<>(); Set<String> allGroups = new LinkedHashSet<>(); Connector connector = getConnector(connType); ClassLoader savedLoader = plugins().compareAndSwapLoaders(connector); try { ConfigDef baseConfigDef = (connector instanceof SourceConnector) ? SourceConnectorConfig.configDef() : SinkConnectorConfig.configDef(); ConfigDef enrichedConfigDef = ConnectorConfig.enrich(plugins(), baseConfigDef, connectorProps, false); Map<String, ConfigValue> validatedConnectorConfig = validateBasicConnectorConfig( connector, enrichedConfigDef, connectorProps ); configValues.addAll(validatedConnectorConfig.values()); configKeys.putAll(enrichedConfigDef.configKeys()); allGroups.addAll(enrichedConfigDef.groups()); Config config = connector.validate(connectorProps); ConfigDef configDef = connector.config(); configKeys.putAll(configDef.configKeys()); allGroups.addAll(configDef.groups()); configValues.addAll(config.configValues()); return generateResult(connType, configKeys, configValues, new ArrayList<>(allGroups)); } finally { Plugins.compareAndSwapLoaders(savedLoader); } }
|
AbstractHerder implements Herder, TaskStatus.Listener, ConnectorStatus.Listener { @Override public ConfigInfos validateConnectorConfig(Map<String, String> connectorProps) { String connType = connectorProps.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG); if (connType == null) throw new BadRequestException("Connector config " + connectorProps + " contains no connector type"); List<ConfigValue> configValues = new ArrayList<>(); Map<String, ConfigKey> configKeys = new LinkedHashMap<>(); Set<String> allGroups = new LinkedHashSet<>(); Connector connector = getConnector(connType); ClassLoader savedLoader = plugins().compareAndSwapLoaders(connector); try { ConfigDef baseConfigDef = (connector instanceof SourceConnector) ? SourceConnectorConfig.configDef() : SinkConnectorConfig.configDef(); ConfigDef enrichedConfigDef = ConnectorConfig.enrich(plugins(), baseConfigDef, connectorProps, false); Map<String, ConfigValue> validatedConnectorConfig = validateBasicConnectorConfig( connector, enrichedConfigDef, connectorProps ); configValues.addAll(validatedConnectorConfig.values()); configKeys.putAll(enrichedConfigDef.configKeys()); allGroups.addAll(enrichedConfigDef.groups()); Config config = connector.validate(connectorProps); ConfigDef configDef = connector.config(); configKeys.putAll(configDef.configKeys()); allGroups.addAll(configDef.groups()); configValues.addAll(config.configValues()); return generateResult(connType, configKeys, configValues, new ArrayList<>(allGroups)); } finally { Plugins.compareAndSwapLoaders(savedLoader); } } }
|
AbstractHerder implements Herder, TaskStatus.Listener, ConnectorStatus.Listener { @Override public ConfigInfos validateConnectorConfig(Map<String, String> connectorProps) { String connType = connectorProps.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG); if (connType == null) throw new BadRequestException("Connector config " + connectorProps + " contains no connector type"); List<ConfigValue> configValues = new ArrayList<>(); Map<String, ConfigKey> configKeys = new LinkedHashMap<>(); Set<String> allGroups = new LinkedHashSet<>(); Connector connector = getConnector(connType); ClassLoader savedLoader = plugins().compareAndSwapLoaders(connector); try { ConfigDef baseConfigDef = (connector instanceof SourceConnector) ? SourceConnectorConfig.configDef() : SinkConnectorConfig.configDef(); ConfigDef enrichedConfigDef = ConnectorConfig.enrich(plugins(), baseConfigDef, connectorProps, false); Map<String, ConfigValue> validatedConnectorConfig = validateBasicConnectorConfig( connector, enrichedConfigDef, connectorProps ); configValues.addAll(validatedConnectorConfig.values()); configKeys.putAll(enrichedConfigDef.configKeys()); allGroups.addAll(enrichedConfigDef.groups()); Config config = connector.validate(connectorProps); ConfigDef configDef = connector.config(); configKeys.putAll(configDef.configKeys()); allGroups.addAll(configDef.groups()); configValues.addAll(config.configValues()); return generateResult(connType, configKeys, configValues, new ArrayList<>(allGroups)); } finally { Plugins.compareAndSwapLoaders(savedLoader); } } AbstractHerder(Worker worker,
String workerId,
StatusBackingStore statusBackingStore,
ConfigBackingStore configBackingStore); }
|
AbstractHerder implements Herder, TaskStatus.Listener, ConnectorStatus.Listener { @Override public ConfigInfos validateConnectorConfig(Map<String, String> connectorProps) { String connType = connectorProps.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG); if (connType == null) throw new BadRequestException("Connector config " + connectorProps + " contains no connector type"); List<ConfigValue> configValues = new ArrayList<>(); Map<String, ConfigKey> configKeys = new LinkedHashMap<>(); Set<String> allGroups = new LinkedHashSet<>(); Connector connector = getConnector(connType); ClassLoader savedLoader = plugins().compareAndSwapLoaders(connector); try { ConfigDef baseConfigDef = (connector instanceof SourceConnector) ? SourceConnectorConfig.configDef() : SinkConnectorConfig.configDef(); ConfigDef enrichedConfigDef = ConnectorConfig.enrich(plugins(), baseConfigDef, connectorProps, false); Map<String, ConfigValue> validatedConnectorConfig = validateBasicConnectorConfig( connector, enrichedConfigDef, connectorProps ); configValues.addAll(validatedConnectorConfig.values()); configKeys.putAll(enrichedConfigDef.configKeys()); allGroups.addAll(enrichedConfigDef.groups()); Config config = connector.validate(connectorProps); ConfigDef configDef = connector.config(); configKeys.putAll(configDef.configKeys()); allGroups.addAll(configDef.groups()); configValues.addAll(config.configValues()); return generateResult(connType, configKeys, configValues, new ArrayList<>(allGroups)); } finally { Plugins.compareAndSwapLoaders(savedLoader); } } AbstractHerder(Worker worker,
String workerId,
StatusBackingStore statusBackingStore,
ConfigBackingStore configBackingStore); @Override void onStartup(String connector); @Override void onPause(String connector); @Override void onResume(String connector); @Override void onShutdown(String connector); @Override void onFailure(String connector, Throwable cause); @Override void onStartup(ConnectorTaskId id); @Override void onFailure(ConnectorTaskId id, Throwable cause); @Override void onShutdown(ConnectorTaskId id); @Override void onResume(ConnectorTaskId id); @Override void onPause(ConnectorTaskId id); @Override void onDeletion(String connector); @Override void pauseConnector(String connector); @Override void resumeConnector(String connector); @Override Plugins plugins(); @Override ConnectorStateInfo connectorStatus(String connName); @Override ConnectorStateInfo.TaskState taskStatus(ConnectorTaskId id); @Override ConfigInfos validateConnectorConfig(Map<String, String> connectorProps); static ConfigInfos generateResult(String connType, Map<String, ConfigKey> configKeys, List<ConfigValue> configValues, List<String> groups); }
|
AbstractHerder implements Herder, TaskStatus.Listener, ConnectorStatus.Listener { @Override public ConfigInfos validateConnectorConfig(Map<String, String> connectorProps) { String connType = connectorProps.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG); if (connType == null) throw new BadRequestException("Connector config " + connectorProps + " contains no connector type"); List<ConfigValue> configValues = new ArrayList<>(); Map<String, ConfigKey> configKeys = new LinkedHashMap<>(); Set<String> allGroups = new LinkedHashSet<>(); Connector connector = getConnector(connType); ClassLoader savedLoader = plugins().compareAndSwapLoaders(connector); try { ConfigDef baseConfigDef = (connector instanceof SourceConnector) ? SourceConnectorConfig.configDef() : SinkConnectorConfig.configDef(); ConfigDef enrichedConfigDef = ConnectorConfig.enrich(plugins(), baseConfigDef, connectorProps, false); Map<String, ConfigValue> validatedConnectorConfig = validateBasicConnectorConfig( connector, enrichedConfigDef, connectorProps ); configValues.addAll(validatedConnectorConfig.values()); configKeys.putAll(enrichedConfigDef.configKeys()); allGroups.addAll(enrichedConfigDef.groups()); Config config = connector.validate(connectorProps); ConfigDef configDef = connector.config(); configKeys.putAll(configDef.configKeys()); allGroups.addAll(configDef.groups()); configValues.addAll(config.configValues()); return generateResult(connType, configKeys, configValues, new ArrayList<>(allGroups)); } finally { Plugins.compareAndSwapLoaders(savedLoader); } } AbstractHerder(Worker worker,
String workerId,
StatusBackingStore statusBackingStore,
ConfigBackingStore configBackingStore); @Override void onStartup(String connector); @Override void onPause(String connector); @Override void onResume(String connector); @Override void onShutdown(String connector); @Override void onFailure(String connector, Throwable cause); @Override void onStartup(ConnectorTaskId id); @Override void onFailure(ConnectorTaskId id, Throwable cause); @Override void onShutdown(ConnectorTaskId id); @Override void onResume(ConnectorTaskId id); @Override void onPause(ConnectorTaskId id); @Override void onDeletion(String connector); @Override void pauseConnector(String connector); @Override void resumeConnector(String connector); @Override Plugins plugins(); @Override ConnectorStateInfo connectorStatus(String connName); @Override ConnectorStateInfo.TaskState taskStatus(ConnectorTaskId id); @Override ConfigInfos validateConnectorConfig(Map<String, String> connectorProps); static ConfigInfos generateResult(String connType, Map<String, ConfigKey> configKeys, List<ConfigValue> configValues, List<String> groups); }
|
@Test() public void testConfigValidationTransformsExtendResults() { AbstractHerder herder = createConfigValidationHerder(TestSourceConnector.class); Set<PluginDesc<Transformation>> transformations = new HashSet<>(); transformations.add(new PluginDesc<Transformation>(SampleTransformation.class, "1.0", classLoader)); EasyMock.expect(plugins.transformations()).andReturn(transformations).times(2); replayAll(); Map<String, String> config = new HashMap<>(); config.put(ConnectorConfig.CONNECTOR_CLASS_CONFIG, TestSourceConnector.class.getName()); config.put(ConnectorConfig.NAME_CONFIG, "connector-name"); config.put(ConnectorConfig.TRANSFORMS_CONFIG, "xformA,xformB"); config.put(ConnectorConfig.TRANSFORMS_CONFIG + ".xformA.type", SampleTransformation.class.getName()); config.put("required", "value"); ConfigInfos result = herder.validateConnectorConfig(config); assertEquals(TestSourceConnector.class.getName(), result.name()); List<String> expectedGroups = Arrays.asList( ConnectorConfig.COMMON_GROUP, ConnectorConfig.TRANSFORMS_GROUP, "Transforms: xformA", "Transforms: xformB" ); assertEquals(expectedGroups, result.groups()); assertEquals(2, result.errorCount()); assertEquals(11, result.values().size()); assertEquals("transforms.xformA.type", result.values().get(6).configValue().name()); assertTrue(result.values().get(6).configValue().errors().isEmpty()); assertEquals("transforms.xformA.subconfig", result.values().get(7).configValue().name()); assertEquals("transforms.xformB.type", result.values().get(8).configValue().name()); assertFalse(result.values().get(8).configValue().errors().isEmpty()); verifyAll(); }
|
@Override public ConfigInfos validateConnectorConfig(Map<String, String> connectorProps) { String connType = connectorProps.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG); if (connType == null) throw new BadRequestException("Connector config " + connectorProps + " contains no connector type"); List<ConfigValue> configValues = new ArrayList<>(); Map<String, ConfigKey> configKeys = new LinkedHashMap<>(); Set<String> allGroups = new LinkedHashSet<>(); Connector connector = getConnector(connType); ClassLoader savedLoader = plugins().compareAndSwapLoaders(connector); try { ConfigDef baseConfigDef = (connector instanceof SourceConnector) ? SourceConnectorConfig.configDef() : SinkConnectorConfig.configDef(); ConfigDef enrichedConfigDef = ConnectorConfig.enrich(plugins(), baseConfigDef, connectorProps, false); Map<String, ConfigValue> validatedConnectorConfig = validateBasicConnectorConfig( connector, enrichedConfigDef, connectorProps ); configValues.addAll(validatedConnectorConfig.values()); configKeys.putAll(enrichedConfigDef.configKeys()); allGroups.addAll(enrichedConfigDef.groups()); Config config = connector.validate(connectorProps); ConfigDef configDef = connector.config(); configKeys.putAll(configDef.configKeys()); allGroups.addAll(configDef.groups()); configValues.addAll(config.configValues()); return generateResult(connType, configKeys, configValues, new ArrayList<>(allGroups)); } finally { Plugins.compareAndSwapLoaders(savedLoader); } }
|
AbstractHerder implements Herder, TaskStatus.Listener, ConnectorStatus.Listener { @Override public ConfigInfos validateConnectorConfig(Map<String, String> connectorProps) { String connType = connectorProps.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG); if (connType == null) throw new BadRequestException("Connector config " + connectorProps + " contains no connector type"); List<ConfigValue> configValues = new ArrayList<>(); Map<String, ConfigKey> configKeys = new LinkedHashMap<>(); Set<String> allGroups = new LinkedHashSet<>(); Connector connector = getConnector(connType); ClassLoader savedLoader = plugins().compareAndSwapLoaders(connector); try { ConfigDef baseConfigDef = (connector instanceof SourceConnector) ? SourceConnectorConfig.configDef() : SinkConnectorConfig.configDef(); ConfigDef enrichedConfigDef = ConnectorConfig.enrich(plugins(), baseConfigDef, connectorProps, false); Map<String, ConfigValue> validatedConnectorConfig = validateBasicConnectorConfig( connector, enrichedConfigDef, connectorProps ); configValues.addAll(validatedConnectorConfig.values()); configKeys.putAll(enrichedConfigDef.configKeys()); allGroups.addAll(enrichedConfigDef.groups()); Config config = connector.validate(connectorProps); ConfigDef configDef = connector.config(); configKeys.putAll(configDef.configKeys()); allGroups.addAll(configDef.groups()); configValues.addAll(config.configValues()); return generateResult(connType, configKeys, configValues, new ArrayList<>(allGroups)); } finally { Plugins.compareAndSwapLoaders(savedLoader); } } }
|
AbstractHerder implements Herder, TaskStatus.Listener, ConnectorStatus.Listener { @Override public ConfigInfos validateConnectorConfig(Map<String, String> connectorProps) { String connType = connectorProps.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG); if (connType == null) throw new BadRequestException("Connector config " + connectorProps + " contains no connector type"); List<ConfigValue> configValues = new ArrayList<>(); Map<String, ConfigKey> configKeys = new LinkedHashMap<>(); Set<String> allGroups = new LinkedHashSet<>(); Connector connector = getConnector(connType); ClassLoader savedLoader = plugins().compareAndSwapLoaders(connector); try { ConfigDef baseConfigDef = (connector instanceof SourceConnector) ? SourceConnectorConfig.configDef() : SinkConnectorConfig.configDef(); ConfigDef enrichedConfigDef = ConnectorConfig.enrich(plugins(), baseConfigDef, connectorProps, false); Map<String, ConfigValue> validatedConnectorConfig = validateBasicConnectorConfig( connector, enrichedConfigDef, connectorProps ); configValues.addAll(validatedConnectorConfig.values()); configKeys.putAll(enrichedConfigDef.configKeys()); allGroups.addAll(enrichedConfigDef.groups()); Config config = connector.validate(connectorProps); ConfigDef configDef = connector.config(); configKeys.putAll(configDef.configKeys()); allGroups.addAll(configDef.groups()); configValues.addAll(config.configValues()); return generateResult(connType, configKeys, configValues, new ArrayList<>(allGroups)); } finally { Plugins.compareAndSwapLoaders(savedLoader); } } AbstractHerder(Worker worker,
String workerId,
StatusBackingStore statusBackingStore,
ConfigBackingStore configBackingStore); }
|
AbstractHerder implements Herder, TaskStatus.Listener, ConnectorStatus.Listener { @Override public ConfigInfos validateConnectorConfig(Map<String, String> connectorProps) { String connType = connectorProps.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG); if (connType == null) throw new BadRequestException("Connector config " + connectorProps + " contains no connector type"); List<ConfigValue> configValues = new ArrayList<>(); Map<String, ConfigKey> configKeys = new LinkedHashMap<>(); Set<String> allGroups = new LinkedHashSet<>(); Connector connector = getConnector(connType); ClassLoader savedLoader = plugins().compareAndSwapLoaders(connector); try { ConfigDef baseConfigDef = (connector instanceof SourceConnector) ? SourceConnectorConfig.configDef() : SinkConnectorConfig.configDef(); ConfigDef enrichedConfigDef = ConnectorConfig.enrich(plugins(), baseConfigDef, connectorProps, false); Map<String, ConfigValue> validatedConnectorConfig = validateBasicConnectorConfig( connector, enrichedConfigDef, connectorProps ); configValues.addAll(validatedConnectorConfig.values()); configKeys.putAll(enrichedConfigDef.configKeys()); allGroups.addAll(enrichedConfigDef.groups()); Config config = connector.validate(connectorProps); ConfigDef configDef = connector.config(); configKeys.putAll(configDef.configKeys()); allGroups.addAll(configDef.groups()); configValues.addAll(config.configValues()); return generateResult(connType, configKeys, configValues, new ArrayList<>(allGroups)); } finally { Plugins.compareAndSwapLoaders(savedLoader); } } AbstractHerder(Worker worker,
String workerId,
StatusBackingStore statusBackingStore,
ConfigBackingStore configBackingStore); @Override void onStartup(String connector); @Override void onPause(String connector); @Override void onResume(String connector); @Override void onShutdown(String connector); @Override void onFailure(String connector, Throwable cause); @Override void onStartup(ConnectorTaskId id); @Override void onFailure(ConnectorTaskId id, Throwable cause); @Override void onShutdown(ConnectorTaskId id); @Override void onResume(ConnectorTaskId id); @Override void onPause(ConnectorTaskId id); @Override void onDeletion(String connector); @Override void pauseConnector(String connector); @Override void resumeConnector(String connector); @Override Plugins plugins(); @Override ConnectorStateInfo connectorStatus(String connName); @Override ConnectorStateInfo.TaskState taskStatus(ConnectorTaskId id); @Override ConfigInfos validateConnectorConfig(Map<String, String> connectorProps); static ConfigInfos generateResult(String connType, Map<String, ConfigKey> configKeys, List<ConfigValue> configValues, List<String> groups); }
|
AbstractHerder implements Herder, TaskStatus.Listener, ConnectorStatus.Listener { @Override public ConfigInfos validateConnectorConfig(Map<String, String> connectorProps) { String connType = connectorProps.get(ConnectorConfig.CONNECTOR_CLASS_CONFIG); if (connType == null) throw new BadRequestException("Connector config " + connectorProps + " contains no connector type"); List<ConfigValue> configValues = new ArrayList<>(); Map<String, ConfigKey> configKeys = new LinkedHashMap<>(); Set<String> allGroups = new LinkedHashSet<>(); Connector connector = getConnector(connType); ClassLoader savedLoader = plugins().compareAndSwapLoaders(connector); try { ConfigDef baseConfigDef = (connector instanceof SourceConnector) ? SourceConnectorConfig.configDef() : SinkConnectorConfig.configDef(); ConfigDef enrichedConfigDef = ConnectorConfig.enrich(plugins(), baseConfigDef, connectorProps, false); Map<String, ConfigValue> validatedConnectorConfig = validateBasicConnectorConfig( connector, enrichedConfigDef, connectorProps ); configValues.addAll(validatedConnectorConfig.values()); configKeys.putAll(enrichedConfigDef.configKeys()); allGroups.addAll(enrichedConfigDef.groups()); Config config = connector.validate(connectorProps); ConfigDef configDef = connector.config(); configKeys.putAll(configDef.configKeys()); allGroups.addAll(configDef.groups()); configValues.addAll(config.configValues()); return generateResult(connType, configKeys, configValues, new ArrayList<>(allGroups)); } finally { Plugins.compareAndSwapLoaders(savedLoader); } } AbstractHerder(Worker worker,
String workerId,
StatusBackingStore statusBackingStore,
ConfigBackingStore configBackingStore); @Override void onStartup(String connector); @Override void onPause(String connector); @Override void onResume(String connector); @Override void onShutdown(String connector); @Override void onFailure(String connector, Throwable cause); @Override void onStartup(ConnectorTaskId id); @Override void onFailure(ConnectorTaskId id, Throwable cause); @Override void onShutdown(ConnectorTaskId id); @Override void onResume(ConnectorTaskId id); @Override void onPause(ConnectorTaskId id); @Override void onDeletion(String connector); @Override void pauseConnector(String connector); @Override void resumeConnector(String connector); @Override Plugins plugins(); @Override ConnectorStateInfo connectorStatus(String connName); @Override ConnectorStateInfo.TaskState taskStatus(ConnectorTaskId id); @Override ConfigInfos validateConnectorConfig(Map<String, String> connectorProps); static ConfigInfos generateResult(String connType, Map<String, ConfigKey> configKeys, List<ConfigValue> configValues, List<String> groups); }
|
@Test public void testRestartConnector() throws Exception { expectAdd(SourceSink.SOURCE); Map<String, String> config = connectorConfig(SourceSink.SOURCE); expectConfigValidation(config); worker.stopConnector(CONNECTOR_NAME); EasyMock.expectLastCall().andReturn(true); worker.startConnector(EasyMock.eq(CONNECTOR_NAME), EasyMock.eq(config), EasyMock.anyObject(HerderConnectorContext.class), EasyMock.eq(herder), EasyMock.eq(TargetState.STARTED)); EasyMock.expectLastCall().andReturn(true); PowerMock.replayAll(); herder.putConnectorConfig(CONNECTOR_NAME, config, false, createCallback); FutureCallback<Void> cb = new FutureCallback<>(); herder.restartConnector(CONNECTOR_NAME, cb); cb.get(1000L, TimeUnit.MILLISECONDS); PowerMock.verifyAll(); }
|
@Override public synchronized void restartConnector(String connName, Callback<Void> cb) { if (!configState.contains(connName)) cb.onCompletion(new NotFoundException("Connector " + connName + " not found", null), null); Map<String, String> config = configState.connectorConfig(connName); worker.stopConnector(connName); if (startConnector(config)) cb.onCompletion(null, null); else cb.onCompletion(new ConnectException("Failed to start connector: " + connName), null); }
|
StandaloneHerder extends AbstractHerder { @Override public synchronized void restartConnector(String connName, Callback<Void> cb) { if (!configState.contains(connName)) cb.onCompletion(new NotFoundException("Connector " + connName + " not found", null), null); Map<String, String> config = configState.connectorConfig(connName); worker.stopConnector(connName); if (startConnector(config)) cb.onCompletion(null, null); else cb.onCompletion(new ConnectException("Failed to start connector: " + connName), null); } }
|
StandaloneHerder extends AbstractHerder { @Override public synchronized void restartConnector(String connName, Callback<Void> cb) { if (!configState.contains(connName)) cb.onCompletion(new NotFoundException("Connector " + connName + " not found", null), null); Map<String, String> config = configState.connectorConfig(connName); worker.stopConnector(connName); if (startConnector(config)) cb.onCompletion(null, null); else cb.onCompletion(new ConnectException("Failed to start connector: " + connName), null); } StandaloneHerder(Worker worker); StandaloneHerder(Worker worker,
String workerId,
StatusBackingStore statusBackingStore,
MemoryConfigBackingStore configBackingStore); }
|
StandaloneHerder extends AbstractHerder { @Override public synchronized void restartConnector(String connName, Callback<Void> cb) { if (!configState.contains(connName)) cb.onCompletion(new NotFoundException("Connector " + connName + " not found", null), null); Map<String, String> config = configState.connectorConfig(connName); worker.stopConnector(connName); if (startConnector(config)) cb.onCompletion(null, null); else cb.onCompletion(new ConnectException("Failed to start connector: " + connName), null); } StandaloneHerder(Worker worker); StandaloneHerder(Worker worker,
String workerId,
StatusBackingStore statusBackingStore,
MemoryConfigBackingStore configBackingStore); synchronized void start(); synchronized void stop(); @Override int generation(); @Override synchronized void connectors(Callback<Collection<String>> callback); @Override synchronized void connectorInfo(String connName, Callback<ConnectorInfo> callback); @Override void connectorConfig(String connName, final Callback<Map<String, String>> callback); @Override synchronized void deleteConnectorConfig(String connName, Callback<Created<ConnectorInfo>> callback); @Override synchronized void putConnectorConfig(String connName,
final Map<String, String> config,
boolean allowReplace,
final Callback<Created<ConnectorInfo>> callback); @Override synchronized void requestTaskReconfiguration(String connName); @Override synchronized void taskConfigs(String connName, Callback<List<TaskInfo>> callback); @Override void putTaskConfigs(String connName, List<Map<String, String>> configs, Callback<Void> callback); @Override synchronized void restartTask(ConnectorTaskId taskId, Callback<Void> cb); @Override synchronized void restartConnector(String connName, Callback<Void> cb); }
|
StandaloneHerder extends AbstractHerder { @Override public synchronized void restartConnector(String connName, Callback<Void> cb) { if (!configState.contains(connName)) cb.onCompletion(new NotFoundException("Connector " + connName + " not found", null), null); Map<String, String> config = configState.connectorConfig(connName); worker.stopConnector(connName); if (startConnector(config)) cb.onCompletion(null, null); else cb.onCompletion(new ConnectException("Failed to start connector: " + connName), null); } StandaloneHerder(Worker worker); StandaloneHerder(Worker worker,
String workerId,
StatusBackingStore statusBackingStore,
MemoryConfigBackingStore configBackingStore); synchronized void start(); synchronized void stop(); @Override int generation(); @Override synchronized void connectors(Callback<Collection<String>> callback); @Override synchronized void connectorInfo(String connName, Callback<ConnectorInfo> callback); @Override void connectorConfig(String connName, final Callback<Map<String, String>> callback); @Override synchronized void deleteConnectorConfig(String connName, Callback<Created<ConnectorInfo>> callback); @Override synchronized void putConnectorConfig(String connName,
final Map<String, String> config,
boolean allowReplace,
final Callback<Created<ConnectorInfo>> callback); @Override synchronized void requestTaskReconfiguration(String connName); @Override synchronized void taskConfigs(String connName, Callback<List<TaskInfo>> callback); @Override void putTaskConfigs(String connName, List<Map<String, String>> configs, Callback<Void> callback); @Override synchronized void restartTask(ConnectorTaskId taskId, Callback<Void> cb); @Override synchronized void restartConnector(String connName, Callback<Void> cb); }
|
@Test public void testRestartTask() throws Exception { ConnectorTaskId taskId = new ConnectorTaskId(CONNECTOR_NAME, 0); expectAdd(SourceSink.SOURCE); Map<String, String> connectorConfig = connectorConfig(SourceSink.SOURCE); expectConfigValidation(connectorConfig); worker.stopAndAwaitTask(taskId); EasyMock.expectLastCall(); worker.startTask(taskId, connectorConfig, taskConfig(SourceSink.SOURCE), herder, TargetState.STARTED); EasyMock.expectLastCall().andReturn(true); PowerMock.replayAll(); herder.putConnectorConfig(CONNECTOR_NAME, connectorConfig, false, createCallback); FutureCallback<Void> cb = new FutureCallback<>(); herder.restartTask(taskId, cb); cb.get(1000L, TimeUnit.MILLISECONDS); PowerMock.verifyAll(); }
|
@Override public synchronized void restartTask(ConnectorTaskId taskId, Callback<Void> cb) { if (!configState.contains(taskId.connector())) cb.onCompletion(new NotFoundException("Connector " + taskId.connector() + " not found", null), null); Map<String, String> taskConfigProps = configState.taskConfig(taskId); if (taskConfigProps == null) cb.onCompletion(new NotFoundException("Task " + taskId + " not found", null), null); Map<String, String> connConfigProps = configState.connectorConfig(taskId.connector()); TargetState targetState = configState.targetState(taskId.connector()); worker.stopAndAwaitTask(taskId); if (worker.startTask(taskId, connConfigProps, taskConfigProps, this, targetState)) cb.onCompletion(null, null); else cb.onCompletion(new ConnectException("Failed to start task: " + taskId), null); }
|
StandaloneHerder extends AbstractHerder { @Override public synchronized void restartTask(ConnectorTaskId taskId, Callback<Void> cb) { if (!configState.contains(taskId.connector())) cb.onCompletion(new NotFoundException("Connector " + taskId.connector() + " not found", null), null); Map<String, String> taskConfigProps = configState.taskConfig(taskId); if (taskConfigProps == null) cb.onCompletion(new NotFoundException("Task " + taskId + " not found", null), null); Map<String, String> connConfigProps = configState.connectorConfig(taskId.connector()); TargetState targetState = configState.targetState(taskId.connector()); worker.stopAndAwaitTask(taskId); if (worker.startTask(taskId, connConfigProps, taskConfigProps, this, targetState)) cb.onCompletion(null, null); else cb.onCompletion(new ConnectException("Failed to start task: " + taskId), null); } }
|
StandaloneHerder extends AbstractHerder { @Override public synchronized void restartTask(ConnectorTaskId taskId, Callback<Void> cb) { if (!configState.contains(taskId.connector())) cb.onCompletion(new NotFoundException("Connector " + taskId.connector() + " not found", null), null); Map<String, String> taskConfigProps = configState.taskConfig(taskId); if (taskConfigProps == null) cb.onCompletion(new NotFoundException("Task " + taskId + " not found", null), null); Map<String, String> connConfigProps = configState.connectorConfig(taskId.connector()); TargetState targetState = configState.targetState(taskId.connector()); worker.stopAndAwaitTask(taskId); if (worker.startTask(taskId, connConfigProps, taskConfigProps, this, targetState)) cb.onCompletion(null, null); else cb.onCompletion(new ConnectException("Failed to start task: " + taskId), null); } StandaloneHerder(Worker worker); StandaloneHerder(Worker worker,
String workerId,
StatusBackingStore statusBackingStore,
MemoryConfigBackingStore configBackingStore); }
|
StandaloneHerder extends AbstractHerder { @Override public synchronized void restartTask(ConnectorTaskId taskId, Callback<Void> cb) { if (!configState.contains(taskId.connector())) cb.onCompletion(new NotFoundException("Connector " + taskId.connector() + " not found", null), null); Map<String, String> taskConfigProps = configState.taskConfig(taskId); if (taskConfigProps == null) cb.onCompletion(new NotFoundException("Task " + taskId + " not found", null), null); Map<String, String> connConfigProps = configState.connectorConfig(taskId.connector()); TargetState targetState = configState.targetState(taskId.connector()); worker.stopAndAwaitTask(taskId); if (worker.startTask(taskId, connConfigProps, taskConfigProps, this, targetState)) cb.onCompletion(null, null); else cb.onCompletion(new ConnectException("Failed to start task: " + taskId), null); } StandaloneHerder(Worker worker); StandaloneHerder(Worker worker,
String workerId,
StatusBackingStore statusBackingStore,
MemoryConfigBackingStore configBackingStore); synchronized void start(); synchronized void stop(); @Override int generation(); @Override synchronized void connectors(Callback<Collection<String>> callback); @Override synchronized void connectorInfo(String connName, Callback<ConnectorInfo> callback); @Override void connectorConfig(String connName, final Callback<Map<String, String>> callback); @Override synchronized void deleteConnectorConfig(String connName, Callback<Created<ConnectorInfo>> callback); @Override synchronized void putConnectorConfig(String connName,
final Map<String, String> config,
boolean allowReplace,
final Callback<Created<ConnectorInfo>> callback); @Override synchronized void requestTaskReconfiguration(String connName); @Override synchronized void taskConfigs(String connName, Callback<List<TaskInfo>> callback); @Override void putTaskConfigs(String connName, List<Map<String, String>> configs, Callback<Void> callback); @Override synchronized void restartTask(ConnectorTaskId taskId, Callback<Void> cb); @Override synchronized void restartConnector(String connName, Callback<Void> cb); }
|
StandaloneHerder extends AbstractHerder { @Override public synchronized void restartTask(ConnectorTaskId taskId, Callback<Void> cb) { if (!configState.contains(taskId.connector())) cb.onCompletion(new NotFoundException("Connector " + taskId.connector() + " not found", null), null); Map<String, String> taskConfigProps = configState.taskConfig(taskId); if (taskConfigProps == null) cb.onCompletion(new NotFoundException("Task " + taskId + " not found", null), null); Map<String, String> connConfigProps = configState.connectorConfig(taskId.connector()); TargetState targetState = configState.targetState(taskId.connector()); worker.stopAndAwaitTask(taskId); if (worker.startTask(taskId, connConfigProps, taskConfigProps, this, targetState)) cb.onCompletion(null, null); else cb.onCompletion(new ConnectException("Failed to start task: " + taskId), null); } StandaloneHerder(Worker worker); StandaloneHerder(Worker worker,
String workerId,
StatusBackingStore statusBackingStore,
MemoryConfigBackingStore configBackingStore); synchronized void start(); synchronized void stop(); @Override int generation(); @Override synchronized void connectors(Callback<Collection<String>> callback); @Override synchronized void connectorInfo(String connName, Callback<ConnectorInfo> callback); @Override void connectorConfig(String connName, final Callback<Map<String, String>> callback); @Override synchronized void deleteConnectorConfig(String connName, Callback<Created<ConnectorInfo>> callback); @Override synchronized void putConnectorConfig(String connName,
final Map<String, String> config,
boolean allowReplace,
final Callback<Created<ConnectorInfo>> callback); @Override synchronized void requestTaskReconfiguration(String connName); @Override synchronized void taskConfigs(String connName, Callback<List<TaskInfo>> callback); @Override void putTaskConfigs(String connName, List<Map<String, String>> configs, Callback<Void> callback); @Override synchronized void restartTask(ConnectorTaskId taskId, Callback<Void> cb); @Override synchronized void restartConnector(String connName, Callback<Void> cb); }
|
@Test public void testPutConnectorConfig() throws Exception { Map<String, String> connConfig = connectorConfig(SourceSink.SOURCE); Map<String, String> newConnConfig = new HashMap<>(connConfig); newConnConfig.put("foo", "bar"); Callback<Map<String, String>> connectorConfigCb = PowerMock.createMock(Callback.class); Callback<Herder.Created<ConnectorInfo>> putConnectorConfigCb = PowerMock.createMock(Callback.class); connector = PowerMock.createMock(BogusSourceConnector.class); expectAdd(SourceSink.SOURCE); Connector connectorMock = PowerMock.createMock(Connector.class); expectConfigValidation(connectorMock, true, connConfig); connectorConfigCb.onCompletion(null, connConfig); EasyMock.expectLastCall(); worker.stopConnector(CONNECTOR_NAME); EasyMock.expectLastCall().andReturn(true); Capture<Map<String, String>> capturedConfig = EasyMock.newCapture(); worker.startConnector(EasyMock.eq(CONNECTOR_NAME), EasyMock.capture(capturedConfig), EasyMock.<ConnectorContext>anyObject(), EasyMock.eq(herder), EasyMock.eq(TargetState.STARTED)); EasyMock.expectLastCall().andReturn(true); EasyMock.expect(worker.isRunning(CONNECTOR_NAME)).andReturn(true); EasyMock.expect(worker.connectorTaskConfigs(CONNECTOR_NAME, DEFAULT_MAX_TASKS, null)) .andReturn(singletonList(taskConfig(SourceSink.SOURCE))); worker.isSinkConnector(CONNECTOR_NAME); EasyMock.expectLastCall().andReturn(false); ConnectorInfo newConnInfo = new ConnectorInfo(CONNECTOR_NAME, newConnConfig, Arrays.asList(new ConnectorTaskId(CONNECTOR_NAME, 0))); putConnectorConfigCb.onCompletion(null, new Herder.Created<>(false, newConnInfo)); EasyMock.expectLastCall(); expectConfigValidation(connectorMock, false, newConnConfig); connectorConfigCb.onCompletion(null, newConnConfig); EasyMock.expectLastCall(); PowerMock.replayAll(); herder.putConnectorConfig(CONNECTOR_NAME, connConfig, false, createCallback); herder.connectorConfig(CONNECTOR_NAME, connectorConfigCb); herder.putConnectorConfig(CONNECTOR_NAME, newConnConfig, true, putConnectorConfigCb); assertEquals("bar", capturedConfig.getValue().get("foo")); herder.connectorConfig(CONNECTOR_NAME, connectorConfigCb); PowerMock.verifyAll(); }
|
@Override public synchronized void putConnectorConfig(String connName, final Map<String, String> config, boolean allowReplace, final Callback<Created<ConnectorInfo>> callback) { try { if (maybeAddConfigErrors(validateConnectorConfig(config), callback)) { return; } boolean created = false; if (configState.contains(connName)) { if (!allowReplace) { callback.onCompletion(new AlreadyExistsException("Connector " + connName + " already exists"), null); return; } worker.stopConnector(connName); } else { created = true; } if (!startConnector(config)) { callback.onCompletion(new ConnectException("Failed to start connector: " + connName), null); return; } updateConnectorTasks(connName); callback.onCompletion(null, new Created<>(created, createConnectorInfo(connName))); } catch (ConnectException e) { callback.onCompletion(e, null); } }
|
StandaloneHerder extends AbstractHerder { @Override public synchronized void putConnectorConfig(String connName, final Map<String, String> config, boolean allowReplace, final Callback<Created<ConnectorInfo>> callback) { try { if (maybeAddConfigErrors(validateConnectorConfig(config), callback)) { return; } boolean created = false; if (configState.contains(connName)) { if (!allowReplace) { callback.onCompletion(new AlreadyExistsException("Connector " + connName + " already exists"), null); return; } worker.stopConnector(connName); } else { created = true; } if (!startConnector(config)) { callback.onCompletion(new ConnectException("Failed to start connector: " + connName), null); return; } updateConnectorTasks(connName); callback.onCompletion(null, new Created<>(created, createConnectorInfo(connName))); } catch (ConnectException e) { callback.onCompletion(e, null); } } }
|
StandaloneHerder extends AbstractHerder { @Override public synchronized void putConnectorConfig(String connName, final Map<String, String> config, boolean allowReplace, final Callback<Created<ConnectorInfo>> callback) { try { if (maybeAddConfigErrors(validateConnectorConfig(config), callback)) { return; } boolean created = false; if (configState.contains(connName)) { if (!allowReplace) { callback.onCompletion(new AlreadyExistsException("Connector " + connName + " already exists"), null); return; } worker.stopConnector(connName); } else { created = true; } if (!startConnector(config)) { callback.onCompletion(new ConnectException("Failed to start connector: " + connName), null); return; } updateConnectorTasks(connName); callback.onCompletion(null, new Created<>(created, createConnectorInfo(connName))); } catch (ConnectException e) { callback.onCompletion(e, null); } } StandaloneHerder(Worker worker); StandaloneHerder(Worker worker,
String workerId,
StatusBackingStore statusBackingStore,
MemoryConfigBackingStore configBackingStore); }
|
StandaloneHerder extends AbstractHerder { @Override public synchronized void putConnectorConfig(String connName, final Map<String, String> config, boolean allowReplace, final Callback<Created<ConnectorInfo>> callback) { try { if (maybeAddConfigErrors(validateConnectorConfig(config), callback)) { return; } boolean created = false; if (configState.contains(connName)) { if (!allowReplace) { callback.onCompletion(new AlreadyExistsException("Connector " + connName + " already exists"), null); return; } worker.stopConnector(connName); } else { created = true; } if (!startConnector(config)) { callback.onCompletion(new ConnectException("Failed to start connector: " + connName), null); return; } updateConnectorTasks(connName); callback.onCompletion(null, new Created<>(created, createConnectorInfo(connName))); } catch (ConnectException e) { callback.onCompletion(e, null); } } StandaloneHerder(Worker worker); StandaloneHerder(Worker worker,
String workerId,
StatusBackingStore statusBackingStore,
MemoryConfigBackingStore configBackingStore); synchronized void start(); synchronized void stop(); @Override int generation(); @Override synchronized void connectors(Callback<Collection<String>> callback); @Override synchronized void connectorInfo(String connName, Callback<ConnectorInfo> callback); @Override void connectorConfig(String connName, final Callback<Map<String, String>> callback); @Override synchronized void deleteConnectorConfig(String connName, Callback<Created<ConnectorInfo>> callback); @Override synchronized void putConnectorConfig(String connName,
final Map<String, String> config,
boolean allowReplace,
final Callback<Created<ConnectorInfo>> callback); @Override synchronized void requestTaskReconfiguration(String connName); @Override synchronized void taskConfigs(String connName, Callback<List<TaskInfo>> callback); @Override void putTaskConfigs(String connName, List<Map<String, String>> configs, Callback<Void> callback); @Override synchronized void restartTask(ConnectorTaskId taskId, Callback<Void> cb); @Override synchronized void restartConnector(String connName, Callback<Void> cb); }
|
StandaloneHerder extends AbstractHerder { @Override public synchronized void putConnectorConfig(String connName, final Map<String, String> config, boolean allowReplace, final Callback<Created<ConnectorInfo>> callback) { try { if (maybeAddConfigErrors(validateConnectorConfig(config), callback)) { return; } boolean created = false; if (configState.contains(connName)) { if (!allowReplace) { callback.onCompletion(new AlreadyExistsException("Connector " + connName + " already exists"), null); return; } worker.stopConnector(connName); } else { created = true; } if (!startConnector(config)) { callback.onCompletion(new ConnectException("Failed to start connector: " + connName), null); return; } updateConnectorTasks(connName); callback.onCompletion(null, new Created<>(created, createConnectorInfo(connName))); } catch (ConnectException e) { callback.onCompletion(e, null); } } StandaloneHerder(Worker worker); StandaloneHerder(Worker worker,
String workerId,
StatusBackingStore statusBackingStore,
MemoryConfigBackingStore configBackingStore); synchronized void start(); synchronized void stop(); @Override int generation(); @Override synchronized void connectors(Callback<Collection<String>> callback); @Override synchronized void connectorInfo(String connName, Callback<ConnectorInfo> callback); @Override void connectorConfig(String connName, final Callback<Map<String, String>> callback); @Override synchronized void deleteConnectorConfig(String connName, Callback<Created<ConnectorInfo>> callback); @Override synchronized void putConnectorConfig(String connName,
final Map<String, String> config,
boolean allowReplace,
final Callback<Created<ConnectorInfo>> callback); @Override synchronized void requestTaskReconfiguration(String connName); @Override synchronized void taskConfigs(String connName, Callback<List<TaskInfo>> callback); @Override void putTaskConfigs(String connName, List<Map<String, String>> configs, Callback<Void> callback); @Override synchronized void restartTask(ConnectorTaskId taskId, Callback<Void> cb); @Override synchronized void restartConnector(String connName, Callback<Void> cb); }
|
@Test(expected = UnsupportedOperationException.class) public void testPutTaskConfigs() { Callback<Void> cb = PowerMock.createMock(Callback.class); PowerMock.replayAll(); herder.putTaskConfigs(CONNECTOR_NAME, Arrays.asList(singletonMap("config", "value")), cb); PowerMock.verifyAll(); }
|
@Override public void putTaskConfigs(String connName, List<Map<String, String>> configs, Callback<Void> callback) { throw new UnsupportedOperationException("Kafka Connect in standalone mode does not support externally setting task configurations."); }
|
StandaloneHerder extends AbstractHerder { @Override public void putTaskConfigs(String connName, List<Map<String, String>> configs, Callback<Void> callback) { throw new UnsupportedOperationException("Kafka Connect in standalone mode does not support externally setting task configurations."); } }
|
StandaloneHerder extends AbstractHerder { @Override public void putTaskConfigs(String connName, List<Map<String, String>> configs, Callback<Void> callback) { throw new UnsupportedOperationException("Kafka Connect in standalone mode does not support externally setting task configurations."); } StandaloneHerder(Worker worker); StandaloneHerder(Worker worker,
String workerId,
StatusBackingStore statusBackingStore,
MemoryConfigBackingStore configBackingStore); }
|
StandaloneHerder extends AbstractHerder { @Override public void putTaskConfigs(String connName, List<Map<String, String>> configs, Callback<Void> callback) { throw new UnsupportedOperationException("Kafka Connect in standalone mode does not support externally setting task configurations."); } StandaloneHerder(Worker worker); StandaloneHerder(Worker worker,
String workerId,
StatusBackingStore statusBackingStore,
MemoryConfigBackingStore configBackingStore); synchronized void start(); synchronized void stop(); @Override int generation(); @Override synchronized void connectors(Callback<Collection<String>> callback); @Override synchronized void connectorInfo(String connName, Callback<ConnectorInfo> callback); @Override void connectorConfig(String connName, final Callback<Map<String, String>> callback); @Override synchronized void deleteConnectorConfig(String connName, Callback<Created<ConnectorInfo>> callback); @Override synchronized void putConnectorConfig(String connName,
final Map<String, String> config,
boolean allowReplace,
final Callback<Created<ConnectorInfo>> callback); @Override synchronized void requestTaskReconfiguration(String connName); @Override synchronized void taskConfigs(String connName, Callback<List<TaskInfo>> callback); @Override void putTaskConfigs(String connName, List<Map<String, String>> configs, Callback<Void> callback); @Override synchronized void restartTask(ConnectorTaskId taskId, Callback<Void> cb); @Override synchronized void restartConnector(String connName, Callback<Void> cb); }
|
StandaloneHerder extends AbstractHerder { @Override public void putTaskConfigs(String connName, List<Map<String, String>> configs, Callback<Void> callback) { throw new UnsupportedOperationException("Kafka Connect in standalone mode does not support externally setting task configurations."); } StandaloneHerder(Worker worker); StandaloneHerder(Worker worker,
String workerId,
StatusBackingStore statusBackingStore,
MemoryConfigBackingStore configBackingStore); synchronized void start(); synchronized void stop(); @Override int generation(); @Override synchronized void connectors(Callback<Collection<String>> callback); @Override synchronized void connectorInfo(String connName, Callback<ConnectorInfo> callback); @Override void connectorConfig(String connName, final Callback<Map<String, String>> callback); @Override synchronized void deleteConnectorConfig(String connName, Callback<Created<ConnectorInfo>> callback); @Override synchronized void putConnectorConfig(String connName,
final Map<String, String> config,
boolean allowReplace,
final Callback<Created<ConnectorInfo>> callback); @Override synchronized void requestTaskReconfiguration(String connName); @Override synchronized void taskConfigs(String connName, Callback<List<TaskInfo>> callback); @Override void putTaskConfigs(String connName, List<Map<String, String>> configs, Callback<Void> callback); @Override synchronized void restartTask(ConnectorTaskId taskId, Callback<Void> cb); @Override synchronized void restartConnector(String connName, Callback<Void> cb); }
|
@Test public void testCorruptConfig() { Map<String, String> config = new HashMap<>(); config.put(ConnectorConfig.NAME_CONFIG, CONNECTOR_NAME); config.put(ConnectorConfig.CONNECTOR_CLASS_CONFIG, BogusSinkConnector.class.getName()); Connector connectorMock = PowerMock.createMock(Connector.class); String error = "This is an error in your config!"; List<String> errors = new ArrayList<>(singletonList(error)); String key = "foo.invalid.key"; EasyMock.expect(connectorMock.validate(config)).andReturn( new Config( Arrays.asList(new ConfigValue(key, null, Collections.emptyList(), errors)) ) ); ConfigDef configDef = new ConfigDef(); configDef.define(key, ConfigDef.Type.STRING, ConfigDef.Importance.HIGH, ""); EasyMock.expect(worker.getPlugins()).andReturn(plugins).times(3); EasyMock.expect(plugins.compareAndSwapLoaders(connectorMock)).andReturn(delegatingLoader); EasyMock.expect(worker.getPlugins()).andStubReturn(plugins); EasyMock.expect(plugins.newConnector(EasyMock.anyString())).andReturn(connectorMock); EasyMock.expect(connectorMock.config()).andStubReturn(configDef); EasyMock.expect(Plugins.compareAndSwapLoaders(delegatingLoader)).andReturn(pluginLoader); Callback<Herder.Created<ConnectorInfo>> callback = PowerMock.createMock(Callback.class); Capture<BadRequestException> capture = Capture.newInstance(); callback.onCompletion( EasyMock.capture(capture), EasyMock.isNull(Herder.Created.class) ); PowerMock.replayAll(); herder.putConnectorConfig(CONNECTOR_NAME, config, true, callback); assertEquals( capture.getValue().getMessage(), "Connector configuration is invalid and contains the following 1 error(s):\n" + error + "\n" + "You can also find the above list of errors at the endpoint `/{connectorType}/config/validate`" ); PowerMock.verifyAll(); }
|
@Override public synchronized void putConnectorConfig(String connName, final Map<String, String> config, boolean allowReplace, final Callback<Created<ConnectorInfo>> callback) { try { if (maybeAddConfigErrors(validateConnectorConfig(config), callback)) { return; } boolean created = false; if (configState.contains(connName)) { if (!allowReplace) { callback.onCompletion(new AlreadyExistsException("Connector " + connName + " already exists"), null); return; } worker.stopConnector(connName); } else { created = true; } if (!startConnector(config)) { callback.onCompletion(new ConnectException("Failed to start connector: " + connName), null); return; } updateConnectorTasks(connName); callback.onCompletion(null, new Created<>(created, createConnectorInfo(connName))); } catch (ConnectException e) { callback.onCompletion(e, null); } }
|
StandaloneHerder extends AbstractHerder { @Override public synchronized void putConnectorConfig(String connName, final Map<String, String> config, boolean allowReplace, final Callback<Created<ConnectorInfo>> callback) { try { if (maybeAddConfigErrors(validateConnectorConfig(config), callback)) { return; } boolean created = false; if (configState.contains(connName)) { if (!allowReplace) { callback.onCompletion(new AlreadyExistsException("Connector " + connName + " already exists"), null); return; } worker.stopConnector(connName); } else { created = true; } if (!startConnector(config)) { callback.onCompletion(new ConnectException("Failed to start connector: " + connName), null); return; } updateConnectorTasks(connName); callback.onCompletion(null, new Created<>(created, createConnectorInfo(connName))); } catch (ConnectException e) { callback.onCompletion(e, null); } } }
|
StandaloneHerder extends AbstractHerder { @Override public synchronized void putConnectorConfig(String connName, final Map<String, String> config, boolean allowReplace, final Callback<Created<ConnectorInfo>> callback) { try { if (maybeAddConfigErrors(validateConnectorConfig(config), callback)) { return; } boolean created = false; if (configState.contains(connName)) { if (!allowReplace) { callback.onCompletion(new AlreadyExistsException("Connector " + connName + " already exists"), null); return; } worker.stopConnector(connName); } else { created = true; } if (!startConnector(config)) { callback.onCompletion(new ConnectException("Failed to start connector: " + connName), null); return; } updateConnectorTasks(connName); callback.onCompletion(null, new Created<>(created, createConnectorInfo(connName))); } catch (ConnectException e) { callback.onCompletion(e, null); } } StandaloneHerder(Worker worker); StandaloneHerder(Worker worker,
String workerId,
StatusBackingStore statusBackingStore,
MemoryConfigBackingStore configBackingStore); }
|
StandaloneHerder extends AbstractHerder { @Override public synchronized void putConnectorConfig(String connName, final Map<String, String> config, boolean allowReplace, final Callback<Created<ConnectorInfo>> callback) { try { if (maybeAddConfigErrors(validateConnectorConfig(config), callback)) { return; } boolean created = false; if (configState.contains(connName)) { if (!allowReplace) { callback.onCompletion(new AlreadyExistsException("Connector " + connName + " already exists"), null); return; } worker.stopConnector(connName); } else { created = true; } if (!startConnector(config)) { callback.onCompletion(new ConnectException("Failed to start connector: " + connName), null); return; } updateConnectorTasks(connName); callback.onCompletion(null, new Created<>(created, createConnectorInfo(connName))); } catch (ConnectException e) { callback.onCompletion(e, null); } } StandaloneHerder(Worker worker); StandaloneHerder(Worker worker,
String workerId,
StatusBackingStore statusBackingStore,
MemoryConfigBackingStore configBackingStore); synchronized void start(); synchronized void stop(); @Override int generation(); @Override synchronized void connectors(Callback<Collection<String>> callback); @Override synchronized void connectorInfo(String connName, Callback<ConnectorInfo> callback); @Override void connectorConfig(String connName, final Callback<Map<String, String>> callback); @Override synchronized void deleteConnectorConfig(String connName, Callback<Created<ConnectorInfo>> callback); @Override synchronized void putConnectorConfig(String connName,
final Map<String, String> config,
boolean allowReplace,
final Callback<Created<ConnectorInfo>> callback); @Override synchronized void requestTaskReconfiguration(String connName); @Override synchronized void taskConfigs(String connName, Callback<List<TaskInfo>> callback); @Override void putTaskConfigs(String connName, List<Map<String, String>> configs, Callback<Void> callback); @Override synchronized void restartTask(ConnectorTaskId taskId, Callback<Void> cb); @Override synchronized void restartConnector(String connName, Callback<Void> cb); }
|
StandaloneHerder extends AbstractHerder { @Override public synchronized void putConnectorConfig(String connName, final Map<String, String> config, boolean allowReplace, final Callback<Created<ConnectorInfo>> callback) { try { if (maybeAddConfigErrors(validateConnectorConfig(config), callback)) { return; } boolean created = false; if (configState.contains(connName)) { if (!allowReplace) { callback.onCompletion(new AlreadyExistsException("Connector " + connName + " already exists"), null); return; } worker.stopConnector(connName); } else { created = true; } if (!startConnector(config)) { callback.onCompletion(new ConnectException("Failed to start connector: " + connName), null); return; } updateConnectorTasks(connName); callback.onCompletion(null, new Created<>(created, createConnectorInfo(connName))); } catch (ConnectException e) { callback.onCompletion(e, null); } } StandaloneHerder(Worker worker); StandaloneHerder(Worker worker,
String workerId,
StatusBackingStore statusBackingStore,
MemoryConfigBackingStore configBackingStore); synchronized void start(); synchronized void stop(); @Override int generation(); @Override synchronized void connectors(Callback<Collection<String>> callback); @Override synchronized void connectorInfo(String connName, Callback<ConnectorInfo> callback); @Override void connectorConfig(String connName, final Callback<Map<String, String>> callback); @Override synchronized void deleteConnectorConfig(String connName, Callback<Created<ConnectorInfo>> callback); @Override synchronized void putConnectorConfig(String connName,
final Map<String, String> config,
boolean allowReplace,
final Callback<Created<ConnectorInfo>> callback); @Override synchronized void requestTaskReconfiguration(String connName); @Override synchronized void taskConfigs(String connName, Callback<List<TaskInfo>> callback); @Override void putTaskConfigs(String connName, List<Map<String, String>> configs, Callback<Void> callback); @Override synchronized void restartTask(ConnectorTaskId taskId, Callback<Void> cb); @Override synchronized void restartConnector(String connName, Callback<Void> cb); }
|
@Test public void testJavaLibraryClasses() throws Exception { assertFalse(PluginUtils.shouldLoadInIsolation("java.")); assertFalse(PluginUtils.shouldLoadInIsolation("java.lang.Object")); assertFalse(PluginUtils.shouldLoadInIsolation("java.lang.String")); assertFalse(PluginUtils.shouldLoadInIsolation("java.util.HashMap$Entry")); assertFalse(PluginUtils.shouldLoadInIsolation("java.io.Serializable")); assertFalse(PluginUtils.shouldLoadInIsolation("javax.rmi.")); assertFalse(PluginUtils.shouldLoadInIsolation( "javax.management.loading.ClassLoaderRepository") ); assertFalse(PluginUtils.shouldLoadInIsolation("org.omg.CORBA.")); assertFalse(PluginUtils.shouldLoadInIsolation("org.omg.CORBA.Object")); assertFalse(PluginUtils.shouldLoadInIsolation("org.w3c.dom.")); assertFalse(PluginUtils.shouldLoadInIsolation("org.w3c.dom.traversal.TreeWalker")); assertFalse(PluginUtils.shouldLoadInIsolation("org.xml.sax.")); assertFalse(PluginUtils.shouldLoadInIsolation("org.xml.sax.EntityResolver")); }
|
public static boolean shouldLoadInIsolation(String name) { return !(name.matches(BLACKLIST) && !name.matches(WHITELIST)); }
|
PluginUtils { public static boolean shouldLoadInIsolation(String name) { return !(name.matches(BLACKLIST) && !name.matches(WHITELIST)); } }
|
PluginUtils { public static boolean shouldLoadInIsolation(String name) { return !(name.matches(BLACKLIST) && !name.matches(WHITELIST)); } }
|
PluginUtils { public static boolean shouldLoadInIsolation(String name) { return !(name.matches(BLACKLIST) && !name.matches(WHITELIST)); } static boolean shouldLoadInIsolation(String name); static boolean isConcrete(Class<?> klass); static boolean isArchive(Path path); static boolean isClassFile(Path path); static List<Path> pluginLocations(Path topPath); static List<Path> pluginUrls(Path topPath); static String simpleName(PluginDesc<?> plugin); static String prunedName(PluginDesc<?> plugin); static boolean isAliasUnique(
PluginDesc<U> alias,
Collection<PluginDesc<U>> plugins
); }
|
PluginUtils { public static boolean shouldLoadInIsolation(String name) { return !(name.matches(BLACKLIST) && !name.matches(WHITELIST)); } static boolean shouldLoadInIsolation(String name); static boolean isConcrete(Class<?> klass); static boolean isArchive(Path path); static boolean isClassFile(Path path); static List<Path> pluginLocations(Path topPath); static List<Path> pluginUrls(Path topPath); static String simpleName(PluginDesc<?> plugin); static String prunedName(PluginDesc<?> plugin); static boolean isAliasUnique(
PluginDesc<U> alias,
Collection<PluginDesc<U>> plugins
); }
|
@Test public void mapToConnectNonStringKeys() { byte[] mapJson = "{ \"schema\": { \"type\": \"map\", \"keys\": { \"type\" : \"int32\" }, \"values\": { \"type\" : \"int32\" } }, \"payload\": [ [1, 12], [2, 15] ] }".getBytes(); Map<Integer, Integer> expected = new HashMap<>(); expected.put(1, 12); expected.put(2, 15); assertEquals(new SchemaAndValue(SchemaBuilder.map(Schema.INT32_SCHEMA, Schema.INT32_SCHEMA).build(), expected), converter.toConnectData(TOPIC, mapJson)); }
|
@Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); }
|
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } }
|
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } }
|
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); ObjectNode asJsonSchema(Schema schema); Schema asConnectSchema(JsonNode jsonSchema); }
|
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); ObjectNode asJsonSchema(Schema schema); Schema asConnectSchema(JsonNode jsonSchema); }
|
@Test public void testThirdPartyClasses() throws Exception { assertFalse(PluginUtils.shouldLoadInIsolation("org.slf4j.")); assertFalse(PluginUtils.shouldLoadInIsolation("org.slf4j.LoggerFactory")); }
|
public static boolean shouldLoadInIsolation(String name) { return !(name.matches(BLACKLIST) && !name.matches(WHITELIST)); }
|
PluginUtils { public static boolean shouldLoadInIsolation(String name) { return !(name.matches(BLACKLIST) && !name.matches(WHITELIST)); } }
|
PluginUtils { public static boolean shouldLoadInIsolation(String name) { return !(name.matches(BLACKLIST) && !name.matches(WHITELIST)); } }
|
PluginUtils { public static boolean shouldLoadInIsolation(String name) { return !(name.matches(BLACKLIST) && !name.matches(WHITELIST)); } static boolean shouldLoadInIsolation(String name); static boolean isConcrete(Class<?> klass); static boolean isArchive(Path path); static boolean isClassFile(Path path); static List<Path> pluginLocations(Path topPath); static List<Path> pluginUrls(Path topPath); static String simpleName(PluginDesc<?> plugin); static String prunedName(PluginDesc<?> plugin); static boolean isAliasUnique(
PluginDesc<U> alias,
Collection<PluginDesc<U>> plugins
); }
|
PluginUtils { public static boolean shouldLoadInIsolation(String name) { return !(name.matches(BLACKLIST) && !name.matches(WHITELIST)); } static boolean shouldLoadInIsolation(String name); static boolean isConcrete(Class<?> klass); static boolean isArchive(Path path); static boolean isClassFile(Path path); static List<Path> pluginLocations(Path topPath); static List<Path> pluginUrls(Path topPath); static String simpleName(PluginDesc<?> plugin); static String prunedName(PluginDesc<?> plugin); static boolean isAliasUnique(
PluginDesc<U> alias,
Collection<PluginDesc<U>> plugins
); }
|
@Test public void testConnectFrameworkClasses() throws Exception { assertFalse(PluginUtils.shouldLoadInIsolation("org.apache.kafka.common.")); assertFalse(PluginUtils.shouldLoadInIsolation( "org.apache.kafka.common.config.AbstractConfig") ); assertFalse(PluginUtils.shouldLoadInIsolation( "org.apache.kafka.common.config.ConfigDef$Type") ); assertFalse(PluginUtils.shouldLoadInIsolation( "org.apache.kafka.common.serialization.Deserializer") ); assertFalse(PluginUtils.shouldLoadInIsolation("org.apache.kafka.connect.")); assertFalse(PluginUtils.shouldLoadInIsolation( "org.apache.kafka.connect.connector.Connector") ); assertFalse(PluginUtils.shouldLoadInIsolation( "org.apache.kafka.connect.source.SourceConnector") ); assertFalse(PluginUtils.shouldLoadInIsolation( "org.apache.kafka.connect.sink.SinkConnector") ); assertFalse(PluginUtils.shouldLoadInIsolation("org.apache.kafka.connect.connector.Task")); assertFalse(PluginUtils.shouldLoadInIsolation( "org.apache.kafka.connect.source.SourceTask") ); assertFalse(PluginUtils.shouldLoadInIsolation("org.apache.kafka.connect.sink.SinkTask")); assertFalse(PluginUtils.shouldLoadInIsolation( "org.apache.kafka.connect.transforms.Transformation") ); assertFalse(PluginUtils.shouldLoadInIsolation( "org.apache.kafka.connect.storage.Converter") ); assertFalse(PluginUtils.shouldLoadInIsolation( "org.apache.kafka.connect.storage.OffsetBackingStore") ); }
|
public static boolean shouldLoadInIsolation(String name) { return !(name.matches(BLACKLIST) && !name.matches(WHITELIST)); }
|
PluginUtils { public static boolean shouldLoadInIsolation(String name) { return !(name.matches(BLACKLIST) && !name.matches(WHITELIST)); } }
|
PluginUtils { public static boolean shouldLoadInIsolation(String name) { return !(name.matches(BLACKLIST) && !name.matches(WHITELIST)); } }
|
PluginUtils { public static boolean shouldLoadInIsolation(String name) { return !(name.matches(BLACKLIST) && !name.matches(WHITELIST)); } static boolean shouldLoadInIsolation(String name); static boolean isConcrete(Class<?> klass); static boolean isArchive(Path path); static boolean isClassFile(Path path); static List<Path> pluginLocations(Path topPath); static List<Path> pluginUrls(Path topPath); static String simpleName(PluginDesc<?> plugin); static String prunedName(PluginDesc<?> plugin); static boolean isAliasUnique(
PluginDesc<U> alias,
Collection<PluginDesc<U>> plugins
); }
|
PluginUtils { public static boolean shouldLoadInIsolation(String name) { return !(name.matches(BLACKLIST) && !name.matches(WHITELIST)); } static boolean shouldLoadInIsolation(String name); static boolean isConcrete(Class<?> klass); static boolean isArchive(Path path); static boolean isClassFile(Path path); static List<Path> pluginLocations(Path topPath); static List<Path> pluginUrls(Path topPath); static String simpleName(PluginDesc<?> plugin); static String prunedName(PluginDesc<?> plugin); static boolean isAliasUnique(
PluginDesc<U> alias,
Collection<PluginDesc<U>> plugins
); }
|
@Test public void testAllowedConnectFrameworkClasses() throws Exception { assertTrue(PluginUtils.shouldLoadInIsolation("org.apache.kafka.connect.transforms.")); assertTrue(PluginUtils.shouldLoadInIsolation( "org.apache.kafka.connect.transforms.ExtractField") ); assertTrue(PluginUtils.shouldLoadInIsolation( "org.apache.kafka.connect.transforms.ExtractField$Key") ); assertTrue(PluginUtils.shouldLoadInIsolation("org.apache.kafka.connect.json.")); assertTrue(PluginUtils.shouldLoadInIsolation( "org.apache.kafka.connect.json.JsonConverter") ); assertTrue(PluginUtils.shouldLoadInIsolation( "org.apache.kafka.connect.json.JsonConverter$21") ); assertTrue(PluginUtils.shouldLoadInIsolation("org.apache.kafka.connect.file.")); assertTrue(PluginUtils.shouldLoadInIsolation( "org.apache.kafka.connect.file.FileStreamSourceTask") ); assertTrue(PluginUtils.shouldLoadInIsolation( "org.apache.kafka.connect.file.FileStreamSinkConnector") ); assertTrue(PluginUtils.shouldLoadInIsolation("org.apache.kafka.connect.converters.")); assertTrue(PluginUtils.shouldLoadInIsolation( "org.apache.kafka.connect.converters.ByteArrayConverter") ); assertTrue(PluginUtils.shouldLoadInIsolation( "org.apache.kafka.connect.storage.StringConverter") ); }
|
public static boolean shouldLoadInIsolation(String name) { return !(name.matches(BLACKLIST) && !name.matches(WHITELIST)); }
|
PluginUtils { public static boolean shouldLoadInIsolation(String name) { return !(name.matches(BLACKLIST) && !name.matches(WHITELIST)); } }
|
PluginUtils { public static boolean shouldLoadInIsolation(String name) { return !(name.matches(BLACKLIST) && !name.matches(WHITELIST)); } }
|
PluginUtils { public static boolean shouldLoadInIsolation(String name) { return !(name.matches(BLACKLIST) && !name.matches(WHITELIST)); } static boolean shouldLoadInIsolation(String name); static boolean isConcrete(Class<?> klass); static boolean isArchive(Path path); static boolean isClassFile(Path path); static List<Path> pluginLocations(Path topPath); static List<Path> pluginUrls(Path topPath); static String simpleName(PluginDesc<?> plugin); static String prunedName(PluginDesc<?> plugin); static boolean isAliasUnique(
PluginDesc<U> alias,
Collection<PluginDesc<U>> plugins
); }
|
PluginUtils { public static boolean shouldLoadInIsolation(String name) { return !(name.matches(BLACKLIST) && !name.matches(WHITELIST)); } static boolean shouldLoadInIsolation(String name); static boolean isConcrete(Class<?> klass); static boolean isArchive(Path path); static boolean isClassFile(Path path); static List<Path> pluginLocations(Path topPath); static List<Path> pluginUrls(Path topPath); static String simpleName(PluginDesc<?> plugin); static String prunedName(PluginDesc<?> plugin); static boolean isAliasUnique(
PluginDesc<U> alias,
Collection<PluginDesc<U>> plugins
); }
|
@Test public void testRegularPluginDesc() throws Exception { PluginDesc<Connector> connectorDesc = new PluginDesc<>( Connector.class, regularVersion, pluginLoader ); assertPluginDesc(connectorDesc, Connector.class, regularVersion, pluginLoader.location()); PluginDesc<Converter> converterDesc = new PluginDesc<>( Converter.class, snaphotVersion, pluginLoader ); assertPluginDesc(converterDesc, Converter.class, snaphotVersion, pluginLoader.location()); PluginDesc<Transformation> transformDesc = new PluginDesc<>( Transformation.class, noVersion, pluginLoader ); assertPluginDesc(transformDesc, Transformation.class, noVersion, pluginLoader.location()); }
|
@JsonProperty("location") public String location() { return location; }
|
PluginDesc implements Comparable<PluginDesc<T>> { @JsonProperty("location") public String location() { return location; } }
|
PluginDesc implements Comparable<PluginDesc<T>> { @JsonProperty("location") public String location() { return location; } PluginDesc(Class<? extends T> klass, String version, ClassLoader loader); }
|
PluginDesc implements Comparable<PluginDesc<T>> { @JsonProperty("location") public String location() { return location; } PluginDesc(Class<? extends T> klass, String version, ClassLoader loader); @Override String toString(); Class<? extends T> pluginClass(); @JsonProperty("class") String className(); @JsonProperty("version") String version(); PluginType type(); @JsonProperty("type") String typeName(); @JsonProperty("location") String location(); @Override boolean equals(Object o); @Override int hashCode(); @Override int compareTo(PluginDesc other); }
|
PluginDesc implements Comparable<PluginDesc<T>> { @JsonProperty("location") public String location() { return location; } PluginDesc(Class<? extends T> klass, String version, ClassLoader loader); @Override String toString(); Class<? extends T> pluginClass(); @JsonProperty("class") String className(); @JsonProperty("version") String version(); PluginType type(); @JsonProperty("type") String typeName(); @JsonProperty("location") String location(); @Override boolean equals(Object o); @Override int hashCode(); @Override int compareTo(PluginDesc other); }
|
@Test public void testPluginDescWithNullVersion() throws Exception { String nullVersion = "null"; PluginDesc<SourceConnector> connectorDesc = new PluginDesc<>( SourceConnector.class, null, pluginLoader ); assertPluginDesc( connectorDesc, SourceConnector.class, nullVersion, pluginLoader.location() ); String location = "classpath"; PluginDesc<Converter> converterDesc = new PluginDesc<>( Converter.class, null, systemLoader ); assertPluginDesc(converterDesc, Converter.class, nullVersion, location); }
|
@JsonProperty("location") public String location() { return location; }
|
PluginDesc implements Comparable<PluginDesc<T>> { @JsonProperty("location") public String location() { return location; } }
|
PluginDesc implements Comparable<PluginDesc<T>> { @JsonProperty("location") public String location() { return location; } PluginDesc(Class<? extends T> klass, String version, ClassLoader loader); }
|
PluginDesc implements Comparable<PluginDesc<T>> { @JsonProperty("location") public String location() { return location; } PluginDesc(Class<? extends T> klass, String version, ClassLoader loader); @Override String toString(); Class<? extends T> pluginClass(); @JsonProperty("class") String className(); @JsonProperty("version") String version(); PluginType type(); @JsonProperty("type") String typeName(); @JsonProperty("location") String location(); @Override boolean equals(Object o); @Override int hashCode(); @Override int compareTo(PluginDesc other); }
|
PluginDesc implements Comparable<PluginDesc<T>> { @JsonProperty("location") public String location() { return location; } PluginDesc(Class<? extends T> klass, String version, ClassLoader loader); @Override String toString(); Class<? extends T> pluginClass(); @JsonProperty("class") String className(); @JsonProperty("version") String version(); PluginType type(); @JsonProperty("type") String typeName(); @JsonProperty("location") String location(); @Override boolean equals(Object o); @Override int hashCode(); @Override int compareTo(PluginDesc other); }
|
@Test public void testPluginDescEquality() throws Exception { PluginDesc<Connector> connectorDescPluginPath = new PluginDesc<>( Connector.class, snaphotVersion, pluginLoader ); PluginDesc<Connector> connectorDescClasspath = new PluginDesc<>( Connector.class, snaphotVersion, systemLoader ); assertEquals(connectorDescPluginPath, connectorDescClasspath); assertEquals(connectorDescPluginPath.hashCode(), connectorDescClasspath.hashCode()); PluginDesc<Converter> converterDescPluginPath = new PluginDesc<>( Converter.class, noVersion, pluginLoader ); PluginDesc<Converter> converterDescClasspath = new PluginDesc<>( Converter.class, noVersion, systemLoader ); assertEquals(converterDescPluginPath, converterDescClasspath); assertEquals(converterDescPluginPath.hashCode(), converterDescClasspath.hashCode()); PluginDesc<Transformation> transformDescPluginPath = new PluginDesc<>( Transformation.class, null, pluginLoader ); PluginDesc<Transformation> transformDescClasspath = new PluginDesc<>( Transformation.class, noVersion, pluginLoader ); assertNotEquals(transformDescPluginPath, transformDescClasspath); }
|
@Override public int hashCode() { return Objects.hash(klass, version, type); }
|
PluginDesc implements Comparable<PluginDesc<T>> { @Override public int hashCode() { return Objects.hash(klass, version, type); } }
|
PluginDesc implements Comparable<PluginDesc<T>> { @Override public int hashCode() { return Objects.hash(klass, version, type); } PluginDesc(Class<? extends T> klass, String version, ClassLoader loader); }
|
PluginDesc implements Comparable<PluginDesc<T>> { @Override public int hashCode() { return Objects.hash(klass, version, type); } PluginDesc(Class<? extends T> klass, String version, ClassLoader loader); @Override String toString(); Class<? extends T> pluginClass(); @JsonProperty("class") String className(); @JsonProperty("version") String version(); PluginType type(); @JsonProperty("type") String typeName(); @JsonProperty("location") String location(); @Override boolean equals(Object o); @Override int hashCode(); @Override int compareTo(PluginDesc other); }
|
PluginDesc implements Comparable<PluginDesc<T>> { @Override public int hashCode() { return Objects.hash(klass, version, type); } PluginDesc(Class<? extends T> klass, String version, ClassLoader loader); @Override String toString(); Class<? extends T> pluginClass(); @JsonProperty("class") String className(); @JsonProperty("version") String version(); PluginType type(); @JsonProperty("type") String typeName(); @JsonProperty("location") String location(); @Override boolean equals(Object o); @Override int hashCode(); @Override int compareTo(PluginDesc other); }
|
@Test public void testRestartConnector() throws Exception { EasyMock.expect(worker.connectorTaskConfigs(CONN1, MAX_TASKS, null)).andStubReturn(TASK_CONFIGS); EasyMock.expect(member.memberId()).andStubReturn("leader"); EasyMock.expect(worker.getPlugins()).andReturn(plugins); expectRebalance(1, singletonList(CONN1), Collections.<ConnectorTaskId>emptyList()); expectPostRebalanceCatchup(SNAPSHOT); member.poll(EasyMock.anyInt()); PowerMock.expectLastCall(); worker.startConnector(EasyMock.eq(CONN1), EasyMock.<Map<String, String>>anyObject(), EasyMock.<ConnectorContext>anyObject(), EasyMock.eq(herder), EasyMock.eq(TargetState.STARTED)); PowerMock.expectLastCall().andReturn(true); EasyMock.expect(worker.isRunning(CONN1)).andReturn(true); member.wakeup(); PowerMock.expectLastCall(); member.ensureActive(); PowerMock.expectLastCall(); member.poll(EasyMock.anyInt()); PowerMock.expectLastCall(); worker.stopConnector(CONN1); PowerMock.expectLastCall().andReturn(true); EasyMock.expect(worker.getPlugins()).andReturn(plugins); worker.startConnector(EasyMock.eq(CONN1), EasyMock.<Map<String, String>>anyObject(), EasyMock.<ConnectorContext>anyObject(), EasyMock.eq(herder), EasyMock.eq(TargetState.STARTED)); PowerMock.expectLastCall().andReturn(true); EasyMock.expect(worker.isRunning(CONN1)).andReturn(true); PowerMock.replayAll(); herder.tick(); FutureCallback<Void> callback = new FutureCallback<>(); herder.restartConnector(CONN1, callback); herder.tick(); callback.get(1000L, TimeUnit.MILLISECONDS); PowerMock.verifyAll(); }
|
@Override public void restartConnector(final String connName, final Callback<Void> callback) { addRequest(new Callable<Void>() { @Override public Void call() throws Exception { if (checkRebalanceNeeded(callback)) return null; if (!configState.connectors().contains(connName)) { callback.onCompletion(new NotFoundException("Unknown connector: " + connName), null); return null; } if (assignment.connectors().contains(connName)) { try { worker.stopConnector(connName); if (startConnector(connName)) callback.onCompletion(null, null); else callback.onCompletion(new ConnectException("Failed to start connector: " + connName), null); } catch (Throwable t) { callback.onCompletion(t, null); } } else if (isLeader()) { callback.onCompletion(new NotAssignedException("Cannot restart connector since it is not assigned to this member", member.ownerUrl(connName)), null); } else { callback.onCompletion(new NotLeaderException("Cannot restart connector since it is not assigned to this member", leaderUrl()), null); } return null; } }, forwardErrorCallback(callback)); }
|
DistributedHerder extends AbstractHerder implements Runnable { @Override public void restartConnector(final String connName, final Callback<Void> callback) { addRequest(new Callable<Void>() { @Override public Void call() throws Exception { if (checkRebalanceNeeded(callback)) return null; if (!configState.connectors().contains(connName)) { callback.onCompletion(new NotFoundException("Unknown connector: " + connName), null); return null; } if (assignment.connectors().contains(connName)) { try { worker.stopConnector(connName); if (startConnector(connName)) callback.onCompletion(null, null); else callback.onCompletion(new ConnectException("Failed to start connector: " + connName), null); } catch (Throwable t) { callback.onCompletion(t, null); } } else if (isLeader()) { callback.onCompletion(new NotAssignedException("Cannot restart connector since it is not assigned to this member", member.ownerUrl(connName)), null); } else { callback.onCompletion(new NotLeaderException("Cannot restart connector since it is not assigned to this member", leaderUrl()), null); } return null; } }, forwardErrorCallback(callback)); } }
|
DistributedHerder extends AbstractHerder implements Runnable { @Override public void restartConnector(final String connName, final Callback<Void> callback) { addRequest(new Callable<Void>() { @Override public Void call() throws Exception { if (checkRebalanceNeeded(callback)) return null; if (!configState.connectors().contains(connName)) { callback.onCompletion(new NotFoundException("Unknown connector: " + connName), null); return null; } if (assignment.connectors().contains(connName)) { try { worker.stopConnector(connName); if (startConnector(connName)) callback.onCompletion(null, null); else callback.onCompletion(new ConnectException("Failed to start connector: " + connName), null); } catch (Throwable t) { callback.onCompletion(t, null); } } else if (isLeader()) { callback.onCompletion(new NotAssignedException("Cannot restart connector since it is not assigned to this member", member.ownerUrl(connName)), null); } else { callback.onCompletion(new NotLeaderException("Cannot restart connector since it is not assigned to this member", leaderUrl()), null); } return null; } }, forwardErrorCallback(callback)); } DistributedHerder(DistributedConfig config,
Time time,
Worker worker,
StatusBackingStore statusBackingStore,
ConfigBackingStore configBackingStore,
String restUrl); DistributedHerder(DistributedConfig config,
Worker worker,
String workerId,
StatusBackingStore statusBackingStore,
ConfigBackingStore configBackingStore,
WorkerGroupMember member,
String restUrl,
Time time); }
|
DistributedHerder extends AbstractHerder implements Runnable { @Override public void restartConnector(final String connName, final Callback<Void> callback) { addRequest(new Callable<Void>() { @Override public Void call() throws Exception { if (checkRebalanceNeeded(callback)) return null; if (!configState.connectors().contains(connName)) { callback.onCompletion(new NotFoundException("Unknown connector: " + connName), null); return null; } if (assignment.connectors().contains(connName)) { try { worker.stopConnector(connName); if (startConnector(connName)) callback.onCompletion(null, null); else callback.onCompletion(new ConnectException("Failed to start connector: " + connName), null); } catch (Throwable t) { callback.onCompletion(t, null); } } else if (isLeader()) { callback.onCompletion(new NotAssignedException("Cannot restart connector since it is not assigned to this member", member.ownerUrl(connName)), null); } else { callback.onCompletion(new NotLeaderException("Cannot restart connector since it is not assigned to this member", leaderUrl()), null); } return null; } }, forwardErrorCallback(callback)); } DistributedHerder(DistributedConfig config,
Time time,
Worker worker,
StatusBackingStore statusBackingStore,
ConfigBackingStore configBackingStore,
String restUrl); DistributedHerder(DistributedConfig config,
Worker worker,
String workerId,
StatusBackingStore statusBackingStore,
ConfigBackingStore configBackingStore,
WorkerGroupMember member,
String restUrl,
Time time); @Override void start(); @Override void run(); void tick(); void halt(); @Override void stop(); @Override void connectors(final Callback<Collection<String>> callback); @Override void connectorInfo(final String connName, final Callback<ConnectorInfo> callback); @Override void connectorConfig(String connName, final Callback<Map<String, String>> callback); @Override void deleteConnectorConfig(final String connName, final Callback<Created<ConnectorInfo>> callback); @Override void putConnectorConfig(final String connName, final Map<String, String> config, final boolean allowReplace,
final Callback<Created<ConnectorInfo>> callback); @Override void requestTaskReconfiguration(final String connName); @Override void taskConfigs(final String connName, final Callback<List<TaskInfo>> callback); @Override void putTaskConfigs(final String connName, final List<Map<String, String>> configs, final Callback<Void> callback); @Override void restartConnector(final String connName, final Callback<Void> callback); @Override void restartTask(final ConnectorTaskId id, final Callback<Void> callback); @Override int generation(); }
|
DistributedHerder extends AbstractHerder implements Runnable { @Override public void restartConnector(final String connName, final Callback<Void> callback) { addRequest(new Callable<Void>() { @Override public Void call() throws Exception { if (checkRebalanceNeeded(callback)) return null; if (!configState.connectors().contains(connName)) { callback.onCompletion(new NotFoundException("Unknown connector: " + connName), null); return null; } if (assignment.connectors().contains(connName)) { try { worker.stopConnector(connName); if (startConnector(connName)) callback.onCompletion(null, null); else callback.onCompletion(new ConnectException("Failed to start connector: " + connName), null); } catch (Throwable t) { callback.onCompletion(t, null); } } else if (isLeader()) { callback.onCompletion(new NotAssignedException("Cannot restart connector since it is not assigned to this member", member.ownerUrl(connName)), null); } else { callback.onCompletion(new NotLeaderException("Cannot restart connector since it is not assigned to this member", leaderUrl()), null); } return null; } }, forwardErrorCallback(callback)); } DistributedHerder(DistributedConfig config,
Time time,
Worker worker,
StatusBackingStore statusBackingStore,
ConfigBackingStore configBackingStore,
String restUrl); DistributedHerder(DistributedConfig config,
Worker worker,
String workerId,
StatusBackingStore statusBackingStore,
ConfigBackingStore configBackingStore,
WorkerGroupMember member,
String restUrl,
Time time); @Override void start(); @Override void run(); void tick(); void halt(); @Override void stop(); @Override void connectors(final Callback<Collection<String>> callback); @Override void connectorInfo(final String connName, final Callback<ConnectorInfo> callback); @Override void connectorConfig(String connName, final Callback<Map<String, String>> callback); @Override void deleteConnectorConfig(final String connName, final Callback<Created<ConnectorInfo>> callback); @Override void putConnectorConfig(final String connName, final Map<String, String> config, final boolean allowReplace,
final Callback<Created<ConnectorInfo>> callback); @Override void requestTaskReconfiguration(final String connName); @Override void taskConfigs(final String connName, final Callback<List<TaskInfo>> callback); @Override void putTaskConfigs(final String connName, final List<Map<String, String>> configs, final Callback<Void> callback); @Override void restartConnector(final String connName, final Callback<Void> callback); @Override void restartTask(final ConnectorTaskId id, final Callback<Void> callback); @Override int generation(); }
|
@Test public void testRestartTask() throws Exception { EasyMock.expect(worker.connectorTaskConfigs(CONN1, MAX_TASKS, null)).andStubReturn(TASK_CONFIGS); EasyMock.expect(member.memberId()).andStubReturn("leader"); expectRebalance(1, Collections.<String>emptyList(), singletonList(TASK0)); expectPostRebalanceCatchup(SNAPSHOT); member.poll(EasyMock.anyInt()); PowerMock.expectLastCall(); worker.startTask(EasyMock.eq(TASK0), EasyMock.<Map<String, String>>anyObject(), EasyMock.<Map<String, String>>anyObject(), EasyMock.eq(herder), EasyMock.eq(TargetState.STARTED)); PowerMock.expectLastCall().andReturn(true); member.wakeup(); PowerMock.expectLastCall(); member.ensureActive(); PowerMock.expectLastCall(); member.poll(EasyMock.anyInt()); PowerMock.expectLastCall(); worker.stopAndAwaitTask(TASK0); PowerMock.expectLastCall(); worker.startTask(EasyMock.eq(TASK0), EasyMock.<Map<String, String>>anyObject(), EasyMock.<Map<String, String>>anyObject(), EasyMock.eq(herder), EasyMock.eq(TargetState.STARTED)); PowerMock.expectLastCall().andReturn(true); PowerMock.replayAll(); herder.tick(); FutureCallback<Void> callback = new FutureCallback<>(); herder.restartTask(TASK0, callback); herder.tick(); callback.get(1000L, TimeUnit.MILLISECONDS); PowerMock.verifyAll(); }
|
@Override public void restartTask(final ConnectorTaskId id, final Callback<Void> callback) { addRequest(new Callable<Void>() { @Override public Void call() throws Exception { if (checkRebalanceNeeded(callback)) return null; if (!configState.connectors().contains(id.connector())) { callback.onCompletion(new NotFoundException("Unknown connector: " + id.connector()), null); return null; } if (configState.taskConfig(id) == null) { callback.onCompletion(new NotFoundException("Unknown task: " + id), null); return null; } if (assignment.tasks().contains(id)) { try { worker.stopAndAwaitTask(id); if (startTask(id)) callback.onCompletion(null, null); else callback.onCompletion(new ConnectException("Failed to start task: " + id), null); } catch (Throwable t) { callback.onCompletion(t, null); } } else if (isLeader()) { callback.onCompletion(new NotAssignedException("Cannot restart task since it is not assigned to this member", member.ownerUrl(id)), null); } else { callback.onCompletion(new NotLeaderException("Cannot restart task since it is not assigned to this member", leaderUrl()), null); } return null; } }, forwardErrorCallback(callback)); }
|
DistributedHerder extends AbstractHerder implements Runnable { @Override public void restartTask(final ConnectorTaskId id, final Callback<Void> callback) { addRequest(new Callable<Void>() { @Override public Void call() throws Exception { if (checkRebalanceNeeded(callback)) return null; if (!configState.connectors().contains(id.connector())) { callback.onCompletion(new NotFoundException("Unknown connector: " + id.connector()), null); return null; } if (configState.taskConfig(id) == null) { callback.onCompletion(new NotFoundException("Unknown task: " + id), null); return null; } if (assignment.tasks().contains(id)) { try { worker.stopAndAwaitTask(id); if (startTask(id)) callback.onCompletion(null, null); else callback.onCompletion(new ConnectException("Failed to start task: " + id), null); } catch (Throwable t) { callback.onCompletion(t, null); } } else if (isLeader()) { callback.onCompletion(new NotAssignedException("Cannot restart task since it is not assigned to this member", member.ownerUrl(id)), null); } else { callback.onCompletion(new NotLeaderException("Cannot restart task since it is not assigned to this member", leaderUrl()), null); } return null; } }, forwardErrorCallback(callback)); } }
|
DistributedHerder extends AbstractHerder implements Runnable { @Override public void restartTask(final ConnectorTaskId id, final Callback<Void> callback) { addRequest(new Callable<Void>() { @Override public Void call() throws Exception { if (checkRebalanceNeeded(callback)) return null; if (!configState.connectors().contains(id.connector())) { callback.onCompletion(new NotFoundException("Unknown connector: " + id.connector()), null); return null; } if (configState.taskConfig(id) == null) { callback.onCompletion(new NotFoundException("Unknown task: " + id), null); return null; } if (assignment.tasks().contains(id)) { try { worker.stopAndAwaitTask(id); if (startTask(id)) callback.onCompletion(null, null); else callback.onCompletion(new ConnectException("Failed to start task: " + id), null); } catch (Throwable t) { callback.onCompletion(t, null); } } else if (isLeader()) { callback.onCompletion(new NotAssignedException("Cannot restart task since it is not assigned to this member", member.ownerUrl(id)), null); } else { callback.onCompletion(new NotLeaderException("Cannot restart task since it is not assigned to this member", leaderUrl()), null); } return null; } }, forwardErrorCallback(callback)); } DistributedHerder(DistributedConfig config,
Time time,
Worker worker,
StatusBackingStore statusBackingStore,
ConfigBackingStore configBackingStore,
String restUrl); DistributedHerder(DistributedConfig config,
Worker worker,
String workerId,
StatusBackingStore statusBackingStore,
ConfigBackingStore configBackingStore,
WorkerGroupMember member,
String restUrl,
Time time); }
|
DistributedHerder extends AbstractHerder implements Runnable { @Override public void restartTask(final ConnectorTaskId id, final Callback<Void> callback) { addRequest(new Callable<Void>() { @Override public Void call() throws Exception { if (checkRebalanceNeeded(callback)) return null; if (!configState.connectors().contains(id.connector())) { callback.onCompletion(new NotFoundException("Unknown connector: " + id.connector()), null); return null; } if (configState.taskConfig(id) == null) { callback.onCompletion(new NotFoundException("Unknown task: " + id), null); return null; } if (assignment.tasks().contains(id)) { try { worker.stopAndAwaitTask(id); if (startTask(id)) callback.onCompletion(null, null); else callback.onCompletion(new ConnectException("Failed to start task: " + id), null); } catch (Throwable t) { callback.onCompletion(t, null); } } else if (isLeader()) { callback.onCompletion(new NotAssignedException("Cannot restart task since it is not assigned to this member", member.ownerUrl(id)), null); } else { callback.onCompletion(new NotLeaderException("Cannot restart task since it is not assigned to this member", leaderUrl()), null); } return null; } }, forwardErrorCallback(callback)); } DistributedHerder(DistributedConfig config,
Time time,
Worker worker,
StatusBackingStore statusBackingStore,
ConfigBackingStore configBackingStore,
String restUrl); DistributedHerder(DistributedConfig config,
Worker worker,
String workerId,
StatusBackingStore statusBackingStore,
ConfigBackingStore configBackingStore,
WorkerGroupMember member,
String restUrl,
Time time); @Override void start(); @Override void run(); void tick(); void halt(); @Override void stop(); @Override void connectors(final Callback<Collection<String>> callback); @Override void connectorInfo(final String connName, final Callback<ConnectorInfo> callback); @Override void connectorConfig(String connName, final Callback<Map<String, String>> callback); @Override void deleteConnectorConfig(final String connName, final Callback<Created<ConnectorInfo>> callback); @Override void putConnectorConfig(final String connName, final Map<String, String> config, final boolean allowReplace,
final Callback<Created<ConnectorInfo>> callback); @Override void requestTaskReconfiguration(final String connName); @Override void taskConfigs(final String connName, final Callback<List<TaskInfo>> callback); @Override void putTaskConfigs(final String connName, final List<Map<String, String>> configs, final Callback<Void> callback); @Override void restartConnector(final String connName, final Callback<Void> callback); @Override void restartTask(final ConnectorTaskId id, final Callback<Void> callback); @Override int generation(); }
|
DistributedHerder extends AbstractHerder implements Runnable { @Override public void restartTask(final ConnectorTaskId id, final Callback<Void> callback) { addRequest(new Callable<Void>() { @Override public Void call() throws Exception { if (checkRebalanceNeeded(callback)) return null; if (!configState.connectors().contains(id.connector())) { callback.onCompletion(new NotFoundException("Unknown connector: " + id.connector()), null); return null; } if (configState.taskConfig(id) == null) { callback.onCompletion(new NotFoundException("Unknown task: " + id), null); return null; } if (assignment.tasks().contains(id)) { try { worker.stopAndAwaitTask(id); if (startTask(id)) callback.onCompletion(null, null); else callback.onCompletion(new ConnectException("Failed to start task: " + id), null); } catch (Throwable t) { callback.onCompletion(t, null); } } else if (isLeader()) { callback.onCompletion(new NotAssignedException("Cannot restart task since it is not assigned to this member", member.ownerUrl(id)), null); } else { callback.onCompletion(new NotLeaderException("Cannot restart task since it is not assigned to this member", leaderUrl()), null); } return null; } }, forwardErrorCallback(callback)); } DistributedHerder(DistributedConfig config,
Time time,
Worker worker,
StatusBackingStore statusBackingStore,
ConfigBackingStore configBackingStore,
String restUrl); DistributedHerder(DistributedConfig config,
Worker worker,
String workerId,
StatusBackingStore statusBackingStore,
ConfigBackingStore configBackingStore,
WorkerGroupMember member,
String restUrl,
Time time); @Override void start(); @Override void run(); void tick(); void halt(); @Override void stop(); @Override void connectors(final Callback<Collection<String>> callback); @Override void connectorInfo(final String connName, final Callback<ConnectorInfo> callback); @Override void connectorConfig(String connName, final Callback<Map<String, String>> callback); @Override void deleteConnectorConfig(final String connName, final Callback<Created<ConnectorInfo>> callback); @Override void putConnectorConfig(final String connName, final Map<String, String> config, final boolean allowReplace,
final Callback<Created<ConnectorInfo>> callback); @Override void requestTaskReconfiguration(final String connName); @Override void taskConfigs(final String connName, final Callback<List<TaskInfo>> callback); @Override void putTaskConfigs(final String connName, final List<Map<String, String>> configs, final Callback<Void> callback); @Override void restartConnector(final String connName, final Callback<Void> callback); @Override void restartTask(final ConnectorTaskId id, final Callback<Void> callback); @Override int generation(); }
|
@Test public void testRequestProcessingOrder() throws Exception { final DistributedHerder.HerderRequest req1 = herder.addRequest(100, null, null); final DistributedHerder.HerderRequest req2 = herder.addRequest(10, null, null); final DistributedHerder.HerderRequest req3 = herder.addRequest(200, null, null); final DistributedHerder.HerderRequest req4 = herder.addRequest(200, null, null); assertEquals(req2, herder.requests.pollFirst()); assertEquals(req1, herder.requests.pollFirst()); assertEquals(req3, herder.requests.pollFirst()); assertEquals(req4, herder.requests.pollFirst()); }
|
HerderRequest addRequest(Callable<Void> action, Callback<Void> callback) { return addRequest(0, action, callback); }
|
DistributedHerder extends AbstractHerder implements Runnable { HerderRequest addRequest(Callable<Void> action, Callback<Void> callback) { return addRequest(0, action, callback); } }
|
DistributedHerder extends AbstractHerder implements Runnable { HerderRequest addRequest(Callable<Void> action, Callback<Void> callback) { return addRequest(0, action, callback); } DistributedHerder(DistributedConfig config,
Time time,
Worker worker,
StatusBackingStore statusBackingStore,
ConfigBackingStore configBackingStore,
String restUrl); DistributedHerder(DistributedConfig config,
Worker worker,
String workerId,
StatusBackingStore statusBackingStore,
ConfigBackingStore configBackingStore,
WorkerGroupMember member,
String restUrl,
Time time); }
|
DistributedHerder extends AbstractHerder implements Runnable { HerderRequest addRequest(Callable<Void> action, Callback<Void> callback) { return addRequest(0, action, callback); } DistributedHerder(DistributedConfig config,
Time time,
Worker worker,
StatusBackingStore statusBackingStore,
ConfigBackingStore configBackingStore,
String restUrl); DistributedHerder(DistributedConfig config,
Worker worker,
String workerId,
StatusBackingStore statusBackingStore,
ConfigBackingStore configBackingStore,
WorkerGroupMember member,
String restUrl,
Time time); @Override void start(); @Override void run(); void tick(); void halt(); @Override void stop(); @Override void connectors(final Callback<Collection<String>> callback); @Override void connectorInfo(final String connName, final Callback<ConnectorInfo> callback); @Override void connectorConfig(String connName, final Callback<Map<String, String>> callback); @Override void deleteConnectorConfig(final String connName, final Callback<Created<ConnectorInfo>> callback); @Override void putConnectorConfig(final String connName, final Map<String, String> config, final boolean allowReplace,
final Callback<Created<ConnectorInfo>> callback); @Override void requestTaskReconfiguration(final String connName); @Override void taskConfigs(final String connName, final Callback<List<TaskInfo>> callback); @Override void putTaskConfigs(final String connName, final List<Map<String, String>> configs, final Callback<Void> callback); @Override void restartConnector(final String connName, final Callback<Void> callback); @Override void restartTask(final ConnectorTaskId id, final Callback<Void> callback); @Override int generation(); }
|
DistributedHerder extends AbstractHerder implements Runnable { HerderRequest addRequest(Callable<Void> action, Callback<Void> callback) { return addRequest(0, action, callback); } DistributedHerder(DistributedConfig config,
Time time,
Worker worker,
StatusBackingStore statusBackingStore,
ConfigBackingStore configBackingStore,
String restUrl); DistributedHerder(DistributedConfig config,
Worker worker,
String workerId,
StatusBackingStore statusBackingStore,
ConfigBackingStore configBackingStore,
WorkerGroupMember member,
String restUrl,
Time time); @Override void start(); @Override void run(); void tick(); void halt(); @Override void stop(); @Override void connectors(final Callback<Collection<String>> callback); @Override void connectorInfo(final String connName, final Callback<ConnectorInfo> callback); @Override void connectorConfig(String connName, final Callback<Map<String, String>> callback); @Override void deleteConnectorConfig(final String connName, final Callback<Created<ConnectorInfo>> callback); @Override void putConnectorConfig(final String connName, final Map<String, String> config, final boolean allowReplace,
final Callback<Created<ConnectorInfo>> callback); @Override void requestTaskReconfiguration(final String connName); @Override void taskConfigs(final String connName, final Callback<List<TaskInfo>> callback); @Override void putTaskConfigs(final String connName, final List<Map<String, String>> configs, final Callback<Void> callback); @Override void restartConnector(final String connName, final Callback<Void> callback); @Override void restartTask(final ConnectorTaskId id, final Callback<Void> callback); @Override int generation(); }
|
@Test public void testPutConnectorConfig() throws Exception { EasyMock.expect(member.memberId()).andStubReturn("leader"); expectRebalance(1, Arrays.asList(CONN1), Collections.<ConnectorTaskId>emptyList()); expectPostRebalanceCatchup(SNAPSHOT); worker.startConnector(EasyMock.eq(CONN1), EasyMock.<Map<String, String>>anyObject(), EasyMock.<ConnectorContext>anyObject(), EasyMock.eq(herder), EasyMock.eq(TargetState.STARTED)); PowerMock.expectLastCall().andReturn(true); EasyMock.expect(worker.isRunning(CONN1)).andReturn(true); EasyMock.expect(worker.connectorTaskConfigs(CONN1, MAX_TASKS, null)).andReturn(TASK_CONFIGS); member.wakeup(); PowerMock.expectLastCall().anyTimes(); member.poll(EasyMock.anyInt()); PowerMock.expectLastCall(); member.ensureActive(); PowerMock.expectLastCall(); Connector connectorMock = PowerMock.createMock(Connector.class); EasyMock.expect(worker.getPlugins()).andReturn(plugins).times(5); EasyMock.expect(plugins.compareAndSwapLoaders(connectorMock)).andReturn(delegatingLoader); EasyMock.expect(plugins.newConnector(EasyMock.anyString())).andReturn(connectorMock); EasyMock.expect(connectorMock.config()).andReturn(new ConfigDef()); EasyMock.expect(connectorMock.validate(CONN1_CONFIG_UPDATED)).andReturn(new Config(Collections.<ConfigValue>emptyList())); EasyMock.expect(Plugins.compareAndSwapLoaders(delegatingLoader)).andReturn(pluginLoader); configBackingStore.putConnectorConfig(CONN1, CONN1_CONFIG_UPDATED); PowerMock.expectLastCall().andAnswer(new IAnswer<Object>() { @Override public Object answer() throws Throwable { configUpdateListener.onConnectorConfigUpdate(CONN1); return null; } }); EasyMock.expect(configBackingStore.snapshot()).andReturn(SNAPSHOT_UPDATED_CONN1_CONFIG); worker.stopConnector(CONN1); PowerMock.expectLastCall().andReturn(true); worker.startConnector(EasyMock.eq(CONN1), EasyMock.<Map<String, String>>anyObject(), EasyMock.<ConnectorContext>anyObject(), EasyMock.eq(herder), EasyMock.eq(TargetState.STARTED)); PowerMock.expectLastCall().andReturn(true); EasyMock.expect(worker.isRunning(CONN1)).andReturn(true); EasyMock.expect(worker.connectorTaskConfigs(CONN1, MAX_TASKS, null)).andReturn(TASK_CONFIGS); member.poll(EasyMock.anyInt()); PowerMock.expectLastCall(); member.ensureActive(); PowerMock.expectLastCall(); member.poll(EasyMock.anyInt()); PowerMock.expectLastCall(); PowerMock.replayAll(); FutureCallback<Map<String, String>> connectorConfigCb = new FutureCallback<>(); herder.connectorConfig(CONN1, connectorConfigCb); herder.tick(); assertTrue(connectorConfigCb.isDone()); assertEquals(CONN1_CONFIG, connectorConfigCb.get()); FutureCallback<Herder.Created<ConnectorInfo>> putConfigCb = new FutureCallback<>(); herder.putConnectorConfig(CONN1, CONN1_CONFIG_UPDATED, true, putConfigCb); herder.tick(); assertTrue(putConfigCb.isDone()); ConnectorInfo updatedInfo = new ConnectorInfo(CONN1, CONN1_CONFIG_UPDATED, Arrays.asList(TASK0, TASK1, TASK2)); assertEquals(new Herder.Created<>(false, updatedInfo), putConfigCb.get()); connectorConfigCb = new FutureCallback<>(); herder.connectorConfig(CONN1, connectorConfigCb); herder.tick(); assertTrue(connectorConfigCb.isDone()); assertEquals(CONN1_CONFIG_UPDATED, connectorConfigCb.get()); PowerMock.verifyAll(); }
|
@Override public void putConnectorConfig(final String connName, final Map<String, String> config, final boolean allowReplace, final Callback<Created<ConnectorInfo>> callback) { log.trace("Submitting connector config write request {}", connName); addRequest( new Callable<Void>() { @Override public Void call() throws Exception { if (maybeAddConfigErrors(validateConnectorConfig(config), callback)) { return null; } log.trace("Handling connector config request {}", connName); if (!isLeader()) { callback.onCompletion(new NotLeaderException("Only the leader can set connector configs.", leaderUrl()), null); return null; } boolean exists = configState.contains(connName); if (!allowReplace && exists) { callback.onCompletion(new AlreadyExistsException("Connector " + connName + " already exists"), null); return null; } log.trace("Submitting connector config {} {} {}", connName, allowReplace, configState.connectors()); configBackingStore.putConnectorConfig(connName, config); ConnectorInfo info = new ConnectorInfo(connName, config, configState.tasks(connName)); callback.onCompletion(null, new Created<>(!exists, info)); return null; } }, forwardErrorCallback(callback) ); }
|
DistributedHerder extends AbstractHerder implements Runnable { @Override public void putConnectorConfig(final String connName, final Map<String, String> config, final boolean allowReplace, final Callback<Created<ConnectorInfo>> callback) { log.trace("Submitting connector config write request {}", connName); addRequest( new Callable<Void>() { @Override public Void call() throws Exception { if (maybeAddConfigErrors(validateConnectorConfig(config), callback)) { return null; } log.trace("Handling connector config request {}", connName); if (!isLeader()) { callback.onCompletion(new NotLeaderException("Only the leader can set connector configs.", leaderUrl()), null); return null; } boolean exists = configState.contains(connName); if (!allowReplace && exists) { callback.onCompletion(new AlreadyExistsException("Connector " + connName + " already exists"), null); return null; } log.trace("Submitting connector config {} {} {}", connName, allowReplace, configState.connectors()); configBackingStore.putConnectorConfig(connName, config); ConnectorInfo info = new ConnectorInfo(connName, config, configState.tasks(connName)); callback.onCompletion(null, new Created<>(!exists, info)); return null; } }, forwardErrorCallback(callback) ); } }
|
DistributedHerder extends AbstractHerder implements Runnable { @Override public void putConnectorConfig(final String connName, final Map<String, String> config, final boolean allowReplace, final Callback<Created<ConnectorInfo>> callback) { log.trace("Submitting connector config write request {}", connName); addRequest( new Callable<Void>() { @Override public Void call() throws Exception { if (maybeAddConfigErrors(validateConnectorConfig(config), callback)) { return null; } log.trace("Handling connector config request {}", connName); if (!isLeader()) { callback.onCompletion(new NotLeaderException("Only the leader can set connector configs.", leaderUrl()), null); return null; } boolean exists = configState.contains(connName); if (!allowReplace && exists) { callback.onCompletion(new AlreadyExistsException("Connector " + connName + " already exists"), null); return null; } log.trace("Submitting connector config {} {} {}", connName, allowReplace, configState.connectors()); configBackingStore.putConnectorConfig(connName, config); ConnectorInfo info = new ConnectorInfo(connName, config, configState.tasks(connName)); callback.onCompletion(null, new Created<>(!exists, info)); return null; } }, forwardErrorCallback(callback) ); } DistributedHerder(DistributedConfig config,
Time time,
Worker worker,
StatusBackingStore statusBackingStore,
ConfigBackingStore configBackingStore,
String restUrl); DistributedHerder(DistributedConfig config,
Worker worker,
String workerId,
StatusBackingStore statusBackingStore,
ConfigBackingStore configBackingStore,
WorkerGroupMember member,
String restUrl,
Time time); }
|
DistributedHerder extends AbstractHerder implements Runnable { @Override public void putConnectorConfig(final String connName, final Map<String, String> config, final boolean allowReplace, final Callback<Created<ConnectorInfo>> callback) { log.trace("Submitting connector config write request {}", connName); addRequest( new Callable<Void>() { @Override public Void call() throws Exception { if (maybeAddConfigErrors(validateConnectorConfig(config), callback)) { return null; } log.trace("Handling connector config request {}", connName); if (!isLeader()) { callback.onCompletion(new NotLeaderException("Only the leader can set connector configs.", leaderUrl()), null); return null; } boolean exists = configState.contains(connName); if (!allowReplace && exists) { callback.onCompletion(new AlreadyExistsException("Connector " + connName + " already exists"), null); return null; } log.trace("Submitting connector config {} {} {}", connName, allowReplace, configState.connectors()); configBackingStore.putConnectorConfig(connName, config); ConnectorInfo info = new ConnectorInfo(connName, config, configState.tasks(connName)); callback.onCompletion(null, new Created<>(!exists, info)); return null; } }, forwardErrorCallback(callback) ); } DistributedHerder(DistributedConfig config,
Time time,
Worker worker,
StatusBackingStore statusBackingStore,
ConfigBackingStore configBackingStore,
String restUrl); DistributedHerder(DistributedConfig config,
Worker worker,
String workerId,
StatusBackingStore statusBackingStore,
ConfigBackingStore configBackingStore,
WorkerGroupMember member,
String restUrl,
Time time); @Override void start(); @Override void run(); void tick(); void halt(); @Override void stop(); @Override void connectors(final Callback<Collection<String>> callback); @Override void connectorInfo(final String connName, final Callback<ConnectorInfo> callback); @Override void connectorConfig(String connName, final Callback<Map<String, String>> callback); @Override void deleteConnectorConfig(final String connName, final Callback<Created<ConnectorInfo>> callback); @Override void putConnectorConfig(final String connName, final Map<String, String> config, final boolean allowReplace,
final Callback<Created<ConnectorInfo>> callback); @Override void requestTaskReconfiguration(final String connName); @Override void taskConfigs(final String connName, final Callback<List<TaskInfo>> callback); @Override void putTaskConfigs(final String connName, final List<Map<String, String>> configs, final Callback<Void> callback); @Override void restartConnector(final String connName, final Callback<Void> callback); @Override void restartTask(final ConnectorTaskId id, final Callback<Void> callback); @Override int generation(); }
|
DistributedHerder extends AbstractHerder implements Runnable { @Override public void putConnectorConfig(final String connName, final Map<String, String> config, final boolean allowReplace, final Callback<Created<ConnectorInfo>> callback) { log.trace("Submitting connector config write request {}", connName); addRequest( new Callable<Void>() { @Override public Void call() throws Exception { if (maybeAddConfigErrors(validateConnectorConfig(config), callback)) { return null; } log.trace("Handling connector config request {}", connName); if (!isLeader()) { callback.onCompletion(new NotLeaderException("Only the leader can set connector configs.", leaderUrl()), null); return null; } boolean exists = configState.contains(connName); if (!allowReplace && exists) { callback.onCompletion(new AlreadyExistsException("Connector " + connName + " already exists"), null); return null; } log.trace("Submitting connector config {} {} {}", connName, allowReplace, configState.connectors()); configBackingStore.putConnectorConfig(connName, config); ConnectorInfo info = new ConnectorInfo(connName, config, configState.tasks(connName)); callback.onCompletion(null, new Created<>(!exists, info)); return null; } }, forwardErrorCallback(callback) ); } DistributedHerder(DistributedConfig config,
Time time,
Worker worker,
StatusBackingStore statusBackingStore,
ConfigBackingStore configBackingStore,
String restUrl); DistributedHerder(DistributedConfig config,
Worker worker,
String workerId,
StatusBackingStore statusBackingStore,
ConfigBackingStore configBackingStore,
WorkerGroupMember member,
String restUrl,
Time time); @Override void start(); @Override void run(); void tick(); void halt(); @Override void stop(); @Override void connectors(final Callback<Collection<String>> callback); @Override void connectorInfo(final String connName, final Callback<ConnectorInfo> callback); @Override void connectorConfig(String connName, final Callback<Map<String, String>> callback); @Override void deleteConnectorConfig(final String connName, final Callback<Created<ConnectorInfo>> callback); @Override void putConnectorConfig(final String connName, final Map<String, String> config, final boolean allowReplace,
final Callback<Created<ConnectorInfo>> callback); @Override void requestTaskReconfiguration(final String connName); @Override void taskConfigs(final String connName, final Callback<List<TaskInfo>> callback); @Override void putTaskConfigs(final String connName, final List<Map<String, String>> configs, final Callback<Void> callback); @Override void restartConnector(final String connName, final Callback<Void> callback); @Override void restartTask(final ConnectorTaskId id, final Callback<Void> callback); @Override int generation(); }
|
@Test public void structToConnect() { byte[] structJson = "{ \"schema\": { \"type\": \"struct\", \"fields\": [{ \"field\": \"field1\", \"type\": \"boolean\" }, { \"field\": \"field2\", \"type\": \"string\" }] }, \"payload\": { \"field1\": true, \"field2\": \"string\" } }".getBytes(); Schema expectedSchema = SchemaBuilder.struct().field("field1", Schema.BOOLEAN_SCHEMA).field("field2", Schema.STRING_SCHEMA).build(); Struct expected = new Struct(expectedSchema).put("field1", true).put("field2", "string"); SchemaAndValue converted = converter.toConnectData(TOPIC, structJson); assertEquals(new SchemaAndValue(expectedSchema, expected), converted); }
|
@Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); }
|
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } }
|
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } }
|
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); ObjectNode asJsonSchema(Schema schema); Schema asConnectSchema(JsonNode jsonSchema); }
|
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); ObjectNode asJsonSchema(Schema schema); Schema asConnectSchema(JsonNode jsonSchema); }
|
@Test public void testMetadata() { EasyMock.expect(configStorage.snapshot()).andReturn(configState1); PowerMock.replayAll(); List<ProtocolMetadata> serialized = coordinator.metadata(); assertEquals(1, serialized.size()); ProtocolMetadata defaultMetadata = serialized.get(0); assertEquals(WorkerCoordinator.DEFAULT_SUBPROTOCOL, defaultMetadata.name()); ConnectProtocol.WorkerState state = ConnectProtocol.deserializeMetadata(defaultMetadata.metadata()); assertEquals(1, state.offset()); PowerMock.verifyAll(); }
|
@Override public List<ProtocolMetadata> metadata() { configSnapshot = configStorage.snapshot(); ConnectProtocol.WorkerState workerState = new ConnectProtocol.WorkerState(restUrl, configSnapshot.offset()); ByteBuffer metadata = ConnectProtocol.serializeMetadata(workerState); return Collections.singletonList(new ProtocolMetadata(DEFAULT_SUBPROTOCOL, metadata)); }
|
WorkerCoordinator extends AbstractCoordinator implements Closeable { @Override public List<ProtocolMetadata> metadata() { configSnapshot = configStorage.snapshot(); ConnectProtocol.WorkerState workerState = new ConnectProtocol.WorkerState(restUrl, configSnapshot.offset()); ByteBuffer metadata = ConnectProtocol.serializeMetadata(workerState); return Collections.singletonList(new ProtocolMetadata(DEFAULT_SUBPROTOCOL, metadata)); } }
|
WorkerCoordinator extends AbstractCoordinator implements Closeable { @Override public List<ProtocolMetadata> metadata() { configSnapshot = configStorage.snapshot(); ConnectProtocol.WorkerState workerState = new ConnectProtocol.WorkerState(restUrl, configSnapshot.offset()); ByteBuffer metadata = ConnectProtocol.serializeMetadata(workerState); return Collections.singletonList(new ProtocolMetadata(DEFAULT_SUBPROTOCOL, metadata)); } WorkerCoordinator(ConsumerNetworkClient client,
String groupId,
int rebalanceTimeoutMs,
int sessionTimeoutMs,
int heartbeatIntervalMs,
Metrics metrics,
String metricGrpPrefix,
Time time,
long retryBackoffMs,
String restUrl,
ConfigBackingStore configStorage,
WorkerRebalanceListener listener); }
|
WorkerCoordinator extends AbstractCoordinator implements Closeable { @Override public List<ProtocolMetadata> metadata() { configSnapshot = configStorage.snapshot(); ConnectProtocol.WorkerState workerState = new ConnectProtocol.WorkerState(restUrl, configSnapshot.offset()); ByteBuffer metadata = ConnectProtocol.serializeMetadata(workerState); return Collections.singletonList(new ProtocolMetadata(DEFAULT_SUBPROTOCOL, metadata)); } WorkerCoordinator(ConsumerNetworkClient client,
String groupId,
int rebalanceTimeoutMs,
int sessionTimeoutMs,
int heartbeatIntervalMs,
Metrics metrics,
String metricGrpPrefix,
Time time,
long retryBackoffMs,
String restUrl,
ConfigBackingStore configStorage,
WorkerRebalanceListener listener); void requestRejoin(); @Override String protocolType(); void poll(long timeout); @Override List<ProtocolMetadata> metadata(); String memberId(); String ownerUrl(String connector); String ownerUrl(ConnectorTaskId task); }
|
WorkerCoordinator extends AbstractCoordinator implements Closeable { @Override public List<ProtocolMetadata> metadata() { configSnapshot = configStorage.snapshot(); ConnectProtocol.WorkerState workerState = new ConnectProtocol.WorkerState(restUrl, configSnapshot.offset()); ByteBuffer metadata = ConnectProtocol.serializeMetadata(workerState); return Collections.singletonList(new ProtocolMetadata(DEFAULT_SUBPROTOCOL, metadata)); } WorkerCoordinator(ConsumerNetworkClient client,
String groupId,
int rebalanceTimeoutMs,
int sessionTimeoutMs,
int heartbeatIntervalMs,
Metrics metrics,
String metricGrpPrefix,
Time time,
long retryBackoffMs,
String restUrl,
ConfigBackingStore configStorage,
WorkerRebalanceListener listener); void requestRejoin(); @Override String protocolType(); void poll(long timeout); @Override List<ProtocolMetadata> metadata(); String memberId(); String ownerUrl(String connector); String ownerUrl(ConnectorTaskId task); static final String DEFAULT_SUBPROTOCOL; }
|
@Test public void testJoinLeaderCannotAssign() { EasyMock.expect(configStorage.snapshot()).andReturn(configState1); EasyMock.expect(configStorage.snapshot()).andReturn(configState2); PowerMock.replayAll(); final String memberId = "member"; client.prepareResponse(groupCoordinatorResponse(node, Errors.NONE)); coordinator.ensureCoordinatorReady(); client.prepareResponse(joinGroupFollowerResponse(1, memberId, "leader", Errors.NONE)); MockClient.RequestMatcher matcher = new MockClient.RequestMatcher() { @Override public boolean matches(AbstractRequest body) { SyncGroupRequest sync = (SyncGroupRequest) body; return sync.memberId().equals(memberId) && sync.generationId() == 1 && sync.groupAssignment().isEmpty(); } }; client.prepareResponse(matcher, syncGroupResponse(ConnectProtocol.Assignment.CONFIG_MISMATCH, "leader", 10L, Collections.<String>emptyList(), Collections.<ConnectorTaskId>emptyList(), Errors.NONE)); client.prepareResponse(joinGroupFollowerResponse(1, memberId, "leader", Errors.NONE)); client.prepareResponse(matcher, syncGroupResponse(ConnectProtocol.Assignment.NO_ERROR, "leader", 1L, Collections.<String>emptyList(), Collections.singletonList(taskId1x0), Errors.NONE)); coordinator.ensureActiveGroup(); PowerMock.verifyAll(); }
|
public String memberId() { Generation generation = generation(); if (generation != null) return generation.memberId; return JoinGroupRequest.UNKNOWN_MEMBER_ID; }
|
WorkerCoordinator extends AbstractCoordinator implements Closeable { public String memberId() { Generation generation = generation(); if (generation != null) return generation.memberId; return JoinGroupRequest.UNKNOWN_MEMBER_ID; } }
|
WorkerCoordinator extends AbstractCoordinator implements Closeable { public String memberId() { Generation generation = generation(); if (generation != null) return generation.memberId; return JoinGroupRequest.UNKNOWN_MEMBER_ID; } WorkerCoordinator(ConsumerNetworkClient client,
String groupId,
int rebalanceTimeoutMs,
int sessionTimeoutMs,
int heartbeatIntervalMs,
Metrics metrics,
String metricGrpPrefix,
Time time,
long retryBackoffMs,
String restUrl,
ConfigBackingStore configStorage,
WorkerRebalanceListener listener); }
|
WorkerCoordinator extends AbstractCoordinator implements Closeable { public String memberId() { Generation generation = generation(); if (generation != null) return generation.memberId; return JoinGroupRequest.UNKNOWN_MEMBER_ID; } WorkerCoordinator(ConsumerNetworkClient client,
String groupId,
int rebalanceTimeoutMs,
int sessionTimeoutMs,
int heartbeatIntervalMs,
Metrics metrics,
String metricGrpPrefix,
Time time,
long retryBackoffMs,
String restUrl,
ConfigBackingStore configStorage,
WorkerRebalanceListener listener); void requestRejoin(); @Override String protocolType(); void poll(long timeout); @Override List<ProtocolMetadata> metadata(); String memberId(); String ownerUrl(String connector); String ownerUrl(ConnectorTaskId task); }
|
WorkerCoordinator extends AbstractCoordinator implements Closeable { public String memberId() { Generation generation = generation(); if (generation != null) return generation.memberId; return JoinGroupRequest.UNKNOWN_MEMBER_ID; } WorkerCoordinator(ConsumerNetworkClient client,
String groupId,
int rebalanceTimeoutMs,
int sessionTimeoutMs,
int heartbeatIntervalMs,
Metrics metrics,
String metricGrpPrefix,
Time time,
long retryBackoffMs,
String restUrl,
ConfigBackingStore configStorage,
WorkerRebalanceListener listener); void requestRejoin(); @Override String protocolType(); void poll(long timeout); @Override List<ProtocolMetadata> metadata(); String memberId(); String ownerUrl(String connector); String ownerUrl(ConnectorTaskId task); static final String DEFAULT_SUBPROTOCOL; }
|
@Test public void testRejoinGroup() { EasyMock.expect(configStorage.snapshot()).andReturn(configState1); EasyMock.expect(configStorage.snapshot()).andReturn(configState1); PowerMock.replayAll(); client.prepareResponse(groupCoordinatorResponse(node, Errors.NONE)); coordinator.ensureCoordinatorReady(); client.prepareResponse(joinGroupFollowerResponse(1, "consumer", "leader", Errors.NONE)); client.prepareResponse(syncGroupResponse(ConnectProtocol.Assignment.NO_ERROR, "leader", 1L, Collections.<String>emptyList(), Collections.singletonList(taskId1x0), Errors.NONE)); coordinator.ensureActiveGroup(); assertEquals(0, rebalanceListener.revokedCount); assertEquals(1, rebalanceListener.assignedCount); assertFalse(rebalanceListener.assignment.failed()); assertEquals(1L, rebalanceListener.assignment.offset()); assertEquals(Collections.emptyList(), rebalanceListener.assignment.connectors()); assertEquals(Collections.singletonList(taskId1x0), rebalanceListener.assignment.tasks()); coordinator.requestRejoin(); client.prepareResponse(joinGroupFollowerResponse(1, "consumer", "leader", Errors.NONE)); client.prepareResponse(syncGroupResponse(ConnectProtocol.Assignment.NO_ERROR, "leader", 1L, Collections.singletonList(connectorId1), Collections.<ConnectorTaskId>emptyList(), Errors.NONE)); coordinator.ensureActiveGroup(); assertEquals(1, rebalanceListener.revokedCount); assertEquals(Collections.emptyList(), rebalanceListener.revokedConnectors); assertEquals(Collections.singletonList(taskId1x0), rebalanceListener.revokedTasks); assertEquals(2, rebalanceListener.assignedCount); assertFalse(rebalanceListener.assignment.failed()); assertEquals(1L, rebalanceListener.assignment.offset()); assertEquals(Collections.singletonList(connectorId1), rebalanceListener.assignment.connectors()); assertEquals(Collections.emptyList(), rebalanceListener.assignment.tasks()); PowerMock.verifyAll(); }
|
public void requestRejoin() { rejoinRequested = true; }
|
WorkerCoordinator extends AbstractCoordinator implements Closeable { public void requestRejoin() { rejoinRequested = true; } }
|
WorkerCoordinator extends AbstractCoordinator implements Closeable { public void requestRejoin() { rejoinRequested = true; } WorkerCoordinator(ConsumerNetworkClient client,
String groupId,
int rebalanceTimeoutMs,
int sessionTimeoutMs,
int heartbeatIntervalMs,
Metrics metrics,
String metricGrpPrefix,
Time time,
long retryBackoffMs,
String restUrl,
ConfigBackingStore configStorage,
WorkerRebalanceListener listener); }
|
WorkerCoordinator extends AbstractCoordinator implements Closeable { public void requestRejoin() { rejoinRequested = true; } WorkerCoordinator(ConsumerNetworkClient client,
String groupId,
int rebalanceTimeoutMs,
int sessionTimeoutMs,
int heartbeatIntervalMs,
Metrics metrics,
String metricGrpPrefix,
Time time,
long retryBackoffMs,
String restUrl,
ConfigBackingStore configStorage,
WorkerRebalanceListener listener); void requestRejoin(); @Override String protocolType(); void poll(long timeout); @Override List<ProtocolMetadata> metadata(); String memberId(); String ownerUrl(String connector); String ownerUrl(ConnectorTaskId task); }
|
WorkerCoordinator extends AbstractCoordinator implements Closeable { public void requestRejoin() { rejoinRequested = true; } WorkerCoordinator(ConsumerNetworkClient client,
String groupId,
int rebalanceTimeoutMs,
int sessionTimeoutMs,
int heartbeatIntervalMs,
Metrics metrics,
String metricGrpPrefix,
Time time,
long retryBackoffMs,
String restUrl,
ConfigBackingStore configStorage,
WorkerRebalanceListener listener); void requestRejoin(); @Override String protocolType(); void poll(long timeout); @Override List<ProtocolMetadata> metadata(); String memberId(); String ownerUrl(String connector); String ownerUrl(ConnectorTaskId task); static final String DEFAULT_SUBPROTOCOL; }
|
@Test public void testLeaderPerformAssignment1() throws Exception { EasyMock.expect(configStorage.snapshot()).andReturn(configState1); PowerMock.replayAll(); coordinator.metadata(); Map<String, ByteBuffer> configs = new HashMap<>(); configs.put("leader", ConnectProtocol.serializeMetadata(new ConnectProtocol.WorkerState(LEADER_URL, 1L))); configs.put("member", ConnectProtocol.serializeMetadata(new ConnectProtocol.WorkerState(MEMBER_URL, 1L))); Map<String, ByteBuffer> result = Whitebox.invokeMethod(coordinator, "performAssignment", "leader", WorkerCoordinator.DEFAULT_SUBPROTOCOL, configs); ConnectProtocol.Assignment leaderAssignment = ConnectProtocol.deserializeAssignment(result.get("leader")); assertEquals(false, leaderAssignment.failed()); assertEquals("leader", leaderAssignment.leader()); assertEquals(1, leaderAssignment.offset()); assertEquals(Collections.singletonList(connectorId1), leaderAssignment.connectors()); assertEquals(Collections.emptyList(), leaderAssignment.tasks()); ConnectProtocol.Assignment memberAssignment = ConnectProtocol.deserializeAssignment(result.get("member")); assertEquals(false, memberAssignment.failed()); assertEquals("leader", memberAssignment.leader()); assertEquals(1, memberAssignment.offset()); assertEquals(Collections.emptyList(), memberAssignment.connectors()); assertEquals(Collections.singletonList(taskId1x0), memberAssignment.tasks()); PowerMock.verifyAll(); }
|
@Override public List<ProtocolMetadata> metadata() { configSnapshot = configStorage.snapshot(); ConnectProtocol.WorkerState workerState = new ConnectProtocol.WorkerState(restUrl, configSnapshot.offset()); ByteBuffer metadata = ConnectProtocol.serializeMetadata(workerState); return Collections.singletonList(new ProtocolMetadata(DEFAULT_SUBPROTOCOL, metadata)); }
|
WorkerCoordinator extends AbstractCoordinator implements Closeable { @Override public List<ProtocolMetadata> metadata() { configSnapshot = configStorage.snapshot(); ConnectProtocol.WorkerState workerState = new ConnectProtocol.WorkerState(restUrl, configSnapshot.offset()); ByteBuffer metadata = ConnectProtocol.serializeMetadata(workerState); return Collections.singletonList(new ProtocolMetadata(DEFAULT_SUBPROTOCOL, metadata)); } }
|
WorkerCoordinator extends AbstractCoordinator implements Closeable { @Override public List<ProtocolMetadata> metadata() { configSnapshot = configStorage.snapshot(); ConnectProtocol.WorkerState workerState = new ConnectProtocol.WorkerState(restUrl, configSnapshot.offset()); ByteBuffer metadata = ConnectProtocol.serializeMetadata(workerState); return Collections.singletonList(new ProtocolMetadata(DEFAULT_SUBPROTOCOL, metadata)); } WorkerCoordinator(ConsumerNetworkClient client,
String groupId,
int rebalanceTimeoutMs,
int sessionTimeoutMs,
int heartbeatIntervalMs,
Metrics metrics,
String metricGrpPrefix,
Time time,
long retryBackoffMs,
String restUrl,
ConfigBackingStore configStorage,
WorkerRebalanceListener listener); }
|
WorkerCoordinator extends AbstractCoordinator implements Closeable { @Override public List<ProtocolMetadata> metadata() { configSnapshot = configStorage.snapshot(); ConnectProtocol.WorkerState workerState = new ConnectProtocol.WorkerState(restUrl, configSnapshot.offset()); ByteBuffer metadata = ConnectProtocol.serializeMetadata(workerState); return Collections.singletonList(new ProtocolMetadata(DEFAULT_SUBPROTOCOL, metadata)); } WorkerCoordinator(ConsumerNetworkClient client,
String groupId,
int rebalanceTimeoutMs,
int sessionTimeoutMs,
int heartbeatIntervalMs,
Metrics metrics,
String metricGrpPrefix,
Time time,
long retryBackoffMs,
String restUrl,
ConfigBackingStore configStorage,
WorkerRebalanceListener listener); void requestRejoin(); @Override String protocolType(); void poll(long timeout); @Override List<ProtocolMetadata> metadata(); String memberId(); String ownerUrl(String connector); String ownerUrl(ConnectorTaskId task); }
|
WorkerCoordinator extends AbstractCoordinator implements Closeable { @Override public List<ProtocolMetadata> metadata() { configSnapshot = configStorage.snapshot(); ConnectProtocol.WorkerState workerState = new ConnectProtocol.WorkerState(restUrl, configSnapshot.offset()); ByteBuffer metadata = ConnectProtocol.serializeMetadata(workerState); return Collections.singletonList(new ProtocolMetadata(DEFAULT_SUBPROTOCOL, metadata)); } WorkerCoordinator(ConsumerNetworkClient client,
String groupId,
int rebalanceTimeoutMs,
int sessionTimeoutMs,
int heartbeatIntervalMs,
Metrics metrics,
String metricGrpPrefix,
Time time,
long retryBackoffMs,
String restUrl,
ConfigBackingStore configStorage,
WorkerRebalanceListener listener); void requestRejoin(); @Override String protocolType(); void poll(long timeout); @Override List<ProtocolMetadata> metadata(); String memberId(); String ownerUrl(String connector); String ownerUrl(ConnectorTaskId task); static final String DEFAULT_SUBPROTOCOL; }
|
@Test public void testLeaderPerformAssignment2() throws Exception { EasyMock.expect(configStorage.snapshot()).andReturn(configState2); PowerMock.replayAll(); coordinator.metadata(); Map<String, ByteBuffer> configs = new HashMap<>(); configs.put("leader", ConnectProtocol.serializeMetadata(new ConnectProtocol.WorkerState(LEADER_URL, 1L))); configs.put("member", ConnectProtocol.serializeMetadata(new ConnectProtocol.WorkerState(MEMBER_URL, 1L))); Map<String, ByteBuffer> result = Whitebox.invokeMethod(coordinator, "performAssignment", "leader", WorkerCoordinator.DEFAULT_SUBPROTOCOL, configs); ConnectProtocol.Assignment leaderAssignment = ConnectProtocol.deserializeAssignment(result.get("leader")); assertEquals(false, leaderAssignment.failed()); assertEquals("leader", leaderAssignment.leader()); assertEquals(1, leaderAssignment.offset()); assertEquals(Collections.singletonList(connectorId1), leaderAssignment.connectors()); assertEquals(Arrays.asList(taskId1x0, taskId2x0), leaderAssignment.tasks()); ConnectProtocol.Assignment memberAssignment = ConnectProtocol.deserializeAssignment(result.get("member")); assertEquals(false, memberAssignment.failed()); assertEquals("leader", memberAssignment.leader()); assertEquals(1, memberAssignment.offset()); assertEquals(Collections.singletonList(connectorId2), memberAssignment.connectors()); assertEquals(Collections.singletonList(taskId1x1), memberAssignment.tasks()); PowerMock.verifyAll(); }
|
@Override public List<ProtocolMetadata> metadata() { configSnapshot = configStorage.snapshot(); ConnectProtocol.WorkerState workerState = new ConnectProtocol.WorkerState(restUrl, configSnapshot.offset()); ByteBuffer metadata = ConnectProtocol.serializeMetadata(workerState); return Collections.singletonList(new ProtocolMetadata(DEFAULT_SUBPROTOCOL, metadata)); }
|
WorkerCoordinator extends AbstractCoordinator implements Closeable { @Override public List<ProtocolMetadata> metadata() { configSnapshot = configStorage.snapshot(); ConnectProtocol.WorkerState workerState = new ConnectProtocol.WorkerState(restUrl, configSnapshot.offset()); ByteBuffer metadata = ConnectProtocol.serializeMetadata(workerState); return Collections.singletonList(new ProtocolMetadata(DEFAULT_SUBPROTOCOL, metadata)); } }
|
WorkerCoordinator extends AbstractCoordinator implements Closeable { @Override public List<ProtocolMetadata> metadata() { configSnapshot = configStorage.snapshot(); ConnectProtocol.WorkerState workerState = new ConnectProtocol.WorkerState(restUrl, configSnapshot.offset()); ByteBuffer metadata = ConnectProtocol.serializeMetadata(workerState); return Collections.singletonList(new ProtocolMetadata(DEFAULT_SUBPROTOCOL, metadata)); } WorkerCoordinator(ConsumerNetworkClient client,
String groupId,
int rebalanceTimeoutMs,
int sessionTimeoutMs,
int heartbeatIntervalMs,
Metrics metrics,
String metricGrpPrefix,
Time time,
long retryBackoffMs,
String restUrl,
ConfigBackingStore configStorage,
WorkerRebalanceListener listener); }
|
WorkerCoordinator extends AbstractCoordinator implements Closeable { @Override public List<ProtocolMetadata> metadata() { configSnapshot = configStorage.snapshot(); ConnectProtocol.WorkerState workerState = new ConnectProtocol.WorkerState(restUrl, configSnapshot.offset()); ByteBuffer metadata = ConnectProtocol.serializeMetadata(workerState); return Collections.singletonList(new ProtocolMetadata(DEFAULT_SUBPROTOCOL, metadata)); } WorkerCoordinator(ConsumerNetworkClient client,
String groupId,
int rebalanceTimeoutMs,
int sessionTimeoutMs,
int heartbeatIntervalMs,
Metrics metrics,
String metricGrpPrefix,
Time time,
long retryBackoffMs,
String restUrl,
ConfigBackingStore configStorage,
WorkerRebalanceListener listener); void requestRejoin(); @Override String protocolType(); void poll(long timeout); @Override List<ProtocolMetadata> metadata(); String memberId(); String ownerUrl(String connector); String ownerUrl(ConnectorTaskId task); }
|
WorkerCoordinator extends AbstractCoordinator implements Closeable { @Override public List<ProtocolMetadata> metadata() { configSnapshot = configStorage.snapshot(); ConnectProtocol.WorkerState workerState = new ConnectProtocol.WorkerState(restUrl, configSnapshot.offset()); ByteBuffer metadata = ConnectProtocol.serializeMetadata(workerState); return Collections.singletonList(new ProtocolMetadata(DEFAULT_SUBPROTOCOL, metadata)); } WorkerCoordinator(ConsumerNetworkClient client,
String groupId,
int rebalanceTimeoutMs,
int sessionTimeoutMs,
int heartbeatIntervalMs,
Metrics metrics,
String metricGrpPrefix,
Time time,
long retryBackoffMs,
String restUrl,
ConfigBackingStore configStorage,
WorkerRebalanceListener listener); void requestRejoin(); @Override String protocolType(); void poll(long timeout); @Override List<ProtocolMetadata> metadata(); String memberId(); String ownerUrl(String connector); String ownerUrl(ConnectorTaskId task); static final String DEFAULT_SUBPROTOCOL; }
|
@Test public void testLeaderPerformAssignmentSingleTaskConnectors() throws Exception { EasyMock.expect(configStorage.snapshot()).andReturn(configStateSingleTaskConnectors); PowerMock.replayAll(); coordinator.metadata(); Map<String, ByteBuffer> configs = new HashMap<>(); configs.put("leader", ConnectProtocol.serializeMetadata(new ConnectProtocol.WorkerState(LEADER_URL, 1L))); configs.put("member", ConnectProtocol.serializeMetadata(new ConnectProtocol.WorkerState(MEMBER_URL, 1L))); Map<String, ByteBuffer> result = Whitebox.invokeMethod(coordinator, "performAssignment", "leader", WorkerCoordinator.DEFAULT_SUBPROTOCOL, configs); ConnectProtocol.Assignment leaderAssignment = ConnectProtocol.deserializeAssignment(result.get("leader")); assertEquals(false, leaderAssignment.failed()); assertEquals("leader", leaderAssignment.leader()); assertEquals(1, leaderAssignment.offset()); assertEquals(Arrays.asList(connectorId1, connectorId3), leaderAssignment.connectors()); assertEquals(Arrays.asList(taskId2x0), leaderAssignment.tasks()); ConnectProtocol.Assignment memberAssignment = ConnectProtocol.deserializeAssignment(result.get("member")); assertEquals(false, memberAssignment.failed()); assertEquals("leader", memberAssignment.leader()); assertEquals(1, memberAssignment.offset()); assertEquals(Collections.singletonList(connectorId2), memberAssignment.connectors()); assertEquals(Arrays.asList(taskId1x0, taskId3x0), memberAssignment.tasks()); PowerMock.verifyAll(); }
|
@Override public List<ProtocolMetadata> metadata() { configSnapshot = configStorage.snapshot(); ConnectProtocol.WorkerState workerState = new ConnectProtocol.WorkerState(restUrl, configSnapshot.offset()); ByteBuffer metadata = ConnectProtocol.serializeMetadata(workerState); return Collections.singletonList(new ProtocolMetadata(DEFAULT_SUBPROTOCOL, metadata)); }
|
WorkerCoordinator extends AbstractCoordinator implements Closeable { @Override public List<ProtocolMetadata> metadata() { configSnapshot = configStorage.snapshot(); ConnectProtocol.WorkerState workerState = new ConnectProtocol.WorkerState(restUrl, configSnapshot.offset()); ByteBuffer metadata = ConnectProtocol.serializeMetadata(workerState); return Collections.singletonList(new ProtocolMetadata(DEFAULT_SUBPROTOCOL, metadata)); } }
|
WorkerCoordinator extends AbstractCoordinator implements Closeable { @Override public List<ProtocolMetadata> metadata() { configSnapshot = configStorage.snapshot(); ConnectProtocol.WorkerState workerState = new ConnectProtocol.WorkerState(restUrl, configSnapshot.offset()); ByteBuffer metadata = ConnectProtocol.serializeMetadata(workerState); return Collections.singletonList(new ProtocolMetadata(DEFAULT_SUBPROTOCOL, metadata)); } WorkerCoordinator(ConsumerNetworkClient client,
String groupId,
int rebalanceTimeoutMs,
int sessionTimeoutMs,
int heartbeatIntervalMs,
Metrics metrics,
String metricGrpPrefix,
Time time,
long retryBackoffMs,
String restUrl,
ConfigBackingStore configStorage,
WorkerRebalanceListener listener); }
|
WorkerCoordinator extends AbstractCoordinator implements Closeable { @Override public List<ProtocolMetadata> metadata() { configSnapshot = configStorage.snapshot(); ConnectProtocol.WorkerState workerState = new ConnectProtocol.WorkerState(restUrl, configSnapshot.offset()); ByteBuffer metadata = ConnectProtocol.serializeMetadata(workerState); return Collections.singletonList(new ProtocolMetadata(DEFAULT_SUBPROTOCOL, metadata)); } WorkerCoordinator(ConsumerNetworkClient client,
String groupId,
int rebalanceTimeoutMs,
int sessionTimeoutMs,
int heartbeatIntervalMs,
Metrics metrics,
String metricGrpPrefix,
Time time,
long retryBackoffMs,
String restUrl,
ConfigBackingStore configStorage,
WorkerRebalanceListener listener); void requestRejoin(); @Override String protocolType(); void poll(long timeout); @Override List<ProtocolMetadata> metadata(); String memberId(); String ownerUrl(String connector); String ownerUrl(ConnectorTaskId task); }
|
WorkerCoordinator extends AbstractCoordinator implements Closeable { @Override public List<ProtocolMetadata> metadata() { configSnapshot = configStorage.snapshot(); ConnectProtocol.WorkerState workerState = new ConnectProtocol.WorkerState(restUrl, configSnapshot.offset()); ByteBuffer metadata = ConnectProtocol.serializeMetadata(workerState); return Collections.singletonList(new ProtocolMetadata(DEFAULT_SUBPROTOCOL, metadata)); } WorkerCoordinator(ConsumerNetworkClient client,
String groupId,
int rebalanceTimeoutMs,
int sessionTimeoutMs,
int heartbeatIntervalMs,
Metrics metrics,
String metricGrpPrefix,
Time time,
long retryBackoffMs,
String restUrl,
ConfigBackingStore configStorage,
WorkerRebalanceListener listener); void requestRejoin(); @Override String protocolType(); void poll(long timeout); @Override List<ProtocolMetadata> metadata(); String memberId(); String ownerUrl(String connector); String ownerUrl(ConnectorTaskId task); static final String DEFAULT_SUBPROTOCOL; }
|
@Test public void testSchedule() throws Exception { Capture<Runnable> taskWrapper = EasyMock.newCapture(); ScheduledFuture commitFuture = PowerMock.createMock(ScheduledFuture.class); EasyMock.expect(executor.scheduleWithFixedDelay( EasyMock.capture(taskWrapper), eq(DEFAULT_OFFSET_COMMIT_INTERVAL_MS), eq(DEFAULT_OFFSET_COMMIT_INTERVAL_MS), eq(TimeUnit.MILLISECONDS)) ).andReturn(commitFuture); ConnectorTaskId taskId = PowerMock.createMock(ConnectorTaskId.class); WorkerSourceTask task = PowerMock.createMock(WorkerSourceTask.class); EasyMock.expect(committers.put(taskId, commitFuture)).andReturn(null); PowerMock.replayAll(); committer.schedule(taskId, task); assertTrue(taskWrapper.hasCaptured()); assertNotNull(taskWrapper.getValue()); PowerMock.verifyAll(); }
|
public void schedule(final ConnectorTaskId id, final WorkerSourceTask workerTask) { long commitIntervalMs = config.getLong(WorkerConfig.OFFSET_COMMIT_INTERVAL_MS_CONFIG); ScheduledFuture<?> commitFuture = commitExecutorService.scheduleWithFixedDelay(new Runnable() { @Override public void run() { commit(workerTask); } }, commitIntervalMs, commitIntervalMs, TimeUnit.MILLISECONDS); committers.put(id, commitFuture); }
|
SourceTaskOffsetCommitter { public void schedule(final ConnectorTaskId id, final WorkerSourceTask workerTask) { long commitIntervalMs = config.getLong(WorkerConfig.OFFSET_COMMIT_INTERVAL_MS_CONFIG); ScheduledFuture<?> commitFuture = commitExecutorService.scheduleWithFixedDelay(new Runnable() { @Override public void run() { commit(workerTask); } }, commitIntervalMs, commitIntervalMs, TimeUnit.MILLISECONDS); committers.put(id, commitFuture); } }
|
SourceTaskOffsetCommitter { public void schedule(final ConnectorTaskId id, final WorkerSourceTask workerTask) { long commitIntervalMs = config.getLong(WorkerConfig.OFFSET_COMMIT_INTERVAL_MS_CONFIG); ScheduledFuture<?> commitFuture = commitExecutorService.scheduleWithFixedDelay(new Runnable() { @Override public void run() { commit(workerTask); } }, commitIntervalMs, commitIntervalMs, TimeUnit.MILLISECONDS); committers.put(id, commitFuture); } SourceTaskOffsetCommitter(WorkerConfig config,
ScheduledExecutorService commitExecutorService,
ConcurrentMap<ConnectorTaskId, ScheduledFuture<?>> committers); SourceTaskOffsetCommitter(WorkerConfig config); }
|
SourceTaskOffsetCommitter { public void schedule(final ConnectorTaskId id, final WorkerSourceTask workerTask) { long commitIntervalMs = config.getLong(WorkerConfig.OFFSET_COMMIT_INTERVAL_MS_CONFIG); ScheduledFuture<?> commitFuture = commitExecutorService.scheduleWithFixedDelay(new Runnable() { @Override public void run() { commit(workerTask); } }, commitIntervalMs, commitIntervalMs, TimeUnit.MILLISECONDS); committers.put(id, commitFuture); } SourceTaskOffsetCommitter(WorkerConfig config,
ScheduledExecutorService commitExecutorService,
ConcurrentMap<ConnectorTaskId, ScheduledFuture<?>> committers); SourceTaskOffsetCommitter(WorkerConfig config); void close(long timeoutMs); void schedule(final ConnectorTaskId id, final WorkerSourceTask workerTask); void remove(ConnectorTaskId id); }
|
SourceTaskOffsetCommitter { public void schedule(final ConnectorTaskId id, final WorkerSourceTask workerTask) { long commitIntervalMs = config.getLong(WorkerConfig.OFFSET_COMMIT_INTERVAL_MS_CONFIG); ScheduledFuture<?> commitFuture = commitExecutorService.scheduleWithFixedDelay(new Runnable() { @Override public void run() { commit(workerTask); } }, commitIntervalMs, commitIntervalMs, TimeUnit.MILLISECONDS); committers.put(id, commitFuture); } SourceTaskOffsetCommitter(WorkerConfig config,
ScheduledExecutorService commitExecutorService,
ConcurrentMap<ConnectorTaskId, ScheduledFuture<?>> committers); SourceTaskOffsetCommitter(WorkerConfig config); void close(long timeoutMs); void schedule(final ConnectorTaskId id, final WorkerSourceTask workerTask); void remove(ConnectorTaskId id); }
|
@Test public void testRemove() throws Exception { ConnectorTaskId taskId = PowerMock.createMock(ConnectorTaskId.class); ScheduledFuture task = PowerMock.createMock(ScheduledFuture.class); EasyMock.expect(committers.remove(taskId)).andReturn(null); PowerMock.replayAll(); committer.remove(taskId); PowerMock.verifyAll(); PowerMock.resetAll(); EasyMock.expect(committers.remove(taskId)).andReturn(task); EasyMock.expect(task.cancel(eq(false))).andReturn(false); EasyMock.expect(task.isDone()).andReturn(false); EasyMock.expect(task.get()).andReturn(null); PowerMock.replayAll(); committer.remove(taskId); PowerMock.verifyAll(); PowerMock.resetAll(); EasyMock.expect(committers.remove(taskId)).andReturn(task); EasyMock.expect(task.cancel(eq(false))).andReturn(false); EasyMock.expect(task.isDone()).andReturn(false); EasyMock.expect(task.get()).andThrow(new CancellationException()); mockLog.trace(EasyMock.anyString(), EasyMock.<Object>anyObject()); PowerMock.expectLastCall(); PowerMock.replayAll(); committer.remove(taskId); PowerMock.verifyAll(); PowerMock.resetAll(); EasyMock.expect(committers.remove(taskId)).andReturn(task); EasyMock.expect(task.cancel(eq(false))).andReturn(false); EasyMock.expect(task.isDone()).andReturn(false); EasyMock.expect(task.get()).andThrow(new InterruptedException()); PowerMock.replayAll(); try { committer.remove(taskId); fail("Expected ConnectException to be raised"); } catch (ConnectException e) { } PowerMock.verifyAll(); }
|
public void remove(ConnectorTaskId id) { final ScheduledFuture<?> task = committers.remove(id); if (task == null) return; try { task.cancel(false); if (!task.isDone()) task.get(); } catch (CancellationException e) { log.trace("Offset commit thread was cancelled by another thread while removing connector task with id: {}", id); } catch (ExecutionException | InterruptedException e) { throw new ConnectException("Unexpected interruption in SourceTaskOffsetCommitter while removing task with id: " + id, e); } }
|
SourceTaskOffsetCommitter { public void remove(ConnectorTaskId id) { final ScheduledFuture<?> task = committers.remove(id); if (task == null) return; try { task.cancel(false); if (!task.isDone()) task.get(); } catch (CancellationException e) { log.trace("Offset commit thread was cancelled by another thread while removing connector task with id: {}", id); } catch (ExecutionException | InterruptedException e) { throw new ConnectException("Unexpected interruption in SourceTaskOffsetCommitter while removing task with id: " + id, e); } } }
|
SourceTaskOffsetCommitter { public void remove(ConnectorTaskId id) { final ScheduledFuture<?> task = committers.remove(id); if (task == null) return; try { task.cancel(false); if (!task.isDone()) task.get(); } catch (CancellationException e) { log.trace("Offset commit thread was cancelled by another thread while removing connector task with id: {}", id); } catch (ExecutionException | InterruptedException e) { throw new ConnectException("Unexpected interruption in SourceTaskOffsetCommitter while removing task with id: " + id, e); } } SourceTaskOffsetCommitter(WorkerConfig config,
ScheduledExecutorService commitExecutorService,
ConcurrentMap<ConnectorTaskId, ScheduledFuture<?>> committers); SourceTaskOffsetCommitter(WorkerConfig config); }
|
SourceTaskOffsetCommitter { public void remove(ConnectorTaskId id) { final ScheduledFuture<?> task = committers.remove(id); if (task == null) return; try { task.cancel(false); if (!task.isDone()) task.get(); } catch (CancellationException e) { log.trace("Offset commit thread was cancelled by another thread while removing connector task with id: {}", id); } catch (ExecutionException | InterruptedException e) { throw new ConnectException("Unexpected interruption in SourceTaskOffsetCommitter while removing task with id: " + id, e); } } SourceTaskOffsetCommitter(WorkerConfig config,
ScheduledExecutorService commitExecutorService,
ConcurrentMap<ConnectorTaskId, ScheduledFuture<?>> committers); SourceTaskOffsetCommitter(WorkerConfig config); void close(long timeoutMs); void schedule(final ConnectorTaskId id, final WorkerSourceTask workerTask); void remove(ConnectorTaskId id); }
|
SourceTaskOffsetCommitter { public void remove(ConnectorTaskId id) { final ScheduledFuture<?> task = committers.remove(id); if (task == null) return; try { task.cancel(false); if (!task.isDone()) task.get(); } catch (CancellationException e) { log.trace("Offset commit thread was cancelled by another thread while removing connector task with id: {}", id); } catch (ExecutionException | InterruptedException e) { throw new ConnectException("Unexpected interruption in SourceTaskOffsetCommitter while removing task with id: " + id, e); } } SourceTaskOffsetCommitter(WorkerConfig config,
ScheduledExecutorService commitExecutorService,
ConcurrentMap<ConnectorTaskId, ScheduledFuture<?>> committers); SourceTaskOffsetCommitter(WorkerConfig config); void close(long timeoutMs); void schedule(final ConnectorTaskId id, final WorkerSourceTask workerTask); void remove(ConnectorTaskId id); }
|
@Test public void testPutConnectorConfig() throws Exception { expectConfigure(); expectStart(Collections.EMPTY_LIST, Collections.EMPTY_MAP); expectConvertWriteAndRead( CONNECTOR_CONFIG_KEYS.get(0), KafkaConfigBackingStore.CONNECTOR_CONFIGURATION_V0, CONFIGS_SERIALIZED.get(0), "properties", SAMPLE_CONFIGS.get(0)); configUpdateListener.onConnectorConfigUpdate(CONNECTOR_IDS.get(0)); EasyMock.expectLastCall(); expectConvertWriteAndRead( CONNECTOR_CONFIG_KEYS.get(1), KafkaConfigBackingStore.CONNECTOR_CONFIGURATION_V0, CONFIGS_SERIALIZED.get(1), "properties", SAMPLE_CONFIGS.get(1)); configUpdateListener.onConnectorConfigUpdate(CONNECTOR_IDS.get(1)); EasyMock.expectLastCall(); expectConnectorRemoval(CONNECTOR_CONFIG_KEYS.get(1), TARGET_STATE_KEYS.get(1)); configUpdateListener.onConnectorConfigRemove(CONNECTOR_IDS.get(1)); EasyMock.expectLastCall(); expectStop(); PowerMock.replayAll(); configStorage.setupAndCreateKafkaBasedLog(TOPIC, DEFAULT_DISTRIBUTED_CONFIG); configStorage.start(); ClusterConfigState configState = configStorage.snapshot(); assertEquals(-1, configState.offset()); assertNull(configState.connectorConfig(CONNECTOR_IDS.get(0))); assertNull(configState.connectorConfig(CONNECTOR_IDS.get(1))); configStorage.putConnectorConfig(CONNECTOR_IDS.get(0), SAMPLE_CONFIGS.get(0)); configState = configStorage.snapshot(); assertEquals(1, configState.offset()); assertEquals(SAMPLE_CONFIGS.get(0), configState.connectorConfig(CONNECTOR_IDS.get(0))); assertNull(configState.connectorConfig(CONNECTOR_IDS.get(1))); configStorage.putConnectorConfig(CONNECTOR_IDS.get(1), SAMPLE_CONFIGS.get(1)); configState = configStorage.snapshot(); assertEquals(2, configState.offset()); assertEquals(SAMPLE_CONFIGS.get(0), configState.connectorConfig(CONNECTOR_IDS.get(0))); assertEquals(SAMPLE_CONFIGS.get(1), configState.connectorConfig(CONNECTOR_IDS.get(1))); configStorage.removeConnectorConfig(CONNECTOR_IDS.get(1)); configState = configStorage.snapshot(); assertEquals(4, configState.offset()); assertEquals(SAMPLE_CONFIGS.get(0), configState.connectorConfig(CONNECTOR_IDS.get(0))); assertNull(configState.connectorConfig(CONNECTOR_IDS.get(1))); assertNull(configState.targetState(CONNECTOR_IDS.get(1))); configStorage.stop(); PowerMock.verifyAll(); }
|
@Override public void putConnectorConfig(String connector, Map<String, String> properties) { log.debug("Writing connector configuration {} for connector {} configuration", properties, connector); Struct connectConfig = new Struct(CONNECTOR_CONFIGURATION_V0); connectConfig.put("properties", properties); byte[] serializedConfig = converter.fromConnectData(topic, CONNECTOR_CONFIGURATION_V0, connectConfig); updateConnectorConfig(connector, serializedConfig); }
|
KafkaConfigBackingStore implements ConfigBackingStore { @Override public void putConnectorConfig(String connector, Map<String, String> properties) { log.debug("Writing connector configuration {} for connector {} configuration", properties, connector); Struct connectConfig = new Struct(CONNECTOR_CONFIGURATION_V0); connectConfig.put("properties", properties); byte[] serializedConfig = converter.fromConnectData(topic, CONNECTOR_CONFIGURATION_V0, connectConfig); updateConnectorConfig(connector, serializedConfig); } }
|
KafkaConfigBackingStore implements ConfigBackingStore { @Override public void putConnectorConfig(String connector, Map<String, String> properties) { log.debug("Writing connector configuration {} for connector {} configuration", properties, connector); Struct connectConfig = new Struct(CONNECTOR_CONFIGURATION_V0); connectConfig.put("properties", properties); byte[] serializedConfig = converter.fromConnectData(topic, CONNECTOR_CONFIGURATION_V0, connectConfig); updateConnectorConfig(connector, serializedConfig); } KafkaConfigBackingStore(Converter converter, WorkerConfig config); }
|
KafkaConfigBackingStore implements ConfigBackingStore { @Override public void putConnectorConfig(String connector, Map<String, String> properties) { log.debug("Writing connector configuration {} for connector {} configuration", properties, connector); Struct connectConfig = new Struct(CONNECTOR_CONFIGURATION_V0); connectConfig.put("properties", properties); byte[] serializedConfig = converter.fromConnectData(topic, CONNECTOR_CONFIGURATION_V0, connectConfig); updateConnectorConfig(connector, serializedConfig); } KafkaConfigBackingStore(Converter converter, WorkerConfig config); static String TARGET_STATE_KEY(String connectorName); static String CONNECTOR_KEY(String connectorName); static String TASK_KEY(ConnectorTaskId taskId); static String COMMIT_TASKS_KEY(String connectorName); @Override void setUpdateListener(UpdateListener listener); @Override void start(); @Override void stop(); @Override ClusterConfigState snapshot(); @Override boolean contains(String connector); @Override void putConnectorConfig(String connector, Map<String, String> properties); @Override void removeConnectorConfig(String connector); @Override void removeTaskConfigs(String connector); @Override void putTaskConfigs(String connector, List<Map<String, String>> configs); @Override void refresh(long timeout, TimeUnit unit); @Override void putTargetState(String connector, TargetState state); }
|
KafkaConfigBackingStore implements ConfigBackingStore { @Override public void putConnectorConfig(String connector, Map<String, String> properties) { log.debug("Writing connector configuration {} for connector {} configuration", properties, connector); Struct connectConfig = new Struct(CONNECTOR_CONFIGURATION_V0); connectConfig.put("properties", properties); byte[] serializedConfig = converter.fromConnectData(topic, CONNECTOR_CONFIGURATION_V0, connectConfig); updateConnectorConfig(connector, serializedConfig); } KafkaConfigBackingStore(Converter converter, WorkerConfig config); static String TARGET_STATE_KEY(String connectorName); static String CONNECTOR_KEY(String connectorName); static String TASK_KEY(ConnectorTaskId taskId); static String COMMIT_TASKS_KEY(String connectorName); @Override void setUpdateListener(UpdateListener listener); @Override void start(); @Override void stop(); @Override ClusterConfigState snapshot(); @Override boolean contains(String connector); @Override void putConnectorConfig(String connector, Map<String, String> properties); @Override void removeConnectorConfig(String connector); @Override void removeTaskConfigs(String connector); @Override void putTaskConfigs(String connector, List<Map<String, String>> configs); @Override void refresh(long timeout, TimeUnit unit); @Override void putTargetState(String connector, TargetState state); static final String TARGET_STATE_PREFIX; static final String CONNECTOR_PREFIX; static final String TASK_PREFIX; static final String COMMIT_TASKS_PREFIX; static final Schema CONNECTOR_CONFIGURATION_V0; static final Schema TASK_CONFIGURATION_V0; static final Schema CONNECTOR_TASKS_COMMIT_V0; static final Schema TARGET_STATE_V0; }
|
@Test(expected = DataException.class) public void nullToConnect() { assertEquals(SchemaAndValue.NULL, converter.toConnectData(TOPIC, null)); }
|
@Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); }
|
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } }
|
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } }
|
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); ObjectNode asJsonSchema(Schema schema); Schema asConnectSchema(JsonNode jsonSchema); }
|
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); ObjectNode asJsonSchema(Schema schema); Schema asConnectSchema(JsonNode jsonSchema); }
|
@Test public void testPutTaskConfigs() throws Exception { expectConfigure(); expectStart(Collections.EMPTY_LIST, Collections.EMPTY_MAP); expectReadToEnd(new LinkedHashMap<String, byte[]>()); expectConvertWriteRead( TASK_CONFIG_KEYS.get(0), KafkaConfigBackingStore.TASK_CONFIGURATION_V0, CONFIGS_SERIALIZED.get(0), "properties", SAMPLE_CONFIGS.get(0)); expectConvertWriteRead( TASK_CONFIG_KEYS.get(1), KafkaConfigBackingStore.TASK_CONFIGURATION_V0, CONFIGS_SERIALIZED.get(1), "properties", SAMPLE_CONFIGS.get(1)); expectReadToEnd(new LinkedHashMap<String, byte[]>()); expectConvertWriteRead( COMMIT_TASKS_CONFIG_KEYS.get(0), KafkaConfigBackingStore.CONNECTOR_TASKS_COMMIT_V0, CONFIGS_SERIALIZED.get(2), "tasks", 2); configUpdateListener.onTaskConfigUpdate(Arrays.asList(TASK_IDS.get(0), TASK_IDS.get(1))); EasyMock.expectLastCall(); LinkedHashMap<String, byte[]> serializedConfigs = new LinkedHashMap<>(); serializedConfigs.put(TASK_CONFIG_KEYS.get(0), CONFIGS_SERIALIZED.get(0)); serializedConfigs.put(TASK_CONFIG_KEYS.get(1), CONFIGS_SERIALIZED.get(1)); serializedConfigs.put(COMMIT_TASKS_CONFIG_KEYS.get(0), CONFIGS_SERIALIZED.get(2)); expectReadToEnd(serializedConfigs); expectStop(); PowerMock.replayAll(); configStorage.setupAndCreateKafkaBasedLog(TOPIC, DEFAULT_DISTRIBUTED_CONFIG); configStorage.start(); whiteboxAddConnector(CONNECTOR_IDS.get(0), SAMPLE_CONFIGS.get(0), Collections.EMPTY_LIST); ClusterConfigState configState = configStorage.snapshot(); assertEquals(-1, configState.offset()); assertNull(configState.taskConfig(TASK_IDS.get(0))); assertNull(configState.taskConfig(TASK_IDS.get(1))); List<Map<String, String>> taskConfigs = Arrays.asList(SAMPLE_CONFIGS.get(0), SAMPLE_CONFIGS.get(1)); configStorage.putTaskConfigs("connector1", taskConfigs); configState = configStorage.snapshot(); assertEquals(3, configState.offset()); String connectorName = CONNECTOR_IDS.get(0); assertEquals(Arrays.asList(connectorName), new ArrayList<>(configState.connectors())); assertEquals(Arrays.asList(TASK_IDS.get(0), TASK_IDS.get(1)), configState.tasks(connectorName)); assertEquals(SAMPLE_CONFIGS.get(0), configState.taskConfig(TASK_IDS.get(0))); assertEquals(SAMPLE_CONFIGS.get(1), configState.taskConfig(TASK_IDS.get(1))); assertEquals(Collections.EMPTY_SET, configState.inconsistentConnectors()); configStorage.stop(); PowerMock.verifyAll(); }
|
@Override public void putTaskConfigs(String connector, List<Map<String, String>> configs) { try { configLog.readToEnd().get(READ_TO_END_TIMEOUT_MS, TimeUnit.MILLISECONDS); } catch (InterruptedException | ExecutionException | TimeoutException e) { log.error("Failed to write root configuration to Kafka: ", e); throw new ConnectException("Error writing root configuration to Kafka", e); } int taskCount = configs.size(); int index = 0; for (Map<String, String> taskConfig: configs) { Struct connectConfig = new Struct(TASK_CONFIGURATION_V0); connectConfig.put("properties", taskConfig); byte[] serializedConfig = converter.fromConnectData(topic, TASK_CONFIGURATION_V0, connectConfig); log.debug("Writing configuration for task " + index + " configuration: " + taskConfig); ConnectorTaskId connectorTaskId = new ConnectorTaskId(connector, index); configLog.send(TASK_KEY(connectorTaskId), serializedConfig); index++; } try { if (taskCount > 0) { configLog.readToEnd().get(READ_TO_END_TIMEOUT_MS, TimeUnit.MILLISECONDS); } Struct connectConfig = new Struct(CONNECTOR_TASKS_COMMIT_V0); connectConfig.put("tasks", taskCount); byte[] serializedConfig = converter.fromConnectData(topic, CONNECTOR_TASKS_COMMIT_V0, connectConfig); log.debug("Writing commit for connector " + connector + " with " + taskCount + " tasks."); configLog.send(COMMIT_TASKS_KEY(connector), serializedConfig); configLog.readToEnd().get(READ_TO_END_TIMEOUT_MS, TimeUnit.MILLISECONDS); } catch (InterruptedException | ExecutionException | TimeoutException e) { log.error("Failed to write root configuration to Kafka: ", e); throw new ConnectException("Error writing root configuration to Kafka", e); } }
|
KafkaConfigBackingStore implements ConfigBackingStore { @Override public void putTaskConfigs(String connector, List<Map<String, String>> configs) { try { configLog.readToEnd().get(READ_TO_END_TIMEOUT_MS, TimeUnit.MILLISECONDS); } catch (InterruptedException | ExecutionException | TimeoutException e) { log.error("Failed to write root configuration to Kafka: ", e); throw new ConnectException("Error writing root configuration to Kafka", e); } int taskCount = configs.size(); int index = 0; for (Map<String, String> taskConfig: configs) { Struct connectConfig = new Struct(TASK_CONFIGURATION_V0); connectConfig.put("properties", taskConfig); byte[] serializedConfig = converter.fromConnectData(topic, TASK_CONFIGURATION_V0, connectConfig); log.debug("Writing configuration for task " + index + " configuration: " + taskConfig); ConnectorTaskId connectorTaskId = new ConnectorTaskId(connector, index); configLog.send(TASK_KEY(connectorTaskId), serializedConfig); index++; } try { if (taskCount > 0) { configLog.readToEnd().get(READ_TO_END_TIMEOUT_MS, TimeUnit.MILLISECONDS); } Struct connectConfig = new Struct(CONNECTOR_TASKS_COMMIT_V0); connectConfig.put("tasks", taskCount); byte[] serializedConfig = converter.fromConnectData(topic, CONNECTOR_TASKS_COMMIT_V0, connectConfig); log.debug("Writing commit for connector " + connector + " with " + taskCount + " tasks."); configLog.send(COMMIT_TASKS_KEY(connector), serializedConfig); configLog.readToEnd().get(READ_TO_END_TIMEOUT_MS, TimeUnit.MILLISECONDS); } catch (InterruptedException | ExecutionException | TimeoutException e) { log.error("Failed to write root configuration to Kafka: ", e); throw new ConnectException("Error writing root configuration to Kafka", e); } } }
|
KafkaConfigBackingStore implements ConfigBackingStore { @Override public void putTaskConfigs(String connector, List<Map<String, String>> configs) { try { configLog.readToEnd().get(READ_TO_END_TIMEOUT_MS, TimeUnit.MILLISECONDS); } catch (InterruptedException | ExecutionException | TimeoutException e) { log.error("Failed to write root configuration to Kafka: ", e); throw new ConnectException("Error writing root configuration to Kafka", e); } int taskCount = configs.size(); int index = 0; for (Map<String, String> taskConfig: configs) { Struct connectConfig = new Struct(TASK_CONFIGURATION_V0); connectConfig.put("properties", taskConfig); byte[] serializedConfig = converter.fromConnectData(topic, TASK_CONFIGURATION_V0, connectConfig); log.debug("Writing configuration for task " + index + " configuration: " + taskConfig); ConnectorTaskId connectorTaskId = new ConnectorTaskId(connector, index); configLog.send(TASK_KEY(connectorTaskId), serializedConfig); index++; } try { if (taskCount > 0) { configLog.readToEnd().get(READ_TO_END_TIMEOUT_MS, TimeUnit.MILLISECONDS); } Struct connectConfig = new Struct(CONNECTOR_TASKS_COMMIT_V0); connectConfig.put("tasks", taskCount); byte[] serializedConfig = converter.fromConnectData(topic, CONNECTOR_TASKS_COMMIT_V0, connectConfig); log.debug("Writing commit for connector " + connector + " with " + taskCount + " tasks."); configLog.send(COMMIT_TASKS_KEY(connector), serializedConfig); configLog.readToEnd().get(READ_TO_END_TIMEOUT_MS, TimeUnit.MILLISECONDS); } catch (InterruptedException | ExecutionException | TimeoutException e) { log.error("Failed to write root configuration to Kafka: ", e); throw new ConnectException("Error writing root configuration to Kafka", e); } } KafkaConfigBackingStore(Converter converter, WorkerConfig config); }
|
KafkaConfigBackingStore implements ConfigBackingStore { @Override public void putTaskConfigs(String connector, List<Map<String, String>> configs) { try { configLog.readToEnd().get(READ_TO_END_TIMEOUT_MS, TimeUnit.MILLISECONDS); } catch (InterruptedException | ExecutionException | TimeoutException e) { log.error("Failed to write root configuration to Kafka: ", e); throw new ConnectException("Error writing root configuration to Kafka", e); } int taskCount = configs.size(); int index = 0; for (Map<String, String> taskConfig: configs) { Struct connectConfig = new Struct(TASK_CONFIGURATION_V0); connectConfig.put("properties", taskConfig); byte[] serializedConfig = converter.fromConnectData(topic, TASK_CONFIGURATION_V0, connectConfig); log.debug("Writing configuration for task " + index + " configuration: " + taskConfig); ConnectorTaskId connectorTaskId = new ConnectorTaskId(connector, index); configLog.send(TASK_KEY(connectorTaskId), serializedConfig); index++; } try { if (taskCount > 0) { configLog.readToEnd().get(READ_TO_END_TIMEOUT_MS, TimeUnit.MILLISECONDS); } Struct connectConfig = new Struct(CONNECTOR_TASKS_COMMIT_V0); connectConfig.put("tasks", taskCount); byte[] serializedConfig = converter.fromConnectData(topic, CONNECTOR_TASKS_COMMIT_V0, connectConfig); log.debug("Writing commit for connector " + connector + " with " + taskCount + " tasks."); configLog.send(COMMIT_TASKS_KEY(connector), serializedConfig); configLog.readToEnd().get(READ_TO_END_TIMEOUT_MS, TimeUnit.MILLISECONDS); } catch (InterruptedException | ExecutionException | TimeoutException e) { log.error("Failed to write root configuration to Kafka: ", e); throw new ConnectException("Error writing root configuration to Kafka", e); } } KafkaConfigBackingStore(Converter converter, WorkerConfig config); static String TARGET_STATE_KEY(String connectorName); static String CONNECTOR_KEY(String connectorName); static String TASK_KEY(ConnectorTaskId taskId); static String COMMIT_TASKS_KEY(String connectorName); @Override void setUpdateListener(UpdateListener listener); @Override void start(); @Override void stop(); @Override ClusterConfigState snapshot(); @Override boolean contains(String connector); @Override void putConnectorConfig(String connector, Map<String, String> properties); @Override void removeConnectorConfig(String connector); @Override void removeTaskConfigs(String connector); @Override void putTaskConfigs(String connector, List<Map<String, String>> configs); @Override void refresh(long timeout, TimeUnit unit); @Override void putTargetState(String connector, TargetState state); }
|
KafkaConfigBackingStore implements ConfigBackingStore { @Override public void putTaskConfigs(String connector, List<Map<String, String>> configs) { try { configLog.readToEnd().get(READ_TO_END_TIMEOUT_MS, TimeUnit.MILLISECONDS); } catch (InterruptedException | ExecutionException | TimeoutException e) { log.error("Failed to write root configuration to Kafka: ", e); throw new ConnectException("Error writing root configuration to Kafka", e); } int taskCount = configs.size(); int index = 0; for (Map<String, String> taskConfig: configs) { Struct connectConfig = new Struct(TASK_CONFIGURATION_V0); connectConfig.put("properties", taskConfig); byte[] serializedConfig = converter.fromConnectData(topic, TASK_CONFIGURATION_V0, connectConfig); log.debug("Writing configuration for task " + index + " configuration: " + taskConfig); ConnectorTaskId connectorTaskId = new ConnectorTaskId(connector, index); configLog.send(TASK_KEY(connectorTaskId), serializedConfig); index++; } try { if (taskCount > 0) { configLog.readToEnd().get(READ_TO_END_TIMEOUT_MS, TimeUnit.MILLISECONDS); } Struct connectConfig = new Struct(CONNECTOR_TASKS_COMMIT_V0); connectConfig.put("tasks", taskCount); byte[] serializedConfig = converter.fromConnectData(topic, CONNECTOR_TASKS_COMMIT_V0, connectConfig); log.debug("Writing commit for connector " + connector + " with " + taskCount + " tasks."); configLog.send(COMMIT_TASKS_KEY(connector), serializedConfig); configLog.readToEnd().get(READ_TO_END_TIMEOUT_MS, TimeUnit.MILLISECONDS); } catch (InterruptedException | ExecutionException | TimeoutException e) { log.error("Failed to write root configuration to Kafka: ", e); throw new ConnectException("Error writing root configuration to Kafka", e); } } KafkaConfigBackingStore(Converter converter, WorkerConfig config); static String TARGET_STATE_KEY(String connectorName); static String CONNECTOR_KEY(String connectorName); static String TASK_KEY(ConnectorTaskId taskId); static String COMMIT_TASKS_KEY(String connectorName); @Override void setUpdateListener(UpdateListener listener); @Override void start(); @Override void stop(); @Override ClusterConfigState snapshot(); @Override boolean contains(String connector); @Override void putConnectorConfig(String connector, Map<String, String> properties); @Override void removeConnectorConfig(String connector); @Override void removeTaskConfigs(String connector); @Override void putTaskConfigs(String connector, List<Map<String, String>> configs); @Override void refresh(long timeout, TimeUnit unit); @Override void putTargetState(String connector, TargetState state); static final String TARGET_STATE_PREFIX; static final String CONNECTOR_PREFIX; static final String TASK_PREFIX; static final String COMMIT_TASKS_PREFIX; static final Schema CONNECTOR_CONFIGURATION_V0; static final Schema TASK_CONFIGURATION_V0; static final Schema CONNECTOR_TASKS_COMMIT_V0; static final Schema TARGET_STATE_V0; }
|
@Test public void testNoOffsetsToFlush() { PowerMock.replayAll(); assertFalse(writer.beginFlush()); PowerMock.verifyAll(); }
|
public synchronized boolean beginFlush() { if (flushing()) { log.error("Invalid call to OffsetStorageWriter flush() while already flushing, the " + "framework should not allow this"); throw new ConnectException("OffsetStorageWriter is already flushing"); } if (data.isEmpty()) return false; assert !flushing(); toFlush = data; data = new HashMap<>(); return true; }
|
OffsetStorageWriter { public synchronized boolean beginFlush() { if (flushing()) { log.error("Invalid call to OffsetStorageWriter flush() while already flushing, the " + "framework should not allow this"); throw new ConnectException("OffsetStorageWriter is already flushing"); } if (data.isEmpty()) return false; assert !flushing(); toFlush = data; data = new HashMap<>(); return true; } }
|
OffsetStorageWriter { public synchronized boolean beginFlush() { if (flushing()) { log.error("Invalid call to OffsetStorageWriter flush() while already flushing, the " + "framework should not allow this"); throw new ConnectException("OffsetStorageWriter is already flushing"); } if (data.isEmpty()) return false; assert !flushing(); toFlush = data; data = new HashMap<>(); return true; } OffsetStorageWriter(OffsetBackingStore backingStore,
String namespace, Converter keyConverter, Converter valueConverter); }
|
OffsetStorageWriter { public synchronized boolean beginFlush() { if (flushing()) { log.error("Invalid call to OffsetStorageWriter flush() while already flushing, the " + "framework should not allow this"); throw new ConnectException("OffsetStorageWriter is already flushing"); } if (data.isEmpty()) return false; assert !flushing(); toFlush = data; data = new HashMap<>(); return true; } OffsetStorageWriter(OffsetBackingStore backingStore,
String namespace, Converter keyConverter, Converter valueConverter); synchronized void offset(Map<String, ?> partition, Map<String, ?> offset); synchronized boolean beginFlush(); Future<Void> doFlush(final Callback<Void> callback); synchronized void cancelFlush(); }
|
OffsetStorageWriter { public synchronized boolean beginFlush() { if (flushing()) { log.error("Invalid call to OffsetStorageWriter flush() while already flushing, the " + "framework should not allow this"); throw new ConnectException("OffsetStorageWriter is already flushing"); } if (data.isEmpty()) return false; assert !flushing(); toFlush = data; data = new HashMap<>(); return true; } OffsetStorageWriter(OffsetBackingStore backingStore,
String namespace, Converter keyConverter, Converter valueConverter); synchronized void offset(Map<String, ?> partition, Map<String, ?> offset); synchronized boolean beginFlush(); Future<Void> doFlush(final Callback<Void> callback); synchronized void cancelFlush(); }
|
@Test public void testFromLogical() { assertEquals(0, Time.fromLogical(Time.SCHEMA, EPOCH.getTime())); assertEquals(10000, Time.fromLogical(Time.SCHEMA, EPOCH_PLUS_TEN_THOUSAND_MILLIS.getTime())); }
|
public static int fromLogical(Schema schema, java.util.Date value) { if (schema.name() == null || !(schema.name().equals(LOGICAL_NAME))) throw new DataException("Requested conversion of Time object but the schema does not match."); Calendar calendar = Calendar.getInstance(UTC); calendar.setTime(value); long unixMillis = calendar.getTimeInMillis(); if (unixMillis < 0 || unixMillis > MILLIS_PER_DAY) { throw new DataException("Kafka Connect Time type should not have any date fields set to non-zero values."); } return (int) unixMillis; }
|
Time { public static int fromLogical(Schema schema, java.util.Date value) { if (schema.name() == null || !(schema.name().equals(LOGICAL_NAME))) throw new DataException("Requested conversion of Time object but the schema does not match."); Calendar calendar = Calendar.getInstance(UTC); calendar.setTime(value); long unixMillis = calendar.getTimeInMillis(); if (unixMillis < 0 || unixMillis > MILLIS_PER_DAY) { throw new DataException("Kafka Connect Time type should not have any date fields set to non-zero values."); } return (int) unixMillis; } }
|
Time { public static int fromLogical(Schema schema, java.util.Date value) { if (schema.name() == null || !(schema.name().equals(LOGICAL_NAME))) throw new DataException("Requested conversion of Time object but the schema does not match."); Calendar calendar = Calendar.getInstance(UTC); calendar.setTime(value); long unixMillis = calendar.getTimeInMillis(); if (unixMillis < 0 || unixMillis > MILLIS_PER_DAY) { throw new DataException("Kafka Connect Time type should not have any date fields set to non-zero values."); } return (int) unixMillis; } }
|
Time { public static int fromLogical(Schema schema, java.util.Date value) { if (schema.name() == null || !(schema.name().equals(LOGICAL_NAME))) throw new DataException("Requested conversion of Time object but the schema does not match."); Calendar calendar = Calendar.getInstance(UTC); calendar.setTime(value); long unixMillis = calendar.getTimeInMillis(); if (unixMillis < 0 || unixMillis > MILLIS_PER_DAY) { throw new DataException("Kafka Connect Time type should not have any date fields set to non-zero values."); } return (int) unixMillis; } static SchemaBuilder builder(); static int fromLogical(Schema schema, java.util.Date value); static java.util.Date toLogical(Schema schema, int value); }
|
Time { public static int fromLogical(Schema schema, java.util.Date value) { if (schema.name() == null || !(schema.name().equals(LOGICAL_NAME))) throw new DataException("Requested conversion of Time object but the schema does not match."); Calendar calendar = Calendar.getInstance(UTC); calendar.setTime(value); long unixMillis = calendar.getTimeInMillis(); if (unixMillis < 0 || unixMillis > MILLIS_PER_DAY) { throw new DataException("Kafka Connect Time type should not have any date fields set to non-zero values."); } return (int) unixMillis; } static SchemaBuilder builder(); static int fromLogical(Schema schema, java.util.Date value); static java.util.Date toLogical(Schema schema, int value); static final String LOGICAL_NAME; static final Schema SCHEMA; }
|
@Test(expected = DataException.class) public void testFromLogicalInvalidHasDateComponents() { Time.fromLogical(Time.SCHEMA, EPOCH_PLUS_DATE_COMPONENT.getTime()); }
|
public static int fromLogical(Schema schema, java.util.Date value) { if (schema.name() == null || !(schema.name().equals(LOGICAL_NAME))) throw new DataException("Requested conversion of Time object but the schema does not match."); Calendar calendar = Calendar.getInstance(UTC); calendar.setTime(value); long unixMillis = calendar.getTimeInMillis(); if (unixMillis < 0 || unixMillis > MILLIS_PER_DAY) { throw new DataException("Kafka Connect Time type should not have any date fields set to non-zero values."); } return (int) unixMillis; }
|
Time { public static int fromLogical(Schema schema, java.util.Date value) { if (schema.name() == null || !(schema.name().equals(LOGICAL_NAME))) throw new DataException("Requested conversion of Time object but the schema does not match."); Calendar calendar = Calendar.getInstance(UTC); calendar.setTime(value); long unixMillis = calendar.getTimeInMillis(); if (unixMillis < 0 || unixMillis > MILLIS_PER_DAY) { throw new DataException("Kafka Connect Time type should not have any date fields set to non-zero values."); } return (int) unixMillis; } }
|
Time { public static int fromLogical(Schema schema, java.util.Date value) { if (schema.name() == null || !(schema.name().equals(LOGICAL_NAME))) throw new DataException("Requested conversion of Time object but the schema does not match."); Calendar calendar = Calendar.getInstance(UTC); calendar.setTime(value); long unixMillis = calendar.getTimeInMillis(); if (unixMillis < 0 || unixMillis > MILLIS_PER_DAY) { throw new DataException("Kafka Connect Time type should not have any date fields set to non-zero values."); } return (int) unixMillis; } }
|
Time { public static int fromLogical(Schema schema, java.util.Date value) { if (schema.name() == null || !(schema.name().equals(LOGICAL_NAME))) throw new DataException("Requested conversion of Time object but the schema does not match."); Calendar calendar = Calendar.getInstance(UTC); calendar.setTime(value); long unixMillis = calendar.getTimeInMillis(); if (unixMillis < 0 || unixMillis > MILLIS_PER_DAY) { throw new DataException("Kafka Connect Time type should not have any date fields set to non-zero values."); } return (int) unixMillis; } static SchemaBuilder builder(); static int fromLogical(Schema schema, java.util.Date value); static java.util.Date toLogical(Schema schema, int value); }
|
Time { public static int fromLogical(Schema schema, java.util.Date value) { if (schema.name() == null || !(schema.name().equals(LOGICAL_NAME))) throw new DataException("Requested conversion of Time object but the schema does not match."); Calendar calendar = Calendar.getInstance(UTC); calendar.setTime(value); long unixMillis = calendar.getTimeInMillis(); if (unixMillis < 0 || unixMillis > MILLIS_PER_DAY) { throw new DataException("Kafka Connect Time type should not have any date fields set to non-zero values."); } return (int) unixMillis; } static SchemaBuilder builder(); static int fromLogical(Schema schema, java.util.Date value); static java.util.Date toLogical(Schema schema, int value); static final String LOGICAL_NAME; static final Schema SCHEMA; }
|
@Test public void testToLogical() { assertEquals(EPOCH.getTime(), Time.toLogical(Time.SCHEMA, 0)); assertEquals(EPOCH_PLUS_TEN_THOUSAND_MILLIS.getTime(), Time.toLogical(Time.SCHEMA, 10000)); }
|
public static java.util.Date toLogical(Schema schema, int value) { if (schema.name() == null || !(schema.name().equals(LOGICAL_NAME))) throw new DataException("Requested conversion of Date object but the schema does not match."); if (value < 0 || value > MILLIS_PER_DAY) throw new DataException("Time values must use number of milliseconds greater than 0 and less than 86400000"); return new java.util.Date(value); }
|
Time { public static java.util.Date toLogical(Schema schema, int value) { if (schema.name() == null || !(schema.name().equals(LOGICAL_NAME))) throw new DataException("Requested conversion of Date object but the schema does not match."); if (value < 0 || value > MILLIS_PER_DAY) throw new DataException("Time values must use number of milliseconds greater than 0 and less than 86400000"); return new java.util.Date(value); } }
|
Time { public static java.util.Date toLogical(Schema schema, int value) { if (schema.name() == null || !(schema.name().equals(LOGICAL_NAME))) throw new DataException("Requested conversion of Date object but the schema does not match."); if (value < 0 || value > MILLIS_PER_DAY) throw new DataException("Time values must use number of milliseconds greater than 0 and less than 86400000"); return new java.util.Date(value); } }
|
Time { public static java.util.Date toLogical(Schema schema, int value) { if (schema.name() == null || !(schema.name().equals(LOGICAL_NAME))) throw new DataException("Requested conversion of Date object but the schema does not match."); if (value < 0 || value > MILLIS_PER_DAY) throw new DataException("Time values must use number of milliseconds greater than 0 and less than 86400000"); return new java.util.Date(value); } static SchemaBuilder builder(); static int fromLogical(Schema schema, java.util.Date value); static java.util.Date toLogical(Schema schema, int value); }
|
Time { public static java.util.Date toLogical(Schema schema, int value) { if (schema.name() == null || !(schema.name().equals(LOGICAL_NAME))) throw new DataException("Requested conversion of Date object but the schema does not match."); if (value < 0 || value > MILLIS_PER_DAY) throw new DataException("Time values must use number of milliseconds greater than 0 and less than 86400000"); return new java.util.Date(value); } static SchemaBuilder builder(); static int fromLogical(Schema schema, java.util.Date value); static java.util.Date toLogical(Schema schema, int value); static final String LOGICAL_NAME; static final Schema SCHEMA; }
|
@Test public void nullSchemaPrimitiveToConnect() { SchemaAndValue converted = converter.toConnectData(TOPIC, "{ \"schema\": null, \"payload\": null }".getBytes()); assertEquals(SchemaAndValue.NULL, converted); converted = converter.toConnectData(TOPIC, "{ \"schema\": null, \"payload\": true }".getBytes()); assertEquals(new SchemaAndValue(null, true), converted); converted = converter.toConnectData(TOPIC, "{ \"schema\": null, \"payload\": 12 }".getBytes()); assertEquals(new SchemaAndValue(null, 12L), converted); converted = converter.toConnectData(TOPIC, "{ \"schema\": null, \"payload\": 12.24 }".getBytes()); assertEquals(new SchemaAndValue(null, 12.24), converted); converted = converter.toConnectData(TOPIC, "{ \"schema\": null, \"payload\": \"a string\" }".getBytes()); assertEquals(new SchemaAndValue(null, "a string"), converted); converted = converter.toConnectData(TOPIC, "{ \"schema\": null, \"payload\": [1, \"2\", 3] }".getBytes()); assertEquals(new SchemaAndValue(null, Arrays.asList(1L, "2", 3L)), converted); converted = converter.toConnectData(TOPIC, "{ \"schema\": null, \"payload\": { \"field1\": 1, \"field2\": 2} }".getBytes()); Map<String, Long> obj = new HashMap<>(); obj.put("field1", 1L); obj.put("field2", 2L); assertEquals(new SchemaAndValue(null, obj), converted); }
|
@Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); }
|
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } }
|
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } }
|
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); ObjectNode asJsonSchema(Schema schema); Schema asConnectSchema(JsonNode jsonSchema); }
|
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); ObjectNode asJsonSchema(Schema schema); Schema asConnectSchema(JsonNode jsonSchema); }
|
@Test public void testEquals() { Struct struct1 = new Struct(FLAT_STRUCT_SCHEMA) .put("int8", (byte) 12) .put("int16", (short) 12) .put("int32", 12) .put("int64", (long) 12) .put("float32", 12.f) .put("float64", 12.) .put("boolean", true) .put("string", "foobar") .put("bytes", ByteBuffer.wrap("foobar".getBytes())); Struct struct2 = new Struct(FLAT_STRUCT_SCHEMA) .put("int8", (byte) 12) .put("int16", (short) 12) .put("int32", 12) .put("int64", (long) 12) .put("float32", 12.f) .put("float64", 12.) .put("boolean", true) .put("string", "foobar") .put("bytes", ByteBuffer.wrap("foobar".getBytes())); Struct struct3 = new Struct(FLAT_STRUCT_SCHEMA) .put("int8", (byte) 12) .put("int16", (short) 12) .put("int32", 12) .put("int64", (long) 12) .put("float32", 12.f) .put("float64", 12.) .put("boolean", true) .put("string", "mismatching string") .put("bytes", ByteBuffer.wrap("foobar".getBytes())); assertEquals(struct1, struct2); assertNotEquals(struct1, struct3); List<Byte> array = Arrays.asList((byte) 1, (byte) 2); Map<Integer, String> map = Collections.singletonMap(1, "string"); struct1 = new Struct(NESTED_SCHEMA) .put("array", array) .put("map", map) .put("nested", new Struct(NESTED_CHILD_SCHEMA).put("int8", (byte) 12)); List<Byte> array2 = Arrays.asList((byte) 1, (byte) 2); Map<Integer, String> map2 = Collections.singletonMap(1, "string"); struct2 = new Struct(NESTED_SCHEMA) .put("array", array2) .put("map", map2) .put("nested", new Struct(NESTED_CHILD_SCHEMA).put("int8", (byte) 12)); List<Byte> array3 = Arrays.asList((byte) 1, (byte) 2, (byte) 3); Map<Integer, String> map3 = Collections.singletonMap(2, "string"); struct3 = new Struct(NESTED_SCHEMA) .put("array", array3) .put("map", map3) .put("nested", new Struct(NESTED_CHILD_SCHEMA).put("int8", (byte) 13)); assertEquals(struct1, struct2); assertNotEquals(struct1, struct3); }
|
@Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Struct struct = (Struct) o; return Objects.equals(schema, struct.schema) && Arrays.equals(values, struct.values); }
|
Struct { @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Struct struct = (Struct) o; return Objects.equals(schema, struct.schema) && Arrays.equals(values, struct.values); } }
|
Struct { @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Struct struct = (Struct) o; return Objects.equals(schema, struct.schema) && Arrays.equals(values, struct.values); } Struct(Schema schema); }
|
Struct { @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Struct struct = (Struct) o; return Objects.equals(schema, struct.schema) && Arrays.equals(values, struct.values); } Struct(Schema schema); Schema schema(); Object get(String fieldName); Object get(Field field); Object getWithoutDefault(String fieldName); Byte getInt8(String fieldName); Short getInt16(String fieldName); Integer getInt32(String fieldName); Long getInt64(String fieldName); Float getFloat32(String fieldName); Double getFloat64(String fieldName); Boolean getBoolean(String fieldName); String getString(String fieldName); byte[] getBytes(String fieldName); @SuppressWarnings("unchecked") List<T> getArray(String fieldName); @SuppressWarnings("unchecked") Map<K, V> getMap(String fieldName); Struct getStruct(String fieldName); Struct put(String fieldName, Object value); Struct put(Field field, Object value); void validate(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); }
|
Struct { @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Struct struct = (Struct) o; return Objects.equals(schema, struct.schema) && Arrays.equals(values, struct.values); } Struct(Schema schema); Schema schema(); Object get(String fieldName); Object get(Field field); Object getWithoutDefault(String fieldName); Byte getInt8(String fieldName); Short getInt16(String fieldName); Integer getInt32(String fieldName); Long getInt64(String fieldName); Float getFloat32(String fieldName); Double getFloat64(String fieldName); Boolean getBoolean(String fieldName); String getString(String fieldName); byte[] getBytes(String fieldName); @SuppressWarnings("unchecked") List<T> getArray(String fieldName); @SuppressWarnings("unchecked") Map<K, V> getMap(String fieldName); Struct getStruct(String fieldName); Struct put(String fieldName, Object value); Struct put(Field field, Object value); void validate(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); }
|
@Test(expected = DataException.class) public void testFieldsOnlyValidForStructs() { Schema.INT8_SCHEMA.fields(); }
|
@Override public List<Field> fields() { if (type != Type.STRUCT) throw new DataException("Cannot list fields on non-struct type"); return fields; }
|
ConnectSchema implements Schema { @Override public List<Field> fields() { if (type != Type.STRUCT) throw new DataException("Cannot list fields on non-struct type"); return fields; } }
|
ConnectSchema implements Schema { @Override public List<Field> fields() { if (type != Type.STRUCT) throw new DataException("Cannot list fields on non-struct type"); return fields; } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); }
|
ConnectSchema implements Schema { @Override public List<Field> fields() { if (type != Type.STRUCT) throw new DataException("Cannot list fields on non-struct type"); return fields; } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); @Override Type type(); @Override boolean isOptional(); @Override Object defaultValue(); @Override String name(); @Override Integer version(); @Override String doc(); @Override Map<String, String> parameters(); @Override List<Field> fields(); Field field(String fieldName); @Override Schema keySchema(); @Override Schema valueSchema(); static void validateValue(Schema schema, Object value); static void validateValue(String name, Schema schema, Object value); void validateValue(Object value); @Override ConnectSchema schema(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); static Type schemaType(Class<?> klass); }
|
ConnectSchema implements Schema { @Override public List<Field> fields() { if (type != Type.STRUCT) throw new DataException("Cannot list fields on non-struct type"); return fields; } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); @Override Type type(); @Override boolean isOptional(); @Override Object defaultValue(); @Override String name(); @Override Integer version(); @Override String doc(); @Override Map<String, String> parameters(); @Override List<Field> fields(); Field field(String fieldName); @Override Schema keySchema(); @Override Schema valueSchema(); static void validateValue(Schema schema, Object value); static void validateValue(String name, Schema schema, Object value); void validateValue(Object value); @Override ConnectSchema schema(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); static Type schemaType(Class<?> klass); }
|
@Test public void testValidateValueMatchingType() { ConnectSchema.validateValue(Schema.INT8_SCHEMA, (byte) 1); ConnectSchema.validateValue(Schema.INT16_SCHEMA, (short) 1); ConnectSchema.validateValue(Schema.INT32_SCHEMA, 1); ConnectSchema.validateValue(Schema.INT64_SCHEMA, (long) 1); ConnectSchema.validateValue(Schema.FLOAT32_SCHEMA, 1.f); ConnectSchema.validateValue(Schema.FLOAT64_SCHEMA, 1.); ConnectSchema.validateValue(Schema.BOOLEAN_SCHEMA, true); ConnectSchema.validateValue(Schema.STRING_SCHEMA, "a string"); ConnectSchema.validateValue(Schema.BYTES_SCHEMA, "a byte array".getBytes()); ConnectSchema.validateValue(Schema.BYTES_SCHEMA, ByteBuffer.wrap("a byte array".getBytes())); ConnectSchema.validateValue(SchemaBuilder.array(Schema.INT32_SCHEMA).build(), Arrays.asList(1, 2, 3)); ConnectSchema.validateValue( SchemaBuilder.map(Schema.INT32_SCHEMA, Schema.STRING_SCHEMA).build(), Collections.singletonMap(1, "value") ); Struct structValue = new Struct(STRUCT_SCHEMA) .put("first", 1) .put("second", "foo") .put("array", Arrays.asList(1, 2, 3)) .put("map", Collections.singletonMap(1, "value")) .put("nested", new Struct(FLAT_STRUCT_SCHEMA).put("field", 12)); ConnectSchema.validateValue(STRUCT_SCHEMA, structValue); }
|
public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); @Override Type type(); @Override boolean isOptional(); @Override Object defaultValue(); @Override String name(); @Override Integer version(); @Override String doc(); @Override Map<String, String> parameters(); @Override List<Field> fields(); Field field(String fieldName); @Override Schema keySchema(); @Override Schema valueSchema(); static void validateValue(Schema schema, Object value); static void validateValue(String name, Schema schema, Object value); void validateValue(Object value); @Override ConnectSchema schema(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); static Type schemaType(Class<?> klass); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); @Override Type type(); @Override boolean isOptional(); @Override Object defaultValue(); @Override String name(); @Override Integer version(); @Override String doc(); @Override Map<String, String> parameters(); @Override List<Field> fields(); Field field(String fieldName); @Override Schema keySchema(); @Override Schema valueSchema(); static void validateValue(Schema schema, Object value); static void validateValue(String name, Schema schema, Object value); void validateValue(Object value); @Override ConnectSchema schema(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); static Type schemaType(Class<?> klass); }
|
@Test(expected = DataException.class) public void testValidateValueMismatchInt8() { ConnectSchema.validateValue(Schema.INT8_SCHEMA, 1); }
|
public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); @Override Type type(); @Override boolean isOptional(); @Override Object defaultValue(); @Override String name(); @Override Integer version(); @Override String doc(); @Override Map<String, String> parameters(); @Override List<Field> fields(); Field field(String fieldName); @Override Schema keySchema(); @Override Schema valueSchema(); static void validateValue(Schema schema, Object value); static void validateValue(String name, Schema schema, Object value); void validateValue(Object value); @Override ConnectSchema schema(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); static Type schemaType(Class<?> klass); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); @Override Type type(); @Override boolean isOptional(); @Override Object defaultValue(); @Override String name(); @Override Integer version(); @Override String doc(); @Override Map<String, String> parameters(); @Override List<Field> fields(); Field field(String fieldName); @Override Schema keySchema(); @Override Schema valueSchema(); static void validateValue(Schema schema, Object value); static void validateValue(String name, Schema schema, Object value); void validateValue(Object value); @Override ConnectSchema schema(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); static Type schemaType(Class<?> klass); }
|
@Test(expected = DataException.class) public void testValidateValueMismatchInt16() { ConnectSchema.validateValue(Schema.INT16_SCHEMA, 1); }
|
public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); @Override Type type(); @Override boolean isOptional(); @Override Object defaultValue(); @Override String name(); @Override Integer version(); @Override String doc(); @Override Map<String, String> parameters(); @Override List<Field> fields(); Field field(String fieldName); @Override Schema keySchema(); @Override Schema valueSchema(); static void validateValue(Schema schema, Object value); static void validateValue(String name, Schema schema, Object value); void validateValue(Object value); @Override ConnectSchema schema(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); static Type schemaType(Class<?> klass); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); @Override Type type(); @Override boolean isOptional(); @Override Object defaultValue(); @Override String name(); @Override Integer version(); @Override String doc(); @Override Map<String, String> parameters(); @Override List<Field> fields(); Field field(String fieldName); @Override Schema keySchema(); @Override Schema valueSchema(); static void validateValue(Schema schema, Object value); static void validateValue(String name, Schema schema, Object value); void validateValue(Object value); @Override ConnectSchema schema(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); static Type schemaType(Class<?> klass); }
|
@Test(expected = DataException.class) public void testValidateValueMismatchInt32() { ConnectSchema.validateValue(Schema.INT32_SCHEMA, (long) 1); }
|
public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); @Override Type type(); @Override boolean isOptional(); @Override Object defaultValue(); @Override String name(); @Override Integer version(); @Override String doc(); @Override Map<String, String> parameters(); @Override List<Field> fields(); Field field(String fieldName); @Override Schema keySchema(); @Override Schema valueSchema(); static void validateValue(Schema schema, Object value); static void validateValue(String name, Schema schema, Object value); void validateValue(Object value); @Override ConnectSchema schema(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); static Type schemaType(Class<?> klass); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); @Override Type type(); @Override boolean isOptional(); @Override Object defaultValue(); @Override String name(); @Override Integer version(); @Override String doc(); @Override Map<String, String> parameters(); @Override List<Field> fields(); Field field(String fieldName); @Override Schema keySchema(); @Override Schema valueSchema(); static void validateValue(Schema schema, Object value); static void validateValue(String name, Schema schema, Object value); void validateValue(Object value); @Override ConnectSchema schema(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); static Type schemaType(Class<?> klass); }
|
@Test public void decimalToConnect() { Schema schema = Decimal.schema(2); BigDecimal reference = new BigDecimal(new BigInteger("156"), 2); String msg = "{ \"schema\": { \"type\": \"bytes\", \"name\": \"org.apache.kafka.connect.data.Decimal\", \"version\": 1, \"parameters\": { \"scale\": \"2\" } }, \"payload\": \"AJw=\" }"; SchemaAndValue schemaAndValue = converter.toConnectData(TOPIC, msg.getBytes()); BigDecimal converted = (BigDecimal) schemaAndValue.value(); assertEquals(schema, schemaAndValue.schema()); assertEquals(reference, converted); }
|
@Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); }
|
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } }
|
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } }
|
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); ObjectNode asJsonSchema(Schema schema); Schema asConnectSchema(JsonNode jsonSchema); }
|
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); ObjectNode asJsonSchema(Schema schema); Schema asConnectSchema(JsonNode jsonSchema); }
|
@Test(expected = DataException.class) public void testValidateValueMismatchInt64() { ConnectSchema.validateValue(Schema.INT64_SCHEMA, 1); }
|
public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); @Override Type type(); @Override boolean isOptional(); @Override Object defaultValue(); @Override String name(); @Override Integer version(); @Override String doc(); @Override Map<String, String> parameters(); @Override List<Field> fields(); Field field(String fieldName); @Override Schema keySchema(); @Override Schema valueSchema(); static void validateValue(Schema schema, Object value); static void validateValue(String name, Schema schema, Object value); void validateValue(Object value); @Override ConnectSchema schema(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); static Type schemaType(Class<?> klass); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); @Override Type type(); @Override boolean isOptional(); @Override Object defaultValue(); @Override String name(); @Override Integer version(); @Override String doc(); @Override Map<String, String> parameters(); @Override List<Field> fields(); Field field(String fieldName); @Override Schema keySchema(); @Override Schema valueSchema(); static void validateValue(Schema schema, Object value); static void validateValue(String name, Schema schema, Object value); void validateValue(Object value); @Override ConnectSchema schema(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); static Type schemaType(Class<?> klass); }
|
@Test(expected = DataException.class) public void testValidateValueMismatchFloat() { ConnectSchema.validateValue(Schema.FLOAT32_SCHEMA, 1.0); }
|
public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); @Override Type type(); @Override boolean isOptional(); @Override Object defaultValue(); @Override String name(); @Override Integer version(); @Override String doc(); @Override Map<String, String> parameters(); @Override List<Field> fields(); Field field(String fieldName); @Override Schema keySchema(); @Override Schema valueSchema(); static void validateValue(Schema schema, Object value); static void validateValue(String name, Schema schema, Object value); void validateValue(Object value); @Override ConnectSchema schema(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); static Type schemaType(Class<?> klass); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); @Override Type type(); @Override boolean isOptional(); @Override Object defaultValue(); @Override String name(); @Override Integer version(); @Override String doc(); @Override Map<String, String> parameters(); @Override List<Field> fields(); Field field(String fieldName); @Override Schema keySchema(); @Override Schema valueSchema(); static void validateValue(Schema schema, Object value); static void validateValue(String name, Schema schema, Object value); void validateValue(Object value); @Override ConnectSchema schema(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); static Type schemaType(Class<?> klass); }
|
@Test(expected = DataException.class) public void testValidateValueMismatchDouble() { ConnectSchema.validateValue(Schema.FLOAT64_SCHEMA, 1.f); }
|
public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); @Override Type type(); @Override boolean isOptional(); @Override Object defaultValue(); @Override String name(); @Override Integer version(); @Override String doc(); @Override Map<String, String> parameters(); @Override List<Field> fields(); Field field(String fieldName); @Override Schema keySchema(); @Override Schema valueSchema(); static void validateValue(Schema schema, Object value); static void validateValue(String name, Schema schema, Object value); void validateValue(Object value); @Override ConnectSchema schema(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); static Type schemaType(Class<?> klass); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); @Override Type type(); @Override boolean isOptional(); @Override Object defaultValue(); @Override String name(); @Override Integer version(); @Override String doc(); @Override Map<String, String> parameters(); @Override List<Field> fields(); Field field(String fieldName); @Override Schema keySchema(); @Override Schema valueSchema(); static void validateValue(Schema schema, Object value); static void validateValue(String name, Schema schema, Object value); void validateValue(Object value); @Override ConnectSchema schema(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); static Type schemaType(Class<?> klass); }
|
@Test(expected = DataException.class) public void testValidateValueMismatchBoolean() { ConnectSchema.validateValue(Schema.BOOLEAN_SCHEMA, 1.f); }
|
public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); @Override Type type(); @Override boolean isOptional(); @Override Object defaultValue(); @Override String name(); @Override Integer version(); @Override String doc(); @Override Map<String, String> parameters(); @Override List<Field> fields(); Field field(String fieldName); @Override Schema keySchema(); @Override Schema valueSchema(); static void validateValue(Schema schema, Object value); static void validateValue(String name, Schema schema, Object value); void validateValue(Object value); @Override ConnectSchema schema(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); static Type schemaType(Class<?> klass); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); @Override Type type(); @Override boolean isOptional(); @Override Object defaultValue(); @Override String name(); @Override Integer version(); @Override String doc(); @Override Map<String, String> parameters(); @Override List<Field> fields(); Field field(String fieldName); @Override Schema keySchema(); @Override Schema valueSchema(); static void validateValue(Schema schema, Object value); static void validateValue(String name, Schema schema, Object value); void validateValue(Object value); @Override ConnectSchema schema(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); static Type schemaType(Class<?> klass); }
|
@Test(expected = DataException.class) public void testValidateValueMismatchString() { CharBuffer cbuf = CharBuffer.wrap("abc"); ConnectSchema.validateValue(Schema.STRING_SCHEMA, cbuf); }
|
public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); @Override Type type(); @Override boolean isOptional(); @Override Object defaultValue(); @Override String name(); @Override Integer version(); @Override String doc(); @Override Map<String, String> parameters(); @Override List<Field> fields(); Field field(String fieldName); @Override Schema keySchema(); @Override Schema valueSchema(); static void validateValue(Schema schema, Object value); static void validateValue(String name, Schema schema, Object value); void validateValue(Object value); @Override ConnectSchema schema(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); static Type schemaType(Class<?> klass); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); @Override Type type(); @Override boolean isOptional(); @Override Object defaultValue(); @Override String name(); @Override Integer version(); @Override String doc(); @Override Map<String, String> parameters(); @Override List<Field> fields(); Field field(String fieldName); @Override Schema keySchema(); @Override Schema valueSchema(); static void validateValue(Schema schema, Object value); static void validateValue(String name, Schema schema, Object value); void validateValue(Object value); @Override ConnectSchema schema(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); static Type schemaType(Class<?> klass); }
|
@Test(expected = DataException.class) public void testValidateValueMismatchBytes() { ConnectSchema.validateValue(Schema.BYTES_SCHEMA, new Object[]{1, "foo"}); }
|
public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); @Override Type type(); @Override boolean isOptional(); @Override Object defaultValue(); @Override String name(); @Override Integer version(); @Override String doc(); @Override Map<String, String> parameters(); @Override List<Field> fields(); Field field(String fieldName); @Override Schema keySchema(); @Override Schema valueSchema(); static void validateValue(Schema schema, Object value); static void validateValue(String name, Schema schema, Object value); void validateValue(Object value); @Override ConnectSchema schema(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); static Type schemaType(Class<?> klass); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); @Override Type type(); @Override boolean isOptional(); @Override Object defaultValue(); @Override String name(); @Override Integer version(); @Override String doc(); @Override Map<String, String> parameters(); @Override List<Field> fields(); Field field(String fieldName); @Override Schema keySchema(); @Override Schema valueSchema(); static void validateValue(Schema schema, Object value); static void validateValue(String name, Schema schema, Object value); void validateValue(Object value); @Override ConnectSchema schema(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); static Type schemaType(Class<?> klass); }
|
@Test(expected = DataException.class) public void testValidateValueMismatchArray() { ConnectSchema.validateValue(SchemaBuilder.array(Schema.INT32_SCHEMA).build(), Arrays.asList("a", "b", "c")); }
|
public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); @Override Type type(); @Override boolean isOptional(); @Override Object defaultValue(); @Override String name(); @Override Integer version(); @Override String doc(); @Override Map<String, String> parameters(); @Override List<Field> fields(); Field field(String fieldName); @Override Schema keySchema(); @Override Schema valueSchema(); static void validateValue(Schema schema, Object value); static void validateValue(String name, Schema schema, Object value); void validateValue(Object value); @Override ConnectSchema schema(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); static Type schemaType(Class<?> klass); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); @Override Type type(); @Override boolean isOptional(); @Override Object defaultValue(); @Override String name(); @Override Integer version(); @Override String doc(); @Override Map<String, String> parameters(); @Override List<Field> fields(); Field field(String fieldName); @Override Schema keySchema(); @Override Schema valueSchema(); static void validateValue(Schema schema, Object value); static void validateValue(String name, Schema schema, Object value); void validateValue(Object value); @Override ConnectSchema schema(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); static Type schemaType(Class<?> klass); }
|
@Test(expected = DataException.class) public void testValidateValueMismatchArraySomeMatch() { ConnectSchema.validateValue(SchemaBuilder.array(Schema.INT32_SCHEMA).build(), Arrays.asList(1, 2, "c")); }
|
public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); @Override Type type(); @Override boolean isOptional(); @Override Object defaultValue(); @Override String name(); @Override Integer version(); @Override String doc(); @Override Map<String, String> parameters(); @Override List<Field> fields(); Field field(String fieldName); @Override Schema keySchema(); @Override Schema valueSchema(); static void validateValue(Schema schema, Object value); static void validateValue(String name, Schema schema, Object value); void validateValue(Object value); @Override ConnectSchema schema(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); static Type schemaType(Class<?> klass); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); @Override Type type(); @Override boolean isOptional(); @Override Object defaultValue(); @Override String name(); @Override Integer version(); @Override String doc(); @Override Map<String, String> parameters(); @Override List<Field> fields(); Field field(String fieldName); @Override Schema keySchema(); @Override Schema valueSchema(); static void validateValue(Schema schema, Object value); static void validateValue(String name, Schema schema, Object value); void validateValue(Object value); @Override ConnectSchema schema(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); static Type schemaType(Class<?> klass); }
|
@Test(expected = DataException.class) public void testValidateValueMismatchMapKey() { ConnectSchema.validateValue(MAP_INT_STRING_SCHEMA, Collections.singletonMap("wrong key type", "value")); }
|
public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); @Override Type type(); @Override boolean isOptional(); @Override Object defaultValue(); @Override String name(); @Override Integer version(); @Override String doc(); @Override Map<String, String> parameters(); @Override List<Field> fields(); Field field(String fieldName); @Override Schema keySchema(); @Override Schema valueSchema(); static void validateValue(Schema schema, Object value); static void validateValue(String name, Schema schema, Object value); void validateValue(Object value); @Override ConnectSchema schema(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); static Type schemaType(Class<?> klass); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); @Override Type type(); @Override boolean isOptional(); @Override Object defaultValue(); @Override String name(); @Override Integer version(); @Override String doc(); @Override Map<String, String> parameters(); @Override List<Field> fields(); Field field(String fieldName); @Override Schema keySchema(); @Override Schema valueSchema(); static void validateValue(Schema schema, Object value); static void validateValue(String name, Schema schema, Object value); void validateValue(Object value); @Override ConnectSchema schema(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); static Type schemaType(Class<?> klass); }
|
@Test(expected = DataException.class) public void testValidateValueMismatchMapValue() { ConnectSchema.validateValue(MAP_INT_STRING_SCHEMA, Collections.singletonMap(1, 2)); }
|
public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); @Override Type type(); @Override boolean isOptional(); @Override Object defaultValue(); @Override String name(); @Override Integer version(); @Override String doc(); @Override Map<String, String> parameters(); @Override List<Field> fields(); Field field(String fieldName); @Override Schema keySchema(); @Override Schema valueSchema(); static void validateValue(Schema schema, Object value); static void validateValue(String name, Schema schema, Object value); void validateValue(Object value); @Override ConnectSchema schema(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); static Type schemaType(Class<?> klass); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); @Override Type type(); @Override boolean isOptional(); @Override Object defaultValue(); @Override String name(); @Override Integer version(); @Override String doc(); @Override Map<String, String> parameters(); @Override List<Field> fields(); Field field(String fieldName); @Override Schema keySchema(); @Override Schema valueSchema(); static void validateValue(Schema schema, Object value); static void validateValue(String name, Schema schema, Object value); void validateValue(Object value); @Override ConnectSchema schema(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); static Type schemaType(Class<?> klass); }
|
@Test public void decimalToConnectOptional() { Schema schema = Decimal.builder(2).optional().schema(); String msg = "{ \"schema\": { \"type\": \"bytes\", \"name\": \"org.apache.kafka.connect.data.Decimal\", \"version\": 1, \"optional\": true, \"parameters\": { \"scale\": \"2\" } }, \"payload\": null }"; SchemaAndValue schemaAndValue = converter.toConnectData(TOPIC, msg.getBytes()); assertEquals(schema, schemaAndValue.schema()); assertNull(schemaAndValue.value()); }
|
@Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); }
|
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } }
|
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } }
|
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); ObjectNode asJsonSchema(Schema schema); Schema asConnectSchema(JsonNode jsonSchema); }
|
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); ObjectNode asJsonSchema(Schema schema); Schema asConnectSchema(JsonNode jsonSchema); }
|
@Test(expected = DataException.class) public void testValidateValueMismatchMapSomeKeys() { Map<Object, String> data = new HashMap<>(); data.put(1, "abc"); data.put("wrong", "it's as easy as one two three"); ConnectSchema.validateValue(MAP_INT_STRING_SCHEMA, data); }
|
public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); @Override Type type(); @Override boolean isOptional(); @Override Object defaultValue(); @Override String name(); @Override Integer version(); @Override String doc(); @Override Map<String, String> parameters(); @Override List<Field> fields(); Field field(String fieldName); @Override Schema keySchema(); @Override Schema valueSchema(); static void validateValue(Schema schema, Object value); static void validateValue(String name, Schema schema, Object value); void validateValue(Object value); @Override ConnectSchema schema(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); static Type schemaType(Class<?> klass); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); @Override Type type(); @Override boolean isOptional(); @Override Object defaultValue(); @Override String name(); @Override Integer version(); @Override String doc(); @Override Map<String, String> parameters(); @Override List<Field> fields(); Field field(String fieldName); @Override Schema keySchema(); @Override Schema valueSchema(); static void validateValue(Schema schema, Object value); static void validateValue(String name, Schema schema, Object value); void validateValue(Object value); @Override ConnectSchema schema(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); static Type schemaType(Class<?> klass); }
|
@Test(expected = DataException.class) public void testValidateValueMismatchMapSomeValues() { Map<Integer, Object> data = new HashMap<>(); data.put(1, "abc"); data.put(2, "wrong".getBytes()); ConnectSchema.validateValue(MAP_INT_STRING_SCHEMA, data); }
|
public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); @Override Type type(); @Override boolean isOptional(); @Override Object defaultValue(); @Override String name(); @Override Integer version(); @Override String doc(); @Override Map<String, String> parameters(); @Override List<Field> fields(); Field field(String fieldName); @Override Schema keySchema(); @Override Schema valueSchema(); static void validateValue(Schema schema, Object value); static void validateValue(String name, Schema schema, Object value); void validateValue(Object value); @Override ConnectSchema schema(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); static Type schemaType(Class<?> klass); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); @Override Type type(); @Override boolean isOptional(); @Override Object defaultValue(); @Override String name(); @Override Integer version(); @Override String doc(); @Override Map<String, String> parameters(); @Override List<Field> fields(); Field field(String fieldName); @Override Schema keySchema(); @Override Schema valueSchema(); static void validateValue(Schema schema, Object value); static void validateValue(String name, Schema schema, Object value); void validateValue(Object value); @Override ConnectSchema schema(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); static Type schemaType(Class<?> klass); }
|
@Test(expected = DataException.class) public void testValidateValueMismatchDate() { ConnectSchema.validateValue(Date.SCHEMA, 1000L); }
|
public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); @Override Type type(); @Override boolean isOptional(); @Override Object defaultValue(); @Override String name(); @Override Integer version(); @Override String doc(); @Override Map<String, String> parameters(); @Override List<Field> fields(); Field field(String fieldName); @Override Schema keySchema(); @Override Schema valueSchema(); static void validateValue(Schema schema, Object value); static void validateValue(String name, Schema schema, Object value); void validateValue(Object value); @Override ConnectSchema schema(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); static Type schemaType(Class<?> klass); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); @Override Type type(); @Override boolean isOptional(); @Override Object defaultValue(); @Override String name(); @Override Integer version(); @Override String doc(); @Override Map<String, String> parameters(); @Override List<Field> fields(); Field field(String fieldName); @Override Schema keySchema(); @Override Schema valueSchema(); static void validateValue(Schema schema, Object value); static void validateValue(String name, Schema schema, Object value); void validateValue(Object value); @Override ConnectSchema schema(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); static Type schemaType(Class<?> klass); }
|
@Test(expected = DataException.class) public void testValidateValueMismatchTime() { ConnectSchema.validateValue(Time.SCHEMA, 1000L); }
|
public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); @Override Type type(); @Override boolean isOptional(); @Override Object defaultValue(); @Override String name(); @Override Integer version(); @Override String doc(); @Override Map<String, String> parameters(); @Override List<Field> fields(); Field field(String fieldName); @Override Schema keySchema(); @Override Schema valueSchema(); static void validateValue(Schema schema, Object value); static void validateValue(String name, Schema schema, Object value); void validateValue(Object value); @Override ConnectSchema schema(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); static Type schemaType(Class<?> klass); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); @Override Type type(); @Override boolean isOptional(); @Override Object defaultValue(); @Override String name(); @Override Integer version(); @Override String doc(); @Override Map<String, String> parameters(); @Override List<Field> fields(); Field field(String fieldName); @Override Schema keySchema(); @Override Schema valueSchema(); static void validateValue(Schema schema, Object value); static void validateValue(String name, Schema schema, Object value); void validateValue(Object value); @Override ConnectSchema schema(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); static Type schemaType(Class<?> klass); }
|
@Test(expected = DataException.class) public void testValidateValueMismatchTimestamp() { ConnectSchema.validateValue(Timestamp.SCHEMA, 1000L); }
|
public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); @Override Type type(); @Override boolean isOptional(); @Override Object defaultValue(); @Override String name(); @Override Integer version(); @Override String doc(); @Override Map<String, String> parameters(); @Override List<Field> fields(); Field field(String fieldName); @Override Schema keySchema(); @Override Schema valueSchema(); static void validateValue(Schema schema, Object value); static void validateValue(String name, Schema schema, Object value); void validateValue(Object value); @Override ConnectSchema schema(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); static Type schemaType(Class<?> klass); }
|
ConnectSchema implements Schema { public static void validateValue(Schema schema, Object value) { validateValue(null, schema, value); } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); @Override Type type(); @Override boolean isOptional(); @Override Object defaultValue(); @Override String name(); @Override Integer version(); @Override String doc(); @Override Map<String, String> parameters(); @Override List<Field> fields(); Field field(String fieldName); @Override Schema keySchema(); @Override Schema valueSchema(); static void validateValue(Schema schema, Object value); static void validateValue(String name, Schema schema, Object value); void validateValue(Object value); @Override ConnectSchema schema(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); static Type schemaType(Class<?> klass); }
|
@Test public void testEmptyStruct() { final ConnectSchema emptyStruct = new ConnectSchema(Schema.Type.STRUCT, false, null, null, null, null); assertEquals(0, emptyStruct.fields().size()); new Struct(emptyStruct); }
|
@Override public List<Field> fields() { if (type != Type.STRUCT) throw new DataException("Cannot list fields on non-struct type"); return fields; }
|
ConnectSchema implements Schema { @Override public List<Field> fields() { if (type != Type.STRUCT) throw new DataException("Cannot list fields on non-struct type"); return fields; } }
|
ConnectSchema implements Schema { @Override public List<Field> fields() { if (type != Type.STRUCT) throw new DataException("Cannot list fields on non-struct type"); return fields; } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); }
|
ConnectSchema implements Schema { @Override public List<Field> fields() { if (type != Type.STRUCT) throw new DataException("Cannot list fields on non-struct type"); return fields; } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); @Override Type type(); @Override boolean isOptional(); @Override Object defaultValue(); @Override String name(); @Override Integer version(); @Override String doc(); @Override Map<String, String> parameters(); @Override List<Field> fields(); Field field(String fieldName); @Override Schema keySchema(); @Override Schema valueSchema(); static void validateValue(Schema schema, Object value); static void validateValue(String name, Schema schema, Object value); void validateValue(Object value); @Override ConnectSchema schema(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); static Type schemaType(Class<?> klass); }
|
ConnectSchema implements Schema { @Override public List<Field> fields() { if (type != Type.STRUCT) throw new DataException("Cannot list fields on non-struct type"); return fields; } ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc, Map<String, String> parameters, List<Field> fields, Schema keySchema, Schema valueSchema); ConnectSchema(Type type, boolean optional, Object defaultValue, String name, Integer version, String doc); ConnectSchema(Type type); @Override Type type(); @Override boolean isOptional(); @Override Object defaultValue(); @Override String name(); @Override Integer version(); @Override String doc(); @Override Map<String, String> parameters(); @Override List<Field> fields(); Field field(String fieldName); @Override Schema keySchema(); @Override Schema valueSchema(); static void validateValue(Schema schema, Object value); static void validateValue(String name, Schema schema, Object value); void validateValue(Object value); @Override ConnectSchema schema(); @Override boolean equals(Object o); @Override int hashCode(); @Override String toString(); static Type schemaType(Class<?> klass); }
|
@Test public void testParameters() { Map<String, String> expectedParameters = new HashMap<>(); expectedParameters.put("foo", "val"); expectedParameters.put("bar", "baz"); Schema schema = SchemaBuilder.string().parameter("foo", "val").parameter("bar", "baz").build(); assertTypeAndDefault(schema, Schema.Type.STRING, false, null); assertMetadata(schema, null, null, null, expectedParameters); schema = SchemaBuilder.string().parameters(expectedParameters).build(); assertTypeAndDefault(schema, Schema.Type.STRING, false, null); assertMetadata(schema, null, null, null, expectedParameters); }
|
@Override public Map<String, String> parameters() { return parameters == null ? null : Collections.unmodifiableMap(parameters); }
|
SchemaBuilder implements Schema { @Override public Map<String, String> parameters() { return parameters == null ? null : Collections.unmodifiableMap(parameters); } }
|
SchemaBuilder implements Schema { @Override public Map<String, String> parameters() { return parameters == null ? null : Collections.unmodifiableMap(parameters); } SchemaBuilder(Type type); }
|
SchemaBuilder implements Schema { @Override public Map<String, String> parameters() { return parameters == null ? null : Collections.unmodifiableMap(parameters); } SchemaBuilder(Type type); @Override boolean isOptional(); SchemaBuilder optional(); SchemaBuilder required(); @Override Object defaultValue(); SchemaBuilder defaultValue(Object value); @Override String name(); SchemaBuilder name(String name); @Override Integer version(); SchemaBuilder version(Integer version); @Override String doc(); SchemaBuilder doc(String doc); @Override Map<String, String> parameters(); SchemaBuilder parameter(String propertyName, String propertyValue); SchemaBuilder parameters(Map<String, String> props); @Override Type type(); static SchemaBuilder type(Type type); static SchemaBuilder int8(); static SchemaBuilder int16(); static SchemaBuilder int32(); static SchemaBuilder int64(); static SchemaBuilder float32(); static SchemaBuilder float64(); static SchemaBuilder bool(); static SchemaBuilder string(); static SchemaBuilder bytes(); static SchemaBuilder struct(); SchemaBuilder field(String fieldName, Schema fieldSchema); List<Field> fields(); Field field(String fieldName); static SchemaBuilder array(Schema valueSchema); static SchemaBuilder map(Schema keySchema, Schema valueSchema); @Override Schema keySchema(); @Override Schema valueSchema(); Schema build(); @Override Schema schema(); }
|
SchemaBuilder implements Schema { @Override public Map<String, String> parameters() { return parameters == null ? null : Collections.unmodifiableMap(parameters); } SchemaBuilder(Type type); @Override boolean isOptional(); SchemaBuilder optional(); SchemaBuilder required(); @Override Object defaultValue(); SchemaBuilder defaultValue(Object value); @Override String name(); SchemaBuilder name(String name); @Override Integer version(); SchemaBuilder version(Integer version); @Override String doc(); SchemaBuilder doc(String doc); @Override Map<String, String> parameters(); SchemaBuilder parameter(String propertyName, String propertyValue); SchemaBuilder parameters(Map<String, String> props); @Override Type type(); static SchemaBuilder type(Type type); static SchemaBuilder int8(); static SchemaBuilder int16(); static SchemaBuilder int32(); static SchemaBuilder int64(); static SchemaBuilder float32(); static SchemaBuilder float64(); static SchemaBuilder bool(); static SchemaBuilder string(); static SchemaBuilder bytes(); static SchemaBuilder struct(); SchemaBuilder field(String fieldName, Schema fieldSchema); List<Field> fields(); Field field(String fieldName); static SchemaBuilder array(Schema valueSchema); static SchemaBuilder map(Schema keySchema, Schema valueSchema); @Override Schema keySchema(); @Override Schema valueSchema(); Schema build(); @Override Schema schema(); }
|
@Test public void testFromLogical() { assertEquals(0, Date.fromLogical(Date.SCHEMA, EPOCH.getTime())); assertEquals(10000, Date.fromLogical(Date.SCHEMA, EPOCH_PLUS_TEN_THOUSAND_DAYS.getTime())); }
|
public static int fromLogical(Schema schema, java.util.Date value) { if (schema.name() == null || !(schema.name().equals(LOGICAL_NAME))) throw new DataException("Requested conversion of Date object but the schema does not match."); Calendar calendar = Calendar.getInstance(UTC); calendar.setTime(value); if (calendar.get(Calendar.HOUR_OF_DAY) != 0 || calendar.get(Calendar.MINUTE) != 0 || calendar.get(Calendar.SECOND) != 0 || calendar.get(Calendar.MILLISECOND) != 0) { throw new DataException("Kafka Connect Date type should not have any time fields set to non-zero values."); } long unixMillis = calendar.getTimeInMillis(); return (int) (unixMillis / MILLIS_PER_DAY); }
|
Date { public static int fromLogical(Schema schema, java.util.Date value) { if (schema.name() == null || !(schema.name().equals(LOGICAL_NAME))) throw new DataException("Requested conversion of Date object but the schema does not match."); Calendar calendar = Calendar.getInstance(UTC); calendar.setTime(value); if (calendar.get(Calendar.HOUR_OF_DAY) != 0 || calendar.get(Calendar.MINUTE) != 0 || calendar.get(Calendar.SECOND) != 0 || calendar.get(Calendar.MILLISECOND) != 0) { throw new DataException("Kafka Connect Date type should not have any time fields set to non-zero values."); } long unixMillis = calendar.getTimeInMillis(); return (int) (unixMillis / MILLIS_PER_DAY); } }
|
Date { public static int fromLogical(Schema schema, java.util.Date value) { if (schema.name() == null || !(schema.name().equals(LOGICAL_NAME))) throw new DataException("Requested conversion of Date object but the schema does not match."); Calendar calendar = Calendar.getInstance(UTC); calendar.setTime(value); if (calendar.get(Calendar.HOUR_OF_DAY) != 0 || calendar.get(Calendar.MINUTE) != 0 || calendar.get(Calendar.SECOND) != 0 || calendar.get(Calendar.MILLISECOND) != 0) { throw new DataException("Kafka Connect Date type should not have any time fields set to non-zero values."); } long unixMillis = calendar.getTimeInMillis(); return (int) (unixMillis / MILLIS_PER_DAY); } }
|
Date { public static int fromLogical(Schema schema, java.util.Date value) { if (schema.name() == null || !(schema.name().equals(LOGICAL_NAME))) throw new DataException("Requested conversion of Date object but the schema does not match."); Calendar calendar = Calendar.getInstance(UTC); calendar.setTime(value); if (calendar.get(Calendar.HOUR_OF_DAY) != 0 || calendar.get(Calendar.MINUTE) != 0 || calendar.get(Calendar.SECOND) != 0 || calendar.get(Calendar.MILLISECOND) != 0) { throw new DataException("Kafka Connect Date type should not have any time fields set to non-zero values."); } long unixMillis = calendar.getTimeInMillis(); return (int) (unixMillis / MILLIS_PER_DAY); } static SchemaBuilder builder(); static int fromLogical(Schema schema, java.util.Date value); static java.util.Date toLogical(Schema schema, int value); }
|
Date { public static int fromLogical(Schema schema, java.util.Date value) { if (schema.name() == null || !(schema.name().equals(LOGICAL_NAME))) throw new DataException("Requested conversion of Date object but the schema does not match."); Calendar calendar = Calendar.getInstance(UTC); calendar.setTime(value); if (calendar.get(Calendar.HOUR_OF_DAY) != 0 || calendar.get(Calendar.MINUTE) != 0 || calendar.get(Calendar.SECOND) != 0 || calendar.get(Calendar.MILLISECOND) != 0) { throw new DataException("Kafka Connect Date type should not have any time fields set to non-zero values."); } long unixMillis = calendar.getTimeInMillis(); return (int) (unixMillis / MILLIS_PER_DAY); } static SchemaBuilder builder(); static int fromLogical(Schema schema, java.util.Date value); static java.util.Date toLogical(Schema schema, int value); static final String LOGICAL_NAME; static final Schema SCHEMA; }
|
@Test(expected = DataException.class) public void testFromLogicalInvalidHasTimeComponents() { Date.fromLogical(Date.SCHEMA, EPOCH_PLUS_TIME_COMPONENT.getTime()); }
|
public static int fromLogical(Schema schema, java.util.Date value) { if (schema.name() == null || !(schema.name().equals(LOGICAL_NAME))) throw new DataException("Requested conversion of Date object but the schema does not match."); Calendar calendar = Calendar.getInstance(UTC); calendar.setTime(value); if (calendar.get(Calendar.HOUR_OF_DAY) != 0 || calendar.get(Calendar.MINUTE) != 0 || calendar.get(Calendar.SECOND) != 0 || calendar.get(Calendar.MILLISECOND) != 0) { throw new DataException("Kafka Connect Date type should not have any time fields set to non-zero values."); } long unixMillis = calendar.getTimeInMillis(); return (int) (unixMillis / MILLIS_PER_DAY); }
|
Date { public static int fromLogical(Schema schema, java.util.Date value) { if (schema.name() == null || !(schema.name().equals(LOGICAL_NAME))) throw new DataException("Requested conversion of Date object but the schema does not match."); Calendar calendar = Calendar.getInstance(UTC); calendar.setTime(value); if (calendar.get(Calendar.HOUR_OF_DAY) != 0 || calendar.get(Calendar.MINUTE) != 0 || calendar.get(Calendar.SECOND) != 0 || calendar.get(Calendar.MILLISECOND) != 0) { throw new DataException("Kafka Connect Date type should not have any time fields set to non-zero values."); } long unixMillis = calendar.getTimeInMillis(); return (int) (unixMillis / MILLIS_PER_DAY); } }
|
Date { public static int fromLogical(Schema schema, java.util.Date value) { if (schema.name() == null || !(schema.name().equals(LOGICAL_NAME))) throw new DataException("Requested conversion of Date object but the schema does not match."); Calendar calendar = Calendar.getInstance(UTC); calendar.setTime(value); if (calendar.get(Calendar.HOUR_OF_DAY) != 0 || calendar.get(Calendar.MINUTE) != 0 || calendar.get(Calendar.SECOND) != 0 || calendar.get(Calendar.MILLISECOND) != 0) { throw new DataException("Kafka Connect Date type should not have any time fields set to non-zero values."); } long unixMillis = calendar.getTimeInMillis(); return (int) (unixMillis / MILLIS_PER_DAY); } }
|
Date { public static int fromLogical(Schema schema, java.util.Date value) { if (schema.name() == null || !(schema.name().equals(LOGICAL_NAME))) throw new DataException("Requested conversion of Date object but the schema does not match."); Calendar calendar = Calendar.getInstance(UTC); calendar.setTime(value); if (calendar.get(Calendar.HOUR_OF_DAY) != 0 || calendar.get(Calendar.MINUTE) != 0 || calendar.get(Calendar.SECOND) != 0 || calendar.get(Calendar.MILLISECOND) != 0) { throw new DataException("Kafka Connect Date type should not have any time fields set to non-zero values."); } long unixMillis = calendar.getTimeInMillis(); return (int) (unixMillis / MILLIS_PER_DAY); } static SchemaBuilder builder(); static int fromLogical(Schema schema, java.util.Date value); static java.util.Date toLogical(Schema schema, int value); }
|
Date { public static int fromLogical(Schema schema, java.util.Date value) { if (schema.name() == null || !(schema.name().equals(LOGICAL_NAME))) throw new DataException("Requested conversion of Date object but the schema does not match."); Calendar calendar = Calendar.getInstance(UTC); calendar.setTime(value); if (calendar.get(Calendar.HOUR_OF_DAY) != 0 || calendar.get(Calendar.MINUTE) != 0 || calendar.get(Calendar.SECOND) != 0 || calendar.get(Calendar.MILLISECOND) != 0) { throw new DataException("Kafka Connect Date type should not have any time fields set to non-zero values."); } long unixMillis = calendar.getTimeInMillis(); return (int) (unixMillis / MILLIS_PER_DAY); } static SchemaBuilder builder(); static int fromLogical(Schema schema, java.util.Date value); static java.util.Date toLogical(Schema schema, int value); static final String LOGICAL_NAME; static final Schema SCHEMA; }
|
@Test public void decimalToConnectWithDefaultValue() { BigDecimal reference = new BigDecimal(new BigInteger("156"), 2); Schema schema = Decimal.builder(2).defaultValue(reference).build(); String msg = "{ \"schema\": { \"type\": \"bytes\", \"name\": \"org.apache.kafka.connect.data.Decimal\", \"version\": 1, \"default\": \"AJw=\", \"parameters\": { \"scale\": \"2\" } }, \"payload\": null }"; SchemaAndValue schemaAndValue = converter.toConnectData(TOPIC, msg.getBytes()); assertEquals(schema, schemaAndValue.schema()); assertEquals(reference, schemaAndValue.value()); }
|
@Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); }
|
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } }
|
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } }
|
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); ObjectNode asJsonSchema(Schema schema); Schema asConnectSchema(JsonNode jsonSchema); }
|
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); ObjectNode asJsonSchema(Schema schema); Schema asConnectSchema(JsonNode jsonSchema); }
|
@Test public void testToLogical() { assertEquals(EPOCH.getTime(), Date.toLogical(Date.SCHEMA, 0)); assertEquals(EPOCH_PLUS_TEN_THOUSAND_DAYS.getTime(), Date.toLogical(Date.SCHEMA, 10000)); }
|
public static java.util.Date toLogical(Schema schema, int value) { if (schema.name() == null || !(schema.name().equals(LOGICAL_NAME))) throw new DataException("Requested conversion of Date object but the schema does not match."); return new java.util.Date(value * MILLIS_PER_DAY); }
|
Date { public static java.util.Date toLogical(Schema schema, int value) { if (schema.name() == null || !(schema.name().equals(LOGICAL_NAME))) throw new DataException("Requested conversion of Date object but the schema does not match."); return new java.util.Date(value * MILLIS_PER_DAY); } }
|
Date { public static java.util.Date toLogical(Schema schema, int value) { if (schema.name() == null || !(schema.name().equals(LOGICAL_NAME))) throw new DataException("Requested conversion of Date object but the schema does not match."); return new java.util.Date(value * MILLIS_PER_DAY); } }
|
Date { public static java.util.Date toLogical(Schema schema, int value) { if (schema.name() == null || !(schema.name().equals(LOGICAL_NAME))) throw new DataException("Requested conversion of Date object but the schema does not match."); return new java.util.Date(value * MILLIS_PER_DAY); } static SchemaBuilder builder(); static int fromLogical(Schema schema, java.util.Date value); static java.util.Date toLogical(Schema schema, int value); }
|
Date { public static java.util.Date toLogical(Schema schema, int value) { if (schema.name() == null || !(schema.name().equals(LOGICAL_NAME))) throw new DataException("Requested conversion of Date object but the schema does not match."); return new java.util.Date(value * MILLIS_PER_DAY); } static SchemaBuilder builder(); static int fromLogical(Schema schema, java.util.Date value); static java.util.Date toLogical(Schema schema, int value); static final String LOGICAL_NAME; static final Schema SCHEMA; }
|
@Test public void testFromLogical() { assertEquals(0L, Timestamp.fromLogical(Timestamp.SCHEMA, EPOCH.getTime())); assertEquals(TOTAL_MILLIS, Timestamp.fromLogical(Timestamp.SCHEMA, EPOCH_PLUS_MILLIS.getTime())); }
|
public static long fromLogical(Schema schema, java.util.Date value) { if (schema.name() == null || !(schema.name().equals(LOGICAL_NAME))) throw new DataException("Requested conversion of Timestamp object but the schema does not match."); return value.getTime(); }
|
Timestamp { public static long fromLogical(Schema schema, java.util.Date value) { if (schema.name() == null || !(schema.name().equals(LOGICAL_NAME))) throw new DataException("Requested conversion of Timestamp object but the schema does not match."); return value.getTime(); } }
|
Timestamp { public static long fromLogical(Schema schema, java.util.Date value) { if (schema.name() == null || !(schema.name().equals(LOGICAL_NAME))) throw new DataException("Requested conversion of Timestamp object but the schema does not match."); return value.getTime(); } }
|
Timestamp { public static long fromLogical(Schema schema, java.util.Date value) { if (schema.name() == null || !(schema.name().equals(LOGICAL_NAME))) throw new DataException("Requested conversion of Timestamp object but the schema does not match."); return value.getTime(); } static SchemaBuilder builder(); static long fromLogical(Schema schema, java.util.Date value); static java.util.Date toLogical(Schema schema, long value); }
|
Timestamp { public static long fromLogical(Schema schema, java.util.Date value) { if (schema.name() == null || !(schema.name().equals(LOGICAL_NAME))) throw new DataException("Requested conversion of Timestamp object but the schema does not match."); return value.getTime(); } static SchemaBuilder builder(); static long fromLogical(Schema schema, java.util.Date value); static java.util.Date toLogical(Schema schema, long value); static final String LOGICAL_NAME; static final Schema SCHEMA; }
|
@Test public void testToLogical() { assertEquals(EPOCH.getTime(), Timestamp.toLogical(Timestamp.SCHEMA, 0L)); assertEquals(EPOCH_PLUS_MILLIS.getTime(), Timestamp.toLogical(Timestamp.SCHEMA, TOTAL_MILLIS)); }
|
public static java.util.Date toLogical(Schema schema, long value) { if (schema.name() == null || !(schema.name().equals(LOGICAL_NAME))) throw new DataException("Requested conversion of Timestamp object but the schema does not match."); return new java.util.Date(value); }
|
Timestamp { public static java.util.Date toLogical(Schema schema, long value) { if (schema.name() == null || !(schema.name().equals(LOGICAL_NAME))) throw new DataException("Requested conversion of Timestamp object but the schema does not match."); return new java.util.Date(value); } }
|
Timestamp { public static java.util.Date toLogical(Schema schema, long value) { if (schema.name() == null || !(schema.name().equals(LOGICAL_NAME))) throw new DataException("Requested conversion of Timestamp object but the schema does not match."); return new java.util.Date(value); } }
|
Timestamp { public static java.util.Date toLogical(Schema schema, long value) { if (schema.name() == null || !(schema.name().equals(LOGICAL_NAME))) throw new DataException("Requested conversion of Timestamp object but the schema does not match."); return new java.util.Date(value); } static SchemaBuilder builder(); static long fromLogical(Schema schema, java.util.Date value); static java.util.Date toLogical(Schema schema, long value); }
|
Timestamp { public static java.util.Date toLogical(Schema schema, long value) { if (schema.name() == null || !(schema.name().equals(LOGICAL_NAME))) throw new DataException("Requested conversion of Timestamp object but the schema does not match."); return new java.util.Date(value); } static SchemaBuilder builder(); static long fromLogical(Schema schema, java.util.Date value); static java.util.Date toLogical(Schema schema, long value); static final String LOGICAL_NAME; static final Schema SCHEMA; }
|
@Test public void testPrimitiveTypeProjection() throws Exception { Object projected; projected = SchemaProjector.project(Schema.BOOLEAN_SCHEMA, false, Schema.BOOLEAN_SCHEMA); assertEquals(false, projected); byte[] bytes = {(byte) 1, (byte) 2}; projected = SchemaProjector.project(Schema.BYTES_SCHEMA, bytes, Schema.BYTES_SCHEMA); assertEquals(bytes, projected); projected = SchemaProjector.project(Schema.STRING_SCHEMA, "abc", Schema.STRING_SCHEMA); assertEquals("abc", projected); projected = SchemaProjector.project(Schema.BOOLEAN_SCHEMA, false, Schema.OPTIONAL_BOOLEAN_SCHEMA); assertEquals(false, projected); projected = SchemaProjector.project(Schema.BYTES_SCHEMA, bytes, Schema.OPTIONAL_BYTES_SCHEMA); assertEquals(bytes, projected); projected = SchemaProjector.project(Schema.STRING_SCHEMA, "abc", Schema.OPTIONAL_STRING_SCHEMA); assertEquals("abc", projected); try { SchemaProjector.project(Schema.OPTIONAL_BOOLEAN_SCHEMA, false, Schema.BOOLEAN_SCHEMA); fail("Cannot project optional schema to schema with no default value."); } catch (DataException e) { } try { SchemaProjector.project(Schema.OPTIONAL_BYTES_SCHEMA, bytes, Schema.BYTES_SCHEMA); fail("Cannot project optional schema to schema with no default value."); } catch (DataException e) { } try { SchemaProjector.project(Schema.OPTIONAL_STRING_SCHEMA, "abc", Schema.STRING_SCHEMA); fail("Cannot project optional schema to schema with no default value."); } catch (DataException e) { } }
|
public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } }
|
SchemaProjector { public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } } }
|
SchemaProjector { public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } } }
|
SchemaProjector { public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } } static Object project(Schema source, Object record, Schema target); }
|
SchemaProjector { public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } } static Object project(Schema source, Object record, Schema target); }
|
@Test public void testNumericTypeProjection() throws Exception { Schema[] promotableSchemas = {Schema.INT8_SCHEMA, Schema.INT16_SCHEMA, Schema.INT32_SCHEMA, Schema.INT64_SCHEMA, Schema.FLOAT32_SCHEMA, Schema.FLOAT64_SCHEMA}; Schema[] promotableOptionalSchemas = {Schema.OPTIONAL_INT8_SCHEMA, Schema.OPTIONAL_INT16_SCHEMA, Schema.OPTIONAL_INT32_SCHEMA, Schema.OPTIONAL_INT64_SCHEMA, Schema.OPTIONAL_FLOAT32_SCHEMA, Schema.OPTIONAL_FLOAT64_SCHEMA}; Object[] values = {(byte) 127, (short) 255, 32767, 327890L, 1.2F, 1.2345}; Map<Object, List<?>> expectedProjected = new HashMap<>(); expectedProjected.put(values[0], Arrays.asList((byte) 127, (short) 127, 127, 127L, 127.F, 127.)); expectedProjected.put(values[1], Arrays.asList((short) 255, 255, 255L, 255.F, 255.)); expectedProjected.put(values[2], Arrays.asList(32767, 32767L, 32767.F, 32767.)); expectedProjected.put(values[3], Arrays.asList(327890L, 327890.F, 327890.)); expectedProjected.put(values[4], Arrays.asList(1.2F, 1.2)); expectedProjected.put(values[5], Arrays.asList(1.2345)); Object promoted; for (int i = 0; i < promotableSchemas.length; ++i) { Schema source = promotableSchemas[i]; List<?> expected = expectedProjected.get(values[i]); for (int j = i; j < promotableSchemas.length; ++j) { Schema target = promotableSchemas[j]; promoted = SchemaProjector.project(source, values[i], target); if (target.type() == Type.FLOAT64) { assertEquals((Double) (expected.get(j - i)), (double) promoted, 1e-6); } else { assertEquals(expected.get(j - i), promoted); } } for (int j = i; j < promotableOptionalSchemas.length; ++j) { Schema target = promotableOptionalSchemas[j]; promoted = SchemaProjector.project(source, values[i], target); if (target.type() == Type.FLOAT64) { assertEquals((Double) (expected.get(j - i)), (double) promoted, 1e-6); } else { assertEquals(expected.get(j - i), promoted); } } } for (int i = 0; i < promotableOptionalSchemas.length; ++i) { Schema source = promotableSchemas[i]; List<?> expected = expectedProjected.get(values[i]); for (int j = i; j < promotableOptionalSchemas.length; ++j) { Schema target = promotableOptionalSchemas[j]; promoted = SchemaProjector.project(source, values[i], target); if (target.type() == Type.FLOAT64) { assertEquals((Double) (expected.get(j - i)), (double) promoted, 1e-6); } else { assertEquals(expected.get(j - i), promoted); } } } Schema[] nonPromotableSchemas = {Schema.BOOLEAN_SCHEMA, Schema.BYTES_SCHEMA, Schema.STRING_SCHEMA}; for (Schema promotableSchema: promotableSchemas) { for (Schema nonPromotableSchema: nonPromotableSchemas) { Object dummy = new Object(); try { SchemaProjector.project(promotableSchema, dummy, nonPromotableSchema); fail("Cannot promote " + promotableSchema.type() + " to " + nonPromotableSchema.type()); } catch (DataException e) { } } } }
|
public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } }
|
SchemaProjector { public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } } }
|
SchemaProjector { public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } } }
|
SchemaProjector { public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } } static Object project(Schema source, Object record, Schema target); }
|
SchemaProjector { public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } } static Object project(Schema source, Object record, Schema target); }
|
@Test public void testStructAddField() throws Exception { Schema source = SchemaBuilder.struct() .field("field", Schema.INT32_SCHEMA) .build(); Struct sourceStruct = new Struct(source); sourceStruct.put("field", 1); Schema target = SchemaBuilder.struct() .field("field", Schema.INT32_SCHEMA) .field("field2", SchemaBuilder.int32().defaultValue(123).build()) .build(); Struct targetStruct = (Struct) SchemaProjector.project(source, sourceStruct, target); assertEquals(1, (int) targetStruct.getInt32("field")); assertEquals(123, (int) targetStruct.getInt32("field2")); Schema incompatibleTargetSchema = SchemaBuilder.struct() .field("field", Schema.INT32_SCHEMA) .field("field2", Schema.INT32_SCHEMA) .build(); try { SchemaProjector.project(source, sourceStruct, incompatibleTargetSchema); fail("Incompatible schema."); } catch (DataException e) { } }
|
public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } }
|
SchemaProjector { public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } } }
|
SchemaProjector { public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } } }
|
SchemaProjector { public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } } static Object project(Schema source, Object record, Schema target); }
|
SchemaProjector { public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } } static Object project(Schema source, Object record, Schema target); }
|
@Test public void testStructRemoveField() throws Exception { Schema source = SchemaBuilder.struct() .field("field", Schema.INT32_SCHEMA) .field("field2", Schema.INT32_SCHEMA) .build(); Struct sourceStruct = new Struct(source); sourceStruct.put("field", 1); sourceStruct.put("field2", 234); Schema target = SchemaBuilder.struct() .field("field", Schema.INT32_SCHEMA) .build(); Struct targetStruct = (Struct) SchemaProjector.project(source, sourceStruct, target); assertEquals(1, targetStruct.get("field")); try { targetStruct.get("field2"); fail("field2 is not part of the projected struct"); } catch (DataException e) { } }
|
public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } }
|
SchemaProjector { public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } } }
|
SchemaProjector { public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } } }
|
SchemaProjector { public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } } static Object project(Schema source, Object record, Schema target); }
|
SchemaProjector { public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } } static Object project(Schema source, Object record, Schema target); }
|
@Test public void testStructDefaultValue() throws Exception { Schema source = SchemaBuilder.struct().optional() .field("field", Schema.INT32_SCHEMA) .field("field2", Schema.INT32_SCHEMA) .build(); SchemaBuilder builder = SchemaBuilder.struct() .field("field", Schema.INT32_SCHEMA) .field("field2", Schema.INT32_SCHEMA); Struct defaultStruct = new Struct(builder).put("field", 12).put("field2", 345); builder.defaultValue(defaultStruct); Schema target = builder.build(); Object projected = SchemaProjector.project(source, null, target); assertEquals(defaultStruct, projected); Struct sourceStruct = new Struct(source).put("field", 45).put("field2", 678); Struct targetStruct = (Struct) SchemaProjector.project(source, sourceStruct, target); assertEquals(sourceStruct.get("field"), targetStruct.get("field")); assertEquals(sourceStruct.get("field2"), targetStruct.get("field2")); }
|
public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } }
|
SchemaProjector { public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } } }
|
SchemaProjector { public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } } }
|
SchemaProjector { public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } } static Object project(Schema source, Object record, Schema target); }
|
SchemaProjector { public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } } static Object project(Schema source, Object record, Schema target); }
|
@Test public void testNestedSchemaProjection() throws Exception { Schema sourceFlatSchema = SchemaBuilder.struct() .field("field", Schema.INT32_SCHEMA) .build(); Schema targetFlatSchema = SchemaBuilder.struct() .field("field", Schema.INT32_SCHEMA) .field("field2", SchemaBuilder.int32().defaultValue(123).build()) .build(); Schema sourceNestedSchema = SchemaBuilder.struct() .field("first", Schema.INT32_SCHEMA) .field("second", Schema.STRING_SCHEMA) .field("array", SchemaBuilder.array(Schema.INT32_SCHEMA).build()) .field("map", SchemaBuilder.map(Schema.INT32_SCHEMA, Schema.STRING_SCHEMA).build()) .field("nested", sourceFlatSchema) .build(); Schema targetNestedSchema = SchemaBuilder.struct() .field("first", Schema.INT32_SCHEMA) .field("second", Schema.STRING_SCHEMA) .field("array", SchemaBuilder.array(Schema.INT32_SCHEMA).build()) .field("map", SchemaBuilder.map(Schema.INT32_SCHEMA, Schema.STRING_SCHEMA).build()) .field("nested", targetFlatSchema) .build(); Struct sourceFlatStruct = new Struct(sourceFlatSchema); sourceFlatStruct.put("field", 113); Struct sourceNestedStruct = new Struct(sourceNestedSchema); sourceNestedStruct.put("first", 1); sourceNestedStruct.put("second", "abc"); sourceNestedStruct.put("array", Arrays.asList(1, 2)); sourceNestedStruct.put("map", Collections.singletonMap(5, "def")); sourceNestedStruct.put("nested", sourceFlatStruct); Struct targetNestedStruct = (Struct) SchemaProjector.project(sourceNestedSchema, sourceNestedStruct, targetNestedSchema); assertEquals(1, targetNestedStruct.get("first")); assertEquals("abc", targetNestedStruct.get("second")); assertEquals(Arrays.asList(1, 2), (List<Integer>) targetNestedStruct.get("array")); assertEquals(Collections.singletonMap(5, "def"), (Map<Integer, String>) targetNestedStruct.get("map")); Struct projectedStruct = (Struct) targetNestedStruct.get("nested"); assertEquals(113, projectedStruct.get("field")); assertEquals(123, projectedStruct.get("field2")); }
|
public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } }
|
SchemaProjector { public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } } }
|
SchemaProjector { public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } } }
|
SchemaProjector { public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } } static Object project(Schema source, Object record, Schema target); }
|
SchemaProjector { public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } } static Object project(Schema source, Object record, Schema target); }
|
@Test public void decimalToConnectOptionalWithDefaultValue() { BigDecimal reference = new BigDecimal(new BigInteger("156"), 2); Schema schema = Decimal.builder(2).optional().defaultValue(reference).build(); String msg = "{ \"schema\": { \"type\": \"bytes\", \"name\": \"org.apache.kafka.connect.data.Decimal\", \"version\": 1, \"optional\": true, \"default\": \"AJw=\", \"parameters\": { \"scale\": \"2\" } }, \"payload\": null }"; SchemaAndValue schemaAndValue = converter.toConnectData(TOPIC, msg.getBytes()); assertEquals(schema, schemaAndValue.schema()); assertEquals(reference, schemaAndValue.value()); }
|
@Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); }
|
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } }
|
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } }
|
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); ObjectNode asJsonSchema(Schema schema); Schema asConnectSchema(JsonNode jsonSchema); }
|
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); ObjectNode asJsonSchema(Schema schema); Schema asConnectSchema(JsonNode jsonSchema); }
|
@Test public void booleanToConnect() { assertEquals(new SchemaAndValue(Schema.BOOLEAN_SCHEMA, true), converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"boolean\" }, \"payload\": true }".getBytes())); assertEquals(new SchemaAndValue(Schema.BOOLEAN_SCHEMA, false), converter.toConnectData(TOPIC, "{ \"schema\": { \"type\": \"boolean\" }, \"payload\": false }".getBytes())); }
|
@Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); }
|
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } }
|
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } }
|
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); ObjectNode asJsonSchema(Schema schema); Schema asConnectSchema(JsonNode jsonSchema); }
|
JsonConverter implements Converter { @Override public SchemaAndValue toConnectData(String topic, byte[] value) { JsonNode jsonValue; try { jsonValue = deserializer.deserialize(topic, value); } catch (SerializationException e) { throw new DataException("Converting byte[] to Kafka Connect data failed due to serialization error: ", e); } if (enableSchemas && (jsonValue == null || !jsonValue.isObject() || jsonValue.size() != 2 || !jsonValue.has("schema") || !jsonValue.has("payload"))) throw new DataException("JsonConverter with schemas.enable requires \"schema\" and \"payload\" fields and may not contain additional fields." + " If you are trying to deserialize plain JSON data, set schemas.enable=false in your converter configuration."); if (!enableSchemas) { ObjectNode envelope = JsonNodeFactory.instance.objectNode(); envelope.set("schema", null); envelope.set("payload", jsonValue); jsonValue = envelope; } return jsonToConnect(jsonValue); } @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); ObjectNode asJsonSchema(Schema schema); Schema asConnectSchema(JsonNode jsonSchema); }
|
@Test public void testLogicalTypeProjection() throws Exception { Schema[] logicalTypeSchemas = {Decimal.schema(2), Date.SCHEMA, Time.SCHEMA, Timestamp.SCHEMA}; Object projected; BigDecimal testDecimal = new BigDecimal(new BigInteger("156"), 2); projected = SchemaProjector.project(Decimal.schema(2), testDecimal, Decimal.schema(2)); assertEquals(testDecimal, projected); projected = SchemaProjector.project(Date.SCHEMA, 1000, Date.SCHEMA); assertEquals(1000, projected); projected = SchemaProjector.project(Time.SCHEMA, 231, Time.SCHEMA); assertEquals(231, projected); projected = SchemaProjector.project(Timestamp.SCHEMA, 34567L, Timestamp.SCHEMA); assertEquals(34567L, projected); Schema namedSchema = SchemaBuilder.int32().name("invalidLogicalTypeName").build(); for (Schema logicalTypeSchema: logicalTypeSchemas) { try { SchemaProjector.project(logicalTypeSchema, null, Schema.BOOLEAN_SCHEMA); fail("Cannot project logical types to non-logical types."); } catch (SchemaProjectorException e) { } try { SchemaProjector.project(logicalTypeSchema, null, namedSchema); fail("Reader name is not a valid logical type name."); } catch (SchemaProjectorException e) { } try { SchemaProjector.project(Schema.BOOLEAN_SCHEMA, null, logicalTypeSchema); fail("Cannot project non-logical types to logical types."); } catch (SchemaProjectorException e) { } } }
|
public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } }
|
SchemaProjector { public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } } }
|
SchemaProjector { public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } } }
|
SchemaProjector { public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } } static Object project(Schema source, Object record, Schema target); }
|
SchemaProjector { public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } } static Object project(Schema source, Object record, Schema target); }
|
@Test public void testArrayProjection() throws Exception { Schema source = SchemaBuilder.array(Schema.INT32_SCHEMA).build(); Object projected = SchemaProjector.project(source, Arrays.asList(1, 2, 3), source); assertEquals(Arrays.asList(1, 2, 3), (List<Integer>) projected); Schema optionalSource = SchemaBuilder.array(Schema.INT32_SCHEMA).optional().build(); Schema target = SchemaBuilder.array(Schema.INT32_SCHEMA).defaultValue(Arrays.asList(1, 2, 3)).build(); projected = SchemaProjector.project(optionalSource, Arrays.asList(4, 5), target); assertEquals(Arrays.asList(4, 5), (List<Integer>) projected); projected = SchemaProjector.project(optionalSource, null, target); assertEquals(Arrays.asList(1, 2, 3), (List<Integer>) projected); Schema promotedTarget = SchemaBuilder.array(Schema.INT64_SCHEMA).defaultValue(Arrays.asList(1L, 2L, 3L)).build(); projected = SchemaProjector.project(optionalSource, Arrays.asList(4, 5), promotedTarget); List<Long> expectedProjected = Arrays.asList(4L, 5L); assertEquals(expectedProjected, (List<Long>) projected); projected = SchemaProjector.project(optionalSource, null, promotedTarget); assertEquals(Arrays.asList(1L, 2L, 3L), (List<Long>) projected); Schema noDefaultValueTarget = SchemaBuilder.array(Schema.INT32_SCHEMA).build(); try { SchemaProjector.project(optionalSource, null, noDefaultValueTarget); fail("Target schema does not provide a default value."); } catch (SchemaProjectorException e) { } Schema nonPromotableTarget = SchemaBuilder.array(Schema.BOOLEAN_SCHEMA).build(); try { SchemaProjector.project(optionalSource, null, nonPromotableTarget); fail("Neither source type matches target type nor source type can be promoted to target type"); } catch (SchemaProjectorException e) { } }
|
public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } }
|
SchemaProjector { public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } } }
|
SchemaProjector { public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } } }
|
SchemaProjector { public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } } static Object project(Schema source, Object record, Schema target); }
|
SchemaProjector { public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } } static Object project(Schema source, Object record, Schema target); }
|
@Test public void testMapProjection() throws Exception { Schema source = SchemaBuilder.map(Schema.INT32_SCHEMA, Schema.INT32_SCHEMA).optional().build(); Schema target = SchemaBuilder.map(Schema.INT32_SCHEMA, Schema.INT32_SCHEMA).defaultValue(Collections.singletonMap(1, 2)).build(); Object projected = SchemaProjector.project(source, Collections.singletonMap(3, 4), target); assertEquals(Collections.singletonMap(3, 4), (Map<Integer, Integer>) projected); projected = SchemaProjector.project(source, null, target); assertEquals(Collections.singletonMap(1, 2), (Map<Integer, Integer>) projected); Schema promotedTarget = SchemaBuilder.map(Schema.INT64_SCHEMA, Schema.FLOAT32_SCHEMA).defaultValue( Collections.singletonMap(3L, 4.5F)).build(); projected = SchemaProjector.project(source, Collections.singletonMap(3, 4), promotedTarget); assertEquals(Collections.singletonMap(3L, 4.F), (Map<Long, Float>) projected); projected = SchemaProjector.project(source, null, promotedTarget); assertEquals(Collections.singletonMap(3L, 4.5F), (Map<Long, Float>) projected); Schema noDefaultValueTarget = SchemaBuilder.map(Schema.INT32_SCHEMA, Schema.INT32_SCHEMA).build(); try { SchemaProjector.project(source, null, noDefaultValueTarget); fail("Reader does not provide a default value."); } catch (SchemaProjectorException e) { } Schema nonPromotableTarget = SchemaBuilder.map(Schema.BOOLEAN_SCHEMA, Schema.STRING_SCHEMA).build(); try { SchemaProjector.project(source, null, nonPromotableTarget); fail("Neither source type matches target type nor source type can be promoted to target type"); } catch (SchemaProjectorException e) { } }
|
public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } }
|
SchemaProjector { public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } } }
|
SchemaProjector { public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } } }
|
SchemaProjector { public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } } static Object project(Schema source, Object record, Schema target); }
|
SchemaProjector { public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } } static Object project(Schema source, Object record, Schema target); }
|
@Test public void testMaybeCompatible() throws Exception { Schema source = SchemaBuilder.int32().name("source").build(); Schema target = SchemaBuilder.int32().name("target").build(); try { SchemaProjector.project(source, 12, target); fail("Source name and target name mismatch."); } catch (SchemaProjectorException e) { } Schema targetWithParameters = SchemaBuilder.int32().parameters(Collections.singletonMap("key", "value")); try { SchemaProjector.project(source, 34, targetWithParameters); fail("Source parameters and target parameters mismatch."); } catch (SchemaProjectorException e) { } }
|
public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } }
|
SchemaProjector { public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } } }
|
SchemaProjector { public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } } }
|
SchemaProjector { public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } } static Object project(Schema source, Object record, Schema target); }
|
SchemaProjector { public static Object project(Schema source, Object record, Schema target) throws SchemaProjectorException { checkMaybeCompatible(source, target); if (source.isOptional() && !target.isOptional()) { if (target.defaultValue() != null) { if (record != null) { return projectRequiredSchema(source, record, target); } else { return target.defaultValue(); } } else { throw new SchemaProjectorException("Writer schema is optional, however, target schema does not provide a default value."); } } else { if (record != null) { return projectRequiredSchema(source, record, target); } else { return null; } } } static Object project(Schema source, Object record, Schema target); }
|
@Test public void testGroupPartitions() { List<List<Integer>> grouped = ConnectorUtils.groupPartitions(FIVE_ELEMENTS, 1); assertEquals(Arrays.asList(FIVE_ELEMENTS), grouped); grouped = ConnectorUtils.groupPartitions(FIVE_ELEMENTS, 2); assertEquals(Arrays.asList(Arrays.asList(1, 2, 3), Arrays.asList(4, 5)), grouped); grouped = ConnectorUtils.groupPartitions(FIVE_ELEMENTS, 3); assertEquals(Arrays.asList(Arrays.asList(1, 2), Arrays.asList(3, 4), Arrays.asList(5)), grouped); grouped = ConnectorUtils.groupPartitions(FIVE_ELEMENTS, 5); assertEquals(Arrays.asList(Arrays.asList(1), Arrays.asList(2), Arrays.asList(3), Arrays.asList(4), Arrays.asList(5)), grouped); grouped = ConnectorUtils.groupPartitions(FIVE_ELEMENTS, 7); assertEquals(Arrays.asList(Arrays.asList(1), Arrays.asList(2), Arrays.asList(3), Arrays.asList(4), Arrays.asList(5), Collections.EMPTY_LIST, Collections.EMPTY_LIST), grouped); }
|
public static <T> List<List<T>> groupPartitions(List<T> elements, int numGroups) { if (numGroups <= 0) throw new IllegalArgumentException("Number of groups must be positive."); List<List<T>> result = new ArrayList<>(numGroups); int perGroup = elements.size() / numGroups; int leftover = elements.size() - (numGroups * perGroup); int assigned = 0; for (int group = 0; group < numGroups; group++) { int numThisGroup = group < leftover ? perGroup + 1 : perGroup; List<T> groupList = new ArrayList<>(numThisGroup); for (int i = 0; i < numThisGroup; i++) { groupList.add(elements.get(assigned)); assigned++; } result.add(groupList); } return result; }
|
ConnectorUtils { public static <T> List<List<T>> groupPartitions(List<T> elements, int numGroups) { if (numGroups <= 0) throw new IllegalArgumentException("Number of groups must be positive."); List<List<T>> result = new ArrayList<>(numGroups); int perGroup = elements.size() / numGroups; int leftover = elements.size() - (numGroups * perGroup); int assigned = 0; for (int group = 0; group < numGroups; group++) { int numThisGroup = group < leftover ? perGroup + 1 : perGroup; List<T> groupList = new ArrayList<>(numThisGroup); for (int i = 0; i < numThisGroup; i++) { groupList.add(elements.get(assigned)); assigned++; } result.add(groupList); } return result; } }
|
ConnectorUtils { public static <T> List<List<T>> groupPartitions(List<T> elements, int numGroups) { if (numGroups <= 0) throw new IllegalArgumentException("Number of groups must be positive."); List<List<T>> result = new ArrayList<>(numGroups); int perGroup = elements.size() / numGroups; int leftover = elements.size() - (numGroups * perGroup); int assigned = 0; for (int group = 0; group < numGroups; group++) { int numThisGroup = group < leftover ? perGroup + 1 : perGroup; List<T> groupList = new ArrayList<>(numThisGroup); for (int i = 0; i < numThisGroup; i++) { groupList.add(elements.get(assigned)); assigned++; } result.add(groupList); } return result; } }
|
ConnectorUtils { public static <T> List<List<T>> groupPartitions(List<T> elements, int numGroups) { if (numGroups <= 0) throw new IllegalArgumentException("Number of groups must be positive."); List<List<T>> result = new ArrayList<>(numGroups); int perGroup = elements.size() / numGroups; int leftover = elements.size() - (numGroups * perGroup); int assigned = 0; for (int group = 0; group < numGroups; group++) { int numThisGroup = group < leftover ? perGroup + 1 : perGroup; List<T> groupList = new ArrayList<>(numThisGroup); for (int i = 0; i < numThisGroup; i++) { groupList.add(elements.get(assigned)); assigned++; } result.add(groupList); } return result; } static List<List<T>> groupPartitions(List<T> elements, int numGroups); }
|
ConnectorUtils { public static <T> List<List<T>> groupPartitions(List<T> elements, int numGroups) { if (numGroups <= 0) throw new IllegalArgumentException("Number of groups must be positive."); List<List<T>> result = new ArrayList<>(numGroups); int perGroup = elements.size() / numGroups; int leftover = elements.size() - (numGroups * perGroup); int assigned = 0; for (int group = 0; group < numGroups; group++) { int numThisGroup = group < leftover ? perGroup + 1 : perGroup; List<T> groupList = new ArrayList<>(numThisGroup); for (int i = 0; i < numThisGroup; i++) { groupList.add(elements.get(assigned)); assigned++; } result.add(groupList); } return result; } static List<List<T>> groupPartitions(List<T> elements, int numGroups); }
|
@Test public void testStringToBytes() throws UnsupportedEncodingException { assertArrayEquals(SAMPLE_STRING.getBytes("UTF8"), converter.fromConnectData(TOPIC, Schema.STRING_SCHEMA, SAMPLE_STRING)); }
|
@Override public byte[] fromConnectData(String topic, Schema schema, Object value) { try { return serializer.serialize(topic, value == null ? null : value.toString()); } catch (SerializationException e) { throw new DataException("Failed to serialize to a string: ", e); } }
|
StringConverter implements Converter { @Override public byte[] fromConnectData(String topic, Schema schema, Object value) { try { return serializer.serialize(topic, value == null ? null : value.toString()); } catch (SerializationException e) { throw new DataException("Failed to serialize to a string: ", e); } } }
|
StringConverter implements Converter { @Override public byte[] fromConnectData(String topic, Schema schema, Object value) { try { return serializer.serialize(topic, value == null ? null : value.toString()); } catch (SerializationException e) { throw new DataException("Failed to serialize to a string: ", e); } } StringConverter(); }
|
StringConverter implements Converter { @Override public byte[] fromConnectData(String topic, Schema schema, Object value) { try { return serializer.serialize(topic, value == null ? null : value.toString()); } catch (SerializationException e) { throw new DataException("Failed to serialize to a string: ", e); } } StringConverter(); @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); }
|
StringConverter implements Converter { @Override public byte[] fromConnectData(String topic, Schema schema, Object value) { try { return serializer.serialize(topic, value == null ? null : value.toString()); } catch (SerializationException e) { throw new DataException("Failed to serialize to a string: ", e); } } StringConverter(); @Override void configure(Map<String, ?> configs, boolean isKey); @Override byte[] fromConnectData(String topic, Schema schema, Object value); @Override SchemaAndValue toConnectData(String topic, byte[] value); }
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.