instance_id
stringlengths 17
39
| repo
stringclasses 8
values | issue_id
stringlengths 14
34
| pr_id
stringlengths 14
34
| linking_methods
sequencelengths 1
3
| base_commit
stringlengths 40
40
| merge_commit
stringlengths 0
40
β | hints_text
sequencelengths 0
106
| resolved_comments
sequencelengths 0
119
| created_at
unknown | labeled_as
sequencelengths 0
7
| problem_title
stringlengths 7
174
| problem_statement
stringlengths 0
55.4k
| gold_files
sequencelengths 0
10
| gold_files_postpatch
sequencelengths 1
10
| test_files
sequencelengths 0
60
| gold_patch
stringlengths 220
5.83M
| test_patch
stringlengths 386
194k
β | split_random
stringclasses 3
values | split_time
stringclasses 3
values | issue_start_time
timestamp[ns] | issue_created_at
unknown | issue_by_user
stringlengths 3
21
| split_repo
stringclasses 3
values |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
provectus/kafka-ui/3127_3165 | provectus/kafka-ui | provectus/kafka-ui/3127 | provectus/kafka-ui/3165 | [
"connected"
] | 9fad0d0ee3c1d3e77eb37b4e510a8c0a77db66b1 | 87ffb4716a2c634f3e68c8ce5a2a2a0e61203ee5 | [] | [
"let's set selectItem() method return TopicList object to avoid separate calling next method from topicList object",
"don't we need to wait until new screen ready first?",
"Added.",
"Fixed."
] | "2022-12-28T12:44:54Z" | [
"scope/QA",
"scope/AQA"
] | [e2e]TopicTests.copyTopic : Copy topic | Autotest implementation for:
https://app.qase.io/case/KAFKAUI-8
Description:
Checking possibility to copy the selected Topic from All Topics' list
Pre-conditions
- Login to Kafka-ui application
- Open the 'Local' section
- Select the 'Topics'
Steps:
1)Check any Topic from All Topics' list
2)Click on 'Copy selected topic' button
3)Change the topic name
4)Press "Create topic" button
Expected results:
1)Make sure 'Delete selected topics', 'Copy selected topic', 'Purge messages of selected topics' are displayed
2)Should redirect to Copy Topic page with filled already existing data and disabled "Create topic" button
3)Create topic button should become active
4)Should redirect to newly created topic with displaying success message | [
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicsList.java"
] | [
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicsList.java"
] | [
"kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/base/BaseTest.java",
"kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicsTests.java"
] | diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicsList.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicsList.java
index 871ecbb752f..f24df1e926a 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicsList.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicsList.java
@@ -70,6 +70,12 @@ public List<SelenideElement> getActionButtons() {
.collect(Collectors.toList());
}
+ @Step
+ public TopicsList clickCopySelectedTopicBtn(){
+ copySelectedTopicBtn.shouldBe(Condition.enabled).click();
+ return this;
+ }
+
private List<SelenideElement> getVisibleColumnHeaders() {
return Stream.of("Replication Factor","Number of messages","Topic Name", "Partitions", "Out of sync replicas", "Size")
.map(name -> $x(String.format(columnHeaderLocator, name)))
@@ -134,8 +140,9 @@ public TopicGridItem(SelenideElement element) {
}
@Step
- public void selectItem(boolean select) {
- selectElement(element.$x("./td[1]/input"), select);
+ public TopicsList selectItem(boolean select) {
+ selectElement(element.$x("./td[1]/input"), select);
+ return new TopicsList();
}
@Step
| diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/base/BaseTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/base/BaseTest.java
index 3a48f9f962d..9290c0af3b4 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/base/BaseTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/base/BaseTest.java
@@ -34,7 +34,7 @@
@Slf4j
@DisplayNameGeneration(DisplayNameGenerator.class)
-public class BaseTest extends Facade {
+public abstract class BaseTest extends Facade {
private static final String SELENIUM_IMAGE_NAME = "selenium/standalone-chrome:103.0";
private static final String SELENIARM_STANDALONE_CHROMIUM = "seleniarm/standalone-chromium:103.0";
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicsTests.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicsTests.java
index 01eff46b980..89857df9675 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicsTests.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicsTests.java
@@ -471,6 +471,38 @@ void recreateTopicFromTopicProfile(){
.as("isAlertWithMessageVisible()").isTrue();
}
+ @DisplayName("TopicTests.copyTopic : Copy topic")
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(8)
+ @Test
+ void checkCopyTopicPossibility(){
+ Topic topicToCopy = new Topic()
+ .setName("topic-to-copy-" + randomAlphabetic(5))
+ .setNumberOfPartitions(1);
+ navigateToTopics();
+ topicsList
+ .getTopicItem("_schemas")
+ .selectItem(true)
+ .clickCopySelectedTopicBtn();
+ topicCreateEditForm
+ .waitUntilScreenReady();
+ assertThat(topicCreateEditForm.isCreateTopicButtonEnabled()).as("isCreateTopicButtonEnabled()").isFalse();
+ topicCreateEditForm
+ .setTopicName(topicToCopy.getName())
+ .setNumberOfPartitions(topicToCopy.getNumberOfPartitions())
+ .clickCreateTopicBtn();
+ topicDetails
+ .waitUntilScreenReady();
+ TOPIC_LIST.add(topicToCopy);
+ SoftAssertions softly = new SoftAssertions();
+ softly.assertThat(topicDetails.isAlertWithMessageVisible(SUCCESS, "Topic successfully created."))
+ .as("isAlertWithMessageVisible()").isTrue();
+ softly.assertThat(topicDetails.isTopicHeaderVisible(topicToCopy.getName()))
+ .as("isTopicHeaderVisible()").isTrue();
+ softly.assertAll();
+ }
+
@AfterAll
public void afterAll() {
TOPIC_LIST.forEach(topic -> apiService.deleteTopic(CLUSTER_NAME, topic.getName()));
| train | val | 2022-12-29T11:45:18 | "2022-12-23T14:06:46Z" | ArthurNiedial | train |
provectus/kafka-ui/3180_3185 | provectus/kafka-ui | provectus/kafka-ui/3180 | provectus/kafka-ui/3185 | [
"keyword_pr_to_issue"
] | eef63466fb3aee13a63d8071b1abf3de425f3f2b | 9b87d3829b024ba89c28dec97394013a3c83973b | [
"Hello there Nawarix! π\n\nThank you and congratulations π for opening your very first issue in this project! π\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. π",
"@Nawarix try this \r\n\r\nAUTH_TYPE: OAUTH2\r\nKAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: bootstrap:9092\r\nKAFKA_CLUSTERS_0_NAME: your-cluster\r\nKAFKA_CLUSTERS_0_SCHEMAREGISTRY: http://apicurioregistry:8080/apis/ccompat/v6\r\nKAFKA_CLUSTERS_0_SCHEMAREGISTRYAUTH_PASSWORD: XXXXXXXXXXXXXX\r\nKAFKA_CLUSTERS_0_SCHEMAREGISTRYAUTH_USERNAME: registry-api\r\nSPRING_SECURITY_OAUTH2_CLIENT_PROVIDER_AUTH0_ISSUER_URI: https://domain/auth/realms/myrealm\r\nSPRING_SECURITY_OAUTH2_CLIENT_REGISTRATION_AUTH0_CLIENTID: kafka-ui\r\nSPRING_SECURITY_OAUTH2_CLIENT_REGISTRATION_AUTH0_CLIENTSECRET: XXXXXXXXXXXXXXXXXXXX\r\nSPRING_SECURITY_OAUTH2_CLIENT_REGISTRATION_AUTH0_SCOPE: openid\r\n\r\nWorks for me.",
"Hi, please take a look at the \"breaking changes\" block at the release page:\r\nhttps://github.com/provectus/kafka-ui/releases/tag/v0.5.0\r\n\r\nLet me know how it goes.",
"@sookeke those were my configs before v0.5.0 and they work just fine, but when I updated the software I got an exception\r\n\r\n@Haarolean That what I tried to do with my last configs\r\nAUTH_OAUTH2_CLIENT_KEYCLOAK_CLIENTID: kafkaui\r\nAUTH_OAUTH2_CLIENT_KEYCLOAK_CLIENTSECRET: XXXXXXXXXXXXXXXXXXXXXXXXXXx\r\nAUTH_OAUTH2_CLIENT_KEYCLOAK_PROVIDER: keycloak\r\nAUTH_OAUTH2_CLIENT_KEYCLOAK_CUSTOM_PARAMS_TYPE: keycloak\r\nAUTH_OAUTH2_CLIENT_KEYCLOAK_CUSTOM_PARAMS_LOGOUTURL: XXXXXXXXXXXXXXXXXXXX\r\nAUTH_OAUTH2_CLIENT_KEYCLOAK_ISSUER_URI: XXXXXXXXXXXXXXXXXXXXX\r\nAUTH_OAUTH2_CLIENT_KEYCLOAK_SCOPE: openid\r\n\r\nBut maybe those changes not reflected on Environment variables?",
"@Nawarix that's a nemesis of env vars as configs :/\r\nPlease replace `CUSTOM_PARAMS` with `CUSTOM-PARAMS`, a dash instead of an underscore. \r\nAlso, you don't have to type them in caps here.\r\n\r\nAlso, I've adjusted some code within #3185 to prevent these pesky errors in case you don't need/have/want to specify custom params at all (like, for keycloak).",
"@Haarolean It worked like charm, I thought I have to treated like issuer-uri\r\nThanks for you support"
] | [] | "2023-01-01T14:15:56Z" | [
"type/bug",
"scope/backend",
"status/accepted",
"status/confirmed"
] | [BE] NullPointerException OAuthProperties$OAuth2Provider.getCustomParams() is null | **Describe the bug**
Trying to authenticate kafka-ui with keycloak, after v0.5.0 everything was working fine,when I updated kafka ui to version v0.5.0, and changed the variables level as mentioned in version description, I got this exception
```
org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'OAuthLogoutSuccessHandler' defined in URL [jar:file:/kafka-ui-api.jar!/BOOT-INF/classes!/com/provectus/kafka/ui/config/auth/logout/OAuthLogoutSuccessHandler.class]: Unsatisfied dependency expressed through constructor parameter 2; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'defaultOidcLogoutHandler' defined in class path resource [com/provectus/kafka/ui/config/auth/OAuthSecurityConfig.class]: Unsatisfied dependency expressed through method 'defaultOidcLogoutHandler' parameter 0; nested exception is org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'clientRegistrationRepository' defined in class path resource [com/provectus/kafka/ui/config/auth/OAuthSecurityConfig.class]: Bean instantiation via factory method failed; nested exception is org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.springframework.security.oauth2.client.registration.InMemoryReactiveClientRegistrationRepository]: Factory method 'clientRegistrationRepository' threw exception; nested exception is java.lang.NullPointerException: Cannot invoke "java.util.Map.get(Object)" because the return value of "com.provectus.kafka.ui.config.auth.OAuthProperties$OAuth2Provider.getCustomParams()" is null
at org.springframework.beans.factory.support.ConstructorResolver.createArgumentArray(ConstructorResolver.java:800)
at org.springframework.beans.factory.support.ConstructorResolver.autowireConstructor(ConstructorResolver.java:229)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.autowireConstructor(AbstractAutowireCapableBeanFactory.java:1372)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBeanInstance(AbstractAutowireCapableBeanFactory.java:1222)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:582)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:542)
at org.springframework.beans.factory.support.AbstractBeanFactory.lambda$doGetBean$0(AbstractBeanFactory.java:335)
at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:234)
at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:333)
at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:208)
at org.springframework.beans.factory.support.DefaultListableBeanFactory.preInstantiateSingletons(DefaultListableBeanFactory.java:955)
at org.springframework.context.support.AbstractApplicationContext.finishBeanFactoryInitialization(AbstractApplicationContext.java:918)
at org.springframework.context.support.AbstractApplicationContext.refresh(AbstractApplicationContext.java:583)
at org.springframework.boot.web.reactive.context.ReactiveWebServerApplicationContext.refresh(ReactiveWebServerApplicationContext.java:66)
at org.springframework.boot.SpringApplication.refresh(SpringApplication.java:734)
at org.springframework.boot.SpringApplication.refreshContext(SpringApplication.java:408)
at org.springframework.boot.SpringApplication.run(SpringApplication.java:308)
at org.springframework.boot.SpringApplication.run(SpringApplication.java:1306)
at org.springframework.boot.SpringApplication.run(SpringApplication.java:1295)
at com.provectus.kafka.ui.KafkaUiApplication.main(KafkaUiApplication.java:15)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:568)
at org.springframework.boot.loader.MainMethodRunner.run(MainMethodRunner.java:49)
at org.springframework.boot.loader.Launcher.launch(Launcher.java:108)
at org.springframework.boot.loader.Launcher.launch(Launcher.java:58)
at org.springframework.boot.loader.JarLauncher.main(JarLauncher.java:65)
Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'defaultOidcLogoutHandler' defined in class path resource [com/provectus/kafka/ui/config/auth/OAuthSecurityConfig.class]: Unsatisfied dependency expressed through method 'defaultOidcLogoutHandler' parameter 0; nested exception is org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'clientRegistrationRepository' defined in class path resource [com/provectus/kafka/ui/config/auth/OAuthSecurityConfig.class]: Bean instantiation via factory method failed; nested exception is org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.springframework.security.oauth2.client.registration.InMemoryReactiveClientRegistrationRepository]: Factory method 'clientRegistrationRepository' threw exception; nested exception is java.lang.NullPointerException: Cannot invoke "java.util.Map.get(Object)" because the return value of "com.provectus.kafka.ui.config.auth.OAuthProperties$OAuth2Provider.getCustomParams()" is null
at org.springframework.beans.factory.support.ConstructorResolver.createArgumentArray(ConstructorResolver.java:800)
at org.springframework.beans.factory.support.ConstructorResolver.instantiateUsingFactoryMethod(ConstructorResolver.java:541)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.instantiateUsingFactoryMethod(AbstractAutowireCapableBeanFactory.java:1352)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBeanInstance(AbstractAutowireCapableBeanFactory.java:1195)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:582)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:542)
at org.springframework.beans.factory.support.AbstractBeanFactory.lambda$doGetBean$0(AbstractBeanFactory.java:335)
at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:234)
at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:333)
at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:208)
at org.springframework.beans.factory.config.DependencyDescriptor.resolveCandidate(DependencyDescriptor.java:276)
at org.springframework.beans.factory.support.DefaultListableBeanFactory.doResolveDependency(DefaultListableBeanFactory.java:1391)
at org.springframework.beans.factory.support.DefaultListableBeanFactory.resolveDependency(DefaultListableBeanFactory.java:1311)
at org.springframework.beans.factory.support.ConstructorResolver.resolveAutowiredArgument(ConstructorResolver.java:887)
at org.springframework.beans.factory.support.ConstructorResolver.createArgumentArray(ConstructorResolver.java:791)
... 27 common frames omitted
Caused by: org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'clientRegistrationRepository' defined in class path resource [com/provectus/kafka/ui/config/auth/OAuthSecurityConfig.class]: Bean instantiation via factory method failed; nested exception is org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.springframework.security.oauth2.client.registration.InMemoryReactiveClientRegistrationRepository]: Factory method 'clientRegistrationRepository' threw exception; nested exception is java.lang.NullPointerException: Cannot invoke "java.util.Map.get(Object)" because the return value of "com.provectus.kafka.ui.config.auth.OAuthProperties$OAuth2Provider.getCustomParams()" is null
at org.springframework.beans.factory.support.ConstructorResolver.instantiate(ConstructorResolver.java:658)
at org.springframework.beans.factory.support.ConstructorResolver.instantiateUsingFactoryMethod(ConstructorResolver.java:486)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.instantiateUsingFactoryMethod(AbstractAutowireCapableBeanFactory.java:1352)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBeanInstance(AbstractAutowireCapableBeanFactory.java:1195)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:582)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:542)
at org.springframework.beans.factory.support.AbstractBeanFactory.lambda$doGetBean$0(AbstractBeanFactory.java:335)
at org.springframework.beans.factory.support.DefaultSingletonBeanRegistry.getSingleton(DefaultSingletonBeanRegistry.java:234)
at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:333)
at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:208)
at org.springframework.beans.factory.config.DependencyDescriptor.resolveCandidate(DependencyDescriptor.java:276)
at org.springframework.beans.factory.support.DefaultListableBeanFactory.doResolveDependency(DefaultListableBeanFactory.java:1391)
at org.springframework.beans.factory.support.DefaultListableBeanFactory.resolveDependency(DefaultListableBeanFactory.java:1311)
at org.springframework.beans.factory.support.ConstructorResolver.resolveAutowiredArgument(ConstructorResolver.java:887)
at org.springframework.beans.factory.support.ConstructorResolver.createArgumentArray(ConstructorResolver.java:791)
... 41 common frames omitted
Caused by: org.springframework.beans.BeanInstantiationException: Failed to instantiate [org.springframework.security.oauth2.client.registration.InMemoryReactiveClientRegistrationRepository]: Factory method 'clientRegistrationRepository' threw exception; nested exception is java.lang.NullPointerException: Cannot invoke "java.util.Map.get(Object)" because the return value of "com.provectus.kafka.ui.config.auth.OAuthProperties$OAuth2Provider.getCustomParams()" is null
at org.springframework.beans.factory.support.SimpleInstantiationStrategy.instantiate(SimpleInstantiationStrategy.java:185)
at org.springframework.beans.factory.support.ConstructorResolver.instantiate(ConstructorResolver.java:653)
... 55 common frames omitted
Caused by: java.lang.NullPointerException: Cannot invoke "java.util.Map.get(Object)" because the return value of "com.provectus.kafka.ui.config.auth.OAuthProperties$OAuth2Provider.getCustomParams()" is null
```
**Set up**
1. docker deployment
2. using Env variables to config kafka ui with keycloak
AUTH_OAUTH2_CLIENT_KEYCLOAK_CLIENTID: kafkaui
AUTH_OAUTH2_CLIENT_KEYCLOAK_CLIENTSECRET: XXXXXXXXXXXXXXXXXXXXXXXXXXx
AUTH_OAUTH2_CLIENT_KEYCLOAK_PROVIDER: keycloak
AUTH_OAUTH2_CLIENT_KEYCLOAK_CUSTOM_PARAMS_TYPE: keycloak
AUTH_OAUTH2_CLIENT_KEYCLOAK_CUSTOM_PARAMS_LOGOUTURL: XXXXXXXXXXXXXXXXXXXX
AUTH_OAUTH2_CLIENT_KEYCLOAK_ISSUER_URI: XXXXXXXXXXXXXXXXXXXXX
AUTH_OAUTH2_CLIENT_KEYCLOAK_SCOPE: openid
**Steps to Reproduce**
1. configure the conatiner
2. run the container with previous configs
**Expected behavior**
run normally with oauth
| [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthProperties.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthPropertiesConverter.java"
] | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthProperties.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthPropertiesConverter.java"
] | [] | diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthProperties.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthProperties.java
index f79d217fa79..db192ae826b 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthProperties.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthProperties.java
@@ -1,6 +1,7 @@
package com.provectus.kafka.ui.config.auth;
import java.util.HashMap;
+import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import javax.annotation.PostConstruct;
@@ -31,13 +32,13 @@ public static class OAuth2Provider {
private String clientName;
private String redirectUri;
private String authorizationGrantType;
- private Set<String> scope;
+ private Set<String> scope = new HashSet<>();
private String issuerUri;
private String authorizationUri;
private String tokenUri;
private String userInfoUri;
private String jwkSetUri;
private String userNameAttribute;
- private Map<String, String> customParams;
+ private Map<String, String> customParams = new HashMap<>();
}
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthPropertiesConverter.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthPropertiesConverter.java
index c3d20664914..8e4a8575a8c 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthPropertiesConverter.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthPropertiesConverter.java
@@ -71,7 +71,7 @@ private static void applyGoogleTransformations(OAuth2Provider provider) {
}
private static boolean isGoogle(OAuth2Provider provider) {
- return provider.getCustomParams().get(TYPE).equalsIgnoreCase(GOOGLE);
+ return GOOGLE.equalsIgnoreCase(provider.getCustomParams().get(TYPE));
}
}
| null | train | val | 2023-01-03T11:52:28 | "2022-12-29T11:45:36Z" | Nawarix | train |
provectus/kafka-ui/3176_3188 | provectus/kafka-ui | provectus/kafka-ui/3176 | provectus/kafka-ui/3188 | [
"connected"
] | 57585891d16714fe4ea662f7b4c7c868ba4b77f7 | 578468d09023c9898c14774f8a827058b4fb68d0 | [
"Thanks, we'll take a look at this",
"@EI-Joao hey, can you try pulling `public.ecr.aws/provectus/kafka-ui-custom-build:3188` image? Is it any way better?",
"Hi @Haarolean \r\nIt is much better now taking 3/4 seconds π\r\nIt would be really helpful if we could get the same for this one [https://github.com/provectus/kafka-ui/issues/3148](url)",
"@EI-Joao thanks for confirmation! How many consumer groups do you have btw?",
"The cluster that has more consumer groups is having +-220. It is not a lot but to describe them all every time a page is changed takes some time π"
] | [
"try to generalize"
] | "2023-01-03T12:25:25Z" | [
"type/bug",
"good first issue",
"scope/backend",
"status/accepted"
] | Consumers: performance loading consumers | <!--
Don't forget to check for existing issues/discussions regarding your proposal. We might already have it.
https://github.com/provectus/kafka-ui/issues
https://github.com/provectus/kafka-ui/discussions
-->
**Describe the bug**
<!--(A clear and concise description of what the bug is.)-->
When loading consumers it takes 30/35 seconds. This time is related to the number of consumers.
E.g. if a cluster has x consumers it wil take y time and if a cluster has x1 consumers it will take y1 time.
**Set up**
<!--
How do you run the app? Please provide as much info as possible:
1. App version (docker image version or check commit hash in the top left corner in UI)
2. Helm chart version, if you use one
3. Any IAAC configs
We might close the issue without further explanation if you don't provide such information.
-->
version v0.5.0
**Steps to Reproduce**
<!-- We'd like you to provide an example setup (via docker-compose, helm, etc.)
to reproduce the problem, especially with a complex setups. -->
Steps to reproduce the behavior:
1. Add a cluster
2. Go to Cluster -> Consumers
**Expected behavior**
<!--
(A clear and concise description of what you expected to happen)
-->
When loading the consumers for a page. Only the number of consumers in that page should be loaded in the backed.
E.g. if a cluster has 1000 consumers we can not load 1000 consumers every time a page is changed.
**Screenshots**
<!--
(If applicable, add screenshots to help explain your problem)
-->
**Additional context**
<!--
(Add any other context about the problem here)
-->
This is applicable to the other pages that have pagination e.g. topics
| [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/ConsumerGroupsController.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ConsumerGroupMapper.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/OffsetsResetService.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/MapUtil.java"
] | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/ConsumerGroupsController.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ConsumerGroupMapper.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/OffsetsResetService.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java"
] | [
"kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/ReactiveAdminClientTest.java"
] | diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/ConsumerGroupsController.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/ConsumerGroupsController.java
index afea878fa8a..fd7505d99bd 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/ConsumerGroupsController.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/ConsumerGroupsController.java
@@ -189,8 +189,8 @@ public Mono<ResponseEntity<Void>> resetConsumerGroupOffsets(String clusterName,
private ConsumerGroupsPageResponseDTO convertPage(ConsumerGroupService.ConsumerGroupsPage
consumerGroupConsumerGroupsPage) {
return new ConsumerGroupsPageResponseDTO()
- .pageCount(consumerGroupConsumerGroupsPage.getTotalPages())
- .consumerGroups(consumerGroupConsumerGroupsPage.getConsumerGroups()
+ .pageCount(consumerGroupConsumerGroupsPage.totalPages())
+ .consumerGroups(consumerGroupConsumerGroupsPage.consumerGroups()
.stream()
.map(ConsumerGroupMapper::toDto)
.collect(Collectors.toList()));
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ConsumerGroupMapper.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ConsumerGroupMapper.java
index 75c0a99039b..9f7e32ed148 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ConsumerGroupMapper.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ConsumerGroupMapper.java
@@ -89,13 +89,17 @@ private static <T extends ConsumerGroupDTO> T convertToConsumerGroup(
.flatMap(m -> m.getAssignment().stream().map(TopicPartition::topic))
).collect(Collectors.toSet()).size();
- long messagesBehind = c.getOffsets().entrySet().stream()
- .mapToLong(e ->
- Optional.ofNullable(c.getEndOffsets())
- .map(o -> o.get(e.getKey()))
- .map(o -> o - e.getValue())
- .orElse(0L)
- ).sum();
+ Long messagesBehind = null;
+ // messagesBehind should be undefined if no committed offsets found for topic
+ if (!c.getOffsets().isEmpty()) {
+ messagesBehind = c.getOffsets().entrySet().stream()
+ .mapToLong(e ->
+ Optional.ofNullable(c.getEndOffsets())
+ .map(o -> o.get(e.getKey()))
+ .map(o -> o - e.getValue())
+ .orElse(0L)
+ ).sum();
+ }
consumerGroup.setMessagesBehind(messagesBehind);
consumerGroup.setTopics(numTopics);
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java
index e2a9592f7e9..1a74914ff43 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java
@@ -1,5 +1,6 @@
package com.provectus.kafka.ui.service;
+import com.google.common.collect.Table;
import com.provectus.kafka.ui.model.ConsumerGroupOrderingDTO;
import com.provectus.kafka.ui.model.InternalConsumerGroup;
import com.provectus.kafka.ui.model.InternalTopicConsumerGroup;
@@ -7,6 +8,7 @@
import com.provectus.kafka.ui.model.SortOrderDTO;
import com.provectus.kafka.ui.service.rbac.AccessControlService;
import java.util.ArrayList;
+import java.util.Collection;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
@@ -14,22 +16,21 @@
import java.util.Properties;
import java.util.function.ToIntFunction;
import java.util.stream.Collectors;
+import java.util.stream.Stream;
import javax.annotation.Nullable;
import lombok.RequiredArgsConstructor;
-import lombok.Value;
import org.apache.commons.lang3.StringUtils;
import org.apache.kafka.clients.admin.ConsumerGroupDescription;
+import org.apache.kafka.clients.admin.ConsumerGroupListing;
import org.apache.kafka.clients.admin.OffsetSpec;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.KafkaConsumer;
+import org.apache.kafka.common.ConsumerGroupState;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.BytesDeserializer;
import org.apache.kafka.common.utils.Bytes;
import org.springframework.stereotype.Service;
-import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
-import reactor.util.function.Tuple2;
-import reactor.util.function.Tuples;
@Service
@RequiredArgsConstructor
@@ -41,21 +42,16 @@ public class ConsumerGroupService {
private Mono<List<InternalConsumerGroup>> getConsumerGroups(
ReactiveAdminClient ac,
List<ConsumerGroupDescription> descriptions) {
- return Flux.fromIterable(descriptions)
- // 1. getting committed offsets for all groups
- .flatMap(desc -> ac.listConsumerGroupOffsets(desc.groupId())
- .map(offsets -> Tuples.of(desc, offsets)))
- .collectMap(Tuple2::getT1, Tuple2::getT2)
- .flatMap((Map<ConsumerGroupDescription, Map<TopicPartition, Long>> groupOffsetsMap) -> {
- var tpsFromGroupOffsets = groupOffsetsMap.values().stream()
- .flatMap(v -> v.keySet().stream())
- .collect(Collectors.toSet());
+ var groupNames = descriptions.stream().map(ConsumerGroupDescription::groupId).toList();
+ // 1. getting committed offsets for all groups
+ return ac.listConsumerGroupOffsets(groupNames, null)
+ .flatMap((Table<String, TopicPartition, Long> committedOffsets) -> {
// 2. getting end offsets for partitions with committed offsets
- return ac.listOffsets(tpsFromGroupOffsets, OffsetSpec.latest(), false)
+ return ac.listOffsets(committedOffsets.columnKeySet(), OffsetSpec.latest(), false)
.map(endOffsets ->
descriptions.stream()
.map(desc -> {
- var groupOffsets = groupOffsetsMap.get(desc);
+ var groupOffsets = committedOffsets.row(desc.groupId());
var endOffsetsForGroup = new HashMap<>(endOffsets);
endOffsetsForGroup.keySet().retainAll(groupOffsets.keySet());
// 3. gathering description & offsets
@@ -73,105 +69,122 @@ public Mono<List<InternalTopicConsumerGroup>> getConsumerGroupsForTopic(KafkaClu
.flatMap(endOffsets -> {
var tps = new ArrayList<>(endOffsets.keySet());
// 2. getting all consumer groups
- return describeConsumerGroups(ac, null)
- .flatMap((List<ConsumerGroupDescription> groups) ->
- Flux.fromIterable(groups)
- // 3. for each group trying to find committed offsets for topic
- .flatMap(g ->
- ac.listConsumerGroupOffsets(g.groupId(), tps)
- // 4. keeping only groups that relates to topic
- .filter(offsets -> isConsumerGroupRelatesToTopic(topic, g, offsets))
- // 5. constructing results
- .map(offsets -> InternalTopicConsumerGroup.create(topic, g, offsets, endOffsets))
- ).collectList());
+ return describeConsumerGroups(ac)
+ .flatMap((List<ConsumerGroupDescription> groups) -> {
+ // 3. trying to find committed offsets for topic
+ var groupNames = groups.stream().map(ConsumerGroupDescription::groupId).toList();
+ return ac.listConsumerGroupOffsets(groupNames, tps).map(offsets ->
+ groups.stream()
+ // 4. keeping only groups that relates to topic
+ .filter(g -> isConsumerGroupRelatesToTopic(topic, g, offsets.containsRow(g.groupId())))
+ .map(g ->
+ // 5. constructing results
+ InternalTopicConsumerGroup.create(topic, g, offsets.row(g.groupId()), endOffsets))
+ .toList()
+ );
+ }
+ );
}));
}
private boolean isConsumerGroupRelatesToTopic(String topic,
ConsumerGroupDescription description,
- Map<TopicPartition, Long> committedGroupOffsetsForTopic) {
+ boolean hasCommittedOffsets) {
boolean hasActiveMembersForTopic = description.members()
.stream()
.anyMatch(m -> m.assignment().topicPartitions().stream().anyMatch(tp -> tp.topic().equals(topic)));
- boolean hasCommittedOffsets = !committedGroupOffsetsForTopic.isEmpty();
return hasActiveMembersForTopic || hasCommittedOffsets;
}
- @Value
- public static class ConsumerGroupsPage {
- List<InternalConsumerGroup> consumerGroups;
- int totalPages;
+ public record ConsumerGroupsPage(List<InternalConsumerGroup> consumerGroups, int totalPages) {
}
public Mono<ConsumerGroupsPage> getConsumerGroupsPage(
KafkaCluster cluster,
- int page,
+ int pageNum,
int perPage,
@Nullable String search,
ConsumerGroupOrderingDTO orderBy,
SortOrderDTO sortOrderDto) {
- var comparator = sortOrderDto.equals(SortOrderDTO.ASC)
- ? getPaginationComparator(orderBy)
- : getPaginationComparator(orderBy).reversed();
return adminClientService.get(cluster).flatMap(ac ->
- describeConsumerGroups(ac, search).flatMap(descriptions ->
- getConsumerGroups(
- ac,
- descriptions.stream()
- .sorted(comparator)
- .skip((long) (page - 1) * perPage)
- .limit(perPage)
- .collect(Collectors.toList())
+ ac.listConsumerGroups()
+ .map(listing -> search == null
+ ? listing
+ : listing.stream()
+ .filter(g -> StringUtils.containsIgnoreCase(g.groupId(), search))
+ .toList()
)
- .flatMapMany(Flux::fromIterable)
- .filterWhen(
- cg -> accessControlService.isConsumerGroupAccessible(cg.getGroupId(), cluster.getName()))
- .collect(Collectors.toList())
- .map(cgs -> new ConsumerGroupsPage(
- cgs,
- (descriptions.size() / perPage) + (descriptions.size() % perPage == 0 ? 0 : 1))))
- );
+ .flatMapIterable(lst -> lst)
+ .filterWhen(cg -> accessControlService.isConsumerGroupAccessible(cg.groupId(), cluster.getName()))
+ .collectList()
+ .flatMap(allGroups ->
+ loadSortedDescriptions(ac, allGroups, pageNum, perPage, orderBy, sortOrderDto)
+ .flatMap(descriptions -> getConsumerGroups(ac, descriptions)
+ .map(page -> new ConsumerGroupsPage(
+ page,
+ (allGroups.size() / perPage) + (allGroups.size() % perPage == 0 ? 0 : 1))))));
}
- private Comparator<ConsumerGroupDescription> getPaginationComparator(ConsumerGroupOrderingDTO
- orderBy) {
- switch (orderBy) {
- case NAME:
- return Comparator.comparing(ConsumerGroupDescription::groupId);
- case STATE:
- ToIntFunction<ConsumerGroupDescription> statesPriorities = cg -> {
- switch (cg.state()) {
- case STABLE:
- return 0;
- case COMPLETING_REBALANCE:
- return 1;
- case PREPARING_REBALANCE:
- return 2;
- case EMPTY:
- return 3;
- case DEAD:
- return 4;
- case UNKNOWN:
- return 5;
- default:
- return 100;
- }
- };
- return Comparator.comparingInt(statesPriorities);
- case MEMBERS:
- return Comparator.comparingInt(cg -> cg.members().size());
- default:
- throw new IllegalStateException("Unsupported order by: " + orderBy);
- }
+ private Mono<List<ConsumerGroupDescription>> loadSortedDescriptions(ReactiveAdminClient ac,
+ List<ConsumerGroupListing> groups,
+ int pageNum,
+ int perPage,
+ ConsumerGroupOrderingDTO orderBy,
+ SortOrderDTO sortOrderDto) {
+ return switch (orderBy) {
+ case NAME -> {
+ Comparator<ConsumerGroupListing> comparator = Comparator.comparing(ConsumerGroupListing::groupId);
+ yield loadDescriptionsByListings(ac, groups, comparator, pageNum, perPage, sortOrderDto);
+ }
+ case STATE -> {
+ ToIntFunction<ConsumerGroupListing> statesPriorities =
+ cg -> switch (cg.state().orElse(ConsumerGroupState.UNKNOWN)) {
+ case STABLE -> 0;
+ case COMPLETING_REBALANCE -> 1;
+ case PREPARING_REBALANCE -> 2;
+ case EMPTY -> 3;
+ case DEAD -> 4;
+ case UNKNOWN -> 5;
+ };
+ var comparator = Comparator.comparingInt(statesPriorities);
+ yield loadDescriptionsByListings(ac, groups, comparator, pageNum, perPage, sortOrderDto);
+ }
+ case MEMBERS -> {
+ var comparator = Comparator.<ConsumerGroupDescription>comparingInt(cg -> cg.members().size());
+ var groupNames = groups.stream().map(ConsumerGroupListing::groupId).toList();
+ yield ac.describeConsumerGroups(groupNames)
+ .map(descriptions ->
+ sortAndPaginate(descriptions.values(), comparator, pageNum, perPage, sortOrderDto).toList());
+ }
+ };
}
- private Mono<List<ConsumerGroupDescription>> describeConsumerGroups(ReactiveAdminClient ac,
- @Nullable String search) {
- return ac.listConsumerGroups()
- .map(groupIds -> groupIds
- .stream()
- .filter(groupId -> search == null || StringUtils.containsIgnoreCase(groupId, search))
- .collect(Collectors.toList()))
+ private Mono<List<ConsumerGroupDescription>> loadDescriptionsByListings(ReactiveAdminClient ac,
+ List<ConsumerGroupListing> listings,
+ Comparator<ConsumerGroupListing> comparator,
+ int pageNum,
+ int perPage,
+ SortOrderDTO sortOrderDto) {
+ List<String> sortedGroups = sortAndPaginate(listings, comparator, pageNum, perPage, sortOrderDto)
+ .map(ConsumerGroupListing::groupId)
+ .toList();
+ return ac.describeConsumerGroups(sortedGroups)
+ .map(descrMap -> sortedGroups.stream().map(descrMap::get).toList());
+ }
+
+ private <T> Stream<T> sortAndPaginate(Collection<T> collection,
+ Comparator<T> comparator,
+ int pageNum,
+ int perPage,
+ SortOrderDTO sortOrderDto) {
+ return collection.stream()
+ .sorted(sortOrderDto == SortOrderDTO.ASC ? comparator : comparator.reversed())
+ .skip((long) (pageNum - 1) * perPage)
+ .limit(perPage);
+ }
+
+ private Mono<List<ConsumerGroupDescription>> describeConsumerGroups(ReactiveAdminClient ac) {
+ return ac.listConsumerGroupNames()
.flatMap(ac::describeConsumerGroups)
.map(cgs -> new ArrayList<>(cgs.values()));
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/OffsetsResetService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/OffsetsResetService.java
index 36b812473e1..67fc268d428 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/OffsetsResetService.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/OffsetsResetService.java
@@ -98,7 +98,7 @@ private Mono<ReactiveAdminClient> checkGroupCondition(KafkaCluster cluster, Stri
.flatMap(ac ->
// we need to call listConsumerGroups() to check group existence, because
// describeConsumerGroups() will return consumer group even if it doesn't exist
- ac.listConsumerGroups()
+ ac.listConsumerGroupNames()
.filter(cgs -> cgs.stream().anyMatch(g -> g.equals(groupId)))
.flatMap(cgs -> ac.describeConsumerGroups(List.of(groupId)))
.filter(cgs -> cgs.containsKey(groupId))
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java
index 1ffbd429180..b24180fa48d 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java
@@ -4,12 +4,12 @@
import static java.util.stream.Collectors.toMap;
import static org.apache.kafka.clients.admin.ListOffsetsResult.ListOffsetsResultInfo;
-import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.Iterators;
+import com.google.common.collect.ImmutableTable;
+import com.google.common.collect.Iterables;
+import com.google.common.collect.Table;
import com.provectus.kafka.ui.exception.IllegalEntityStateException;
import com.provectus.kafka.ui.exception.NotFoundException;
import com.provectus.kafka.ui.exception.ValidationException;
-import com.provectus.kafka.ui.util.MapUtil;
import com.provectus.kafka.ui.util.NumberUtil;
import com.provectus.kafka.ui.util.annotation.KafkaClientInternalsDependant;
import java.io.Closeable;
@@ -18,7 +18,6 @@
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
-import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Optional;
@@ -45,7 +44,7 @@
import org.apache.kafka.clients.admin.DescribeClusterOptions;
import org.apache.kafka.clients.admin.DescribeClusterResult;
import org.apache.kafka.clients.admin.DescribeConfigsOptions;
-import org.apache.kafka.clients.admin.ListConsumerGroupOffsetsOptions;
+import org.apache.kafka.clients.admin.ListConsumerGroupOffsetsSpec;
import org.apache.kafka.clients.admin.ListOffsetsResult;
import org.apache.kafka.clients.admin.ListTopicsOptions;
import org.apache.kafka.clients.admin.NewPartitionReassignment;
@@ -54,7 +53,6 @@
import org.apache.kafka.clients.admin.OffsetSpec;
import org.apache.kafka.clients.admin.RecordsToDelete;
import org.apache.kafka.clients.admin.TopicDescription;
-import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.OffsetAndMetadata;
import org.apache.kafka.common.KafkaException;
import org.apache.kafka.common.KafkaFuture;
@@ -69,6 +67,7 @@
import org.apache.kafka.common.errors.InvalidRequestException;
import org.apache.kafka.common.errors.UnknownTopicOrPartitionException;
import org.apache.kafka.common.requests.DescribeLogDirsResponse;
+import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.core.scheduler.Schedulers;
import reactor.util.function.Tuple2;
@@ -183,7 +182,7 @@ public Mono<Map<String, List<ConfigEntry>>> getTopicsConfig(Collection<String> t
topicNames,
200,
part -> getTopicsConfigImpl(part, includeDocFixed),
- (m1, m2) -> ImmutableMap.<String, List<ConfigEntry>>builder().putAll(m1).putAll(m2).build()
+ mapMerger()
);
}
@@ -236,7 +235,7 @@ public Mono<Map<String, TopicDescription>> describeTopics(Collection<String> top
topics,
200,
this::describeTopicsImpl,
- (m1, m2) -> ImmutableMap.<String, TopicDescription>builder().putAll(m1).putAll(m2).build()
+ mapMerger()
);
}
@@ -383,32 +382,57 @@ public Mono<Void> updateTopicConfig(String topicName, Map<String, String> config
}
}
- public Mono<List<String>> listConsumerGroups() {
- return toMono(client.listConsumerGroups().all())
- .map(lst -> lst.stream().map(ConsumerGroupListing::groupId).collect(toList()));
+ public Mono<List<String>> listConsumerGroupNames() {
+ return listConsumerGroups().map(lst -> lst.stream().map(ConsumerGroupListing::groupId).toList());
}
- public Mono<Map<String, ConsumerGroupDescription>> describeConsumerGroups(Collection<String> groupIds) {
- return toMono(client.describeConsumerGroups(groupIds).all());
+ public Mono<Collection<ConsumerGroupListing>> listConsumerGroups() {
+ return toMono(client.listConsumerGroups().all());
}
- public Mono<Map<TopicPartition, Long>> listConsumerGroupOffsets(String groupId) {
- return listConsumerGroupOffsets(groupId, new ListConsumerGroupOffsetsOptions());
+ public Mono<Map<String, ConsumerGroupDescription>> describeConsumerGroups(Collection<String> groupIds) {
+ return partitionCalls(
+ groupIds,
+ 25,
+ 4,
+ ids -> toMono(client.describeConsumerGroups(ids).all()),
+ mapMerger()
+ );
}
- public Mono<Map<TopicPartition, Long>> listConsumerGroupOffsets(
- String groupId, List<TopicPartition> partitions) {
- return listConsumerGroupOffsets(groupId,
- new ListConsumerGroupOffsetsOptions().topicPartitions(partitions));
- }
+ // group -> partition -> offset
+ // NOTE: partitions with no committed offsets will be skipped
+ public Mono<Table<String, TopicPartition, Long>> listConsumerGroupOffsets(List<String> consumerGroups,
+ // all partitions if null passed
+ @Nullable List<TopicPartition> partitions) {
+ Function<Collection<String>, Mono<Map<String, Map<TopicPartition, OffsetAndMetadata>>>> call =
+ groups -> toMono(
+ client.listConsumerGroupOffsets(
+ groups.stream()
+ .collect(Collectors.toMap(
+ g -> g,
+ g -> new ListConsumerGroupOffsetsSpec().topicPartitions(partitions)
+ ))).all()
+ );
+
+ Mono<Map<String, Map<TopicPartition, OffsetAndMetadata>>> merged = partitionCalls(
+ consumerGroups,
+ 25,
+ 4,
+ call,
+ mapMerger()
+ );
- private Mono<Map<TopicPartition, Long>> listConsumerGroupOffsets(
- String groupId, ListConsumerGroupOffsetsOptions options) {
- return toMono(client.listConsumerGroupOffsets(groupId, options).partitionsToOffsetAndMetadata())
- .map(MapUtil::removeNullValues)
- .map(m -> m.entrySet().stream()
- .map(e -> Tuples.of(e.getKey(), e.getValue().offset()))
- .collect(Collectors.toMap(Tuple2::getT1, Tuple2::getT2)));
+ return merged.map(map -> {
+ var table = ImmutableTable.<String, TopicPartition, Long>builder();
+ map.forEach((g, tpOffsets) -> tpOffsets.forEach((tp, offset) -> {
+ if (offset != null) {
+ // offset will be null for partitions that don't have committed offset for this group
+ table.put(g, tp, offset.offset());
+ }
+ }));
+ return table.build();
+ });
}
public Mono<Void> alterConsumerGroupOffsets(String groupId, Map<TopicPartition, Long> offsets) {
@@ -501,7 +525,7 @@ public Mono<Map<TopicPartition, Long>> listOffsetsUnsafe(Collection<TopicPartiti
partitions,
200,
call,
- (m1, m2) -> ImmutableMap.<TopicPartition, Long>builder().putAll(m1).putAll(m2).build()
+ mapMerger()
);
}
@@ -551,7 +575,7 @@ private Mono<Void> alterConfig(String topicName, Map<String, String> configs) {
}
/**
- * Splits input collection into batches, applies each batch sequentially to function
+ * Splits input collection into batches, converts each batch into Mono, sequentially subscribes to them
* and merges output Monos into one Mono.
*/
private static <R, I> Mono<R> partitionCalls(Collection<I> items,
@@ -561,14 +585,37 @@ private static <R, I> Mono<R> partitionCalls(Collection<I> items,
if (items.isEmpty()) {
return call.apply(items);
}
- Iterator<List<I>> parts = Iterators.partition(items.iterator(), partitionSize);
- Mono<R> mono = call.apply(parts.next());
- while (parts.hasNext()) {
- var nextPart = parts.next();
- // calls will be executed sequentially
- mono = mono.flatMap(res1 -> call.apply(nextPart).map(res2 -> merger.apply(res1, res2)));
+ Iterable<List<I>> parts = Iterables.partition(items, partitionSize);
+ return Flux.fromIterable(parts)
+ .concatMap(call)
+ .reduce(merger);
+ }
+
+ /**
+ * Splits input collection into batches, converts each batch into Mono, subscribes to them (concurrently,
+ * with specified concurrency level) and merges output Monos into one Mono.
+ */
+ private static <R, I> Mono<R> partitionCalls(Collection<I> items,
+ int partitionSize,
+ int concurrency,
+ Function<Collection<I>, Mono<R>> call,
+ BiFunction<R, R, R> merger) {
+ if (items.isEmpty()) {
+ return call.apply(items);
}
- return mono;
+ Iterable<List<I>> parts = Iterables.partition(items, partitionSize);
+ return Flux.fromIterable(parts)
+ .flatMap(call, concurrency)
+ .reduce(merger);
+ }
+
+ private static <K, V> BiFunction<Map<K, V>, Map<K, V>, Map<K, V>> mapMerger() {
+ return (m1, m2) -> {
+ var merged = new HashMap<K, V>();
+ merged.putAll(m1);
+ merged.putAll(m2);
+ return merged;
+ };
}
@Override
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/MapUtil.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/MapUtil.java
index d1a5c035ee6..e69de29bb2d 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/MapUtil.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/MapUtil.java
@@ -1,21 +0,0 @@
-package com.provectus.kafka.ui.util;
-
-import java.util.Map;
-import java.util.stream.Collectors;
-
-public class MapUtil {
-
- private MapUtil() {
- }
-
- public static <K, V> Map<K, V> removeNullValues(Map<K, V> map) {
- return map.entrySet().stream()
- .filter(e -> e.getValue() != null)
- .collect(
- Collectors.toMap(
- Map.Entry::getKey,
- Map.Entry::getValue
- )
- );
- }
-}
| diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/ReactiveAdminClientTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/ReactiveAdminClientTest.java
index 99cfedad4cf..2e302009ac1 100644
--- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/ReactiveAdminClientTest.java
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/ReactiveAdminClientTest.java
@@ -2,14 +2,18 @@
import static com.provectus.kafka.ui.service.ReactiveAdminClient.toMonoWithExceptionFilter;
import static java.util.Objects.requireNonNull;
+import static org.apache.kafka.clients.admin.ListOffsetsResult.ListOffsetsResultInfo;
import static org.assertj.core.api.Assertions.assertThat;
import com.provectus.kafka.ui.AbstractIntegrationTest;
import com.provectus.kafka.ui.producer.KafkaTestProducer;
+import java.time.Duration;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
+import java.util.Properties;
import java.util.UUID;
+import java.util.function.Function;
import java.util.stream.Stream;
import lombok.SneakyThrows;
import org.apache.kafka.clients.admin.AdminClient;
@@ -18,12 +22,16 @@
import org.apache.kafka.clients.admin.ConfigEntry;
import org.apache.kafka.clients.admin.NewTopic;
import org.apache.kafka.clients.admin.OffsetSpec;
+import org.apache.kafka.clients.consumer.ConsumerConfig;
+import org.apache.kafka.clients.consumer.KafkaConsumer;
+import org.apache.kafka.clients.consumer.OffsetAndMetadata;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.KafkaFuture;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.config.ConfigResource;
import org.apache.kafka.common.errors.UnknownTopicOrPartitionException;
import org.apache.kafka.common.internals.KafkaFutureImpl;
+import org.apache.kafka.common.serialization.StringDeserializer;
import org.junit.function.ThrowingRunnable;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
@@ -96,6 +104,14 @@ void createTopics(NewTopic... topics) {
clearings.add(() -> adminClient.deleteTopics(Stream.of(topics).map(NewTopic::name).toList()).all().get());
}
+ void fillTopic(String topic, int msgsCnt) {
+ try (var producer = KafkaTestProducer.forKafka(kafka)) {
+ for (int i = 0; i < msgsCnt; i++) {
+ producer.send(topic, UUID.randomUUID().toString());
+ }
+ }
+ }
+
@Test
void testToMonoWithExceptionFilter() {
var failedFuture = new KafkaFutureImpl<String>();
@@ -152,4 +168,79 @@ void testListOffsetsUnsafe() {
.verifyComplete();
}
+
+ @Test
+ void testListConsumerGroupOffsets() throws Exception {
+ String topic = UUID.randomUUID().toString();
+ String anotherTopic = UUID.randomUUID().toString();
+ createTopics(new NewTopic(topic, 2, (short) 1), new NewTopic(anotherTopic, 1, (short) 1));
+ fillTopic(topic, 10);
+
+ Function<String, KafkaConsumer<String, String>> consumerSupplier = groupName -> {
+ Properties p = new Properties();
+ p.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafka.getBootstrapServers());
+ p.setProperty(ConsumerConfig.GROUP_ID_CONFIG, groupName);
+ p.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
+ p.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
+ p.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
+ p.setProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
+ return new KafkaConsumer<String, String>(p);
+ };
+
+ String fullyPolledConsumer = UUID.randomUUID().toString();
+ try (KafkaConsumer<String, String> c = consumerSupplier.apply(fullyPolledConsumer)) {
+ c.subscribe(List.of(topic));
+ int polled = 0;
+ while (polled < 10) {
+ polled += c.poll(Duration.ofMillis(50)).count();
+ }
+ c.commitSync();
+ }
+
+ String polled1MsgConsumer = UUID.randomUUID().toString();
+ try (KafkaConsumer<String, String> c = consumerSupplier.apply(polled1MsgConsumer)) {
+ c.subscribe(List.of(topic));
+ c.poll(Duration.ofMillis(100));
+ c.commitSync(Map.of(tp(topic, 0), new OffsetAndMetadata(1)));
+ }
+
+ String noCommitConsumer = UUID.randomUUID().toString();
+ try (KafkaConsumer<String, String> c = consumerSupplier.apply(noCommitConsumer)) {
+ c.subscribe(List.of(topic));
+ c.poll(Duration.ofMillis(100));
+ }
+
+ Map<TopicPartition, ListOffsetsResultInfo> endOffsets = adminClient.listOffsets(Map.of(
+ tp(topic, 0), OffsetSpec.latest(),
+ tp(topic, 1), OffsetSpec.latest())).all().get();
+
+ StepVerifier.create(
+ reactiveAdminClient.listConsumerGroupOffsets(
+ List.of(fullyPolledConsumer, polled1MsgConsumer, noCommitConsumer),
+ List.of(
+ tp(topic, 0),
+ tp(topic, 1),
+ tp(anotherTopic, 0))
+ )
+ ).assertNext(table -> {
+
+ assertThat(table.row(polled1MsgConsumer))
+ .containsEntry(tp(topic, 0), 1L)
+ .hasSize(1);
+
+ assertThat(table.row(noCommitConsumer))
+ .isEmpty();
+
+ assertThat(table.row(fullyPolledConsumer))
+ .containsEntry(tp(topic, 0), endOffsets.get(tp(topic, 0)).offset())
+ .containsEntry(tp(topic, 1), endOffsets.get(tp(topic, 1)).offset())
+ .hasSize(2);
+ })
+ .verifyComplete();
+ }
+
+ private static TopicPartition tp(String topic, int partition) {
+ return new TopicPartition(topic, partition);
+ }
+
}
| test | val | 2023-01-12T16:43:37 | "2022-12-29T10:11:54Z" | joaofrsilva | train |
provectus/kafka-ui/3191_3192 | provectus/kafka-ui | provectus/kafka-ui/3191 | provectus/kafka-ui/3192 | [
"connected"
] | 53cdb684783817e7efab4fb75e1133cb25e857fb | e1708550d5796da07ef3bc006f24bd82580da3c9 | [] | [] | "2023-01-04T15:02:34Z" | [
"scope/frontend",
"status/accepted",
"type/chore"
] | [FE] Build fails after #2372 | <!--
Don't forget to check for existing issues/discussions regarding your proposal. We might already have it.
https://github.com/provectus/kafka-ui/issues
https://github.com/provectus/kafka-ui/discussions
-->
<!--
Please follow the naming conventions for bugs:
<Feature/Area/Scope> : <Compact, but specific problem summary>
Avoid generic titles, like βTopics: incorrect layout of message sorting drop-down listβ. Better use something like: βTopics: Message sorting drop-down list overlaps the "Submit" buttonβ.
-->
**Describe the bug** (Actual behavior)
<!--(A clear and concise description of what the bug is.Use a list, if there is more than one problem)-->
Front End DEV environment is not working after vite plugin implementation.
**Expected behavior**
<!--(A clear and concise description of what you expected to happen.)-->
It should work as before.
**Set up**
<!--
WE MIGHT CLOSE THE ISSUE without further explanation IF YOU DON'T PROVIDE THIS INFORMATION.
How do you run the app? Please provide as much info as possible:
1. App version (docker image version or check commit hash in the top left corner in UI)
2. Helm chart version, if you use one
3. Any IAAC configs
-->
Normal front end setup without any java build to replace the `PUBLIC-PATH-VARIABLE`
**Steps to Reproduce**
<!-- We'd like you to provide an example setup (via docker-compose, helm, etc.)
to reproduce the problem, especially with a complex setups. -->
1. Go the the front end application and start the `pnpm start`
**Screenshots**
<!--
(If applicable, add screenshots to help explain your problem)
-->
**Additional context**
<!--
Add any other context about the problem here. E.g.:
1. Are there any alternative scenarios (different data/methods/configuration/setup) you have tried?
Were they successfull or same issue occured? Please provide steps as well.
2. Related issues (if there are any).
3. Logs (if available)
4. Is there any serious impact or behaviour on the end-user because of this issue, that can be overlooked?
-->
| [
"kafka-ui-react-app/index.html",
"kafka-ui-react-app/package.json",
"kafka-ui-react-app/pnpm-lock.yaml",
"kafka-ui-react-app/vite.config.ts"
] | [
"kafka-ui-react-app/index.html",
"kafka-ui-react-app/package.json",
"kafka-ui-react-app/pnpm-lock.yaml",
"kafka-ui-react-app/vite.config.ts"
] | [] | diff --git a/kafka-ui-react-app/index.html b/kafka-ui-react-app/index.html
index aa3f89f8f90..33e18ad2688 100644
--- a/kafka-ui-react-app/index.html
+++ b/kafka-ui-react-app/index.html
@@ -12,14 +12,14 @@
/>
<!-- Favicons -->
- <link rel="icon" href="PUBLIC-PATH-VARIABLE/favicon/favicon.ico" sizes="any" />
- <link rel="icon" href="PUBLIC-PATH-VARIABLE/favicon/icon.svg" type="image/svg+xml" />
- <link rel="apple-touch-icon" href="PUBLIC-PATH-VARIABLE/favicon/apple-touch-icon.png" />
- <link rel="manifest" href="PUBLIC-PATH-VARIABLE/manifest.json" />
+ <link rel="icon" href="<%= PUBLIC_PATH %>/favicon/favicon.ico" sizes="any" />
+ <link rel="icon" href="<%= PUBLIC_PATH %>/favicon/icon.svg" type="image/svg+xml" />
+ <link rel="apple-touch-icon" href="<%= PUBLIC_PATH %>/favicon/apple-touch-icon.png" />
+ <link rel="manifest" href="<%= PUBLIC_PATH %>/manifest.json" />
<title>UI for Apache Kafka</title>
<script type="text/javascript">
- window.basePath = 'PUBLIC-PATH-VARIABLE';
+ window.basePath = '<%= PUBLIC_PATH %>';
window.__assetsPathBuilder = function (importer) {
return window.basePath+ "/" + importer;
diff --git a/kafka-ui-react-app/package.json b/kafka-ui-react-app/package.json
index 6353452605a..a385c08c881 100644
--- a/kafka-ui-react-app/package.json
+++ b/kafka-ui-react-app/package.json
@@ -118,7 +118,8 @@
"rimraf": "^3.0.2",
"ts-node": "^10.8.1",
"ts-prune": "^0.10.3",
- "typescript": "^4.7.4"
+ "typescript": "^4.7.4",
+ "vite-plugin-ejs": "^1.6.4"
},
"engines": {
"node": "v16.15.0",
diff --git a/kafka-ui-react-app/pnpm-lock.yaml b/kafka-ui-react-app/pnpm-lock.yaml
index 8a37026d6c3..b69eb15c985 100644
--- a/kafka-ui-react-app/pnpm-lock.yaml
+++ b/kafka-ui-react-app/pnpm-lock.yaml
@@ -85,6 +85,7 @@ specifiers:
typescript: ^4.7.4
use-debounce: ^8.0.1
vite: ^4.0.0
+ vite-plugin-ejs: ^1.6.4
vite-tsconfig-paths: ^4.0.2
whatwg-fetch: ^3.6.2
yup: ^0.32.11
@@ -181,6 +182,7 @@ devDependencies:
ts-node: 10.8.1_seagpw47opwyivxvtfydnuwcuy
ts-prune: 0.10.3
typescript: 4.7.4
+ vite-plugin-ejs: 1.6.4
packages:
@@ -4266,6 +4268,10 @@ packages:
engines: {node: '>=8'}
dev: true
+ /async/3.2.4:
+ resolution: {integrity: sha512-iAB+JbDEGXhyIUavoDl9WP/Jj106Kz9DEn1DPgYw5ruDn0e3Wgi3sKFm55sASdGBNOQB8F59d9qQ7deqrHA8wQ==}
+ dev: true
+
/asynckit/0.4.0:
resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==}
dev: true
@@ -4514,6 +4520,12 @@ packages:
balanced-match: 1.0.2
concat-map: 0.0.1
+ /brace-expansion/2.0.1:
+ resolution: {integrity: sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==}
+ dependencies:
+ balanced-match: 1.0.2
+ dev: true
+
/braces/3.0.2:
resolution: {integrity: sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==}
engines: {node: '>=8'}
@@ -5040,6 +5052,14 @@ packages:
wcwidth: 1.0.1
dev: true
+ /ejs/3.1.8:
+ resolution: {integrity: sha512-/sXZeMlhS0ArkfX2Aw780gJzXSMPnKjtspYZv+f3NiKLlubezAHDU5+9xz6gd3/NhG3txQCo6xlglmTS+oTGEQ==}
+ engines: {node: '>=0.10.0'}
+ hasBin: true
+ dependencies:
+ jake: 10.8.5
+ dev: true
+
/electron-to-chromium/1.4.151:
resolution: {integrity: sha512-XaG2LpZi9fdiWYOqJh0dJy4SlVywCvpgYXhzOlZTp4JqSKqxn5URqOjbm9OMYB3aInA2GuHQiem1QUOc1yT0Pw==}
@@ -5740,6 +5760,12 @@ packages:
flat-cache: 3.0.4
dev: true
+ /filelist/1.0.4:
+ resolution: {integrity: sha512-w1cEuf3S+DrLCQL7ET6kz+gmlJdbq9J7yXCSjK/OZCPA+qEN1WyF4ZAf0YYJa4/shHJra2t/d/r8SV4Ji+x+8Q==}
+ dependencies:
+ minimatch: 5.1.2
+ dev: true
+
/fill-range/7.0.1:
resolution: {integrity: sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==}
engines: {node: '>=8'}
@@ -6340,6 +6366,17 @@ packages:
engines: {node: '>=6'}
dev: true
+ /jake/10.8.5:
+ resolution: {integrity: sha512-sVpxYeuAhWt0OTWITwT98oyV0GsXyMlXCF+3L1SuafBVUIr/uILGRB+NqwkzhgXKvoJpDIpQvqkUALgdmQsQxw==}
+ engines: {node: '>=10'}
+ hasBin: true
+ dependencies:
+ async: 3.2.4
+ chalk: 4.1.2
+ filelist: 1.0.4
+ minimatch: 3.1.2
+ dev: true
+
/jest-changed-files/29.0.0:
resolution: {integrity: sha512-28/iDMDrUpGoCitTURuDqUzWQoWmOmOKOFST1mi2lwh62X4BFf6khgH3uSuo1e49X/UDjuApAj3w0wLOex4VPQ==}
engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
@@ -7176,6 +7213,13 @@ packages:
dependencies:
brace-expansion: 1.1.11
+ /minimatch/5.1.2:
+ resolution: {integrity: sha512-bNH9mmM9qsJ2X4r2Nat1B//1dJVcn3+iBLa3IgqJ7EbGaDNepL9QSHOxN4ng33s52VMMhhIfgCYDk3C4ZmlDAg==}
+ engines: {node: '>=10'}
+ dependencies:
+ brace-expansion: 2.0.1
+ dev: true
+
/minimist/1.2.6:
resolution: {integrity: sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==}
dev: true
@@ -8675,6 +8719,12 @@ packages:
'@types/istanbul-lib-coverage': 2.0.3
convert-source-map: 1.7.0
+ /vite-plugin-ejs/1.6.4:
+ resolution: {integrity: sha512-23p1RS4PiA0veXY5/gHZ60pl3pPvd8NEqdBsDgxNK8nM1rjFFDcVb0paNmuipzCgNP/Y0f/Id22M7Il4kvZ2jA==}
+ dependencies:
+ ejs: 3.1.8
+ dev: true
+
/vite-tsconfig-paths/4.0.2_eqmiqdrctagsk5ranq2vs4ssty:
resolution: {integrity: sha512-UzU8zwbCQrdUkj/Z0tnh293n4ScRcjJLoS8nPme2iB2FHoU5q8rhilb7AbhLlUC1uv4t6jSzVWnENjPnyGseeQ==}
peerDependencies:
diff --git a/kafka-ui-react-app/vite.config.ts b/kafka-ui-react-app/vite.config.ts
index 3320bb8cd87..189e72e7f9d 100644
--- a/kafka-ui-react-app/vite.config.ts
+++ b/kafka-ui-react-app/vite.config.ts
@@ -6,12 +6,20 @@ import {
} from 'vite';
import react from '@vitejs/plugin-react-swc';
import tsconfigPaths from 'vite-tsconfig-paths';
+import { ViteEjsPlugin } from 'vite-plugin-ejs';
export default defineConfig(({ mode }) => {
process.env = { ...process.env, ...loadEnv(mode, process.cwd()) };
const defaultConfig: UserConfigExport = {
- plugins: [react(), tsconfigPaths(), splitVendorChunkPlugin()],
+ plugins: [
+ react(),
+ tsconfigPaths(),
+ splitVendorChunkPlugin(),
+ ViteEjsPlugin({
+ PUBLIC_PATH: mode !== 'development' ? 'PUBLIC-PATH-VARIABLE' : '',
+ }),
+ ],
server: {
port: 3000,
},
| null | test | val | 2023-01-04T05:51:49 | "2023-01-04T12:23:08Z" | Mgrdich | train |
provectus/kafka-ui/3105_3195 | provectus/kafka-ui | provectus/kafka-ui/3105 | provectus/kafka-ui/3195 | [
"connected"
] | 578468d09023c9898c14774f8a827058b4fb68d0 | a03b6844e0000a1aedcf6d7ed1472789277f1009 | [] | [
"please make all created methods return the object of ApiService and all old methods need to set private",
"don't we able to use createConnector() at line 60 and deleteConnector() at line 123 without clusterName?",
"seems here we also can set a date when seek type is Timestamp. so suggest tot lease String argument",
"Condition.visible -> Condition.enabled for seekTypeFLd",
"seems this method is not used in tests, it should be private and w/o Step annotation",
"Condition.visible -> Condition.enabled",
"Condition.visible -> Condition.enabled",
"done",
"sure ,done it ",
"done",
"done\r\n",
"done",
"done",
"done"
] | "2023-01-05T11:32:27Z" | [
"scope/QA",
"scope/AQA"
] | [e2e]Checking messages filtering by Timestamp within Messages/Topic | Autotest implementation for:
https://app.qase.io/case/KAFKAUI-16
Description:
Checking messages filtering by Timestamp within Messages/Topic
Pre-conditions:
- Login to Kafka-ui application
- Open the 'Local' section
- Select the 'Topics'
- Open the Topic profile
- Turn to Messages tab
Steps:
1. Change the Offset dropdown value to 'Timestamp'
2. Click on 'Select Timestamp'
3. Choose the date and time within opened calendar
**Expected result:**
All the messages created from the selected time should filtered | [
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicDetails.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/services/ApiService.java",
"kafka-ui-e2e-checks/src/test/resources/producedkey.txt",
"kafka-ui-e2e-checks/src/test/resources/testData.txt"
] | [
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicDetails.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/services/ApiService.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/TimeUtils.java"
] | [
"kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/base/BaseTest.java",
"kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/connectors/ConnectorsTests.java",
"kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/schemas/SchemasTests.java",
"kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicMessagesTests.java",
"kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicsTests.java"
] | diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java
index 67bf5a7c590..8b12d3b0d3f 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java
@@ -7,12 +7,13 @@
import com.codeborne.selenide.ElementsCollection;
import com.codeborne.selenide.SelenideElement;
import com.provectus.kafka.ui.utilities.WebUtils;
+import java.time.Duration;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public abstract class BasePage extends WebUtils {
- protected SelenideElement loadingSpinner = $x("//*[contains(text(),'Loading')]");
+ protected SelenideElement loadingSpinner = $x("//div[@role='progressbar']");
protected SelenideElement submitBtn = $x("//button[@type='submit']");
protected SelenideElement tableGrid = $x("//table");
protected SelenideElement dotMenuBtn = $x("//button[@aria-label='Dropdown Toggle']");
@@ -26,7 +27,9 @@ public abstract class BasePage extends WebUtils {
protected void waitUntilSpinnerDisappear() {
log.debug("\nwaitUntilSpinnerDisappear");
- loadingSpinner.shouldBe(Condition.disappear);
+ if(isVisible(loadingSpinner)){
+ loadingSpinner.shouldBe(Condition.disappear, Duration.ofSeconds(30));
+ }
}
protected void clickSubmitBtn() {
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicDetails.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicDetails.java
index bd8cbd3d02c..1de0478abec 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicDetails.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicDetails.java
@@ -3,6 +3,7 @@
import static com.codeborne.selenide.Selenide.$;
import static com.codeborne.selenide.Selenide.$$x;
import static com.codeborne.selenide.Selenide.$x;
+import static com.codeborne.selenide.Selenide.sleep;
import static org.apache.commons.lang.math.RandomUtils.nextInt;
import com.codeborne.selenide.CollectionCondition;
@@ -11,9 +12,17 @@
import com.codeborne.selenide.SelenideElement;
import com.provectus.kafka.ui.pages.BasePage;
import io.qameta.allure.Step;
+import java.time.LocalDate;
+import java.time.LocalDateTime;
+import java.time.LocalTime;
+import java.time.YearMonth;
+import java.time.format.DateTimeFormatter;
+import java.time.format.DateTimeFormatterBuilder;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
+import java.util.Locale;
+import java.util.Objects;
import org.openqa.selenium.By;
public class TopicDetails extends BasePage {
@@ -24,7 +33,7 @@ public class TopicDetails extends BasePage {
protected SelenideElement overviewTab = $x("//a[contains(text(),'Overview')]");
protected SelenideElement messagesTab = $x("//a[contains(text(),'Messages')]");
protected SelenideElement seekTypeDdl = $x("//ul[@id='selectSeekType']/li");
- protected SelenideElement seekTypeField = $x("//label[text()='Seek Type']//..//input");
+ protected SelenideElement seekTypeField = $x("//label[text()='Seek Type']//..//div/input");
protected SelenideElement addFiltersBtn = $x("//button[text()='Add Filters']");
protected SelenideElement savedFiltersLink = $x("//div[text()='Saved Filters']");
protected SelenideElement addFilterCodeModalTitle = $x("//label[text()='Filter code']");
@@ -33,6 +42,7 @@ public class TopicDetails extends BasePage {
protected SelenideElement displayNameInputAddFilterMdl = $x("//input[@placeholder='Enter Name']");
protected SelenideElement cancelBtnAddFilterMdl = $x("//button[text()='Cancel']");
protected SelenideElement addFilterBtnAddFilterMdl = $x("//button[text()='Add filter']");
+ protected SelenideElement addFiltersBtnMessages = $x("//button[text()='Add Filters']");
protected SelenideElement selectFilterBtnAddFilterMdl = $x("//button[text()='Select filter']");
protected SelenideElement editSettingsMenu = $x("//li[@role][contains(text(),'Edit settings')]");
protected SelenideElement removeTopicBtn = $x("//ul[@role='menu']//div[contains(text(),'Remove Topic')]");
@@ -43,6 +53,11 @@ public class TopicDetails extends BasePage {
protected SelenideElement backToCreateFiltersLink = $x("//div[text()='Back To create filters']");
protected SelenideElement confirmationMdl = $x("//div[text()= 'Confirm the action']/..");
protected ElementsCollection messageGridItems = $$x("//tbody//tr");
+ protected SelenideElement actualCalendarDate = $x("//div[@class='react-datepicker__current-month']");
+ protected SelenideElement previousMonthButton = $x("//button[@aria-label='Previous Month']");
+ protected SelenideElement nextMonthButton = $x("//button[@aria-label='Next Month']");
+ protected SelenideElement calendarTimeFld = $x("//input[@placeholder='Time']");
+ protected String dayCellLtr = "//div[@role='option'][contains(text(),'%d')]";
protected String seekFilterDdlLocator = "//ul[@id='selectSeekType']/ul/li[text()='%s']";
protected String savedFilterNameLocator = "//div[@role='savedFilter']/div[contains(text(),'%s')]";
protected String consumerIdLocator = "//a[@title='%s']";
@@ -53,7 +68,7 @@ public class TopicDetails extends BasePage {
@Step
public TopicDetails waitUntilScreenReady() {
waitUntilSpinnerDisappear();
- dotMenuBtn.shouldBe(Condition.visible);
+ overviewTab.shouldBe(Condition.visible);
return this;
}
@@ -265,6 +280,63 @@ public boolean isContentMessageVisible(String contentMessage) {
return contentMessage.matches(contentMessageTab.getText().trim());
}
+ private void selectYear(int expectedYear) {
+ while (getActualCalendarDate().getYear() > expectedYear) {
+ clickByJavaScript(previousMonthButton);
+ sleep(1000);
+ if (LocalTime.now().plusMinutes(3).isBefore(LocalTime.now())) {
+ throw new IllegalArgumentException("Unable to select year");
+ }
+ }
+ }
+
+ private void selectMonth(int expectedMonth) {
+ while (getActualCalendarDate().getMonthValue() > expectedMonth) {
+ clickByJavaScript(previousMonthButton);
+ sleep(1000);
+ if (LocalTime.now().plusMinutes(3).isBefore(LocalTime.now())) {
+ throw new IllegalArgumentException("Unable to select month");
+ }
+ }
+ }
+
+ private void selectDay(int expectedDay) {
+ Objects.requireNonNull($$x(String.format(dayCellLtr, expectedDay)).stream()
+ .filter(day -> !Objects.requireNonNull(day.getAttribute("class")).contains("outside-month"))
+ .findFirst().orElse(null)).shouldBe(Condition.enabled).click();
+ }
+
+ private void setTime(LocalDateTime dateTime) {
+ calendarTimeFld.shouldBe(Condition.enabled)
+ .sendKeys(String.valueOf(dateTime.getHour()), String.valueOf(dateTime.getMinute()));
+ }
+
+ @Step
+ public TopicDetails selectDateAndTimeByCalendar(LocalDateTime dateTime) {
+ setTime(dateTime);
+ selectYear(dateTime.getYear());
+ selectMonth(dateTime.getMonthValue());
+ selectDay(dateTime.getDayOfMonth());
+ return this;
+ }
+
+ private LocalDate getActualCalendarDate() {
+ String monthAndYearStr = actualCalendarDate.getText().trim();
+ DateTimeFormatter formatter = new DateTimeFormatterBuilder()
+ .parseCaseInsensitive()
+ .append(DateTimeFormatter.ofPattern("MMMM yyyy"))
+ .toFormatter(Locale.ENGLISH);
+ YearMonth yearMonth = formatter.parse(monthAndYearStr, YearMonth::from);
+ return yearMonth.atDay(1);
+ }
+
+ @Step
+ public TopicDetails openCalendarSeekType(){
+ seekTypeField.shouldBe(Condition.enabled).click();
+ actualCalendarDate.shouldBe(Condition.visible);
+ return this;
+ }
+
@Step
public int getMessageCountAmount() {
return Integer.parseInt(messageAmountCell.getText().trim());
@@ -278,7 +350,7 @@ private List<TopicDetails.MessageGridItem> initItems() {
}
@Step
- public TopicDetails.MessageGridItem getMessage(int offset) {
+ public TopicDetails.MessageGridItem getMessageByOffset(int offset) {
return initItems().stream()
.filter(e -> e.getOffset() == offset)
.findFirst().orElse(null);
@@ -291,7 +363,7 @@ public List<MessageGridItem> getAllMessages(){
@Step
public TopicDetails.MessageGridItem getRandomMessage() {
- return getMessage(nextInt(initItems().size() - 1));
+ return getMessageByOffset(nextInt(initItems().size() - 1));
}
public enum TopicMenu {
@@ -340,8 +412,10 @@ public int getPartition() {
}
@Step
- public String getTimestamp() {
- return element.$x("./td[4]/div").getText().trim();
+ public LocalDateTime getTimestamp() {
+ String timestampValue = element.$x("./td[4]/div").getText().trim();
+ DateTimeFormatter formatter = DateTimeFormatter.ofPattern("M/d/yyyy, HH:mm:ss");
+ return LocalDateTime.parse(timestampValue, formatter);
}
@Step
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/services/ApiService.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/services/ApiService.java
index ad1fad76b2e..8451ef836e1 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/services/ApiService.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/services/ApiService.java
@@ -1,7 +1,6 @@
package com.provectus.kafka.ui.services;
import static com.codeborne.selenide.Selenide.sleep;
-import static com.provectus.kafka.ui.settings.BaseSource.BASE_LOCAL_URL;
import static com.provectus.kafka.ui.utilities.FileUtils.fileToString;
import com.fasterxml.jackson.databind.ObjectMapper;
@@ -17,6 +16,8 @@
import com.provectus.kafka.ui.models.Connector;
import com.provectus.kafka.ui.models.Schema;
import com.provectus.kafka.ui.models.Topic;
+import com.provectus.kafka.ui.settings.BaseSource;
+import io.qameta.allure.Step;
import java.util.HashMap;
import java.util.Map;
import lombok.SneakyThrows;
@@ -25,7 +26,7 @@
@Slf4j
-public class ApiService {
+public class ApiService extends BaseSource {
@SneakyThrows
private TopicsApi topicApi() {
@@ -48,7 +49,7 @@ private MessagesApi messageApi() {
}
@SneakyThrows
- public void createTopic(String clusterName, String topicName) {
+ private void createTopic(String clusterName, String topicName) {
TopicCreation topic = new TopicCreation();
topic.setName(topicName);
topic.setPartitions(1);
@@ -61,15 +62,28 @@ public void createTopic(String clusterName, String topicName) {
}
}
- public void deleteTopic(String clusterName, String topicName) {
+ @Step
+ public ApiService createTopic(String topicName) {
+ createTopic(CLUSTER_NAME, topicName);
+ return this;
+ }
+
+ @SneakyThrows
+ private void deleteTopic(String clusterName, String topicName) {
try {
topicApi().deleteTopic(clusterName, topicName).block();
} catch (WebClientResponseException ignore) {
}
}
+ @Step
+ public ApiService deleteTopic(String topicName){
+ deleteTopic(CLUSTER_NAME, topicName);
+ return this;
+ }
+
@SneakyThrows
- public void createSchema(String clusterName, Schema schema) {
+ private void createSchema(String clusterName, Schema schema) {
NewSchemaSubject schemaSubject = new NewSchemaSubject();
schemaSubject.setSubject(schema.getName());
schemaSubject.setSchema(fileToString(schema.getValuePath()));
@@ -81,24 +95,42 @@ public void createSchema(String clusterName, Schema schema) {
}
}
+ @Step
+ public ApiService createSchema(Schema schema){
+ createSchema(CLUSTER_NAME, schema);
+ return this;
+ }
+
@SneakyThrows
- public void deleteSchema(String clusterName, String schemaName) {
+ private void deleteSchema(String clusterName, String schemaName) {
try {
schemaApi().deleteSchema(clusterName, schemaName).block();
} catch (WebClientResponseException ignore) {
}
}
+ @Step
+ public ApiService deleteSchema(String schemaName){
+ deleteSchema(CLUSTER_NAME, schemaName);
+ return this;
+ }
+
@SneakyThrows
- public void deleteConnector(String clusterName, String connectName, String connectorName) {
+ private void deleteConnector(String clusterName, String connectName, String connectorName) {
try {
connectorApi().deleteConnector(clusterName, connectName, connectorName).block();
} catch (WebClientResponseException ignore) {
}
}
+ @Step
+ public ApiService deleteConnector(String connectName, String connectorName){
+ deleteConnector(CLUSTER_NAME, connectName, connectorName);
+ return this;
+ }
+
@SneakyThrows
- public void createConnector(String clusterName, String connectName, Connector connector) {
+ private void createConnector(String clusterName, String connectName, Connector connector) {
NewConnector connectorProperties = new NewConnector();
connectorProperties.setName(connector.getName());
Map<String, Object> configMap = new ObjectMapper().readValue(connector.getConfig(), HashMap.class);
@@ -110,20 +142,35 @@ public void createConnector(String clusterName, String connectName, Connector co
connectorApi().createConnector(clusterName, connectName, connectorProperties).block();
}
+ @Step
+ public ApiService createConnector(String connectName, Connector connector){
+ createConnector(CLUSTER_NAME, connectName, connector);
+ return this;
+ }
+
+ @Step
public String getFirstConnectName(String clusterName) {
return connectorApi().getConnects(clusterName).blockFirst().getName();
}
@SneakyThrows
- public void sendMessage(String clusterName, Topic topic) {
- CreateTopicMessage createMessage = new CreateTopicMessage();
- createMessage.partition(0);
- createMessage.setContent(topic.getMessageContent());
- createMessage.setKey(topic.getMessageKey());
- try {
- messageApi().sendTopicMessages(clusterName, topic.getName(), createMessage).block();
- } catch (WebClientResponseException ex) {
- ex.getRawStatusCode();
- }
+ private void sendMessage(String clusterName, Topic topic) {
+ CreateTopicMessage createMessage = new CreateTopicMessage();
+ createMessage.setPartition(0);
+ createMessage.setKeySerde("String");
+ createMessage.setValueSerde("String");
+ createMessage.setKey(topic.getMessageKey());
+ createMessage.setContent(topic.getMessageContent());
+ try {
+ messageApi().sendTopicMessages(clusterName, topic.getName(), createMessage).block();
+ } catch (WebClientResponseException ex) {
+ ex.getRawStatusCode();
+ }
+ }
+
+ @Step
+ public ApiService sendMessage(Topic topic) {
+ sendMessage(CLUSTER_NAME, topic);
+ return this;
}
}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/TimeUtils.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/TimeUtils.java
new file mode 100644
index 00000000000..259f70c3ccc
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/TimeUtils.java
@@ -0,0 +1,16 @@
+package com.provectus.kafka.ui.utilities;
+
+import static com.codeborne.selenide.Selenide.sleep;
+
+import java.time.LocalTime;
+import lombok.extern.slf4j.Slf4j;
+
+@Slf4j
+public class TimeUtils {
+
+ public static void waitUntilNewMinuteStarted(){
+ int secondsLeft = 60 - LocalTime.now().getSecond();
+ log.debug("\nwaitUntilNewMinuteStarted: {}s", secondsLeft);
+ sleep(secondsLeft * 1000);
+ }
+}
diff --git a/kafka-ui-e2e-checks/src/test/resources/producedkey.txt b/kafka-ui-e2e-checks/src/test/resources/producedkey.txt
deleted file mode 100644
index f1ccb75d55d..00000000000
--- a/kafka-ui-e2e-checks/src/test/resources/producedkey.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-"key"
\ No newline at end of file
diff --git a/kafka-ui-e2e-checks/src/test/resources/testData.txt b/kafka-ui-e2e-checks/src/test/resources/testData.txt
deleted file mode 100644
index 9b9d3b66546..00000000000
--- a/kafka-ui-e2e-checks/src/test/resources/testData.txt
+++ /dev/null
@@ -1,1 +0,0 @@
-"print"
| diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/base/BaseTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/base/BaseTest.java
index 5ad98fe8542..6db92244ba4 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/base/BaseTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/base/BaseTest.java
@@ -62,6 +62,7 @@ public static void start() {
.addArguments("--disable-gpu")
.addArguments("--no-sandbox")
.addArguments("--verbose")
+ .addArguments("--lang=es")
)
.withLogConsumer(new Slf4jLogConsumer(log).withPrefix("[CHROME]: "));
try {
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/connectors/ConnectorsTests.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/connectors/ConnectorsTests.java
index 665b8fb2ff0..ec2e3d5a0f1 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/connectors/ConnectorsTests.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/connectors/ConnectorsTests.java
@@ -2,7 +2,6 @@
import static com.provectus.kafka.ui.pages.BasePage.AlertHeader.SUCCESS;
import static com.provectus.kafka.ui.pages.NaviSideBar.SideMenuOption.KAFKA_CONNECT;
-import static com.provectus.kafka.ui.settings.BaseSource.CLUSTER_NAME;
import static com.provectus.kafka.ui.utilities.FileUtils.getResourceAsString;
import static org.apache.commons.lang.RandomStringUtils.randomAlphabetic;
@@ -50,14 +49,14 @@ public class ConnectorsTests extends BaseTest {
@BeforeAll
public void beforeAll() {
- TOPIC_LIST.addAll(List.of(TOPIC_FOR_CREATE, TOPIC_FOR_DELETE, TOPIC_FOR_UPDATE));
- TOPIC_LIST.forEach(topic -> {
- apiService.createTopic(CLUSTER_NAME, topic.getName());
- apiService.sendMessage(CLUSTER_NAME, topic);
- });
- CONNECTOR_LIST.addAll(List.of(CONNECTOR_FOR_DELETE, CONNECTOR_FOR_UPDATE));
- CONNECTOR_LIST.forEach(connector -> apiService
- .createConnector(CLUSTER_NAME, CONNECT_NAME, connector));
+ TOPIC_LIST.addAll(List.of(TOPIC_FOR_CREATE, TOPIC_FOR_DELETE, TOPIC_FOR_UPDATE));
+ TOPIC_LIST.forEach(topic -> apiService
+ .createTopic(topic.getName())
+ .sendMessage(topic)
+ );
+ CONNECTOR_LIST.addAll(List.of(CONNECTOR_FOR_DELETE, CONNECTOR_FOR_UPDATE));
+ CONNECTOR_LIST.forEach(connector -> apiService
+ .createConnector(CONNECT_NAME, connector));
}
@DisplayName("should create a connector")
@@ -120,8 +119,8 @@ public void deleteConnector() {
@AfterAll
public void afterAll() {
CONNECTOR_LIST.forEach(connector ->
- apiService.deleteConnector(CLUSTER_NAME, CONNECT_NAME, connector.getName()));
- TOPIC_LIST.forEach(topic -> apiService.deleteTopic(CLUSTER_NAME, topic.getName()));
+ apiService.deleteConnector(CONNECT_NAME, connector.getName()));
+ TOPIC_LIST.forEach(topic -> apiService.deleteTopic(topic.getName()));
}
@Step
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/schemas/SchemasTests.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/schemas/SchemasTests.java
index 1af657ca976..ea98b4abe1a 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/schemas/SchemasTests.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/schemas/SchemasTests.java
@@ -1,7 +1,6 @@
package com.provectus.kafka.ui.suite.schemas;
import static com.provectus.kafka.ui.pages.NaviSideBar.SideMenuOption.SCHEMA_REGISTRY;
-import static com.provectus.kafka.ui.settings.BaseSource.CLUSTER_NAME;
import static com.provectus.kafka.ui.utilities.FileUtils.fileToString;
import com.codeborne.selenide.Condition;
@@ -41,7 +40,7 @@ public class SchemasTests extends BaseTest {
@SneakyThrows
public void beforeAll() {
SCHEMA_LIST.addAll(List.of(AVRO_API, JSON_API, PROTOBUF_API));
- SCHEMA_LIST.forEach(schema -> apiService.createSchema(CLUSTER_NAME, schema));
+ SCHEMA_LIST.forEach(schema -> apiService.createSchema(schema));
}
@DisplayName("should create AVRO schema")
@@ -228,7 +227,7 @@ void deleteSchemaProtobuf() {
@AfterAll
public void afterAll() {
- SCHEMA_LIST.forEach(schema -> apiService.deleteSchema(CLUSTER_NAME, schema.getName()));
+ SCHEMA_LIST.forEach(schema -> apiService.deleteSchema(schema.getName()));
}
@Step
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicMessagesTests.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicMessagesTests.java
index 68dd2f72201..db2d67106b6 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicMessagesTests.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicMessagesTests.java
@@ -2,8 +2,8 @@
import static com.provectus.kafka.ui.pages.BasePage.AlertHeader.SUCCESS;
import static com.provectus.kafka.ui.pages.topic.TopicDetails.TopicMenu.MESSAGES;
-import static com.provectus.kafka.ui.settings.BaseSource.CLUSTER_NAME;
-import static com.provectus.kafka.ui.utilities.FileUtils.fileToString;
+import static com.provectus.kafka.ui.pages.topic.TopicDetails.TopicMenu.OVERVIEW;
+import static com.provectus.kafka.ui.utilities.TimeUtils.waitUntilNewMinuteStarted;
import static org.apache.commons.lang.RandomStringUtils.randomAlphabetic;
import static org.assertj.core.api.Assertions.assertThat;
@@ -15,8 +15,12 @@
import com.provectus.kafka.ui.utilities.qaseIoUtils.enums.Status;
import io.qameta.allure.Issue;
import io.qase.api.annotation.CaseId;
+import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.List;
+import java.util.Objects;
+import java.util.stream.Collectors;
+import java.util.stream.IntStream;
import org.assertj.core.api.SoftAssertions;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.Assertions;
@@ -32,22 +36,29 @@ public class TopicMessagesTests extends BaseTest {
private static final String SUITE_TITLE = "Topics";
private static final Topic TOPIC_FOR_MESSAGES = new Topic()
.setName("topic-with-clean-message-attribute-" + randomAlphabetic(5))
- .setMessageKey(fileToString(System.getProperty("user.dir") + "/src/test/resources/producedkey.txt"))
- .setMessageContent(fileToString(System.getProperty("user.dir") + "/src/test/resources/testData.txt"));
+ .setMessageKey(randomAlphabetic(5))
+ .setMessageContent(randomAlphabetic(10));
private static final Topic TOPIC_TO_CLEAR_MESSAGES = new Topic()
.setName("topic-to-clear-message-attribute-" + randomAlphabetic(5))
- .setMessageKey(fileToString(System.getProperty("user.dir") + "/src/test/resources/producedkey.txt"))
- .setMessageContent(fileToString(System.getProperty("user.dir") + "/src/test/resources/testData.txt"));
+ .setMessageKey(randomAlphabetic(5))
+ .setMessageContent(randomAlphabetic(10));
+ private static final Topic TOPIC_FOR_CHECKING_FILTERS = new Topic()
+ .setName("topic_for_checking_filters" + randomAlphabetic(5))
+ .setMessageKey(randomAlphabetic(5))
+ .setMessageContent(randomAlphabetic(10));
private static final Topic TOPIC_TO_RECREATE = new Topic()
.setName("topic-to-recreate-attribute-" + randomAlphabetic(5))
- .setMessageKey(fileToString(System.getProperty("user.dir") + "/src/test/resources/producedkey.txt"))
- .setMessageContent(fileToString(System.getProperty("user.dir") + "/src/test/resources/testData.txt"));
+ .setMessageKey(randomAlphabetic(5))
+ .setMessageContent(randomAlphabetic(10));
private static final List<Topic> TOPIC_LIST = new ArrayList<>();
@BeforeAll
public void beforeAll() {
- TOPIC_LIST.addAll(List.of(TOPIC_FOR_MESSAGES, TOPIC_TO_CLEAR_MESSAGES, TOPIC_TO_RECREATE));
- TOPIC_LIST.forEach(topic -> apiService.createTopic(CLUSTER_NAME, topic.getName()));
+ TOPIC_LIST.addAll(List.of(TOPIC_FOR_MESSAGES, TOPIC_FOR_CHECKING_FILTERS, TOPIC_TO_CLEAR_MESSAGES, TOPIC_TO_RECREATE));
+ TOPIC_LIST.forEach(topic -> apiService.createTopic(topic.getName()));
+ IntStream.range(1, 3).forEach(i -> apiService.sendMessage(TOPIC_FOR_CHECKING_FILTERS));
+ waitUntilNewMinuteStarted();
+ IntStream.range(1, 3).forEach(i -> apiService.sendMessage(TOPIC_FOR_CHECKING_FILTERS));
}
@DisplayName("produce message")
@@ -58,7 +69,7 @@ public void beforeAll() {
void produceMessage() {
navigateToTopicsAndOpenDetails(TOPIC_FOR_MESSAGES.getName());
topicDetails
- .openDetailsTab(TopicDetails.TopicMenu.MESSAGES)
+ .openDetailsTab(MESSAGES)
.clickProduceMessageBtn();
produceMessagePanel
.waitUntilScreenReady()
@@ -85,7 +96,7 @@ void produceMessage() {
void clearMessage() {
navigateToTopicsAndOpenDetails(TOPIC_FOR_MESSAGES.getName());
topicDetails
- .openDetailsTab(TopicDetails.TopicMenu.OVERVIEW)
+ .openDetailsTab(OVERVIEW)
.clickProduceMessageBtn();
int messageAmount = topicDetails.getMessageCountAmount();
produceMessagePanel
@@ -111,35 +122,70 @@ void clearMessage() {
@CaseId(21)
@Test
void copyMessageFromTopicProfile() {
- navigateToTopicsAndOpenDetails("_schemas");
+ navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECKING_FILTERS.getName());
topicDetails
- .openDetailsTab(TopicDetails.TopicMenu.MESSAGES)
+ .openDetailsTab(MESSAGES)
.getRandomMessage()
.openDotMenu()
.clickCopyToClipBoard();
- Assertions.assertTrue(topicDetails.isAlertWithMessageVisible(SUCCESS,"Copied successfully!"),
+ Assertions.assertTrue(topicDetails.isAlertWithMessageVisible(SUCCESS, "Copied successfully!"),
"isAlertWithMessageVisible()");
}
@Disabled
- @Issue("https://github.com/provectus/kafka-ui/issues/2856")
+ @Issue("https://github.com/provectus/kafka-ui/issues/2394")
@DisplayName("Checking messages filtering by Offset within Topic/Messages")
@Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(15)
@Test
void checkingMessageFilteringByOffset() {
- String offsetValue = "2";
- navigateToTopicsAndOpenDetails("_schemas");
+ navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECKING_FILTERS.getName());
+ topicDetails
+ .openDetailsTab(MESSAGES);
+ TopicDetails.MessageGridItem secondMessage = topicDetails.getMessageByOffset(1);
topicDetails
- .openDetailsTab(MESSAGES)
.selectSeekTypeDdlMessagesTab("Offset")
- .setSeekTypeValueFldMessagesTab(offsetValue)
+ .setSeekTypeValueFldMessagesTab(String.valueOf(secondMessage.getOffset()))
+ .clickSubmitFiltersBtnMessagesTab();
+ SoftAssertions softly = new SoftAssertions();
+ topicDetails.getAllMessages().forEach(message ->
+ softly.assertThat(message.getOffset() == secondMessage.getOffset()
+ || message.getOffset() > secondMessage.getOffset())
+ .as(String.format("Expected offset is: %s, but found: %s", secondMessage.getOffset(), message.getOffset()))
+ .isTrue());
+ softly.assertAll();
+ }
+
+ @Disabled
+ @Issue("https://github.com/provectus/kafka-ui/issues/3215")
+ @Issue("https://github.com/provectus/kafka-ui/issues/2345")
+ @DisplayName("Checking messages filtering by Timestamp within Messages/Topic")
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(16)
+ @Test
+ void checkingMessageFilteringByTimestamp() {
+ navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECKING_FILTERS.getName());
+ topicDetails
+ .openDetailsTab(MESSAGES);
+ LocalDateTime firstTimestamp = topicDetails.getMessageByOffset(0).getTimestamp();
+ List<TopicDetails.MessageGridItem> nextMessages = topicDetails.getAllMessages().stream()
+ .filter(message -> message.getTimestamp().getMinute() != firstTimestamp.getMinute())
+ .collect(Collectors.toList());
+ LocalDateTime nextTimestamp = Objects.requireNonNull(nextMessages.stream()
+ .findFirst().orElse(null)).getTimestamp();
+ topicDetails
+ .selectSeekTypeDdlMessagesTab("Timestamp")
+ .openCalendarSeekType()
+ .selectDateAndTimeByCalendar(nextTimestamp)
.clickSubmitFiltersBtnMessagesTab();
SoftAssertions softly = new SoftAssertions();
- topicDetails.getAllMessages()
- .forEach(messages -> softly.assertThat(messages.getOffset() == Integer.parseInt(offsetValue))
- .as("getAllMessages()").isTrue());
+ topicDetails.getAllMessages().forEach(message ->
+ softly.assertThat(message.getTimestamp().isEqual(nextTimestamp)
+ || message.getTimestamp().isAfter(nextTimestamp))
+ .as(String.format("Expected timestamp is: %s, but found: %s", nextTimestamp, message.getTimestamp()))
+ .isTrue());
softly.assertAll();
}
@@ -215,6 +261,6 @@ void checkRecreateTopic(){
@AfterAll
public void afterAll() {
- TOPIC_LIST.forEach(topic -> apiService.deleteTopic(CLUSTER_NAME, topic.getName()));
+ TOPIC_LIST.forEach(topic -> apiService.deleteTopic(topic.getName()));
}
}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicsTests.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicsTests.java
index 89857df9675..bc2ebe0889b 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicsTests.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicsTests.java
@@ -9,11 +9,9 @@
import static com.provectus.kafka.ui.pages.topic.enums.MaxSizeOnDisk.NOT_SET;
import static com.provectus.kafka.ui.pages.topic.enums.MaxSizeOnDisk.SIZE_1_GB;
import static com.provectus.kafka.ui.pages.topic.enums.MaxSizeOnDisk.SIZE_20_GB;
-import static com.provectus.kafka.ui.settings.BaseSource.CLUSTER_NAME;
-import static com.provectus.kafka.ui.utilities.FileUtils.fileToString;
import static org.apache.commons.lang.RandomStringUtils.randomAlphabetic;
-import static org.assertj.core.api.Assertions.assertThat;
import static org.apache.commons.lang3.RandomUtils.nextInt;
+import static org.assertj.core.api.Assertions.assertThat;
import com.codeborne.selenide.Condition;
import com.provectus.kafka.ui.base.BaseTest;
@@ -56,8 +54,8 @@ public class TopicsTests extends BaseTest {
.setTimeToRetainData("604800001")
.setMaxSizeOnDisk(SIZE_20_GB)
.setMaxMessageBytes("1000020")
- .setMessageKey(fileToString(System.getProperty("user.dir") + "/src/test/resources/producedkey.txt"))
- .setMessageContent(fileToString(System.getProperty("user.dir") + "/src/test/resources/testData.txt"));
+ .setMessageKey(randomAlphabetic(5))
+ .setMessageContent(randomAlphabetic(10));
private static final Topic TOPIC_TO_CHECK_SETTINGS = new Topic()
.setName("new-topic-" + randomAlphabetic(5))
.setNumberOfPartitions(1)
@@ -69,7 +67,7 @@ public class TopicsTests extends BaseTest {
@BeforeAll
public void beforeAll() {
TOPIC_LIST.addAll(List.of(TOPIC_TO_UPDATE, TOPIC_FOR_DELETE));
- TOPIC_LIST.forEach(topic -> apiService.createTopic(CLUSTER_NAME, topic.getName()));
+ TOPIC_LIST.forEach(topic -> apiService.createTopic(topic.getName()));
}
@DisplayName("should create a topic")
@@ -505,6 +503,6 @@ void checkCopyTopicPossibility(){
@AfterAll
public void afterAll() {
- TOPIC_LIST.forEach(topic -> apiService.deleteTopic(CLUSTER_NAME, topic.getName()));
+ TOPIC_LIST.forEach(topic -> apiService.deleteTopic(topic.getName()));
}
}
| train | val | 2023-01-12T20:14:28 | "2022-12-20T11:31:51Z" | anezboretskiy | train |
provectus/kafka-ui/3111_3197 | provectus/kafka-ui | provectus/kafka-ui/3111 | provectus/kafka-ui/3197 | [
"connected"
] | 741bbc1be1bd8dba4fdfe854ca01f6f60b80c0a0 | ea348102c22a4eb4fdaa24a635ca7f4184970442 | [] | [
"are we using this function ? cause it seems to be deleted ",
"I added a new function because there pops up a warning that 'button cannot be located inside another button'",
"in the dropdown, I just added a div with onClick and now it works correctly and there is no warning in the tests",
"\r\nand yes we are using this function because the old drop down still works for deletion\r\n",
"i don't think the click dropdown button should check how many dropdowns we have, now it is just clicking the first or the second button.\r\n\r\nand if this click is for the delete , we should fetch it with its text Delete , as it is done in the Restart. and we can rename the function `afterClickDeleteButton`",
"ok i will remove expect(dropDownButton.length).toEqual(2) this line ",
"let catch this with its name as well instead of `screen.getAllByRole('button');` like `screen.getByText('Restart Failed Tasks');` , so it won't fail in the future if the order of the dropdown elements changes",
"there is a no name button it's just a drop down button "
] | "2023-01-05T13:58:45Z" | [
"type/enhancement",
"scope/frontend",
"status/accepted"
] | Redesign connector actions | 1. one dropdown button for restart actions
2. a "triple dot" sandwich menu for deleting a connector
https://www.figma.com/file/ZkvysdRUmt9p2PDNU3a9WU/Kafka-UI?node-id=3302%3A12035&t=uAxXU6AP0QAqiwUE-0 | [
"kafka-ui-react-app/src/components/Connect/Details/Actions/Actions.tsx",
"kafka-ui-react-app/src/components/Connect/Details/Actions/__tests__/Actions.spec.tsx",
"kafka-ui-react-app/src/components/common/Button/Button.styled.ts",
"kafka-ui-react-app/src/theme/theme.ts"
] | [
"kafka-ui-react-app/src/components/Connect/Details/Actions/Action.styled.ts",
"kafka-ui-react-app/src/components/Connect/Details/Actions/Actions.tsx",
"kafka-ui-react-app/src/components/Connect/Details/Actions/__tests__/Actions.spec.tsx",
"kafka-ui-react-app/src/components/common/Button/Button.styled.ts",
"kafka-ui-react-app/src/components/common/Icons/ChevronDownIcon.tsx",
"kafka-ui-react-app/src/theme/theme.ts"
] | [] | diff --git a/kafka-ui-react-app/src/components/Connect/Details/Actions/Action.styled.ts b/kafka-ui-react-app/src/components/Connect/Details/Actions/Action.styled.ts
new file mode 100644
index 00000000000..a2998acdc21
--- /dev/null
+++ b/kafka-ui-react-app/src/components/Connect/Details/Actions/Action.styled.ts
@@ -0,0 +1,23 @@
+import styled from 'styled-components';
+
+export const ConnectorActionsWrapperStyled = styled.div`
+ display: flex;
+ flex-wrap: wrap;
+ align-items: center;
+ gap: 8px;
+`;
+export const ButtonLabel = styled.span`
+ margin-right: 11.5px;
+`;
+export const RestartButton = styled.div`
+ padding: 0 12px;
+ border: none;
+ border-radius: 4px;
+ display: flex;
+ -webkit-align-items: center;
+ background: #e8e8fc;
+ color: #171a1c;
+ font-size: 14px;
+ font-weight: 500;
+ height: 32px;
+`;
diff --git a/kafka-ui-react-app/src/components/Connect/Details/Actions/Actions.tsx b/kafka-ui-react-app/src/components/Connect/Details/Actions/Actions.tsx
index f475b739e12..530f0db1471 100644
--- a/kafka-ui-react-app/src/components/Connect/Details/Actions/Actions.tsx
+++ b/kafka-ui-react-app/src/components/Connect/Details/Actions/Actions.tsx
@@ -1,5 +1,4 @@
import React from 'react';
-import styled from 'styled-components';
import { useNavigate } from 'react-router-dom';
import { useIsMutating } from '@tanstack/react-query';
import {
@@ -21,13 +20,9 @@ import {
import { useConfirm } from 'lib/hooks/useConfirm';
import { Dropdown } from 'components/common/Dropdown';
import { ActionDropdownItem } from 'components/common/ActionComponent';
+import ChevronDownIcon from 'components/common/Icons/ChevronDownIcon';
-const ConnectorActionsWrapperStyled = styled.div`
- display: flex;
- flex-wrap: wrap;
- align-items: center;
- gap: 8px;
-`;
+import * as S from './Action.styled';
const Actions: React.FC = () => {
const navigate = useNavigate();
@@ -66,10 +61,16 @@ const Actions: React.FC = () => {
stateMutation.mutateAsync(ConnectorAction.PAUSE);
const resumeConnectorHandler = () =>
stateMutation.mutateAsync(ConnectorAction.RESUME);
-
return (
- <ConnectorActionsWrapperStyled>
- <Dropdown>
+ <S.ConnectorActionsWrapperStyled>
+ <Dropdown
+ label={
+ <S.RestartButton>
+ <S.ButtonLabel>Restart</S.ButtonLabel>
+ <ChevronDownIcon />
+ </S.RestartButton>
+ }
+ >
{connector?.status.state === ConnectorState.RUNNING && (
<ActionDropdownItem
onClick={pauseConnectorHandler}
@@ -129,6 +130,8 @@ const Actions: React.FC = () => {
>
Restart Failed Tasks
</ActionDropdownItem>
+ </Dropdown>
+ <Dropdown>
<ActionDropdownItem
onClick={deleteConnectorHandler}
disabled={isMutating}
@@ -142,7 +145,7 @@ const Actions: React.FC = () => {
Delete
</ActionDropdownItem>
</Dropdown>
- </ConnectorActionsWrapperStyled>
+ </S.ConnectorActionsWrapperStyled>
);
};
diff --git a/kafka-ui-react-app/src/components/Connect/Details/Actions/__tests__/Actions.spec.tsx b/kafka-ui-react-app/src/components/Connect/Details/Actions/__tests__/Actions.spec.tsx
index c3c4cff8f1a..9dce7507f59 100644
--- a/kafka-ui-react-app/src/components/Connect/Details/Actions/__tests__/Actions.spec.tsx
+++ b/kafka-ui-react-app/src/components/Connect/Details/Actions/__tests__/Actions.spec.tsx
@@ -35,8 +35,11 @@ const expectActionButtonsExists = () => {
};
const afterClickDropDownButton = async () => {
const dropDownButton = screen.getAllByRole('button');
- expect(dropDownButton.length).toEqual(1);
- await userEvent.click(dropDownButton[0]);
+ await userEvent.click(dropDownButton[1]);
+};
+const afterClickRestartButton = async () => {
+ const dropDownButton = screen.getByText('Restart');
+ await userEvent.click(dropDownButton);
};
describe('Actions', () => {
afterEach(() => {
@@ -66,8 +69,8 @@ describe('Actions', () => {
data: set({ ...connector }, 'status.state', ConnectorState.PAUSED),
}));
renderComponent();
- await afterClickDropDownButton();
- expect(screen.getAllByRole('menuitem').length).toEqual(5);
+ await afterClickRestartButton();
+ expect(screen.getAllByRole('menuitem').length).toEqual(4);
expect(screen.getByText('Resume')).toBeInTheDocument();
expect(screen.queryByText('Pause')).not.toBeInTheDocument();
expectActionButtonsExists();
@@ -78,8 +81,8 @@ describe('Actions', () => {
data: set({ ...connector }, 'status.state', ConnectorState.FAILED),
}));
renderComponent();
- await afterClickDropDownButton();
- expect(screen.getAllByRole('menuitem').length).toEqual(4);
+ await afterClickRestartButton();
+ expect(screen.getAllByRole('menuitem').length).toEqual(3);
expect(screen.queryByText('Resume')).not.toBeInTheDocument();
expect(screen.queryByText('Pause')).not.toBeInTheDocument();
expectActionButtonsExists();
@@ -90,8 +93,8 @@ describe('Actions', () => {
data: set({ ...connector }, 'status.state', ConnectorState.UNASSIGNED),
}));
renderComponent();
- await afterClickDropDownButton();
- expect(screen.getAllByRole('menuitem').length).toEqual(4);
+ await afterClickRestartButton();
+ expect(screen.getAllByRole('menuitem').length).toEqual(3);
expect(screen.queryByText('Resume')).not.toBeInTheDocument();
expect(screen.queryByText('Pause')).not.toBeInTheDocument();
expectActionButtonsExists();
@@ -102,8 +105,8 @@ describe('Actions', () => {
data: set({ ...connector }, 'status.state', ConnectorState.RUNNING),
}));
renderComponent();
- await afterClickDropDownButton();
- expect(screen.getAllByRole('menuitem').length).toEqual(5);
+ await afterClickRestartButton();
+ expect(screen.getAllByRole('menuitem').length).toEqual(4);
expect(screen.queryByText('Resume')).not.toBeInTheDocument();
expect(screen.getByText('Pause')).toBeInTheDocument();
expectActionButtonsExists();
@@ -131,7 +134,7 @@ describe('Actions', () => {
mutateAsync: restartConnector,
}));
renderComponent();
- await afterClickDropDownButton();
+ await afterClickRestartButton();
await userEvent.click(
screen.getByRole('menuitem', { name: 'Restart Connector' })
);
@@ -144,7 +147,7 @@ describe('Actions', () => {
mutateAsync: restartAllTasks,
}));
renderComponent();
- await afterClickDropDownButton();
+ await afterClickRestartButton();
await userEvent.click(
screen.getByRole('menuitem', { name: 'Restart All Tasks' })
);
@@ -159,7 +162,7 @@ describe('Actions', () => {
mutateAsync: restartFailedTasks,
}));
renderComponent();
- await afterClickDropDownButton();
+ await afterClickRestartButton();
await userEvent.click(
screen.getByRole('menuitem', { name: 'Restart Failed Tasks' })
);
@@ -174,7 +177,7 @@ describe('Actions', () => {
mutateAsync: pauseConnector,
}));
renderComponent();
- await afterClickDropDownButton();
+ await afterClickRestartButton();
await userEvent.click(screen.getByRole('menuitem', { name: 'Pause' }));
expect(pauseConnector).toHaveBeenCalledWith(ConnectorAction.PAUSE);
});
@@ -188,7 +191,7 @@ describe('Actions', () => {
mutateAsync: resumeConnector,
}));
renderComponent();
- await afterClickDropDownButton();
+ await afterClickRestartButton();
await userEvent.click(screen.getByRole('menuitem', { name: 'Resume' }));
expect(resumeConnector).toHaveBeenCalledWith(ConnectorAction.RESUME);
});
diff --git a/kafka-ui-react-app/src/components/common/Button/Button.styled.ts b/kafka-ui-react-app/src/components/common/Button/Button.styled.ts
index 574e8a6c5eb..4cf793a5ed4 100644
--- a/kafka-ui-react-app/src/components/common/Button/Button.styled.ts
+++ b/kafka-ui-react-app/src/components/common/Button/Button.styled.ts
@@ -58,8 +58,10 @@ const StyledButton = styled.button<ButtonProps>`
color: ${(props) => props.theme.button.primary.color};
}
- & svg {
- margin-right: 7px;
+ & :first-of-type {
+ svg {
+ margin-right: 7px;
+ }
}
`;
diff --git a/kafka-ui-react-app/src/components/common/Icons/ChevronDownIcon.tsx b/kafka-ui-react-app/src/components/common/Icons/ChevronDownIcon.tsx
new file mode 100644
index 00000000000..d9bf102474d
--- /dev/null
+++ b/kafka-ui-react-app/src/components/common/Icons/ChevronDownIcon.tsx
@@ -0,0 +1,24 @@
+import React from 'react';
+import { useTheme } from 'styled-components';
+
+const ChevronDownIcon: React.FC = () => {
+ const theme = useTheme();
+ return (
+ <svg
+ width="10"
+ height="6"
+ viewBox="0 0 10 6"
+ fill="none"
+ xmlns="http://www.w3.org/2000/svg"
+ >
+ <path
+ fillRule="evenodd"
+ clipRule="evenodd"
+ d="M0.646447 0.646447C0.841709 0.451184 1.15829 0.451184 1.35355 0.646447L5 4.29289L8.64645 0.646447C8.84171 0.451184 9.15829 0.451184 9.35355 0.646447C9.54882 0.841709 9.54882 1.15829 9.35355 1.35355L5.35355 5.35355C5.15829 5.54882 4.84171 5.54882 4.64645 5.35355L0.646447 1.35355C0.451184 1.15829 0.451184 0.841709 0.646447 0.646447Z"
+ fill={theme.icons.chevronDownIcon}
+ />
+ </svg>
+ );
+};
+
+export default ChevronDownIcon;
diff --git a/kafka-ui-react-app/src/theme/theme.ts b/kafka-ui-react-app/src/theme/theme.ts
index 9eb8ac3fc79..73da7bf84d0 100644
--- a/kafka-ui-react-app/src/theme/theme.ts
+++ b/kafka-ui-react-app/src/theme/theme.ts
@@ -547,6 +547,7 @@ const theme = {
},
},
icons: {
+ chevronDownIcon: Colors.neutral[90],
closeIcon: Colors.neutral[30],
deleteIcon: Colors.red[20],
warningIcon: Colors.yellow[20],
| null | train | val | 2023-02-22T08:50:49 | "2022-12-21T07:23:52Z" | Haarolean | train |
provectus/kafka-ui/3198_3199 | provectus/kafka-ui | provectus/kafka-ui/3198 | provectus/kafka-ui/3199 | [
"connected"
] | 0048a7eabdd078606768487af74c4403f8142b60 | 8ffd542a808c5476156d1070a21fd340adf64a6b | [] | [] | "2023-01-05T18:45:02Z" | [
"scope/frontend",
"status/accepted",
"type/chore"
] | classnames is not use in the code base yet included in the bundle | <!--
Don't forget to check for existing issues/discussions regarding your proposal. We might already have it.
https://github.com/provectus/kafka-ui/issues
https://github.com/provectus/kafka-ui/discussions
-->
<!--
Please follow the naming conventions for bugs:
<Feature/Area/Scope> : <Compact, but specific problem summary>
Avoid generic titles, like βTopics: incorrect layout of message sorting drop-down listβ. Better use something like: βTopics: Message sorting drop-down list overlaps the "Submit" buttonβ.
-->
**Describe the bug** (Actual behavior)
<!--(A clear and concise description of what the bug is.Use a list, if there is more than one problem)-->
`classnames` is not use in the code base yet included in the bundle
**Expected behavior**
<!--(A clear and concise description of what you expected to happen.)-->
It should be removed cause it effects the bundle size.
**Set up**
<!--
WE MIGHT CLOSE THE ISSUE without further explanation IF YOU DON'T PROVIDE THIS INFORMATION.
How do you run the app? Please provide as much info as possible:
1. App version (docker image version or check commit hash in the top left corner in UI)
2. Helm chart version, if you use one
3. Any IAAC configs
-->
**Steps to Reproduce**
<!-- We'd like you to provide an example setup (via docker-compose, helm, etc.)
to reproduce the problem, especially with a complex setups. -->
1. remove the `classnames` from the bundle.
2. retest the and see if the application is running.
**Screenshots**
<!--
(If applicable, add screenshots to help explain your problem)
-->
**Additional context**
<!--
Add any other context about the problem here. E.g.:
1. Are there any alternative scenarios (different data/methods/configuration/setup) you have tried?
Were they successfull or same issue occured? Please provide steps as well.
3. Related issues (if there are any).
4. Logs (if available)
5. Is there any serious impact or behaviour on the end-user because of this issue, that can be overlooked?
-->
| [
"kafka-ui-react-app/package.json",
"kafka-ui-react-app/pnpm-lock.yaml"
] | [
"kafka-ui-react-app/package.json",
"kafka-ui-react-app/pnpm-lock.yaml"
] | [] | diff --git a/kafka-ui-react-app/package.json b/kafka-ui-react-app/package.json
index a385c08c881..f51939d5978 100644
--- a/kafka-ui-react-app/package.json
+++ b/kafka-ui-react-app/package.json
@@ -19,7 +19,6 @@
"@types/testing-library__jest-dom": "^5.14.5",
"ace-builds": "^1.7.1",
"ajv": "^8.6.3",
- "classnames": "^2.2.6",
"fetch-mock": "^9.11.0",
"jest": "^29.0.3",
"jest-watch-typeahead": "^2.0.0",
diff --git a/kafka-ui-react-app/pnpm-lock.yaml b/kafka-ui-react-app/pnpm-lock.yaml
index b69eb15c985..c7e33aea0ae 100644
--- a/kafka-ui-react-app/pnpm-lock.yaml
+++ b/kafka-ui-react-app/pnpm-lock.yaml
@@ -38,7 +38,6 @@ specifiers:
'@vitejs/plugin-react-swc': ^3.0.0
ace-builds: ^1.7.1
ajv: ^8.6.3
- classnames: ^2.2.6
dotenv: ^16.0.1
eslint: ^8.3.0
eslint-config-airbnb: ^19.0.4
@@ -107,7 +106,6 @@ dependencies:
'@types/testing-library__jest-dom': 5.14.5
ace-builds: 1.7.1
ajv: 8.8.2
- classnames: 2.3.1
fetch-mock: 9.11.0
jest: 29.0.3_yqiaopbgmqcuvx27p5xxvum6wm
jest-watch-typeahead: [email protected]
| null | train | val | 2023-01-09T14:24:09 | "2023-01-05T18:40:18Z" | Mgrdich | train |
provectus/kafka-ui/3196_3208 | provectus/kafka-ui | provectus/kafka-ui/3196 | provectus/kafka-ui/3208 | [
"connected"
] | 0048a7eabdd078606768487af74c4403f8142b60 | 7fc94ecdbfc6aec961370e582cc3e261c81ffd2e | [] | [
"why do we need this link to the task?",
"getNumberOfMessage -> getNumberOfMessages",
"assertions don't work, because assertAll() missed",
"btw, I'm not sure we really need this method. we can call getTopicItem(topicName).getNumberOfMessages() directly from test",
"Fixed",
"Fixed.",
"Removed.",
"we need to wait until button is visible while dot menu is opening: clickByJavaScript(clearMessageBtn); -> clickByJavaScript(clearMessageBtn.shouldBe(visible));",
"Fixed."
] | "2023-01-10T09:56:16Z" | [
"scope/QA",
"scope/AQA"
] | [e2e]TopicTests.clearMessageOfTopic : Clear message of topic | Autotest implementation for:
https://app.qase.io/case/KAFKAUI-239
Pre-conditions :
1)Create a Topic
2)Add messages to Topic
Steps:
1)Navigate to Topics
2)Select "Clear messages" from 3dot menu icon for Topic
3)Press "Confirm"
Expected results:
1)"Confirm the action" should appear
2)Number of messages should become 0 ;
Success message should be displayed | [
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicDetails.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicsList.java"
] | [
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicDetails.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicsList.java"
] | [
"kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicMessagesTests.java"
] | diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java
index cf559af1e2c..67bf5a7c590 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java
@@ -18,6 +18,7 @@ public abstract class BasePage extends WebUtils {
protected SelenideElement dotMenuBtn = $x("//button[@aria-label='Dropdown Toggle']");
protected SelenideElement alertHeader = $x("//div[@role='alert']//div[@role='heading']");
protected SelenideElement alertMessage = $x("//div[@role='alert']//div[@role='contentinfo']");
+ protected SelenideElement confirmBtn = $x("//button[contains(text(),'Confirm')]");
protected ElementsCollection allGridItems = $$x("//tr[@class]");
protected String summaryCellLocator = "//div[contains(text(),'%s')]";
protected String tableElementNameLocator = "//tbody//a[contains(text(),'%s')]";
@@ -65,6 +66,11 @@ protected boolean isAlertVisible(AlertHeader header, String message) {
return result;
}
+ protected void clickConfirmButton() {
+ confirmBtn.shouldBe(Condition.enabled).click();
+ confirmBtn.shouldBe(Condition.disappear);
+ }
+
public enum AlertHeader {
SUCCESS("Success"),
VALIDATION_ERROR("Validation Error"),
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicDetails.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicDetails.java
index 70d5934d048..bd8cbd3d02c 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicDetails.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicDetails.java
@@ -36,7 +36,6 @@ public class TopicDetails extends BasePage {
protected SelenideElement selectFilterBtnAddFilterMdl = $x("//button[text()='Select filter']");
protected SelenideElement editSettingsMenu = $x("//li[@role][contains(text(),'Edit settings')]");
protected SelenideElement removeTopicBtn = $x("//ul[@role='menu']//div[contains(text(),'Remove Topic')]");
- protected SelenideElement confirmBtn = $x("//div[@role='dialog']//button[contains(text(),'Confirm')]");
protected SelenideElement produceMessageBtn = $x("//div//button[text()='Produce Message']");
protected SelenideElement contentMessageTab = $x("//html//div[@id='root']/div/main//table//p");
protected SelenideElement cleanUpPolicyField = $x("//div[contains(text(),'Clean Up Policy')]/../span/*");
@@ -77,7 +76,7 @@ public TopicDetails openDotMenu() {
}
@Step
- public boolean isAlertWithMessageVisible(AlertHeader header, String message) {
+ public boolean isAlertWithMessageVisible(AlertHeader header, String message){
return isAlertVisible(header, message);
}
@@ -127,8 +126,7 @@ public TopicDetails clickDeleteTopicMenu() {
@Step
public TopicDetails clickConfirmBtnMdl() {
- confirmBtn.shouldBe(Condition.enabled).click();
- confirmBtn.shouldBe(Condition.disappear);
+ clickConfirmButton();
return this;
}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicsList.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicsList.java
index 1a28050369d..73e2610b7e9 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicsList.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicsList.java
@@ -1,5 +1,6 @@
package com.provectus.kafka.ui.pages.topic;
+import static com.codeborne.selenide.Condition.visible;
import static com.codeborne.selenide.Selenide.$x;
import com.codeborne.selenide.CollectionCondition;
@@ -22,11 +23,12 @@ public class TopicsList extends BasePage {
protected SelenideElement deleteSelectedTopicsBtn = $x("//button[text()='Delete selected topics']");
protected SelenideElement copySelectedTopicBtn = $x("//button[text()='Copy selected topic']");
protected SelenideElement purgeMessagesOfSelectedTopicsBtn = $x("//button[text()='Purge messages of selected topics']");
+ protected SelenideElement clearMessagesBtn = $x("//ul[contains(@class ,'open')]//div[text()='Clear Messages']");
@Step
public TopicsList waitUntilScreenReady() {
waitUntilSpinnerDisappear();
- topicListHeader.shouldBe(Condition.visible);
+ topicListHeader.shouldBe(visible);
return this;
}
@@ -38,7 +40,7 @@ public TopicsList clickAddTopicBtn() {
@Step
public boolean isTopicVisible(String topicName) {
- tableGrid.shouldBe(Condition.visible);
+ tableGrid.shouldBe(visible);
return isVisible(getTableElement(topicName));
}
@@ -59,6 +61,12 @@ public TopicsList openTopic(String topicName) {
return this;
}
+ @Step
+ public TopicsList openDotMenuByTopicName(String topicName){
+ getTopicItem(topicName).openDotMenu();
+ return this;
+ }
+
@Step
public boolean isCopySelectedTopicBtnEnabled(){
return isEnabled(copySelectedTopicBtn);
@@ -76,6 +84,23 @@ public TopicsList clickCopySelectedTopicBtn(){
return this;
}
+ @Step
+ public TopicsList clickClearMessagesBtn(){
+ clickByJavaScript(clearMessagesBtn.shouldBe(visible));
+ return this;
+ }
+
+ @Step
+ public TopicsList clickConfirmBtnMdl() {
+ clickConfirmButton();
+ return this;
+ }
+
+ @Step
+ public boolean isAlertWithMessageVisible(AlertHeader header, String message) {
+ return isAlertVisible(header, message);
+ }
+
private List<SelenideElement> getVisibleColumnHeaders() {
return Stream.of("Replication Factor","Number of messages","Topic Name", "Partitions", "Out of sync replicas", "Size")
.map(name -> $x(String.format(columnHeaderLocator, name)))
@@ -110,12 +135,12 @@ private List<TopicGridItem> initGridItems() {
return gridItemList;
}
- @Step
- public TopicGridItem getTopicItem(String name) {
- return initGridItems().stream()
+ @Step
+ public TopicGridItem getTopicItem(String name) {
+ return initGridItems().stream()
.filter(e -> e.getName().equals(name))
.findFirst().orElse(null);
- }
+ }
@Step
public List<TopicGridItem> getNonInternalTopics() {
@@ -149,7 +174,7 @@ public TopicsList selectItem(boolean select) {
public boolean isInternal() {
boolean internal = false;
try {
- element.$x("./td[2]/a/span").shouldBe(Condition.visible);
+ element.$x("./td[2]/a/span").shouldBe(visible);
internal = true;
} catch (Throwable ignored) {
}
| diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicMessagesTests.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicMessagesTests.java
index f18150f8347..22b3b6e97b8 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicMessagesTests.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicMessagesTests.java
@@ -5,6 +5,7 @@
import static com.provectus.kafka.ui.settings.BaseSource.CLUSTER_NAME;
import static com.provectus.kafka.ui.utilities.FileUtils.fileToString;
import static org.apache.commons.lang.RandomStringUtils.randomAlphabetic;
+import static org.assertj.core.api.Assertions.assertThat;
import com.provectus.kafka.ui.base.BaseTest;
import com.provectus.kafka.ui.models.Topic;
@@ -33,11 +34,15 @@ public class TopicMessagesTests extends BaseTest {
.setName("topic-with-clean-message-attribute-" + randomAlphabetic(5))
.setMessageKey(fileToString(System.getProperty("user.dir") + "/src/test/resources/producedkey.txt"))
.setMessageContent(fileToString(System.getProperty("user.dir") + "/src/test/resources/testData.txt"));
+ private static final Topic TOPIC_TO_CLEAR_MESSAGES = new Topic()
+ .setName("topic-to-clear-message-attribute-" + randomAlphabetic(5))
+ .setMessageKey(fileToString(System.getProperty("user.dir") + "/src/test/resources/producedkey.txt"))
+ .setMessageContent(fileToString(System.getProperty("user.dir") + "/src/test/resources/testData.txt"));
private static final List<Topic> TOPIC_LIST = new ArrayList<>();
@BeforeAll
public void beforeAll() {
- TOPIC_LIST.addAll(List.of(TOPIC_FOR_MESSAGES));
+ TOPIC_LIST.addAll(List.of(TOPIC_FOR_MESSAGES, TOPIC_TO_CLEAR_MESSAGES));
TOPIC_LIST.forEach(topic -> apiService.createTopic(CLUSTER_NAME, topic.getName()));
}
@@ -108,7 +113,7 @@ void copyMessageFromTopicProfile() {
.getRandomMessage()
.openDotMenu()
.clickCopyToClipBoard();
- Assertions.assertTrue(topicDetails.isAlertWithMessageVisible(SUCCESS, "Copied successfully!"),
+ Assertions.assertTrue(topicDetails.isAlertWithMessageVisible(SUCCESS,"Copied successfully!"),
"isAlertWithMessageVisible()");
}
@@ -134,6 +139,41 @@ void checkingMessageFilteringByOffset() {
softly.assertAll();
}
+ @DisplayName("TopicTests.clearMessageOfTopic : Clear message of topic")
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(239)
+ @Test
+ void checkClearTopicMessage() {
+ navigateToTopicsAndOpenDetails(TOPIC_TO_CLEAR_MESSAGES.getName());
+ topicDetails
+ .openDetailsTab(TopicDetails.TopicMenu.OVERVIEW)
+ .clickProduceMessageBtn();
+ produceMessagePanel
+ .waitUntilScreenReady()
+ .setContentFiled(TOPIC_TO_CLEAR_MESSAGES.getMessageContent())
+ .setKeyField(TOPIC_TO_CLEAR_MESSAGES.getMessageKey())
+ .submitProduceMessage();
+ topicDetails
+ .waitUntilScreenReady();
+ navigateToTopics();
+ topicsList
+ .waitUntilScreenReady();
+ assertThat(topicsList.getTopicItem(TOPIC_TO_CLEAR_MESSAGES.getName()).getNumberOfMessages())
+ .as("getNumberOfMessages()").isEqualTo(1);
+ topicsList
+ .openDotMenuByTopicName(TOPIC_TO_CLEAR_MESSAGES.getName())
+ .clickClearMessagesBtn()
+ .clickConfirmBtnMdl();
+ SoftAssertions softly = new SoftAssertions();
+ softly.assertThat(topicsList.isAlertWithMessageVisible(SUCCESS,
+ String.format("%s messages have been successfully cleared!",TOPIC_TO_CLEAR_MESSAGES.getName())))
+ .as("isAlertWithMessageVisible()").isTrue();
+ softly.assertThat(topicsList.getTopicItem(TOPIC_TO_CLEAR_MESSAGES.getName()).getNumberOfMessages())
+ .as("getNumberOfMessages()").isEqualTo(0);
+ softly.assertAll();
+ }
+
@AfterAll
public void afterAll() {
TOPIC_LIST.forEach(topic -> apiService.deleteTopic(CLUSTER_NAME, topic.getName()));
| test | val | 2023-01-09T14:24:09 | "2023-01-05T13:00:21Z" | ArthurNiedial | train |
provectus/kafka-ui/3212_3213 | provectus/kafka-ui | provectus/kafka-ui/3212 | provectus/kafka-ui/3213 | [
"connected"
] | 356be08fc7f4f88ee566271ba6c59957d304e232 | 566dab078f7e4f75804f0aa6ba706cc8d73a127e | [] | [
"navigateToTopics() already has topicsList.waitUntilScreenReady()"
] | "2023-01-11T10:54:36Z" | [
"scope/QA",
"scope/AQA"
] | [e2e]TopicTests.recreateTopic : Recreate topic | Autotest implementation for:
https://app.qase.io/case/KAFKAUI-240
Pre-conditions :
1)Create a Topic
2)Add messages to Topic
Steps:
1)Navigate to Topics
2)Select "Recreate topic" from 3 dot menu for Topic
3)Press "Confirm" button
Expected results:
1)Confirm the action pop up should open
2)Topic should be recreated: messages should become 0 | [
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicsList.java"
] | [
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicsList.java"
] | [
"kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicMessagesTests.java"
] | diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicsList.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicsList.java
index 73e2610b7e9..afe5633e440 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicsList.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicsList.java
@@ -8,6 +8,7 @@
import com.codeborne.selenide.SelenideElement;
import com.provectus.kafka.ui.pages.BasePage;
import io.qameta.allure.Step;
+import java.time.Duration;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
@@ -24,6 +25,7 @@ public class TopicsList extends BasePage {
protected SelenideElement copySelectedTopicBtn = $x("//button[text()='Copy selected topic']");
protected SelenideElement purgeMessagesOfSelectedTopicsBtn = $x("//button[text()='Purge messages of selected topics']");
protected SelenideElement clearMessagesBtn = $x("//ul[contains(@class ,'open')]//div[text()='Clear Messages']");
+ protected SelenideElement recreateTopicBtn = $x("//ul[contains(@class ,'open')]//div[text()='Recreate Topic']");
@Step
public TopicsList waitUntilScreenReady() {
@@ -90,6 +92,12 @@ public TopicsList clickClearMessagesBtn(){
return this;
}
+ @Step
+ public TopicsList clickRecreateTopicBtn(){
+ clickByJavaScript(recreateTopicBtn.shouldBe(visible));
+ return this;
+ }
+
@Step
public TopicsList clickConfirmBtnMdl() {
clickConfirmButton();
@@ -174,7 +182,7 @@ public TopicsList selectItem(boolean select) {
public boolean isInternal() {
boolean internal = false;
try {
- element.$x("./td[2]/a/span").shouldBe(visible);
+ element.$x("./td[2]/a/span").shouldBe(visible, Duration.ofMillis(500));
internal = true;
} catch (Throwable ignored) {
}
| diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicMessagesTests.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicMessagesTests.java
index 22b3b6e97b8..68dd2f72201 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicMessagesTests.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicMessagesTests.java
@@ -38,11 +38,15 @@ public class TopicMessagesTests extends BaseTest {
.setName("topic-to-clear-message-attribute-" + randomAlphabetic(5))
.setMessageKey(fileToString(System.getProperty("user.dir") + "/src/test/resources/producedkey.txt"))
.setMessageContent(fileToString(System.getProperty("user.dir") + "/src/test/resources/testData.txt"));
+ private static final Topic TOPIC_TO_RECREATE = new Topic()
+ .setName("topic-to-recreate-attribute-" + randomAlphabetic(5))
+ .setMessageKey(fileToString(System.getProperty("user.dir") + "/src/test/resources/producedkey.txt"))
+ .setMessageContent(fileToString(System.getProperty("user.dir") + "/src/test/resources/testData.txt"));
private static final List<Topic> TOPIC_LIST = new ArrayList<>();
@BeforeAll
public void beforeAll() {
- TOPIC_LIST.addAll(List.of(TOPIC_FOR_MESSAGES, TOPIC_TO_CLEAR_MESSAGES));
+ TOPIC_LIST.addAll(List.of(TOPIC_FOR_MESSAGES, TOPIC_TO_CLEAR_MESSAGES, TOPIC_TO_RECREATE));
TOPIC_LIST.forEach(topic -> apiService.createTopic(CLUSTER_NAME, topic.getName()));
}
@@ -174,6 +178,41 @@ void checkClearTopicMessage() {
softly.assertAll();
}
+ @DisplayName("TopicTests.recreateTopic : Recreate topic")
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(240)
+ @Test
+ void checkRecreateTopic(){
+ navigateToTopicsAndOpenDetails(TOPIC_TO_RECREATE.getName());
+ topicDetails
+ .openDetailsTab(TopicDetails.TopicMenu.OVERVIEW)
+ .clickProduceMessageBtn();
+ produceMessagePanel
+ .waitUntilScreenReady()
+ .setContentFiled(TOPIC_TO_RECREATE.getMessageContent())
+ .setKeyField(TOPIC_TO_RECREATE.getMessageKey())
+ .submitProduceMessage();
+ topicDetails
+ .waitUntilScreenReady();
+ navigateToTopics();
+ topicsList
+ .waitUntilScreenReady();
+ assertThat(topicsList.getTopicItem(TOPIC_TO_RECREATE.getName()).getNumberOfMessages())
+ .as("getNumberOfMessages()").isEqualTo(1);
+ topicsList
+ .openDotMenuByTopicName(TOPIC_TO_RECREATE.getName())
+ .clickRecreateTopicBtn()
+ .clickConfirmBtnMdl();
+ SoftAssertions softly = new SoftAssertions();
+ softly.assertThat(topicDetails.isAlertWithMessageVisible(SUCCESS,
+ String.format("Topic %s successfully recreated!", TOPIC_TO_RECREATE.getName())))
+ .as("isAlertWithMessageVisible()").isTrue();
+ softly.assertThat(topicsList.getTopicItem(TOPIC_TO_RECREATE.getName()).getNumberOfMessages())
+ .as("getNumberOfMessages()").isEqualTo(0);
+ softly.assertAll();
+ }
+
@AfterAll
public void afterAll() {
TOPIC_LIST.forEach(topic -> apiService.deleteTopic(CLUSTER_NAME, topic.getName()));
| train | val | 2023-01-12T08:21:40 | "2023-01-11T10:30:33Z" | ArthurNiedial | train |
provectus/kafka-ui/3230_3231 | provectus/kafka-ui | provectus/kafka-ui/3230 | provectus/kafka-ui/3231 | [
"connected"
] | 6096ad1d4944a6573a77cc9b99444951ae5da33d | b4e7f3763b68362b0df6a0a167b344d5f6bf18af | [] | [
"can't we use TOPIC_TO_UPDATE here?\ni believe we should rename in in this case",
"now need to add this topic in your flow",
"Done"
] | "2023-01-13T11:12:20Z" | [
"scope/QA",
"scope/AQA"
] | TopicTests.removeTopicFromAllTopics : Remove topic from all topics | Description:
No set
Pre-conditions:
No set
Post-conditions:
No set
Steps:
1. Navigate to Topics
2. Select "Remove topic" from 3 dot menu for Topic
Expected result
"Confirm the action" pop up should open
3. Press "Confirm" button
Expected result
Topic should be removed | [
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicsList.java"
] | [
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicsList.java"
] | [
"kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicsTests.java"
] | diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicsList.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicsList.java
index afe5633e440..9db1d18c8bd 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicsList.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicsList.java
@@ -26,6 +26,7 @@ public class TopicsList extends BasePage {
protected SelenideElement purgeMessagesOfSelectedTopicsBtn = $x("//button[text()='Purge messages of selected topics']");
protected SelenideElement clearMessagesBtn = $x("//ul[contains(@class ,'open')]//div[text()='Clear Messages']");
protected SelenideElement recreateTopicBtn = $x("//ul[contains(@class ,'open')]//div[text()='Recreate Topic']");
+ protected SelenideElement removeTopicBtn = $x("//ul[contains(@class ,'open')]//div[text()='Remove Topic']");
@Step
public TopicsList waitUntilScreenReady() {
@@ -98,6 +99,12 @@ public TopicsList clickRecreateTopicBtn(){
return this;
}
+ @Step
+ public TopicsList clickRemoveTopicBtn(){
+ clickByJavaScript(removeTopicBtn.shouldBe(visible));
+ return this;
+ }
+
@Step
public TopicsList clickConfirmBtnMdl() {
clickConfirmButton();
| diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicsTests.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicsTests.java
index b5f4e0d9778..b70daa33079 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicsTests.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicsTests.java
@@ -47,8 +47,8 @@ public class TopicsTests extends BaseTest {
.setCustomParameterType(COMPRESSION_TYPE)
.setCustomParameterValue("producer")
.setCleanupPolicyValue(DELETE);
- private static final Topic TOPIC_TO_UPDATE = new Topic()
- .setName("topic-to-update-" + randomAlphabetic(5))
+ private static final Topic TOPIC_TO_UPDATE_AND_DELETE = new Topic()
+ .setName("topic-to-update-and-delete-" + randomAlphabetic(5))
.setNumberOfPartitions(1)
.setCleanupPolicyValue(COMPACT)
.setTimeToRetainData("604800001")
@@ -66,7 +66,7 @@ public class TopicsTests extends BaseTest {
@BeforeAll
public void beforeAll() {
- TOPIC_LIST.addAll(List.of(TOPIC_TO_UPDATE, TOPIC_FOR_DELETE));
+ TOPIC_LIST.addAll(List.of(TOPIC_TO_UPDATE_AND_DELETE, TOPIC_FOR_DELETE));
TOPIC_LIST.forEach(topic -> apiService.createTopic(topic.getName()));
}
@@ -127,42 +127,60 @@ void checkAvailableOperations() {
@Test
@Order(3)
public void updateTopic() {
- navigateToTopicsAndOpenDetails(TOPIC_TO_UPDATE.getName());
+ navigateToTopicsAndOpenDetails(TOPIC_TO_UPDATE_AND_DELETE.getName());
topicDetails
.openDotMenu()
.clickEditSettingsMenu();
topicCreateEditForm
.waitUntilScreenReady()
- .selectCleanupPolicy((TOPIC_TO_UPDATE.getCleanupPolicyValue()))
+ .selectCleanupPolicy((TOPIC_TO_UPDATE_AND_DELETE.getCleanupPolicyValue()))
.setMinInsyncReplicas(10)
- .setTimeToRetainDataInMs(TOPIC_TO_UPDATE.getTimeToRetainData())
- .setMaxSizeOnDiskInGB(TOPIC_TO_UPDATE.getMaxSizeOnDisk())
- .setMaxMessageBytes(TOPIC_TO_UPDATE.getMaxMessageBytes())
+ .setTimeToRetainDataInMs(TOPIC_TO_UPDATE_AND_DELETE.getTimeToRetainData())
+ .setMaxSizeOnDiskInGB(TOPIC_TO_UPDATE_AND_DELETE.getMaxSizeOnDisk())
+ .setMaxMessageBytes(TOPIC_TO_UPDATE_AND_DELETE.getMaxMessageBytes())
.clickCreateTopicBtn();
topicDetails
.waitUntilScreenReady();
- navigateToTopicsAndOpenDetails(TOPIC_TO_UPDATE.getName());
+ navigateToTopicsAndOpenDetails(TOPIC_TO_UPDATE_AND_DELETE.getName());
topicDetails
.openDotMenu()
.clickEditSettingsMenu();
SoftAssertions softly = new SoftAssertions();
softly.assertThat(topicCreateEditForm.getCleanupPolicy()).as("getCleanupPolicy()")
- .isEqualTo(TOPIC_TO_UPDATE.getCleanupPolicyValue().getVisibleText());
+ .isEqualTo(TOPIC_TO_UPDATE_AND_DELETE.getCleanupPolicyValue().getVisibleText());
softly.assertThat(topicCreateEditForm.getTimeToRetain()).as("getTimeToRetain()")
- .isEqualTo(TOPIC_TO_UPDATE.getTimeToRetainData());
+ .isEqualTo(TOPIC_TO_UPDATE_AND_DELETE.getTimeToRetainData());
softly.assertThat(topicCreateEditForm.getMaxSizeOnDisk()).as("getMaxSizeOnDisk()")
- .isEqualTo(TOPIC_TO_UPDATE.getMaxSizeOnDisk().getVisibleText());
+ .isEqualTo(TOPIC_TO_UPDATE_AND_DELETE.getMaxSizeOnDisk().getVisibleText());
softly.assertThat(topicCreateEditForm.getMaxMessageBytes()).as("getMaxMessageBytes()")
- .isEqualTo(TOPIC_TO_UPDATE.getMaxMessageBytes());
+ .isEqualTo(TOPIC_TO_UPDATE_AND_DELETE.getMaxMessageBytes());
softly.assertAll();
}
+ @DisplayName("TopicTests.removeTopicFromAllTopics : Remove topic from 'all topics'/'TopicList'")
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(242)
+ @Test
+ @Order(4)
+ public void removeTopicFromTopicList() {
+ navigateToTopics();
+ topicsList
+ .openDotMenuByTopicName(TOPIC_TO_UPDATE_AND_DELETE.getName())
+ .clickRemoveTopicBtn()
+ .clickConfirmBtnMdl();
+ Assertions.assertTrue(topicsList.isAlertWithMessageVisible(SUCCESS,
+ String.format("Topic %s successfully deleted!", TOPIC_TO_UPDATE_AND_DELETE.getName())),
+ "isAlertWithMessageVisible()");
+ TOPIC_LIST.remove(TOPIC_TO_UPDATE_AND_DELETE);
+ }
+
@DisplayName("should delete topic")
@Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(207)
@Test
- @Order(4)
+ @Order(5)
public void deleteTopic() {
navigateToTopicsAndOpenDetails(TOPIC_FOR_DELETE.getName());
topicDetails
@@ -179,7 +197,7 @@ public void deleteTopic() {
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(20)
@Test
- @Order(5)
+ @Order(6)
void redirectToConsumerFromTopic() {
String topicName = "source-activities";
String consumerGroupId = "connect-sink_postgres_activities";
@@ -200,7 +218,7 @@ void redirectToConsumerFromTopic() {
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(4)
@Test
- @Order(6)
+ @Order(7)
void checkTopicCreatePossibility() {
navigateToTopics();
topicsList
@@ -225,7 +243,7 @@ void checkTopicCreatePossibility() {
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(266)
@Test
- @Order(7)
+ @Order(8)
void checkTimeToRetainDataCustomValueWithEditingTopic() {
Topic topicToRetainData = new Topic()
.setName("topic-to-retain-data-" + randomAlphabetic(5))
@@ -260,7 +278,7 @@ void checkTimeToRetainDataCustomValueWithEditingTopic() {
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(6)
@Test
- @Order(8)
+ @Order(9)
void checkCustomParametersWithinCreateNewTopic() {
navigateToTopics();
topicsList
@@ -283,7 +301,7 @@ void checkCustomParametersWithinCreateNewTopic() {
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(2)
@Test
- @Order(9)
+ @Order(10)
void checkTopicListElements() {
navigateToTopics();
verifyElementsCondition(topicsList.getAllVisibleElements(), Condition.visible);
@@ -295,7 +313,7 @@ void checkTopicListElements() {
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(12)
@Test
- @Order(10)
+ @Order(11)
void addingNewFilterWithinTopic() {
String filterName = randomAlphabetic(5);
navigateToTopicsAndOpenDetails("_schemas");
@@ -322,7 +340,7 @@ void addingNewFilterWithinTopic() {
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(13)
@Test
- @Order(11)
+ @Order(12)
void checkFilterSavingWithinSavedFilters() {
String displayName = randomAlphabetic(5);
navigateToTopicsAndOpenDetails("my_ksql_1ksql_processing_log");
@@ -347,7 +365,7 @@ void checkFilterSavingWithinSavedFilters() {
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(14)
@Test
- @Order(12)
+ @Order(13)
void checkingApplyingSavedFilterWithinTopicMessages() {
String displayName = randomAlphabetic(5);
navigateToTopicsAndOpenDetails("my_ksql_1ksql_processing_log");
@@ -371,7 +389,7 @@ void checkingApplyingSavedFilterWithinTopicMessages() {
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(11)
@Test
- @Order(13)
+ @Order(14)
void checkShowInternalTopicsButtonFunctionality(){
navigateToTopics();
SoftAssertions softly = new SoftAssertions();
@@ -391,6 +409,7 @@ void checkShowInternalTopicsButtonFunctionality(){
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(56)
@Test
+ @Order(15)
void checkRetentionBytesAccordingToMaxSizeOnDisk(){
navigateToTopics();
topicsList
@@ -442,6 +461,7 @@ void checkRetentionBytesAccordingToMaxSizeOnDisk(){
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(247)
@Test
+ @Order(16)
void recreateTopicFromTopicProfile(){
Topic topicToRecreate = new Topic()
.setName("topic-to-recreate-" + randomAlphabetic(5))
@@ -473,6 +493,7 @@ void recreateTopicFromTopicProfile(){
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(8)
@Test
+ @Order(17)
void checkCopyTopicPossibility(){
Topic topicToCopy = new Topic()
.setName("topic-to-copy-" + randomAlphabetic(5))
| train | val | 2023-01-13T11:14:56 | "2023-01-13T10:15:00Z" | anezboretskiy | train |
provectus/kafka-ui/3222_3235 | provectus/kafka-ui | provectus/kafka-ui/3222 | provectus/kafka-ui/3235 | [
"connected"
] | c2d7d70a8ec35809aa7b5a54d4b98eb1925845d8 | 5d31189609ddb7024d5f16a4f9ed52cbb9022355 | [
"Hello there th0ger! π\n\nThank you and congratulations π for opening your very first issue in this project! π\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. π",
"@th0ger hey, is the schema for this topic located in SR?",
"> @th0ger hey, is the schema for this topic located in SR?\r\n\r\nYes",
"> > @th0ger hey, is the schema for this topic located in SR?\r\n> \r\n> Yes\r\n\r\ndo you have schema registry available in serdes list within this topic? If yes, does selecting one help?",
"@th0ger also, please tell us if you are using TopicNameStrategy for subjects naming",
"If you mean this dropdown, then no schema:\r\n\r\n\r\nWe use TopicRecordNameStrategy, essentially something like:\r\n```python\r\nfrom confluent_kafka.schema_registry import topic_record_subject_name_strategy\r\nconf = {'auto.register.schemas': True, 'use.latest.version': False, 'subject.name.strategy': topic_record_subject_name_strategy}\r\nAvroSerializer(schema_registry_client=registry, schema_str=schema_str, conf=conf)\r\n```\r\n",
"I noticed under Settings that \r\n```confluent.value.subject.name.strategy: io.confluent.kafka.serializers.subject.TopicNameStrategy```,\r\nnot sure where it comes from. We intend to use TopicRecordNameStrategy.\r\n\r\nUPDATE: I managed to change above setting to `io.confluent.kafka.serializers.subject.TopicRecordNameStrategy` (via Confluent Control Center) but the issue persists.",
"@th0ger thank you for response, we will fix it soon",
"Hello @th0ger, \r\nwe implemented fix on this issue - can you please check on `master` tag image?",
"Got no reply, closing as completed.",
"@Haarolean yea it's on my list",
"@th0ger okay, ping me if it doesn't help",
"@armenuikafka please check the test cases for that issue and add one, if missing."
] | [] | "2023-01-13T16:52:47Z" | [
"type/bug",
"scope/backend",
"status/accepted"
] | Topic Messages: Preview AVRO broken in v0.5.0+ | **Describe the bug** (Actual behavior)
In v0.5.0 (and not currently fix on master), the preview feature of avro messages are garbagled:

**Expected behavior**
This is a regression, used to work in v0.4.0:

**Steps to Reproduce**
```yaml
version: "3.8"
services:
networks:
- default
ports:
- "139:139"
- "446:445"
restart: unless-stopped
zookeeper:
image: confluentinc/cp-zookeeper:7.2.1
container_name: zookeeper
ports:
- "2181:2181"
environment:
ZOOKEEPER_CLIENT_PORT: 2181
ZOOKEEPER_TICK_TIME: 2000
kafka:
image: confluentinc/cp-server:7.2.1
container_name: kafka
depends_on:
- zookeeper
ports:
- "9094:9094"
- "9092:9092"
- "9101:9101"
environment:
HOSTNAME: "kafka"
KAFKA_BROKER_ID: 1
KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181'
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INSIDE:PLAINTEXT,OUTSIDE:PLAINTEXT,CLUSTER:PLAINTEXT
KAFKA_ADVERTISED_LISTENERS: INSIDE://:9092,OUTSIDE://kafka:9094,CLUSTER://kafka:1337
KAFKA_LISTENERS: INSIDE://:9092,OUTSIDE://:9094,CLUSTER://:1337
KAFKA_INTER_BROKER_LISTENER_NAME: INSIDE
KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true'
KAFKA_METRIC_REPORTERS: io.confluent.metrics.reporter.ConfluentMetricsReporter
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
KAFKA_CONFLUENT_LICENSE_TOPIC_REPLICATION_FACTOR: 1
KAFKA_CONFLUENT_BALANCER_TOPIC_REPLICATION_FACTOR: 1
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
KAFKA_JMX_PORT: 9101
KAFKA_JMX_HOSTNAME: localhost
KAFKA_CONFLUENT_SCHEMA_REGISTRY_URL: http://schema-registry:8081
CONFLUENT_METRICS_REPORTER_BOOTSTRAP_SERVERS: kafka:1337
CONFLUENT_METRICS_REPORTER_TOPIC_REPLICAS: 1
CONFLUENT_METRICS_ENABLE: 'true'
CONFLUENT_SUPPORT_CUSTOMER_ID: 'anonymous'
schema-registry:
image: confluentinc/cp-schema-registry:7.2.1
container_name: schema-registry
ports:
- "8081:8081"
depends_on:
- kafka
environment:
SCHEMA_REGISTRY_HOST_NAME: schema-registry
SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: 'kafka:1337'
SCHEMA_REGISTRY_LISTENERS: http://0.0.0.0:8081
kafka-ui:
image: provectuslabs/kafka-ui:v0.4.0 # Avro preview broken in v0.5.0
container_name: kafka-ui
ports:
- "8080:8080"
depends_on:
- kafka
- schema-registry
restart: always
environment:
KAFKA_CLUSTERS_0_NAME: BG
KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka:9094
KAFKA_CLUSTERS_0_ZOOKEEPER: zookeeper:2181
KAFKA_CLUSTERS_0_SCHEMAREGISTRY: 'http://schema-registry:8081'
KAFKA_CLUSTERS_0_KAFKACONNECT_0_NAME: kafka-connect
KAFKA_CLUSTERS_0_KAFKACONNECT_0_ADDRESS: "http://kafka-connect:8083"
networks:
default:
external: true
name: kafka_overlay_network
```
**Additional context**
Since new "Value Serde" options are implemented in v0.5.0, it's quite clear that some bug was introduced. | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerde.java"
] | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerde.java"
] | [
"kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerdeTest.java"
] | diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerde.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerde.java
index 9c3d0a9d375..a4d7ee8891d 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerde.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerde.java
@@ -46,6 +46,7 @@ public static String name() {
private List<String> schemaRegistryUrls;
private String valueSchemaNameTemplate;
private String keySchemaNameTemplate;
+ private boolean checkSchemaExistenceForDeserialize;
private Map<SchemaType, MessageFormatter> schemaRegistryFormatters;
@@ -75,7 +76,9 @@ public void autoConfigure(PropertyResolver kafkaClusterProperties,
kafkaClusterProperties.getProperty("schemaRegistrySSL.truststorePassword", String.class).orElse(null)
),
kafkaClusterProperties.getProperty("schemaRegistryKeySchemaNameTemplate", String.class).orElse("%s-key"),
- kafkaClusterProperties.getProperty("schemaRegistrySchemaNameTemplate", String.class).orElse("%s-value")
+ kafkaClusterProperties.getProperty("schemaRegistrySchemaNameTemplate", String.class).orElse("%s-value"),
+ kafkaClusterProperties.getProperty("schemaRegistryCheckSchemaExistenceForDeserialize", Boolean.class)
+ .orElse(false)
);
}
@@ -99,7 +102,9 @@ public void configure(PropertyResolver serdeProperties,
serdeProperties.getProperty("truststorePassword", String.class).orElse(null)
),
serdeProperties.getProperty("keySchemaNameTemplate", String.class).orElse("%s-key"),
- serdeProperties.getProperty("schemaNameTemplate", String.class).orElse("%s-value")
+ serdeProperties.getProperty("schemaNameTemplate", String.class).orElse("%s-value"),
+ kafkaClusterProperties.getProperty("checkSchemaExistenceForDeserialize", Boolean.class)
+ .orElse(false)
);
}
@@ -108,12 +113,14 @@ void configure(
List<String> schemaRegistryUrls,
SchemaRegistryClient schemaRegistryClient,
String keySchemaNameTemplate,
- String valueSchemaNameTemplate) {
+ String valueSchemaNameTemplate,
+ boolean checkTopicSchemaExistenceForDeserialize) {
this.schemaRegistryUrls = schemaRegistryUrls;
this.schemaRegistryClient = schemaRegistryClient;
this.keySchemaNameTemplate = keySchemaNameTemplate;
this.valueSchemaNameTemplate = valueSchemaNameTemplate;
this.schemaRegistryFormatters = MessageFormatter.createMap(schemaRegistryClient);
+ this.checkSchemaExistenceForDeserialize = checkTopicSchemaExistenceForDeserialize;
}
private static SchemaRegistryClient createSchemaRegistryClient(List<String> urls,
@@ -122,8 +129,7 @@ private static SchemaRegistryClient createSchemaRegistryClient(List<String> urls
@Nullable String keyStoreLocation,
@Nullable String keyStorePassword,
@Nullable String trustStoreLocation,
- @Nullable String trustStorePassword
- ) {
+ @Nullable String trustStorePassword) {
Map<String, String> configs = new HashMap<>();
if (username != null && password != null) {
configs.put(BASIC_AUTH_CREDENTIALS_SOURCE, "USER_INFO");
@@ -169,7 +175,8 @@ public Optional<String> getDescription() {
@Override
public boolean canDeserialize(String topic, Target type) {
String subject = schemaSubject(topic, type);
- return getSchemaBySubject(subject).isPresent();
+ return !checkSchemaExistenceForDeserialize
+ || getSchemaBySubject(subject).isPresent();
}
@Override
| diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerdeTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerdeTest.java
index 092b3e8b3c9..4ea2bf3c2ac 100644
--- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerdeTest.java
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerdeTest.java
@@ -22,6 +22,7 @@
import org.apache.avro.io.Encoder;
import org.apache.avro.io.EncoderFactory;
import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.CsvSource;
@@ -35,7 +36,7 @@ class SchemaRegistrySerdeTest {
@BeforeEach
void init() {
serde = new SchemaRegistrySerde();
- serde.configure(List.of("wontbeused"), registryClient, "%s-key", "%s-value");
+ serde.configure(List.of("wontbeused"), registryClient, "%s-key", "%s-value", true);
}
@ParameterizedTest
@@ -129,24 +130,61 @@ void deserializeReturnsJsonAvroMsgJsonRepresentation() throws RestClientExceptio
.contains(Map.entry("schemaId", schemaId));
}
+ @Nested
+ class SerdeWithDisabledSubjectExistenceCheck {
+
+ @BeforeEach
+ void init() {
+ serde.configure(List.of("wontbeused"), registryClient, "%s-key", "%s-value", false);
+ }
+
+ @Test
+ void canDeserializeAlwaysReturnsTrue() {
+ String topic = RandomString.make(10);
+ assertThat(serde.canDeserialize(topic, Serde.Target.KEY)).isTrue();
+ assertThat(serde.canDeserialize(topic, Serde.Target.VALUE)).isTrue();
+ }
+ }
+
+ @Nested
+ class SerdeWithEnabledSubjectExistenceCheck {
+
+ @BeforeEach
+ void init() {
+ serde.configure(List.of("wontbeused"), registryClient, "%s-key", "%s-value", true);
+ }
+
+ @Test
+ void canDeserializeReturnsTrueIfSubjectExists() throws Exception {
+ String topic = RandomString.make(10);
+ registryClient.register(topic + "-key", new AvroSchema("\"int\""));
+ registryClient.register(topic + "-value", new AvroSchema("\"int\""));
+
+ assertThat(serde.canDeserialize(topic, Serde.Target.KEY)).isTrue();
+ assertThat(serde.canDeserialize(topic, Serde.Target.VALUE)).isTrue();
+ }
+
+ @Test
+ void canDeserializeReturnsFalseIfSubjectDoesNotExist() {
+ String topic = RandomString.make(10);
+ assertThat(serde.canDeserialize(topic, Serde.Target.KEY)).isFalse();
+ assertThat(serde.canDeserialize(topic, Serde.Target.VALUE)).isFalse();
+ }
+ }
+
@Test
void canDeserializeAndCanSerializeReturnsTrueIfSubjectExists() throws Exception {
String topic = RandomString.make(10);
registryClient.register(topic + "-key", new AvroSchema("\"int\""));
registryClient.register(topic + "-value", new AvroSchema("\"int\""));
- assertThat(serde.canDeserialize(topic, Serde.Target.KEY)).isTrue();
- assertThat(serde.canDeserialize(topic, Serde.Target.VALUE)).isTrue();
-
assertThat(serde.canSerialize(topic, Serde.Target.KEY)).isTrue();
assertThat(serde.canSerialize(topic, Serde.Target.VALUE)).isTrue();
}
@Test
- void canDeserializeAndCanSerializeReturnsFalseIfSubjectDoesNotExist() {
+ void canSerializeReturnsFalseIfSubjectDoesNotExist() {
String topic = RandomString.make(10);
- assertThat(serde.canDeserialize(topic, Serde.Target.KEY)).isFalse();
- assertThat(serde.canDeserialize(topic, Serde.Target.VALUE)).isFalse();
assertThat(serde.canSerialize(topic, Serde.Target.KEY)).isFalse();
assertThat(serde.canSerialize(topic, Serde.Target.VALUE)).isFalse();
}
@@ -178,4 +216,4 @@ private byte[] jsonToAvro(String json, AvroSchema schema) {
return output.toByteArray();
}
-}
\ No newline at end of file
+}
| test | val | 2023-01-16T17:39:45 | "2023-01-12T14:35:44Z" | th0ger | train |
provectus/kafka-ui/3224_3235 | provectus/kafka-ui | provectus/kafka-ui/3224 | provectus/kafka-ui/3235 | [
"connected"
] | c2d7d70a8ec35809aa7b5a54d4b98eb1925845d8 | 5d31189609ddb7024d5f16a4f9ed52cbb9022355 | [
"Hello there Ronserruya! π\n\nThank you and congratulations π for opening your very first issue in this project! π\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. π",
"Hey, thanks for reaching out.\r\nWe'll take a look soon.",
"@Ronserruya does the schema name in glue represent the topic name exactly?\r\nThe default glue serde behavior is the following: Upon deserializing the message, kafka-ui will try to look up a schema with the name \"%topicname%\" (that's a default one for GlueSchemaRegistryKafkaSerializer), so in case it doesn't exist -- it assumes that glue serde is not available for the topic. \r\nOn a side note, you're able to alter the pattern used for schema lookup by adjusting the following env vars:\r\n`kafka.clusters.0.serde.0.properties.keySchemaNameTemplate` and `kafka.clusters.0.serde.0.properties.valueSchemaNameTemplate`.",
"Hey @Haarolean , thank you for the quick response. I wasn't aware that that was a requirement. I created a test topic and schema with matching names and validated that it does work. \r\n\r\nHowever there are 2 issues:\r\n1. The resulting decoded data injects the namespaces into the data. I decoded similar avro msgs using python, kafka-connect, and spark and didn't encounter this behavior. \r\nI sent this msg: ```{\"name\": {\"raw\": null, \"first\": \"ron\", \"full\": \"ron serruya\", \"last\": \"serruya\"}, \"age\": 3}```\r\nEncoded with this schema: \r\n```\r\n{\r\n \"type\": \"record\",\r\n \"name\": \"generation\",\r\n \"namespace\": \"top_level\",\r\n \"fields\": [\r\n {\r\n \"name\": \"name\",\r\n \"type\": [\r\n {\r\n \"type\": \"record\",\r\n \"name\": \"name\",\r\n \"namespace\": \"top_level.generation\",\r\n \"fields\": [\r\n {\r\n \"name\": \"raw\",\r\n \"type\": [\r\n \"string\",\r\n \"null\"\r\n ]\r\n },\r\n {\r\n \"name\": \"first\",\r\n \"type\": \"string\"\r\n },\r\n {\r\n \"name\": \"full\",\r\n \"type\": \"string\"\r\n },\r\n {\r\n \"name\": \"last\",\r\n \"type\": \"string\"\r\n }\r\n ]\r\n },\r\n \"null\"\r\n ]\r\n },\r\n {\r\n \"name\": \"age\",\r\n \"type\": \"int\"\r\n }\r\n ]\r\n}\r\n```\r\nKafka-ui showed this decoded msg\r\n```\r\n{\r\n\t\"name\": {\r\n\t\t\"top_level.generation.name\": {\r\n\t\t\t\"raw\": null,\r\n\t\t\t\"first\": \"ron\",\r\n\t\t\t\"full\": \"ron serruya\",\r\n\t\t\t\"last\": \"serruya\"\r\n\t\t}\r\n\t},\r\n\t\"age\": 3\r\n}\r\n```\r\n(let me know if you want to open a different issue for this, since its not exactly related)\r\n2. This method of fetching the schema using the topic name is not really usable for us since we have many topics and schemas that don't match 1to1 (and can't be simply formatted like `%s-value-schema`), also, how does that work with multiple schemas per topic?\r\n\r\nWhen a message is encoded using the aws glue schema registry format, the schema UUID is inserted from from bytes 2-17, so decoders like the python one or the one used in kafka connect just get the schema UUID from those bytes and fetch the schema from aws by ID, without needing the topic name.\r\nSee [python](https://github.com/DisasterAWARE/aws-glue-schema-registry-python/blob/main/src/aws_schema_registry/codec.py#L118), [aws java library](https://github.com/awslabs/aws-glue-schema-registry/blob/a7017fb7a418f30d6302d3822e3464388228a72c/serializer-deserializer/src/main/java/com/amazonaws/services/schemaregistry/deserializers/GlueSchemaRegistryDeserializationFacade.java#L146)\r\n\r\nLooking at the code in [your deserialize method](https://github.com/provectus/kafkaui-glue-sr-serde/blob/main/src/main/java/com/provectus/kafka/ui/serdes/glue/GlueSerde.java#L252) I don't see `topic` being used, so I suspect that by using the `GlueSchemaRegistryDeserializationFacade` class your code already does this upon desiralization, so all that needs to be done is to \"unblock\" the need for the topic name to match a schema.\r\n\r\nForgive me if I read the code incorrectly, being a python dev I lost myself in the sea of facades and interfaces π
",
"@armenuikafka please add to the test cases, if it can be checked on our end. Or just move to done otherwise"
] | [] | "2023-01-13T16:52:47Z" | [
"type/bug",
"scope/backend",
"status/accepted",
"status/confirmed"
] | AWS Glue serde not available for non-name strategy | I tried configuring the new aws glue serde to read messages encoded with the schema registry, but I can't see the glue serde in the value dropdown. Not sure if it's a bug or if I configured it wrong.
**Set up**
I tried following the example from https://github.com/provectus/kafkaui-glue-sr-serde/blob/main/docker-compose/setup-example.yaml
Folder structure:
```
ο .
βββ ο vol
β βββ ο
kafkaui-glue-serde-1.0-SNAPSHOT-jar-with-dependencies.jar
βββ ο docker-compose.yml
```
```
---
version: '2'
services:
kafka-ui:
container_name: kafka-ui
image: provectuslabs/kafka-ui:latest
ports:
- 8080:8080
environment:
AWS_ACCESS_KEY_ID: '****'
AWS_SECRET_ACCESS_KEY: '***'
AWS_SESSION_TOKEN: '***'
kafka.clusters.0.name: Mycluster
kafka.clusters.0.bootstrapServers: '***'
kafka.clusters.0.serde.0.name: GlueSchemaRegistry
kafka.clusters.0.serde.0.filePath: /glue-serde/kafkaui-glue-serde-1.0-SNAPSHOT-jar-with-dependencies.jar
kafka.clusters.0.serde.0.className: com.provectus.kafka.ui.serdes.glue.GlueSerde
kafka.clusters.0.serde.0.properties.region: us-east-1 #required
kafka.clusters.0.serde.0.properties.registry: '***' #required, name of Glue Schema Registry
volumes:
- ./vol/:/glue-serde
```
The `kafkaui-glue-serde-1.0.0.jar` file from releases in https://github.com/provectus/kafkaui-glue-sr-serde didn't work at first, I guess it was missing the dependencies, so I built it myself (`mvn package`), to create the `kafkaui-glue-serde-1.0-SNAPSHOT-jar-with-dependencies.jar` file.
**Screenshots**

**logs**
```
ron@bash:~/Desktop/Projects/kafka_ui_test$ docker-compose up
[+] Running 1/1
β Ώ Container kafka-ui Recreated 2.4s
Attaching to kafka-ui
kafka-ui | _ _ ___ __ _ _ _ __ __ _
kafka-ui | | | | |_ _| / _|___ _ _ /_\ _ __ __ _ __| |_ ___ | |/ /__ _ / _| |_____
kafka-ui | | |_| || | | _/ _ | '_| / _ \| '_ / _` / _| ' \/ -_) | ' </ _` | _| / / _`|
kafka-ui | \___/|___| |_| \___|_| /_/ \_| .__\__,_\__|_||_\___| |_|\_\__,_|_| |_\_\__,|
kafka-ui | |_|
kafka-ui |
kafka-ui | 2023-01-12 15:38:36,912 INFO [background-preinit] o.h.v.i.u.Version: HV000001: Hibernate Validator 6.2.5.Final
kafka-ui | 2023-01-12 15:38:36,951 INFO [main] c.p.k.u.KafkaUiApplication: Starting KafkaUiApplication using Java 17.0.5 on d01713c83787 with PID 1 (/kafka-ui-api.jar started by kafkaui in /)
kafka-ui | 2023-01-12 15:38:36,952 DEBUG [main] c.p.k.u.KafkaUiApplication: Running with Spring Boot v2.7.5, Spring v5.3.23
kafka-ui | 2023-01-12 15:38:36,953 INFO [main] c.p.k.u.KafkaUiApplication: No active profile set, falling back to 1 default profile: "default"
kafka-ui | 2023-01-12 15:38:41,813 DEBUG [main] c.p.k.u.s.SerdesInitializer: Configuring serdes for cluster Mycluster
kafka-ui | 2023-01-12 15:38:41,833 INFO [main] c.p.k.u.s.SerdesInitializer: Loading custom serde GlueSchemaRegistry
kafka-ui | 2023-01-12 15:38:43,417 INFO [main] c.a.s.s.c.c.GlueSchemaRegistryConfiguration: endpoint key is not present in the configs
kafka-ui | 2023-01-12 15:38:43,417 INFO [main] c.a.s.s.c.c.GlueSchemaRegistryConfiguration: registry.name key is not present in the configs
kafka-ui | 2023-01-12 15:38:43,417 INFO [main] c.a.s.s.c.c.GlueSchemaRegistryConfiguration: description key is not present in the configs
kafka-ui | 2023-01-12 15:38:43,417 INFO [main] c.a.s.s.c.c.GlueSchemaRegistryConfiguration: avroRecordType key is not present in the configs
kafka-ui | 2023-01-12 15:38:43,417 INFO [main] c.a.s.s.c.c.GlueSchemaRegistryConfiguration: protobufMessageType key is not present in the configs
kafka-ui | 2023-01-12 15:38:43,417 INFO [main] c.a.s.s.c.c.GlueSchemaRegistryConfiguration: compatibility key is not present in the configs
kafka-ui | 2023-01-12 15:38:43,419 INFO [main] c.a.s.s.c.c.GlueSchemaRegistryConfiguration: compression key is not present in the configs
kafka-ui | 2023-01-12 15:38:43,419 INFO [main] c.a.s.s.c.c.GlueSchemaRegistryConfiguration: schemaAutoRegistrationEnabled key is not present in the configs
kafka-ui | 2023-01-12 15:38:43,419 INFO [main] c.a.s.s.c.c.GlueSchemaRegistryConfiguration: schemaAutoRegistrationEnabled is not defined in the properties. Using the default value false
kafka-ui | 2023-01-12 15:38:43,419 INFO [main] c.a.s.s.c.c.GlueSchemaRegistryConfiguration: jacksonSerializationFeatures key is not present in the configs
kafka-ui | 2023-01-12 15:38:43,419 INFO [main] c.a.s.s.c.c.GlueSchemaRegistryConfiguration: jacksonDeserializationFeatures key is not present in the configs
kafka-ui | 2023-01-12 15:38:43,419 INFO [main] c.a.s.s.c.c.GlueSchemaRegistryConfiguration: tags key is not present in the configs
kafka-ui | 2023-01-12 15:38:43,420 INFO [main] c.a.s.s.c.c.GlueSchemaRegistryConfiguration: Tags value is not defined in the properties. No tags are assigned
kafka-ui | 2023-01-12 15:38:43,420 INFO [main] c.a.s.s.c.c.GlueSchemaRegistryConfiguration: metadata key is not present in the configs
kafka-ui | 2023-01-12 15:38:43,420 INFO [main] c.a.s.s.c.c.GlueSchemaRegistryConfiguration: userAgentApp key is not present in the configs
kafka-ui | 2023-01-12 15:38:43,420 INFO [main] c.a.s.s.c.c.GlueSchemaRegistryConfiguration: secondaryDeserializer key is not present in the configs
kafka-ui | 2023-01-12 15:38:43,420 INFO [main] c.a.s.s.c.c.GlueSchemaRegistryConfiguration: cacheSize key is not present in the configs
kafka-ui | 2023-01-12 15:38:43,420 INFO [main] c.a.s.s.c.c.GlueSchemaRegistryConfiguration: Cache Size is not found, using default 200
kafka-ui | 2023-01-12 15:38:43,420 INFO [main] c.a.s.s.c.c.GlueSchemaRegistryConfiguration: timeToLiveMillis key is not present in the configs
kafka-ui | 2023-01-12 15:38:43,420 INFO [main] c.a.s.s.c.c.GlueSchemaRegistryConfiguration: Cache Time to live is not found, using default 86400000
kafka-ui | 2023-01-12 15:38:44,806 INFO [main] o.s.b.a.e.w.EndpointLinksResolver: Exposing 2 endpoint(s) beneath base path '/actuator'
kafka-ui | 2023-01-12 15:38:45,147 INFO [main] o.s.b.a.s.r.ReactiveUserDetailsServiceAutoConfiguration:
kafka-ui |
kafka-ui | Using generated security password: ****
kafka-ui |
kafka-ui | 2023-01-12 15:38:45,444 WARN [main] c.p.k.u.c.a.DisabledAuthSecurityConfig: Authentication is disabled. Access will be unrestricted.
kafka-ui | 2023-01-12 15:38:46,437 INFO [main] o.s.b.w.e.n.NettyWebServer: Netty started on port 8080
kafka-ui | 2023-01-12 15:38:46,494 INFO [main] c.p.k.u.KafkaUiApplication: Started KafkaUiApplication in 10.636 seconds (JVM running for 11.868)
kafka-ui | 2023-01-12 15:38:46,564 DEBUG [parallel-1] c.p.k.u.s.ClustersStatisticsScheduler: Start getting metrics for kafkaCluster: Mycluster
kafka-ui | 2023-01-12 15:38:46,608 INFO [parallel-1] o.a.k.c.a.AdminClientConfig: AdminClientConfig values:
kafka-ui | bootstrap.servers = [****]
kafka-ui | client.dns.lookup = use_all_dns_ips
kafka-ui | client.id = kafka-ui-admin-client-1673537926591
kafka-ui | connections.max.idle.ms = 300000
kafka-ui | default.api.timeout.ms = 60000
kafka-ui | metadata.max.age.ms = 300000
kafka-ui | metric.reporters = []
kafka-ui | metrics.num.samples = 2
kafka-ui | metrics.recording.level = INFO
kafka-ui | metrics.sample.window.ms = 30000
kafka-ui | receive.buffer.bytes = 65536
kafka-ui | reconnect.backoff.max.ms = 1000
kafka-ui | reconnect.backoff.ms = 50
kafka-ui | request.timeout.ms = 30000
kafka-ui | retries = 2147483647
kafka-ui | retry.backoff.ms = 100
kafka-ui | sasl.client.callback.handler.class = null
kafka-ui | sasl.jaas.config = null
kafka-ui | sasl.kerberos.kinit.cmd = /usr/bin/kinit
kafka-ui | sasl.kerberos.min.time.before.relogin = 60000
kafka-ui | sasl.kerberos.service.name = null
kafka-ui | sasl.kerberos.ticket.renew.jitter = 0.05
kafka-ui | sasl.kerberos.ticket.renew.window.factor = 0.8
kafka-ui | sasl.login.callback.handler.class = null
kafka-ui | sasl.login.class = null
kafka-ui | sasl.login.connect.timeout.ms = null
kafka-ui | sasl.login.read.timeout.ms = null
kafka-ui | sasl.login.refresh.buffer.seconds = 300
kafka-ui | sasl.login.refresh.min.period.seconds = 60
kafka-ui | sasl.login.refresh.window.factor = 0.8
kafka-ui | sasl.login.refresh.window.jitter = 0.05
kafka-ui | sasl.login.retry.backoff.max.ms = 10000
kafka-ui | sasl.login.retry.backoff.ms = 100
kafka-ui | sasl.mechanism = GSSAPI
kafka-ui | sasl.oauthbearer.clock.skew.seconds = 30
kafka-ui | sasl.oauthbearer.expected.audience = null
kafka-ui | sasl.oauthbearer.expected.issuer = null
kafka-ui | sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000
kafka-ui | sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000
kafka-ui | sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100
kafka-ui | sasl.oauthbearer.jwks.endpoint.url = null
kafka-ui | sasl.oauthbearer.scope.claim.name = scope
kafka-ui | sasl.oauthbearer.sub.claim.name = sub
kafka-ui | sasl.oauthbearer.token.endpoint.url = null
kafka-ui | security.protocol = PLAINTEXT
kafka-ui | security.providers = null
kafka-ui | send.buffer.bytes = 131072
kafka-ui | socket.connection.setup.timeout.max.ms = 30000
kafka-ui | socket.connection.setup.timeout.ms = 10000
kafka-ui | ssl.cipher.suites = null
kafka-ui | ssl.enabled.protocols = [TLSv1.2, TLSv1.3]
kafka-ui | ssl.endpoint.identification.algorithm = https
kafka-ui | ssl.engine.factory.class = null
kafka-ui | ssl.key.password = null
kafka-ui | ssl.keymanager.algorithm = SunX509
kafka-ui | ssl.keystore.certificate.chain = null
kafka-ui | ssl.keystore.key = null
kafka-ui | ssl.keystore.location = null
kafka-ui | ssl.keystore.password = null
kafka-ui | ssl.keystore.type = JKS
kafka-ui | ssl.protocol = TLSv1.3
kafka-ui | ssl.provider = null
kafka-ui | ssl.secure.random.implementation = null
kafka-ui | ssl.trustmanager.algorithm = PKIX
kafka-ui | ssl.truststore.certificates = null
kafka-ui | ssl.truststore.location = null
kafka-ui | ssl.truststore.password = null
kafka-ui | ssl.truststore.type = JKS
kafka-ui |
kafka-ui | 2023-01-12 15:38:46,885 INFO [parallel-1] o.a.k.c.u.AppInfoParser: Kafka version: 3.3.1
kafka-ui | 2023-01-12 15:38:46,885 INFO [parallel-1] o.a.k.c.u.AppInfoParser: Kafka commitId: e23c59d00e687ff5
kafka-ui | 2023-01-12 15:38:46,885 INFO [parallel-1] o.a.k.c.u.AppInfoParser: Kafka startTimeMs: 1673537926879
kafka-ui | 2023-01-12 15:38:50,739 DEBUG [parallel-6] c.p.k.u.s.ClustersStatisticsScheduler: Metrics updated for cluster: Mycluster
``` | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerde.java"
] | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerde.java"
] | [
"kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerdeTest.java"
] | diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerde.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerde.java
index 9c3d0a9d375..a4d7ee8891d 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerde.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerde.java
@@ -46,6 +46,7 @@ public static String name() {
private List<String> schemaRegistryUrls;
private String valueSchemaNameTemplate;
private String keySchemaNameTemplate;
+ private boolean checkSchemaExistenceForDeserialize;
private Map<SchemaType, MessageFormatter> schemaRegistryFormatters;
@@ -75,7 +76,9 @@ public void autoConfigure(PropertyResolver kafkaClusterProperties,
kafkaClusterProperties.getProperty("schemaRegistrySSL.truststorePassword", String.class).orElse(null)
),
kafkaClusterProperties.getProperty("schemaRegistryKeySchemaNameTemplate", String.class).orElse("%s-key"),
- kafkaClusterProperties.getProperty("schemaRegistrySchemaNameTemplate", String.class).orElse("%s-value")
+ kafkaClusterProperties.getProperty("schemaRegistrySchemaNameTemplate", String.class).orElse("%s-value"),
+ kafkaClusterProperties.getProperty("schemaRegistryCheckSchemaExistenceForDeserialize", Boolean.class)
+ .orElse(false)
);
}
@@ -99,7 +102,9 @@ public void configure(PropertyResolver serdeProperties,
serdeProperties.getProperty("truststorePassword", String.class).orElse(null)
),
serdeProperties.getProperty("keySchemaNameTemplate", String.class).orElse("%s-key"),
- serdeProperties.getProperty("schemaNameTemplate", String.class).orElse("%s-value")
+ serdeProperties.getProperty("schemaNameTemplate", String.class).orElse("%s-value"),
+ kafkaClusterProperties.getProperty("checkSchemaExistenceForDeserialize", Boolean.class)
+ .orElse(false)
);
}
@@ -108,12 +113,14 @@ void configure(
List<String> schemaRegistryUrls,
SchemaRegistryClient schemaRegistryClient,
String keySchemaNameTemplate,
- String valueSchemaNameTemplate) {
+ String valueSchemaNameTemplate,
+ boolean checkTopicSchemaExistenceForDeserialize) {
this.schemaRegistryUrls = schemaRegistryUrls;
this.schemaRegistryClient = schemaRegistryClient;
this.keySchemaNameTemplate = keySchemaNameTemplate;
this.valueSchemaNameTemplate = valueSchemaNameTemplate;
this.schemaRegistryFormatters = MessageFormatter.createMap(schemaRegistryClient);
+ this.checkSchemaExistenceForDeserialize = checkTopicSchemaExistenceForDeserialize;
}
private static SchemaRegistryClient createSchemaRegistryClient(List<String> urls,
@@ -122,8 +129,7 @@ private static SchemaRegistryClient createSchemaRegistryClient(List<String> urls
@Nullable String keyStoreLocation,
@Nullable String keyStorePassword,
@Nullable String trustStoreLocation,
- @Nullable String trustStorePassword
- ) {
+ @Nullable String trustStorePassword) {
Map<String, String> configs = new HashMap<>();
if (username != null && password != null) {
configs.put(BASIC_AUTH_CREDENTIALS_SOURCE, "USER_INFO");
@@ -169,7 +175,8 @@ public Optional<String> getDescription() {
@Override
public boolean canDeserialize(String topic, Target type) {
String subject = schemaSubject(topic, type);
- return getSchemaBySubject(subject).isPresent();
+ return !checkSchemaExistenceForDeserialize
+ || getSchemaBySubject(subject).isPresent();
}
@Override
| diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerdeTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerdeTest.java
index 092b3e8b3c9..4ea2bf3c2ac 100644
--- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerdeTest.java
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerdeTest.java
@@ -22,6 +22,7 @@
import org.apache.avro.io.Encoder;
import org.apache.avro.io.EncoderFactory;
import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.CsvSource;
@@ -35,7 +36,7 @@ class SchemaRegistrySerdeTest {
@BeforeEach
void init() {
serde = new SchemaRegistrySerde();
- serde.configure(List.of("wontbeused"), registryClient, "%s-key", "%s-value");
+ serde.configure(List.of("wontbeused"), registryClient, "%s-key", "%s-value", true);
}
@ParameterizedTest
@@ -129,24 +130,61 @@ void deserializeReturnsJsonAvroMsgJsonRepresentation() throws RestClientExceptio
.contains(Map.entry("schemaId", schemaId));
}
+ @Nested
+ class SerdeWithDisabledSubjectExistenceCheck {
+
+ @BeforeEach
+ void init() {
+ serde.configure(List.of("wontbeused"), registryClient, "%s-key", "%s-value", false);
+ }
+
+ @Test
+ void canDeserializeAlwaysReturnsTrue() {
+ String topic = RandomString.make(10);
+ assertThat(serde.canDeserialize(topic, Serde.Target.KEY)).isTrue();
+ assertThat(serde.canDeserialize(topic, Serde.Target.VALUE)).isTrue();
+ }
+ }
+
+ @Nested
+ class SerdeWithEnabledSubjectExistenceCheck {
+
+ @BeforeEach
+ void init() {
+ serde.configure(List.of("wontbeused"), registryClient, "%s-key", "%s-value", true);
+ }
+
+ @Test
+ void canDeserializeReturnsTrueIfSubjectExists() throws Exception {
+ String topic = RandomString.make(10);
+ registryClient.register(topic + "-key", new AvroSchema("\"int\""));
+ registryClient.register(topic + "-value", new AvroSchema("\"int\""));
+
+ assertThat(serde.canDeserialize(topic, Serde.Target.KEY)).isTrue();
+ assertThat(serde.canDeserialize(topic, Serde.Target.VALUE)).isTrue();
+ }
+
+ @Test
+ void canDeserializeReturnsFalseIfSubjectDoesNotExist() {
+ String topic = RandomString.make(10);
+ assertThat(serde.canDeserialize(topic, Serde.Target.KEY)).isFalse();
+ assertThat(serde.canDeserialize(topic, Serde.Target.VALUE)).isFalse();
+ }
+ }
+
@Test
void canDeserializeAndCanSerializeReturnsTrueIfSubjectExists() throws Exception {
String topic = RandomString.make(10);
registryClient.register(topic + "-key", new AvroSchema("\"int\""));
registryClient.register(topic + "-value", new AvroSchema("\"int\""));
- assertThat(serde.canDeserialize(topic, Serde.Target.KEY)).isTrue();
- assertThat(serde.canDeserialize(topic, Serde.Target.VALUE)).isTrue();
-
assertThat(serde.canSerialize(topic, Serde.Target.KEY)).isTrue();
assertThat(serde.canSerialize(topic, Serde.Target.VALUE)).isTrue();
}
@Test
- void canDeserializeAndCanSerializeReturnsFalseIfSubjectDoesNotExist() {
+ void canSerializeReturnsFalseIfSubjectDoesNotExist() {
String topic = RandomString.make(10);
- assertThat(serde.canDeserialize(topic, Serde.Target.KEY)).isFalse();
- assertThat(serde.canDeserialize(topic, Serde.Target.VALUE)).isFalse();
assertThat(serde.canSerialize(topic, Serde.Target.KEY)).isFalse();
assertThat(serde.canSerialize(topic, Serde.Target.VALUE)).isFalse();
}
@@ -178,4 +216,4 @@ private byte[] jsonToAvro(String json, AvroSchema schema) {
return output.toByteArray();
}
-}
\ No newline at end of file
+}
| train | val | 2023-01-16T17:39:45 | "2023-01-12T15:51:49Z" | Ronserruya | train |
provectus/kafka-ui/3206_3246 | provectus/kafka-ui | provectus/kafka-ui/3206 | provectus/kafka-ui/3246 | [
"connected"
] | 5d31189609ddb7024d5f16a4f9ed52cbb9022355 | ceb9c5dd85ce5543b9863c5bc500e67467c358fb | [
"Hello there AlexeyGA! π\n\nThank you and congratulations π for opening your very first issue in this project! π\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. π",
"Hey, thanks for raising the issue.\r\nYeah, I believe the frontend doesn't take `SERVER_SERVLET_CONTEXT_PATH` into consideration.\r\nWe'll fix that, thanks again.",
"Hello, this is my first experience with open source contributions. Would really appreciate it if you explained exactly what is needed to be done. \r\n\r\nI would love to work on this issue.\r\n\r\nThanks in advance! :)",
"@anushkabishnoi hey, sorry for the delay! We've been already working on it.\r\nIf you wish to contribute please refer to [contributing guide](https://github.com/provectus/kafka-ui/blob/master/CONTRIBUTING.md) as a start, thanks!"
] | [] | "2023-01-18T20:19:14Z" | [
"type/bug",
"good first issue",
"scope/frontend",
"status/accepted",
"status/confirmed"
] | Custom URN for logout button for 0.5.0 | When connecting oauth2 via the keycloak provider I did not find the possibility of determining the URN of the exit button in the interface.
Very often, a user path is used to access the user interface, and it is very different from `/`, so exit is not possible. So I would like to have an environment setting/variable responsible for that path in order to be able to log out properly.
The launch is carried out in kubernetes. Additionally, the `SERVER_SERVLET_CONTEXT_PATH : /kafka-ui` parameter is passed | [
"kafka-ui-react-app/src/components/NavBar/UserInfo/UserInfo.tsx",
"kafka-ui-react-app/src/components/NavBar/UserInfo/__tests__/UserInfo.spec.tsx"
] | [
"kafka-ui-react-app/src/components/NavBar/UserInfo/UserInfo.tsx",
"kafka-ui-react-app/src/components/NavBar/UserInfo/__tests__/UserInfo.spec.tsx"
] | [] | diff --git a/kafka-ui-react-app/src/components/NavBar/UserInfo/UserInfo.tsx b/kafka-ui-react-app/src/components/NavBar/UserInfo/UserInfo.tsx
index bae205ad03c..a70276afabd 100644
--- a/kafka-ui-react-app/src/components/NavBar/UserInfo/UserInfo.tsx
+++ b/kafka-ui-react-app/src/components/NavBar/UserInfo/UserInfo.tsx
@@ -26,7 +26,7 @@ const UserInfo = () => {
}
>
<DropdownItem>
- <S.LogoutLink href="/logout">Log out</S.LogoutLink>
+ <S.LogoutLink href={`${window.basePath}/logout`}>Log out</S.LogoutLink>
</DropdownItem>
</Dropdown>
) : null;
diff --git a/kafka-ui-react-app/src/components/NavBar/UserInfo/__tests__/UserInfo.spec.tsx b/kafka-ui-react-app/src/components/NavBar/UserInfo/__tests__/UserInfo.spec.tsx
index f474783c471..b51f00da024 100644
--- a/kafka-ui-react-app/src/components/NavBar/UserInfo/__tests__/UserInfo.spec.tsx
+++ b/kafka-ui-react-app/src/components/NavBar/UserInfo/__tests__/UserInfo.spec.tsx
@@ -22,6 +22,10 @@ describe('UserInfo', () => {
it('should render the userInfo during click opens the dropdown', async () => {
const username = 'someName';
+ Object.defineProperty(window, 'basePath', {
+ value: '',
+ writable: true,
+ });
(useUserInfo as jest.Mock).mockImplementation(() => ({ username }));
renderComponent();
@@ -33,6 +37,22 @@ describe('UserInfo', () => {
expect(logout).toHaveAttribute('href', '/logout');
});
+ it('should render correct url during basePath initialization', async () => {
+ const username = 'someName';
+ const baseUrl = '/path';
+ Object.defineProperty(window, 'basePath', {
+ value: baseUrl,
+ writable: true,
+ });
+ (useUserInfo as jest.Mock).mockImplementation(() => ({ username }));
+
+ renderComponent();
+
+ const logout = screen.getByText('Log out');
+ expect(logout).toBeInTheDocument();
+ expect(logout).toHaveAttribute('href', `${baseUrl}/logout`);
+ });
+
it('should not render anything if the username does not exists', () => {
(useUserInfo as jest.Mock).mockImplementation(() => ({
username: undefined,
| null | train | val | 2023-01-17T19:38:44 | "2023-01-09T07:30:22Z" | AlexeyGA | train |
provectus/kafka-ui/3229_3254 | provectus/kafka-ui | provectus/kafka-ui/3229 | provectus/kafka-ui/3254 | [
"connected"
] | 0ff8c0d4fb9d7ea4d075a2c65f6f14144d4c6a1f | 9cfa184cea9e3273673d955806a0d21700be8e8b | [] | [
"can't we reuse TOPIC_TO_CLEAR_MESSAGES in this test to not create a lot of instances. u can rename it to TOPIC_TO_CLEAR_AND_PURGE_MESSAGES\nI believe we can place this test after @Order(4)",
"Fixed."
] | "2023-01-23T09:07:45Z" | [
"scope/QA",
"scope/AQA"
] | [e2e]TopicTests.purgeMessagesOfTopics : Purge messages of topics | Autotest implementation for:
https://app.qase.io/case/KAFKAUI-10
Description:
Checking possibility to Purge messages of the selected Topic from All Topics' list
Pre-conditions:
1)Login to Kafka-ui application
2)Open the 'Local' section
3)Select the 'Topics'
4)Create a new Topic
5)Add message into a new created Topic
Post-conditions:
Not set
Steps:
1)Check the previously created Topic from All Topics' list
2)Click on 'Purge messages of selected topics'
3)Press 'Cancel' button
4)Click on 'Purge messages of selected topics'
5)Click 'Confirm' button
6)Turn to 'Messages' tab of the selected Topic
Expected results:
1)Make sure 'Delete selected topics', 'Copy selected topic', 'Purge messages of selected topics' are displayed
2)Should appear 'Confirm the action' pop up for confirmation deleting the topic with following information: 'Are you sure you want to purge messages of selected topics?' message, 'Cancel' and 'Confirm buttons
3)'Confirm the action' pop up should disappear
4)Make sure the pop up 'Confirm the action' opened again
5)'[Topic name] Messages have been successfully cleared!' success message should appear
6)Should not be displayed the messages | [
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicDetails.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicsList.java"
] | [
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicDetails.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicsList.java"
] | [
"kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicMessagesTests.java"
] | diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java
index 8b12d3b0d3f..afe95432ca8 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java
@@ -19,7 +19,9 @@ public abstract class BasePage extends WebUtils {
protected SelenideElement dotMenuBtn = $x("//button[@aria-label='Dropdown Toggle']");
protected SelenideElement alertHeader = $x("//div[@role='alert']//div[@role='heading']");
protected SelenideElement alertMessage = $x("//div[@role='alert']//div[@role='contentinfo']");
+ protected SelenideElement confirmationMdl = $x("//div[text()= 'Confirm the action']/..");
protected SelenideElement confirmBtn = $x("//button[contains(text(),'Confirm')]");
+ protected SelenideElement cancelBtn = $x("//button[contains(text(),'Cancel')]");
protected ElementsCollection allGridItems = $$x("//tr[@class]");
protected String summaryCellLocator = "//div[contains(text(),'%s')]";
protected String tableElementNameLocator = "//tbody//a[contains(text(),'%s')]";
@@ -74,6 +76,15 @@ protected void clickConfirmButton() {
confirmBtn.shouldBe(Condition.disappear);
}
+ protected void clickCancelButton() {
+ cancelBtn.shouldBe(Condition.enabled).click();
+ cancelBtn.shouldBe(Condition.disappear);
+ }
+
+ protected boolean isConfirmationModalVisible() {
+ return isVisible(confirmationMdl);
+ }
+
public enum AlertHeader {
SUCCESS("Success"),
VALIDATION_ERROR("Validation Error"),
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicDetails.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicDetails.java
index 1de0478abec..119f602a402 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicDetails.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicDetails.java
@@ -51,7 +51,6 @@ public class TopicDetails extends BasePage {
protected SelenideElement cleanUpPolicyField = $x("//div[contains(text(),'Clean Up Policy')]/../span/*");
protected SelenideElement partitionsField = $x("//div[contains(text(),'Partitions')]/../span");
protected SelenideElement backToCreateFiltersLink = $x("//div[text()='Back To create filters']");
- protected SelenideElement confirmationMdl = $x("//div[text()= 'Confirm the action']/..");
protected ElementsCollection messageGridItems = $$x("//tbody//tr");
protected SelenideElement actualCalendarDate = $x("//div[@class='react-datepicker__current-month']");
protected SelenideElement previousMonthButton = $x("//button[@aria-label='Previous Month']");
@@ -103,7 +102,7 @@ public TopicDetails clickEditSettingsMenu() {
@Step
public boolean isConfirmationMdlVisible(){
- return isVisible(confirmationMdl);
+ return isConfirmationModalVisible();
}
@Step
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicsList.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicsList.java
index 9db1d18c8bd..6280ffe8a47 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicsList.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicsList.java
@@ -87,6 +87,12 @@ public TopicsList clickCopySelectedTopicBtn(){
return this;
}
+ @Step
+ public TopicsList clickPurgeMessagesOfSelectedTopicsBtn(){
+ purgeMessagesOfSelectedTopicsBtn.shouldBe(Condition.enabled).click();
+ return this;
+ }
+
@Step
public TopicsList clickClearMessagesBtn(){
clickByJavaScript(clearMessagesBtn.shouldBe(visible));
@@ -111,6 +117,17 @@ public TopicsList clickConfirmBtnMdl() {
return this;
}
+ @Step
+ public TopicsList clickCancelBtnMdl(){
+ clickCancelButton();
+ return this;
+ }
+
+ @Step
+ public boolean isConfirmationMdlVisible(){
+ return isConfirmationModalVisible();
+ }
+
@Step
public boolean isAlertWithMessageVisible(AlertHeader header, String message) {
return isAlertVisible(header, message);
| diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicMessagesTests.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicMessagesTests.java
index 205707a263e..4993a2d4837 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicMessagesTests.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicMessagesTests.java
@@ -40,8 +40,8 @@ public class TopicMessagesTests extends BaseTest {
.setName("topic-with-clean-message-attribute-" + randomAlphabetic(5))
.setMessageKey(randomAlphabetic(5))
.setMessageContent(randomAlphabetic(10));
- private static final Topic TOPIC_TO_CLEAR_MESSAGES = new Topic()
- .setName("topic-to-clear-message-attribute-" + randomAlphabetic(5))
+ private static final Topic TOPIC_TO_CLEAR_AND_PURGE_MESSAGES = new Topic()
+ .setName("topic-to-clear-and-purge-messages-attribute-" + randomAlphabetic(5))
.setMessageKey(randomAlphabetic(5))
.setMessageContent(randomAlphabetic(10));
private static final Topic TOPIC_FOR_CHECKING_FILTERS = new Topic()
@@ -56,7 +56,8 @@ public class TopicMessagesTests extends BaseTest {
@BeforeAll
public void beforeAll() {
- TOPIC_LIST.addAll(List.of(TOPIC_FOR_MESSAGES, TOPIC_FOR_CHECKING_FILTERS, TOPIC_TO_CLEAR_MESSAGES, TOPIC_TO_RECREATE));
+ TOPIC_LIST.addAll(List.of(TOPIC_FOR_MESSAGES, TOPIC_FOR_CHECKING_FILTERS, TOPIC_TO_CLEAR_AND_PURGE_MESSAGES,
+ TOPIC_TO_RECREATE));
TOPIC_LIST.forEach(topic -> apiService.createTopic(topic.getName()));
IntStream.range(1, 3).forEach(i -> apiService.sendMessage(TOPIC_FOR_CHECKING_FILTERS));
waitUntilNewMinuteStarted();
@@ -111,22 +112,57 @@ void clearMessage() {
@Order(3)
@Test
void checkClearTopicMessage() {
- navigateToTopicsAndOpenDetails(TOPIC_TO_CLEAR_MESSAGES.getName());
+ navigateToTopicsAndOpenDetails(TOPIC_TO_CLEAR_AND_PURGE_MESSAGES.getName());
topicDetails
.openDetailsTab(OVERVIEW);
- produceMessage(TOPIC_TO_CLEAR_MESSAGES);
+ produceMessage(TOPIC_TO_CLEAR_AND_PURGE_MESSAGES);
navigateToTopics();
- assertThat(topicsList.getTopicItem(TOPIC_TO_CLEAR_MESSAGES.getName()).getNumberOfMessages())
+ assertThat(topicsList.getTopicItem(TOPIC_TO_CLEAR_AND_PURGE_MESSAGES.getName()).getNumberOfMessages())
.as("getNumberOfMessages()").isEqualTo(1);
topicsList
- .openDotMenuByTopicName(TOPIC_TO_CLEAR_MESSAGES.getName())
+ .openDotMenuByTopicName(TOPIC_TO_CLEAR_AND_PURGE_MESSAGES.getName())
.clickClearMessagesBtn()
.clickConfirmBtnMdl();
SoftAssertions softly = new SoftAssertions();
softly.assertThat(topicsList.isAlertWithMessageVisible(SUCCESS,
- String.format("%s messages have been successfully cleared!", TOPIC_TO_CLEAR_MESSAGES.getName())))
+ String.format("%s messages have been successfully cleared!", TOPIC_TO_CLEAR_AND_PURGE_MESSAGES.getName())))
.as("isAlertWithMessageVisible()").isTrue();
- softly.assertThat(topicsList.getTopicItem(TOPIC_TO_CLEAR_MESSAGES.getName()).getNumberOfMessages())
+ softly.assertThat(topicsList.getTopicItem(TOPIC_TO_CLEAR_AND_PURGE_MESSAGES.getName()).getNumberOfMessages())
+ .as("getNumberOfMessages()").isEqualTo(0);
+ softly.assertAll();
+ }
+
+ @DisplayName("TopicTests.purgeMessagesOfTopics : Purge messages of topics")
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(10)
+ @Order(4)
+ @Test
+ void checkPurgeMessagePossibility(){
+ navigateToTopics();
+ int messageAmount = topicsList.getTopicItem(TOPIC_TO_CLEAR_AND_PURGE_MESSAGES.getName()).getNumberOfMessages();
+ topicsList
+ .openTopic(TOPIC_TO_CLEAR_AND_PURGE_MESSAGES.getName());
+ topicDetails
+ .openDetailsTab(OVERVIEW);
+ produceMessage(TOPIC_TO_CLEAR_AND_PURGE_MESSAGES);
+ navigateToTopics();
+ assertThat(topicsList.getTopicItem(TOPIC_TO_CLEAR_AND_PURGE_MESSAGES.getName()).getNumberOfMessages())
+ .as("getNumberOfMessages()").isEqualTo(messageAmount +1);
+ topicsList
+ .getTopicItem(TOPIC_TO_CLEAR_AND_PURGE_MESSAGES.getName())
+ .selectItem(true)
+ .clickPurgeMessagesOfSelectedTopicsBtn();
+ assertThat(topicsList.isConfirmationMdlVisible()).as("isConfirmationMdlVisible()").isTrue();
+ topicsList
+ .clickCancelBtnMdl()
+ .clickPurgeMessagesOfSelectedTopicsBtn()
+ .clickConfirmBtnMdl();
+ SoftAssertions softly = new SoftAssertions();
+ softly.assertThat(topicsList.isAlertWithMessageVisible(SUCCESS,
+ String.format("%s messages have been successfully cleared!",TOPIC_TO_CLEAR_AND_PURGE_MESSAGES.getName())))
+ .as("isAlertWithMessageVisible()").isTrue();
+ softly.assertThat(topicsList.getTopicItem(TOPIC_TO_CLEAR_AND_PURGE_MESSAGES.getName()).getNumberOfMessages())
.as("getNumberOfMessages()").isEqualTo(0);
softly.assertAll();
}
@@ -137,7 +173,7 @@ void checkClearTopicMessage() {
@Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(21)
- @Order(4)
+ @Order(5)
@Test
void copyMessageFromTopicProfile() {
navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECKING_FILTERS.getName());
@@ -156,7 +192,7 @@ void copyMessageFromTopicProfile() {
@Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(15)
- @Order(5)
+ @Order(6)
@Test
void checkingMessageFilteringByOffset() {
navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECKING_FILTERS.getName());
@@ -183,7 +219,7 @@ void checkingMessageFilteringByOffset() {
@Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(16)
- @Order(6)
+ @Order(7)
@Test
void checkingMessageFilteringByTimestamp() {
navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECKING_FILTERS.getName());
@@ -215,7 +251,7 @@ void checkingMessageFilteringByTimestamp() {
@Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(246)
- @Order(7)
+ @Order(8)
@Test
void checkClearTopicMessageFromOverviewTab() {
navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECKING_FILTERS.getName());
@@ -237,7 +273,7 @@ void checkClearTopicMessageFromOverviewTab() {
@Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(240)
- @Order(8)
+ @Order(9)
@Test
void checkRecreateTopic(){
navigateToTopicsAndOpenDetails(TOPIC_TO_RECREATE.getName());
| train | val | 2023-01-23T15:27:55 | "2023-01-13T09:12:03Z" | ArthurNiedial | train |
provectus/kafka-ui/3226_3257 | provectus/kafka-ui | provectus/kafka-ui/3226 | provectus/kafka-ui/3257 | [
"connected"
] | 76fbaa7ead44de2a9cb7baf6473c810394ac0e5b | cdb559002526517953f7c039467650e665a83e5f | [] | [] | "2023-01-23T15:04:46Z" | [
"status/accepted",
"scope/infrastructure"
] | Fix workflow warnings | https://github.com/provectus/kafka-ui/actions/runs/3909117657
<img width="1172" alt="image" src="https://user-images.githubusercontent.com/1494347/212258072-de4a4013-374e-463a-a3e6-41e7bbcfe074.png">
| [
".github/workflows/aws_publisher.yaml",
".github/workflows/block_merge.yml",
".github/workflows/branch-deploy.yml",
".github/workflows/build-public-image.yml",
".github/workflows/delete-public-image.yml",
".github/workflows/e2e-checks.yaml",
".github/workflows/separate_env_public_create.yml"
] | [
".github/workflows/aws_publisher.yaml",
".github/workflows/block_merge.yml",
".github/workflows/branch-deploy.yml",
".github/workflows/build-public-image.yml",
".github/workflows/delete-public-image.yml",
".github/workflows/e2e-checks.yaml",
".github/workflows/separate_env_public_create.yml"
] | [] | diff --git a/.github/workflows/aws_publisher.yaml b/.github/workflows/aws_publisher.yaml
index 0d0f081f564..c7b80c54f97 100644
--- a/.github/workflows/aws_publisher.yaml
+++ b/.github/workflows/aws_publisher.yaml
@@ -31,7 +31,7 @@ jobs:
echo "Packer will be triggered in this dir $WORK_DIR"
- name: Configure AWS credentials for Kafka-UI account
- uses: aws-actions/configure-aws-credentials@v1
+ uses: aws-actions/configure-aws-credentials@v1-node16
with:
aws-access-key-id: ${{ secrets.AWS_AMI_PUBLISH_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_AMI_PUBLISH_KEY_SECRET }}
diff --git a/.github/workflows/block_merge.yml b/.github/workflows/block_merge.yml
index 28f5cde3f45..e1cdb3ac8e9 100644
--- a/.github/workflows/block_merge.yml
+++ b/.github/workflows/block_merge.yml
@@ -6,7 +6,7 @@ jobs:
block_merge:
runs-on: ubuntu-latest
steps:
- - uses: mheap/github-action-required-labels@v2
+ - uses: mheap/github-action-required-labels@v3
with:
mode: exactly
count: 0
diff --git a/.github/workflows/branch-deploy.yml b/.github/workflows/branch-deploy.yml
index b694a3c7fdc..1cc5fee39b1 100644
--- a/.github/workflows/branch-deploy.yml
+++ b/.github/workflows/branch-deploy.yml
@@ -45,7 +45,7 @@ jobs:
restore-keys: |
${{ runner.os }}-buildx-
- name: Configure AWS credentials for Kafka-UI account
- uses: aws-actions/configure-aws-credentials@v1
+ uses: aws-actions/configure-aws-credentials@v1-node16
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
diff --git a/.github/workflows/build-public-image.yml b/.github/workflows/build-public-image.yml
index d593326fa68..c79996d2ac5 100644
--- a/.github/workflows/build-public-image.yml
+++ b/.github/workflows/build-public-image.yml
@@ -42,7 +42,7 @@ jobs:
restore-keys: |
${{ runner.os }}-buildx-
- name: Configure AWS credentials for Kafka-UI account
- uses: aws-actions/configure-aws-credentials@v1
+ uses: aws-actions/configure-aws-credentials@v1-node16
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
diff --git a/.github/workflows/delete-public-image.yml b/.github/workflows/delete-public-image.yml
index 47e08713d87..56c795d0b54 100644
--- a/.github/workflows/delete-public-image.yml
+++ b/.github/workflows/delete-public-image.yml
@@ -15,7 +15,7 @@ jobs:
tag='${{ github.event.pull_request.number }}'
echo "tag=${tag}" >> $GITHUB_OUTPUT
- name: Configure AWS credentials for Kafka-UI account
- uses: aws-actions/configure-aws-credentials@v1
+ uses: aws-actions/configure-aws-credentials@v1-node16
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
diff --git a/.github/workflows/e2e-checks.yaml b/.github/workflows/e2e-checks.yaml
index a21821e79c8..962a1346849 100644
--- a/.github/workflows/e2e-checks.yaml
+++ b/.github/workflows/e2e-checks.yaml
@@ -15,6 +15,12 @@ jobs:
- uses: actions/checkout@v3
with:
ref: ${{ github.event.pull_request.head.sha }}
+ - name: Configure AWS credentials for Kafka-UI account
+ uses: aws-actions/configure-aws-credentials@v1-node16
+ with:
+ aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
+ aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
+ aws-region: eu-central-1
- name: Set the values
id: set_env_values
run: |
@@ -57,8 +63,6 @@ jobs:
if: always()
env:
AWS_S3_BUCKET: 'kafkaui-allure-reports'
- AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
- AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_REGION: 'eu-central-1'
SOURCE_DIR: 'allure-history/allure-results'
- name: Post the link to allure report
diff --git a/.github/workflows/separate_env_public_create.yml b/.github/workflows/separate_env_public_create.yml
index fde65dffaba..e10b1a382c3 100644
--- a/.github/workflows/separate_env_public_create.yml
+++ b/.github/workflows/separate_env_public_create.yml
@@ -47,7 +47,7 @@ jobs:
restore-keys: |
${{ runner.os }}-buildx-
- name: Configure AWS credentials for Kafka-UI account
- uses: aws-actions/configure-aws-credentials@v1
+ uses: aws-actions/configure-aws-credentials@v1-node16
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
| null | val | val | 2023-03-09T19:59:59 | "2023-01-13T07:04:24Z" | Haarolean | train |
provectus/kafka-ui/3253_3259 | provectus/kafka-ui | provectus/kafka-ui/3253 | provectus/kafka-ui/3259 | [
"connected"
] | 9cfa184cea9e3273673d955806a0d21700be8e8b | 0e671a939646eb283e529ea976d5b2e1fa91fcb5 | [
"Hello there Dgadavin! π\n\nThank you and congratulations π for opening your very first issue in this project! π\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. π",
"Hey, thanks for reaching out. \r\n\r\nWe'll take a look!",
"@Narekmat please",
"Thanks a lot @Haarolean and @Narekmat. Superfast. "
] | [] | "2023-01-24T05:39:26Z" | [
"status/accepted",
"scope/k8s"
] | ALB ingress with wildcard not support pathType: Prefix | Good day. I faced with problem when ALB ingress controller failed to build model because wildcard `/*` in path not supported with `pathType: Prefix`.
Could you please add a value to override `pathType` in values.yaml?
**Set up**
Chart version: 0.4.2
**Steps to Reproduce**
Create ingress with such values
```
ingress:
enabled: true
annotations:
kubernetes.io/ingress.class: alb
alb.ingress.kubernetes.io/scheme: internal
alb.ingress.kubernetes.io/target-type: ip
alb.ingress.kubernetes.io/load-balancer-attributes: idle_timeout.timeout_seconds=900
external-dns.alpha.kubernetes.io/hostname: "kafka-ui.example.com"
alb.ingress.kubernetes.io/certificate-arn: <CERT-ARN>
alb.ingress.kubernetes.io/listen-ports: '[{"HTTP": 80}, {"HTTPS":443}]'
alb.ingress.kubernetes.io/actions.ssl-redirect: '{"Type": "redirect",
"RedirectConfig": { "Protocol": "HTTPS", "Port": "443", "StatusCode":
"HTTP_301"}}'
alb.ingress.kubernetes.io/group.name: prod-int
path: "/*"
host: "kafka-ui.example.com"
```
And you will have such error:
```
Failed build model due to ingress: dev/kafka-ui: prefix path shouldn't contain wildcards: /*
```
**Expected behavior**
Ingress work and model build without errors.
For this `pathType` should be `ImplementationSpecific`
| [
"charts/kafka-ui/Chart.yaml",
"charts/kafka-ui/templates/ingress.yaml",
"charts/kafka-ui/values.yaml"
] | [
"charts/kafka-ui/Chart.yaml",
"charts/kafka-ui/templates/ingress.yaml",
"charts/kafka-ui/values.yaml"
] | [] | diff --git a/charts/kafka-ui/Chart.yaml b/charts/kafka-ui/Chart.yaml
index 28e1a892aac..3a1759a3911 100644
--- a/charts/kafka-ui/Chart.yaml
+++ b/charts/kafka-ui/Chart.yaml
@@ -2,6 +2,6 @@ apiVersion: v2
name: kafka-ui
description: A Helm chart for kafka-UI
type: application
-version: 0.5.1
+version: 0.5.2
appVersion: v0.5.0
icon: https://github.com/provectus/kafka-ui/raw/master/documentation/images/kafka-ui-logo.png
diff --git a/charts/kafka-ui/templates/ingress.yaml b/charts/kafka-ui/templates/ingress.yaml
index e4b33439c42..13e746d8d1b 100644
--- a/charts/kafka-ui/templates/ingress.yaml
+++ b/charts/kafka-ui/templates/ingress.yaml
@@ -35,7 +35,7 @@ spec:
{{- if and ($.Capabilities.APIVersions.Has "networking.k8s.io/v1") $isHigher1p19 -}}
{{- range .Values.ingress.precedingPaths }}
- path: {{ .path }}
- pathType: Prefix
+ pathType: {{ .Values.ingress.pathType }}
backend:
service:
name: {{ .serviceName }}
@@ -47,13 +47,13 @@ spec:
name: {{ $fullName }}
port:
number: {{ $svcPort }}
- pathType: Prefix
+ pathType: {{ .Values.ingress.pathType }}
{{- if .Values.ingress.path }}
path: {{ .Values.ingress.path }}
{{- end }}
{{- range .Values.ingress.succeedingPaths }}
- path: {{ .path }}
- pathType: Prefix
+ pathType: {{ .Values.ingress.pathType }}
backend:
service:
name: {{ .serviceName }}
diff --git a/charts/kafka-ui/values.yaml b/charts/kafka-ui/values.yaml
index dce32059e14..3c30b408134 100644
--- a/charts/kafka-ui/values.yaml
+++ b/charts/kafka-ui/values.yaml
@@ -111,6 +111,9 @@ ingress:
# The path for the Ingress
path: "/"
+ # The path type for the Ingress
+ pathType: "Prefix"
+
# The hostname for the Ingress
host: ""
| null | test | val | 2023-01-23T17:05:00 | "2023-01-23T08:04:17Z" | Dgadavin | train |
provectus/kafka-ui/3187_3262 | provectus/kafka-ui | provectus/kafka-ui/3187 | provectus/kafka-ui/3262 | [
"connected"
] | 0e671a939646eb283e529ea976d5b2e1fa91fcb5 | 43fcf6dce17283adc963f8661abe5b3e0410372d | [] | [] | "2023-01-24T10:17:59Z" | [
"type/enhancement",
"scope/backend",
"status/accepted"
] | Prevent crashing in case of unavailable logdirs | [
"README.md",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/KafkaCluster.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaClusterFactory.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/StatisticsService.java"
] | [
"README.md",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/KafkaCluster.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaClusterFactory.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/StatisticsService.java"
] | [] | diff --git a/README.md b/README.md
index a0ca023bc04..b66ac8f37f1 100644
--- a/README.md
+++ b/README.md
@@ -199,7 +199,6 @@ For example, if you want to use an environment variable to set the `name` parame
|`KAFKA_CLUSTERS_0_METRICS_PORT` |Open metrics port of a broker
|`KAFKA_CLUSTERS_0_METRICS_TYPE` |Type of metrics retriever to use. Valid values are JMX (default) or PROMETHEUS. If Prometheus, then metrics are read from prometheus-jmx-exporter instead of jmx
|`KAFKA_CLUSTERS_0_READONLY` |Enable read-only mode. Default: false
-|`KAFKA_CLUSTERS_0_DISABLELOGDIRSCOLLECTION` |Disable collecting segments information. It should be true for confluent cloud. Default: false
|`KAFKA_CLUSTERS_0_KAFKACONNECT_0_NAME` |Given name for the Kafka Connect cluster
|`KAFKA_CLUSTERS_0_KAFKACONNECT_0_ADDRESS` |Address of the Kafka Connect service endpoint
|`KAFKA_CLUSTERS_0_KAFKACONNECT_0_USERNAME`| Kafka Connect cluster's basic authentication username
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java
index e709f33d4f4..01d4de12570 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java
@@ -38,7 +38,6 @@ public static class Cluster {
MetricsConfigData metrics;
Properties properties;
boolean readOnly = false;
- boolean disableLogDirsCollection = false;
List<SerdeConfig> serde = new ArrayList<>();
String defaultKeySerde;
String defaultValueSerde;
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/KafkaCluster.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/KafkaCluster.java
index d87113633f5..9933d7e4675 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/KafkaCluster.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/KafkaCluster.java
@@ -26,7 +26,6 @@ public class KafkaCluster {
private final String bootstrapServers;
private final Properties properties;
private final boolean readOnly;
- private final boolean disableLogDirsCollection;
private final MetricsConfig metricsConfig;
private final DataMasking masking;
private final Supplier<PollingThrottler> throttler;
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaClusterFactory.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaClusterFactory.java
index b1000131eec..7113f5af367 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaClusterFactory.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaClusterFactory.java
@@ -39,7 +39,6 @@ public KafkaCluster create(ClustersProperties.Cluster clusterProperties) {
builder.bootstrapServers(clusterProperties.getBootstrapServers());
builder.properties(Optional.ofNullable(clusterProperties.getProperties()).orElse(new Properties()));
builder.readOnly(clusterProperties.isReadOnly());
- builder.disableLogDirsCollection(clusterProperties.isDisableLogDirsCollection());
builder.masking(DataMasking.create(clusterProperties.getMasking()));
builder.metricsConfig(metricsConfigDataToMetricsConfig(clusterProperties.getMetrics()));
builder.throttler(PollingThrottler.throttlerSupplier(clusterProperties));
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java
index b24180fa48d..ea56edcf96e 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java
@@ -66,6 +66,7 @@
import org.apache.kafka.common.errors.GroupNotEmptyException;
import org.apache.kafka.common.errors.InvalidRequestException;
import org.apache.kafka.common.errors.UnknownTopicOrPartitionException;
+import org.apache.kafka.common.errors.UnsupportedVersionException;
import org.apache.kafka.common.requests.DescribeLogDirsResponse;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
@@ -297,7 +298,12 @@ public Mono<Map<Integer, Map<String, DescribeLogDirsResponse.LogDirInfo>>> descr
public Mono<Map<Integer, Map<String, DescribeLogDirsResponse.LogDirInfo>>> describeLogDirs(
Collection<Integer> brokerIds) {
- return toMono(client.describeLogDirs(brokerIds).all());
+ return toMono(client.describeLogDirs(brokerIds).all())
+ .onErrorResume(UnsupportedVersionException.class, th -> Mono.just(Map.of()))
+ .onErrorResume(th -> true, th -> {
+ log.warn("Error while calling describeLogDirs", th);
+ return Mono.just(Map.of());
+ });
}
public Mono<ClusterDescription> describeCluster() {
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/StatisticsService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/StatisticsService.java
index 9f0b9389996..e6dbb27ee9f 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/StatisticsService.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/StatisticsService.java
@@ -1,5 +1,7 @@
package com.provectus.kafka.ui.service;
+import static com.provectus.kafka.ui.service.ReactiveAdminClient.ClusterDescription;
+
import com.provectus.kafka.ui.model.Feature;
import com.provectus.kafka.ui.model.InternalLogDirStats;
import com.provectus.kafka.ui.model.KafkaCluster;
@@ -9,10 +11,12 @@
import com.provectus.kafka.ui.service.metrics.MetricsCollector;
import java.util.List;
import java.util.Map;
+import java.util.stream.Collectors;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.admin.ConfigEntry;
import org.apache.kafka.clients.admin.TopicDescription;
+import org.apache.kafka.common.Node;
import org.springframework.stereotype.Service;
import reactor.core.publisher.Mono;
@@ -21,7 +25,7 @@
@Slf4j
public class StatisticsService {
- private final MetricsCollector metricsClusterUtil;
+ private final MetricsCollector metricsCollector;
private final AdminClientService adminClientService;
private final FeatureService featureService;
private final StatisticsCache cache;
@@ -35,8 +39,8 @@ private Mono<Statistics> getStatistics(KafkaCluster cluster) {
ac.describeCluster().flatMap(description ->
Mono.zip(
List.of(
- metricsClusterUtil.getBrokerMetrics(cluster, description.getNodes()),
- getLogDirInfo(cluster, ac),
+ metricsCollector.getBrokerMetrics(cluster, description.getNodes()),
+ getLogDirInfo(description, ac),
featureService.getAvailableFeatures(cluster, description.getController()),
loadTopicConfigs(cluster),
describeTopics(cluster)),
@@ -58,11 +62,9 @@ private Mono<Statistics> getStatistics(KafkaCluster cluster) {
e -> Mono.just(Statistics.empty().toBuilder().lastKafkaException(e).build()));
}
- private Mono<InternalLogDirStats> getLogDirInfo(KafkaCluster cluster, ReactiveAdminClient c) {
- if (!cluster.isDisableLogDirsCollection()) {
- return c.describeLogDirs().map(InternalLogDirStats::new);
- }
- return Mono.just(InternalLogDirStats.empty());
+ private Mono<InternalLogDirStats> getLogDirInfo(ClusterDescription desc, ReactiveAdminClient ac) {
+ var brokerIds = desc.getNodes().stream().map(Node::id).collect(Collectors.toSet());
+ return ac.describeLogDirs(brokerIds).map(InternalLogDirStats::new);
}
private Mono<Map<String, TopicDescription>> describeTopics(KafkaCluster c) {
| null | test | val | 2023-01-24T09:52:54 | "2023-01-03T10:59:32Z" | Haarolean | train |
|
provectus/kafka-ui/3269_3271 | provectus/kafka-ui | provectus/kafka-ui/3269 | provectus/kafka-ui/3271 | [
"connected"
] | 43fcf6dce17283adc963f8661abe5b3e0410372d | 640777dbdab2d3da6324edfb3bdac066ff1727fe | [] | [
"u need to wait new screen after 136 line",
"Fix.",
"ok, but why do we need wait at 154. screen wasn't changed",
"can't we refactor TineToRetain enum with two attributes: buttonName and value. to use the same instance while setting value at he form and getting the result in asserts",
"Fixed.",
"Fixed.",
"Fixed."
] | "2023-01-26T07:50:10Z" | [
"scope/QA",
"scope/AQA"
] | [e2e]TopicTests.editSettingsOfTopic : Edit settings of topic | Autotest implementation for:
https://app.qase.io/case/KAFKAUI-248
Pre-conditions:
-Create a Topic with Delete cleanup policy and Time to retain data "7days"; Max size on disk "Not Set"; Maximum message size in bytes "1048588"
Post-conditions:
Not set
Steps:
-Navigate to Topics
-Select the Topic
-Select "Edit settings" from 3dot menu
-Change "Cleanup Policy"
-Change "Time to retain data" field
-Change "Max size on disk"
-Change "Maximum message size in bytes"
-Press "Update topic" button
-Press "Edit settings" from 3dot menu
Expected results:
-Topic Name field should be frozen
-"Update topic" should become active
-Input data
"2 days"
172800000 should be filled
-Input data
"50 GB"
-Input data
"1048589"
-Success message should display
Topic overview should open
-The following updated data should display:
Cleanup Policy "Compact"; Time to retain data "2days"; Max size on disk "50 GB"; Maximum message size in bytes "1048589"
| [
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/models/Topic.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicCreateEditForm.java"
] | [
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/models/Topic.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicCreateEditForm.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/enums/TimeToRetain.java"
] | [
"kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicsTests.java"
] | diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/models/Topic.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/models/Topic.java
index 53c234c8d38..16b3ef6e2fb 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/models/Topic.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/models/Topic.java
@@ -3,6 +3,7 @@
import com.provectus.kafka.ui.pages.topic.enums.CleanupPolicyValue;
import com.provectus.kafka.ui.pages.topic.enums.CustomParameterType;
import com.provectus.kafka.ui.pages.topic.enums.MaxSizeOnDisk;
+import com.provectus.kafka.ui.pages.topic.enums.TimeToRetain;
import lombok.Data;
import lombok.experimental.Accessors;
@@ -14,4 +15,5 @@ public class Topic {
private CustomParameterType customParameterType;
private CleanupPolicyValue cleanupPolicyValue;
private MaxSizeOnDisk maxSizeOnDisk;
+ private TimeToRetain timeToRetain;
}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicCreateEditForm.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicCreateEditForm.java
index 3822cca257a..1ad65bdb36d 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicCreateEditForm.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/TopicCreateEditForm.java
@@ -13,6 +13,7 @@
import com.provectus.kafka.ui.pages.topic.enums.CleanupPolicyValue;
import com.provectus.kafka.ui.pages.topic.enums.CustomParameterType;
import com.provectus.kafka.ui.pages.topic.enums.MaxSizeOnDisk;
+import com.provectus.kafka.ui.pages.topic.enums.TimeToRetain;
import io.qameta.allure.Step;
public class TopicCreateEditForm extends BasePage {
@@ -30,6 +31,8 @@ public class TopicCreateEditForm extends BasePage {
protected SelenideElement customParameterValueField = $x("//input[@placeholder='Value']");
protected SelenideElement validationCustomParameterValueMsg = $x("//p[contains(text(),'Value is required')]");
protected String ddlElementLocator = "//li[@value='%s']";
+ protected String btnTimeToRetainLocator = "//button[@class][text()='%s']";
+
@Step
public TopicCreateEditForm waitUntilScreenReady() {
@@ -46,6 +49,10 @@ public boolean isDeleteCustomParameterButtonEnabled() {
return isEnabled(deleteCustomParameterBtn);
}
+ public boolean isNameFieldEnabled(){
+ return isEnabled(nameField);
+ }
+
@Step
public TopicCreateEditForm setTopicName(String topicName) {
nameField.shouldBe(Condition.enabled).clear();
@@ -118,13 +125,8 @@ public TopicCreateEditForm setNumberOfPartitions(int partitions) {
}
@Step
- public TopicCreateEditForm setTimeToRetainDataInMsUsingButtons(String value) {
- timeToRetainField
- .parent()
- .parent()
- .$$("button")
- .find(Condition.exactText(value))
- .click();
+ public TopicCreateEditForm setTimeToRetainDataByButtons(TimeToRetain timeToRetain) {
+ $x(String.format(btnTimeToRetainLocator, timeToRetain.getButton())).shouldBe(Condition.enabled).click();
return this;
}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/enums/TimeToRetain.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/enums/TimeToRetain.java
new file mode 100644
index 00000000000..9d42bf800f4
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topic/enums/TimeToRetain.java
@@ -0,0 +1,25 @@
+package com.provectus.kafka.ui.pages.topic.enums;
+
+public enum TimeToRetain {
+ BTN_12_HOURS("12 hours", "43200000"),
+ BTN_1_DAY("1 day", "86400000"),
+ BTN_2_DAYS("2 days", "172800000"),
+ BTN_7_DAYS("7 days", "604800000"),
+ BTN_4_WEEKS("4 weeks", "2419200000");
+
+ private final String button;
+ private final String value;
+
+ TimeToRetain(String button, String value) {
+ this.button = button;
+ this.value = value;
+ }
+
+ public String getButton(){
+ return button;
+ }
+
+ public String getValue(){
+ return value;
+ }
+}
| diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicsTests.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicsTests.java
index b70daa33079..7f301835c8a 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicsTests.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicsTests.java
@@ -8,7 +8,9 @@
import static com.provectus.kafka.ui.pages.topic.enums.CustomParameterType.COMPRESSION_TYPE;
import static com.provectus.kafka.ui.pages.topic.enums.MaxSizeOnDisk.NOT_SET;
import static com.provectus.kafka.ui.pages.topic.enums.MaxSizeOnDisk.SIZE_1_GB;
-import static com.provectus.kafka.ui.pages.topic.enums.MaxSizeOnDisk.SIZE_20_GB;
+import static com.provectus.kafka.ui.pages.topic.enums.MaxSizeOnDisk.SIZE_50_GB;
+import static com.provectus.kafka.ui.pages.topic.enums.TimeToRetain.BTN_2_DAYS;
+import static com.provectus.kafka.ui.pages.topic.enums.TimeToRetain.BTN_7_DAYS;
import static org.apache.commons.lang.RandomStringUtils.randomAlphabetic;
import static org.apache.commons.lang3.RandomUtils.nextInt;
import static org.assertj.core.api.Assertions.assertThat;
@@ -50,10 +52,10 @@ public class TopicsTests extends BaseTest {
private static final Topic TOPIC_TO_UPDATE_AND_DELETE = new Topic()
.setName("topic-to-update-and-delete-" + randomAlphabetic(5))
.setNumberOfPartitions(1)
- .setCleanupPolicyValue(COMPACT)
- .setTimeToRetainData("604800001")
- .setMaxSizeOnDisk(SIZE_20_GB)
- .setMaxMessageBytes("1000020")
+ .setCleanupPolicyValue(DELETE)
+ .setTimeToRetain(BTN_7_DAYS)
+ .setMaxSizeOnDisk(NOT_SET)
+ .setMaxMessageBytes("1048588")
.setMessageKey(randomAlphabetic(5))
.setMessageContent(randomAlphabetic(10));
private static final Topic TOPIC_TO_CHECK_SETTINGS = new Topic()
@@ -132,24 +134,43 @@ public void updateTopic() {
.openDotMenu()
.clickEditSettingsMenu();
topicCreateEditForm
- .waitUntilScreenReady()
+ .waitUntilScreenReady();
+ SoftAssertions softly = new SoftAssertions();
+ softly.assertThat(topicCreateEditForm.getCleanupPolicy()).as("getCleanupPolicy()")
+ .isEqualTo(TOPIC_TO_UPDATE_AND_DELETE.getCleanupPolicyValue().getVisibleText());
+ softly.assertThat(topicCreateEditForm.getTimeToRetain()).as("getTimeToRetain()")
+ .isEqualTo(TOPIC_TO_UPDATE_AND_DELETE.getTimeToRetain().getValue());
+ softly.assertThat(topicCreateEditForm.getMaxSizeOnDisk()).as("getMaxSizeOnDisk()")
+ .isEqualTo(TOPIC_TO_UPDATE_AND_DELETE.getMaxSizeOnDisk().getVisibleText());
+ softly.assertThat(topicCreateEditForm.getMaxMessageBytes()).as("getMaxMessageBytes()")
+ .isEqualTo(TOPIC_TO_UPDATE_AND_DELETE.getMaxMessageBytes());
+ softly.assertAll();
+ TOPIC_TO_UPDATE_AND_DELETE
+ .setCleanupPolicyValue(COMPACT)
+ .setTimeToRetain(BTN_2_DAYS)
+ .setMaxSizeOnDisk(SIZE_50_GB).setMaxMessageBytes("1048589");
+ topicCreateEditForm
.selectCleanupPolicy((TOPIC_TO_UPDATE_AND_DELETE.getCleanupPolicyValue()))
- .setMinInsyncReplicas(10)
- .setTimeToRetainDataInMs(TOPIC_TO_UPDATE_AND_DELETE.getTimeToRetainData())
+ .setTimeToRetainDataByButtons(TOPIC_TO_UPDATE_AND_DELETE.getTimeToRetain())
.setMaxSizeOnDiskInGB(TOPIC_TO_UPDATE_AND_DELETE.getMaxSizeOnDisk())
.setMaxMessageBytes(TOPIC_TO_UPDATE_AND_DELETE.getMaxMessageBytes())
.clickCreateTopicBtn();
+ softly.assertThat(topicDetails.isAlertWithMessageVisible(SUCCESS, "Topic successfully updated."))
+ .as("isAlertWithMessageVisible()").isTrue();
+ softly.assertThat(topicDetails.isTopicHeaderVisible(TOPIC_TO_UPDATE_AND_DELETE.getName()))
+ .as("isTopicHeaderVisible()").isTrue();
+ softly.assertAll();
topicDetails
.waitUntilScreenReady();
navigateToTopicsAndOpenDetails(TOPIC_TO_UPDATE_AND_DELETE.getName());
topicDetails
.openDotMenu()
.clickEditSettingsMenu();
- SoftAssertions softly = new SoftAssertions();
+ softly.assertThat(topicCreateEditForm.isNameFieldEnabled()).as("isNameFieldEnabled()").isFalse();
softly.assertThat(topicCreateEditForm.getCleanupPolicy()).as("getCleanupPolicy()")
.isEqualTo(TOPIC_TO_UPDATE_AND_DELETE.getCleanupPolicyValue().getVisibleText());
softly.assertThat(topicCreateEditForm.getTimeToRetain()).as("getTimeToRetain()")
- .isEqualTo(TOPIC_TO_UPDATE_AND_DELETE.getTimeToRetainData());
+ .isEqualTo(TOPIC_TO_UPDATE_AND_DELETE.getTimeToRetain().getValue());
softly.assertThat(topicCreateEditForm.getMaxSizeOnDisk()).as("getMaxSizeOnDisk()")
.isEqualTo(TOPIC_TO_UPDATE_AND_DELETE.getMaxSizeOnDisk().getVisibleText());
softly.assertThat(topicCreateEditForm.getMaxMessageBytes()).as("getMaxMessageBytes()")
| val | val | 2023-01-26T08:12:59 | "2023-01-25T15:02:33Z" | ArthurNiedial | train |
provectus/kafka-ui/3104_3295 | provectus/kafka-ui | provectus/kafka-ui/3104 | provectus/kafka-ui/3295 | [
"connected"
] | fdf8db98a22c7403c2910b326dbe6458d3ddcbba | 398181e0d25fc94cf58793d8cafec5fb0a1f06ee | [
"We have documentation for this ticket https://ksqldb.io/quickstart.html \r\n",
"@BulatKha ",
"We need to reproduce 4 and 5 steps as preconditions for create Stream and TABLE , and using this table for KSQL query ",
"@anezboretskiy we need to get AC and cover this case"
] | [
"if the class consists wih enum only u can to not declare the class, just enum\nalso upd KsqlQueryConfig pls",
"1. in this logic method should be called as getTableNameElm()\n2. no reason in this method because element it returns used only once in getTableName()",
"same comment as for 64 line",
"Done",
"Deleted method ",
"Deleted method "
] | "2023-01-31T16:14:11Z" | [
"scope/QA",
"scope/AQA"
] | [e2e] Checking KSQL request execution | Autotest implementation for:
https://app.qase.io/case/KAFKAUI-41
Description:
The purpose of this case is to check KSQL request execution
Pre-conditions:
- Login to system
- Navigate to KSQL DB
- Create stream
- Create tables for stream
- For more info use https://ksqldb.io/quickstart.html
Steps:
1. Press "Execute KSQL Request"
2. Input the query for KSQL
**Input data:**
show tables;
3. Press "Execute"
**Expected result:**
All the requesting data should appear under the query | [
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/services/ApiService.java"
] | [
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlDbList.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlQueryForm.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/enums/KsqlMenuTabs.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/enums/KsqlQueryConfig.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/models/Stream.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/models/Table.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/services/ApiService.java"
] | [
"kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/base/Facade.java",
"kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/ksqldb/KsqlTests.java",
"kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicsTests.java"
] | diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlDbList.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlDbList.java
new file mode 100644
index 00000000000..ec735df65a7
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlDbList.java
@@ -0,0 +1,137 @@
+package com.provectus.kafka.ui.pages.ksqldb;
+
+import static com.codeborne.selenide.Selenide.$;
+import static com.codeborne.selenide.Selenide.$x;
+
+import com.codeborne.selenide.CollectionCondition;
+import com.codeborne.selenide.Condition;
+import com.codeborne.selenide.SelenideElement;
+import com.provectus.kafka.ui.pages.BasePage;
+import com.provectus.kafka.ui.pages.ksqldb.enums.KsqlMenuTabs;
+import io.qameta.allure.Step;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import org.openqa.selenium.By;
+
+public class KsqlDbList extends BasePage {
+ protected SelenideElement executeKsqlBtn = $x("//button[text()='Execute KSQL Request']");
+ protected SelenideElement tablesTab = $x("//nav[@role='navigation']/a[text()='Tables']");
+ protected SelenideElement streamsTab = $x("//nav[@role='navigation']/a[text()='Streams']");
+
+ @Step
+ public KsqlDbList waitUntilScreenReady() {
+ waitUntilSpinnerDisappear();
+ Arrays.asList(tablesTab, streamsTab).forEach(tab -> tab.shouldBe(Condition.visible));
+ return this;
+ }
+
+ @Step
+ public KsqlDbList clickExecuteKsqlRequestBtn() {
+ clickByJavaScript(executeKsqlBtn);
+ return this;
+ }
+
+ @Step
+ public KsqlDbList openDetailsTab(KsqlMenuTabs menu) {
+ $(By.linkText(menu.toString())).shouldBe(Condition.visible).click();
+ waitUntilSpinnerDisappear();
+ return this;
+ }
+
+ private List<KsqlDbList.KsqlTablesGridItem> initTablesItems() {
+ List<KsqlDbList.KsqlTablesGridItem> gridItemList = new ArrayList<>();
+ allGridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
+ .forEach(item -> gridItemList.add(new KsqlDbList.KsqlTablesGridItem(item)));
+ return gridItemList;
+ }
+
+ @Step
+ public KsqlDbList.KsqlTablesGridItem getTableByName(String tableName) {
+ return initTablesItems().stream()
+ .filter(e -> e.getTableName().equals(tableName))
+ .findFirst().orElse(null);
+ }
+
+ public static class KsqlTablesGridItem extends BasePage {
+
+ private final SelenideElement element;
+
+ public KsqlTablesGridItem(SelenideElement element) {
+ this.element = element;
+ }
+
+ @Step
+ public String getTableName() {
+ return element.$x("./td[1]").getText().trim();
+ }
+
+ @Step
+ public String getTopicName() {
+ return element.$x("./td[2]").getText().trim();
+ }
+
+ @Step
+ public String getKeyFormat() {
+ return element.$x("./td[3]").getText().trim();
+ }
+
+ @Step
+ public String getValueFormat() {
+ return element.$x("./td[4]").getText().trim();
+ }
+
+ @Step
+ public String getIsWindowed() {
+ return element.$x("./td[5]").getText().trim();
+ }
+ }
+
+ private List<KsqlDbList.KsqlStreamsGridItem> initStreamsItems() {
+ List<KsqlDbList.KsqlStreamsGridItem> gridItemList = new ArrayList<>();
+ allGridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
+ .forEach(item -> gridItemList.add(new KsqlDbList.KsqlStreamsGridItem(item)));
+ return gridItemList;
+ }
+
+ @Step
+ public KsqlDbList.KsqlStreamsGridItem getStreamByName(String streamName) {
+ return initStreamsItems().stream()
+ .filter(e -> e.getStreamName().equals(streamName))
+ .findFirst().orElse(null);
+ }
+
+ public static class KsqlStreamsGridItem extends BasePage {
+
+ private final SelenideElement element;
+
+ public KsqlStreamsGridItem(SelenideElement element) {
+ this.element = element;
+ }
+
+ @Step
+ public String getStreamName() {
+ return element.$x("./td[1]").getText().trim();
+ }
+
+ @Step
+ public String getTopicName() {
+ return element.$x("./td[2]").getText().trim();
+ }
+
+ @Step
+ public String getKeyFormat() {
+ return element.$x("./td[3]").getText().trim();
+ }
+
+ @Step
+ public String getValueFormat() {
+ return element.$x("./td[4]").getText().trim();
+ }
+
+ @Step
+ public String getIsWindowed() {
+ return element.$x("./td[5]").getText().trim();
+ }
+ }
+}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlQueryForm.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlQueryForm.java
new file mode 100644
index 00000000000..21ceacdbb7f
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlQueryForm.java
@@ -0,0 +1,144 @@
+package com.provectus.kafka.ui.pages.ksqldb;
+
+import static com.codeborne.selenide.Condition.visible;
+import static com.codeborne.selenide.Selenide.$$x;
+import static com.codeborne.selenide.Selenide.$x;
+
+import com.codeborne.selenide.CollectionCondition;
+import com.codeborne.selenide.Condition;
+import com.codeborne.selenide.ElementsCollection;
+import com.codeborne.selenide.SelenideElement;
+import com.provectus.kafka.ui.pages.BasePage;
+import io.qameta.allure.Step;
+import java.time.Duration;
+import java.util.ArrayList;
+import java.util.List;
+
+public class KsqlQueryForm extends BasePage {
+ protected SelenideElement pageTitle = $x("//h1[text()='Query']");
+ protected SelenideElement clearBtn = $x("//div/button[text()='Clear']");
+ protected SelenideElement executeBtn = $x("//div/button[text()='Execute']");
+ protected SelenideElement stopQueryBtn = $x("//div/button[text()='Stop query']");
+ protected SelenideElement clearResultsBtn = $x("//div/button[text()='Clear results']");
+ protected SelenideElement addStreamPropertyBtn = $x("//button[text()='Add Stream Property']");
+ protected SelenideElement queryAreaValue = $x("//div[@class='ace_content']");
+ protected SelenideElement queryArea = $x("//div[@id='ksql']/textarea[@class='ace_text-input']");
+ protected ElementsCollection ksqlGridItems = $$x("//tbody//tr");
+ protected ElementsCollection keyField = $$x("//input[@aria-label='value']");
+ protected ElementsCollection valueField = $$x("//input[@aria-label='value']");
+
+ @Step
+ public KsqlQueryForm waitUntilScreenReady() {
+ waitUntilSpinnerDisappear();
+ pageTitle.shouldBe(Condition.visible);
+ return this;
+ }
+
+ @Step
+ public KsqlQueryForm clickClearBtn() {
+ clickByJavaScript(clearBtn);
+ return this;
+ }
+
+ @Step
+ public KsqlQueryForm clickExecuteBtn() {
+ clickByJavaScript(executeBtn);
+ if (queryAreaValue.getText().contains("EMIT CHANGES;")) {
+ loadingSpinner.shouldBe(Condition.visible);
+ } else {
+ waitUntilSpinnerDisappear();
+ }
+ return this;
+ }
+
+ @Step
+ public KsqlQueryForm clickStopQueryBtn() {
+ clickByJavaScript(stopQueryBtn);
+ waitUntilSpinnerDisappear();
+ return this;
+ }
+
+ @Step
+ public KsqlQueryForm clickClearResultsBtn() {
+ clickByJavaScript(clearResultsBtn);
+ waitUntilSpinnerDisappear();
+ return this;
+ }
+
+ @Step
+ public KsqlQueryForm clickAddStreamProperty() {
+ clickByJavaScript(addStreamPropertyBtn);
+ return this;
+ }
+
+ @Step
+ public KsqlQueryForm setQuery(String query) {
+ queryAreaValue.shouldBe(Condition.visible).click();
+ queryArea.setValue(query);
+ return this;
+ }
+
+ private List<KsqlQueryForm.KsqlResponseGridItem> initItems() {
+ List<KsqlQueryForm.KsqlResponseGridItem> gridItemList = new ArrayList<>();
+ ksqlGridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
+ .forEach(item -> gridItemList.add(new KsqlQueryForm.KsqlResponseGridItem(item)));
+ return gridItemList;
+ }
+
+ @Step
+ public KsqlQueryForm.KsqlResponseGridItem getTableByName(String name) {
+ return initItems().stream()
+ .filter(e -> e.getName().equalsIgnoreCase(name))
+ .findFirst().orElse(null);
+ }
+
+ public static class KsqlResponseGridItem extends BasePage {
+
+ private final SelenideElement element;
+
+ private KsqlResponseGridItem(SelenideElement element) {
+ this.element = element;
+ }
+
+ @Step
+ public String getType() {
+ return element.$x("./td[1]").getText().trim();
+ }
+
+ @Step
+ public String getName() {
+ return element.$x("./td[2]").scrollTo().getText().trim();
+ }
+
+ @Step
+ public boolean isVisible() {
+ boolean isVisible = false;
+ try {
+ element.$x("./td[2]").shouldBe(visible, Duration.ofMillis(500));
+ isVisible = true;
+ } catch (Throwable ignored) {
+ }
+ return isVisible;
+ }
+
+ @Step
+ public String getTopic() {
+ return element.$x("./td[3]").getText().trim();
+ }
+
+ @Step
+ public String getKeyFormat() {
+ return element.$x("./td[4]").getText().trim();
+ }
+
+ @Step
+ public String getValueFormat() {
+ return element.$x("./td[5]").getText().trim();
+ }
+
+ @Step
+ public String getIsWindowed() {
+ return element.$x("./td[6]").getText().trim();
+ }
+ }
+}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/enums/KsqlMenuTabs.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/enums/KsqlMenuTabs.java
new file mode 100644
index 00000000000..f3bb55d42b0
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/enums/KsqlMenuTabs.java
@@ -0,0 +1,16 @@
+package com.provectus.kafka.ui.pages.ksqldb.enums;
+
+public enum KsqlMenuTabs {
+ TABLES("Table"),
+ STREAMS("Streams");
+
+ private final String value;
+
+ KsqlMenuTabs(String value) {
+ this.value = value;
+ }
+
+ public String toString() {
+ return value;
+ }
+}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/enums/KsqlQueryConfig.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/enums/KsqlQueryConfig.java
new file mode 100644
index 00000000000..c918b079973
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/enums/KsqlQueryConfig.java
@@ -0,0 +1,18 @@
+package com.provectus.kafka.ui.pages.ksqldb.enums;
+
+public enum KsqlQueryConfig {
+ SHOW_TABLES("show tables;"),
+ SHOW_STREAMS("show streams;"),
+ SELECT_ALL_FROM("SELECT * FROM %s\n" +
+ "EMIT CHANGES;");
+
+ private final String query;
+
+ KsqlQueryConfig(String query) {
+ this.query = query;
+ }
+
+ public String getQuery(){
+ return query;
+ }
+}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/models/Stream.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/models/Stream.java
new file mode 100644
index 00000000000..f61b8243866
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/models/Stream.java
@@ -0,0 +1,10 @@
+package com.provectus.kafka.ui.pages.ksqldb.models;
+
+import lombok.Data;
+import lombok.experimental.Accessors;
+
+@Data
+@Accessors(chain = true)
+public class Stream {
+ private String name, topicName, valueFormat, partitions;
+}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/models/Table.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/models/Table.java
new file mode 100644
index 00000000000..cbb0c1d2a72
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/models/Table.java
@@ -0,0 +1,10 @@
+package com.provectus.kafka.ui.pages.ksqldb.models;
+
+import lombok.Data;
+import lombok.experimental.Accessors;
+
+@Data
+@Accessors(chain = true)
+public class Table {
+ private String name, streamName;
+}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/services/ApiService.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/services/ApiService.java
index 8451ef836e1..c41796cb85c 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/services/ApiService.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/services/ApiService.java
@@ -6,20 +6,28 @@
import com.fasterxml.jackson.databind.ObjectMapper;
import com.provectus.kafka.ui.api.ApiClient;
import com.provectus.kafka.ui.api.api.KafkaConnectApi;
+import com.provectus.kafka.ui.api.api.KsqlApi;
import com.provectus.kafka.ui.api.api.MessagesApi;
import com.provectus.kafka.ui.api.api.SchemasApi;
import com.provectus.kafka.ui.api.api.TopicsApi;
import com.provectus.kafka.ui.api.model.CreateTopicMessage;
+import com.provectus.kafka.ui.api.model.KsqlCommandV2;
+import com.provectus.kafka.ui.api.model.KsqlCommandV2Response;
+import com.provectus.kafka.ui.api.model.KsqlResponse;
import com.provectus.kafka.ui.api.model.NewConnector;
import com.provectus.kafka.ui.api.model.NewSchemaSubject;
import com.provectus.kafka.ui.api.model.TopicCreation;
import com.provectus.kafka.ui.models.Connector;
import com.provectus.kafka.ui.models.Schema;
import com.provectus.kafka.ui.models.Topic;
+import com.provectus.kafka.ui.pages.ksqldb.models.Stream;
+import com.provectus.kafka.ui.pages.ksqldb.models.Table;
import com.provectus.kafka.ui.settings.BaseSource;
import io.qameta.allure.Step;
import java.util.HashMap;
+import java.util.List;
import java.util.Map;
+import java.util.Objects;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.springframework.web.reactive.function.client.WebClientResponseException;
@@ -48,6 +56,9 @@ private MessagesApi messageApi() {
return new MessagesApi(new ApiClient().setBasePath(BASE_LOCAL_URL));
}
+ @SneakyThrows
+ private KsqlApi ksqlApi(){return new KsqlApi(new ApiClient().setBasePath(BASE_LOCAL_URL));}
+
@SneakyThrows
private void createTopic(String clusterName, String topicName) {
TopicCreation topic = new TopicCreation();
@@ -173,4 +184,76 @@ public ApiService sendMessage(Topic topic) {
sendMessage(CLUSTER_NAME, topic);
return this;
}
+
+ @Step
+ public ApiService createStream(Stream stream) {
+ KsqlCommandV2Response pipeIdStream = ksqlApi().executeKsql(
+ CLUSTER_NAME, new KsqlCommandV2()
+ .ksql(String.format("CREATE STREAM %s (profileId VARCHAR, latitude DOUBLE, longitude DOUBLE) ",
+ stream.getName())
+ + String.format("WITH (kafka_topic='%s', value_format='json', partitions=1);",
+ stream.getTopicName())))
+ .block();
+ assert pipeIdStream != null;
+ List<KsqlResponse> responseListStream =
+ ksqlApi().openKsqlResponsePipe(CLUSTER_NAME, pipeIdStream.getPipeId()).collectList().block();
+ assert Objects.requireNonNull(responseListStream).size() != 0;
+ return this;
+ }
+
+ @Step
+ public ApiService createTables(Table firstTable, Table secondTable) {
+ KsqlCommandV2Response pipeIdTable1 = ksqlApi().executeKsql(
+ CLUSTER_NAME, new KsqlCommandV2().ksql(
+ String.format("CREATE TABLE %s AS ", firstTable.getName())
+ + " SELECT profileId, "
+ + " LATEST_BY_OFFSET(latitude) AS la, "
+ + " LATEST_BY_OFFSET(longitude) AS lo "
+ + String.format(" FROM %s ", firstTable.getStreamName())
+ + " GROUP BY profileId "
+ + " EMIT CHANGES;"))
+ .block();
+ assert pipeIdTable1 != null;
+ List<KsqlResponse> responseListTable =
+ ksqlApi().openKsqlResponsePipe(CLUSTER_NAME, pipeIdTable1.getPipeId()).collectList().block();
+ assert Objects.requireNonNull(responseListTable).size() != 0;
+ KsqlCommandV2Response pipeIdTable2 = ksqlApi().executeKsql(
+ CLUSTER_NAME,
+ new KsqlCommandV2().ksql(String.format("CREATE TABLE %s AS ", secondTable.getName())
+ + " SELECT ROUND(GEO_DISTANCE(la, lo, 37.4133, -122.1162), -1) AS distanceInMiles, "
+ + " COLLECT_LIST(profileId) AS riders, "
+ + " COUNT(*) AS count "
+ + String.format(" FROM %s ", firstTable.getName())
+ + " GROUP BY ROUND(GEO_DISTANCE(la, lo, 37.4133, -122.1162), -1);"))
+ .block();
+ assert pipeIdTable2 != null;
+ List<KsqlResponse> responseListTable2 =
+ ksqlApi().openKsqlResponsePipe(CLUSTER_NAME, pipeIdTable2.getPipeId()).collectList().block();
+ assert Objects.requireNonNull(responseListTable2).size() != 0;
+ return this;
+ }
+
+ @Step
+ public ApiService insertInto(Stream stream) {
+ String streamName = stream.getName();
+ KsqlCommandV2Response pipeIdInsert = ksqlApi().executeKsql(CLUSTER_NAME, new KsqlCommandV2()
+ .ksql(
+ "INSERT INTO " + streamName + " (profileId, latitude, longitude) VALUES ('c2309eec', 37.7877, -122.4205);"
+ + "INSERT INTO " + streamName +
+ " (profileId, latitude, longitude) VALUES ('18f4ea86', 37.3903, -122.0643); "
+ + "INSERT INTO " + streamName +
+ " (profileId, latitude, longitude) VALUES ('4ab5cbad', 37.3952, -122.0813); "
+ + "INSERT INTO " + streamName +
+ " (profileId, latitude, longitude) VALUES ('8b6eae59', 37.3944, -122.0813); "
+ + "INSERT INTO " + streamName +
+ " (profileId, latitude, longitude) VALUES ('4a7c7b41', 37.4049, -122.0822); "
+ + "INSERT INTO " + streamName +
+ " (profileId, latitude, longitude) VALUES ('4ddad000', 37.7857, -122.4011);"))
+ .block();
+ assert pipeIdInsert != null;
+ List<KsqlResponse> responseListInsert =
+ ksqlApi().openKsqlResponsePipe(CLUSTER_NAME, pipeIdInsert.getPipeId()).collectList().block();
+ assert Objects.requireNonNull(responseListInsert).size() != 0;
+ return this;
+ }
}
| diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/base/Facade.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/base/Facade.java
index 1dad47a7007..ac4180fe611 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/base/Facade.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/base/Facade.java
@@ -1,6 +1,5 @@
package com.provectus.kafka.ui.base;
-import com.provectus.kafka.ui.services.ApiService;
import com.provectus.kafka.ui.pages.NaviSideBar;
import com.provectus.kafka.ui.pages.TopPanel;
import com.provectus.kafka.ui.pages.brokers.BrokersConfigTab;
@@ -11,14 +10,17 @@
import com.provectus.kafka.ui.pages.connector.KafkaConnectList;
import com.provectus.kafka.ui.pages.consumer.ConsumersDetails;
import com.provectus.kafka.ui.pages.consumer.ConsumersList;
+import com.provectus.kafka.ui.pages.ksqldb.KsqlDbList;
+import com.provectus.kafka.ui.pages.ksqldb.KsqlQueryForm;
import com.provectus.kafka.ui.pages.schema.SchemaCreateForm;
import com.provectus.kafka.ui.pages.schema.SchemaDetails;
import com.provectus.kafka.ui.pages.schema.SchemaRegistryList;
import com.provectus.kafka.ui.pages.topic.ProduceMessagePanel;
-import com.provectus.kafka.ui.pages.topic.TopicSettingsTab;
import com.provectus.kafka.ui.pages.topic.TopicCreateEditForm;
import com.provectus.kafka.ui.pages.topic.TopicDetails;
+import com.provectus.kafka.ui.pages.topic.TopicSettingsTab;
import com.provectus.kafka.ui.pages.topic.TopicsList;
+import com.provectus.kafka.ui.services.ApiService;
public abstract class Facade {
protected ApiService apiService = new ApiService();
@@ -40,4 +42,7 @@ public abstract class Facade {
protected BrokersDetails brokersDetails = new BrokersDetails();
protected BrokersConfigTab brokersConfigTab = new BrokersConfigTab();
protected TopicSettingsTab topicSettingsTab = new TopicSettingsTab();
+ protected KsqlQueryForm ksqlQueryForm = new KsqlQueryForm();
+ protected KsqlDbList ksqlDbList = new KsqlDbList();
+
}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/ksqldb/KsqlTests.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/ksqldb/KsqlTests.java
new file mode 100644
index 00000000000..ee03fd8de1a
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/ksqldb/KsqlTests.java
@@ -0,0 +1,65 @@
+package com.provectus.kafka.ui.suite.ksqldb;
+
+import static com.provectus.kafka.ui.pages.NaviSideBar.SideMenuOption.KSQL_DB;
+import static com.provectus.kafka.ui.pages.ksqldb.enums.KsqlQueryConfig.SHOW_TABLES;
+import static org.apache.commons.lang.RandomStringUtils.randomAlphabetic;
+
+import com.provectus.kafka.ui.base.BaseTest;
+import com.provectus.kafka.ui.pages.ksqldb.models.Stream;
+import com.provectus.kafka.ui.pages.ksqldb.models.Table;
+import com.provectus.kafka.ui.utilities.qaseIoUtils.annotations.AutomationStatus;
+import com.provectus.kafka.ui.utilities.qaseIoUtils.annotations.Suite;
+import com.provectus.kafka.ui.utilities.qaseIoUtils.enums.Status;
+import io.qase.api.annotation.CaseId;
+import org.assertj.core.api.SoftAssertions;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.DisplayName;
+import org.junit.jupiter.api.Order;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestInstance;
+
+@TestInstance(TestInstance.Lifecycle.PER_CLASS)
+public class KsqlTests extends BaseTest {
+ private static final long SUITE_ID = 8;
+ private static final String SUITE_TITLE = "KSQL_DB";
+ private static final Stream STREAM_FOR_CHECKING_TABLES = new Stream()
+ .setName("STREAM_FOR_CHECKING_TABLES_" + randomAlphabetic(4).toUpperCase())
+ .setTopicName("TOPIC_FOR_STREAM_" + randomAlphabetic(4).toUpperCase());
+ private static final Table FIRST_TABLE = new Table()
+ .setName("FIRST_TABLE"+ randomAlphabetic(4).toUpperCase())
+ .setStreamName(STREAM_FOR_CHECKING_TABLES.getName());
+ private static final Table SECOND_TABLE = new Table()
+ .setName("SECOND_TABLE"+ randomAlphabetic(4).toUpperCase())
+ .setStreamName(STREAM_FOR_CHECKING_TABLES.getName());
+
+ @BeforeAll
+ public void beforeAll(){
+ apiService
+ .createStream(STREAM_FOR_CHECKING_TABLES)
+ .createTables(FIRST_TABLE, SECOND_TABLE);
+ }
+
+ @DisplayName("check KSQL request execution")
+ @Suite(suiteId = SUITE_ID,title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(41)
+ @Test
+ @Order(1)
+ public void checkingKsqlRequestExecution() {
+ naviSideBar
+ .openSideMenu(KSQL_DB);
+ ksqlDbList
+ .waitUntilScreenReady()
+ .clickExecuteKsqlRequestBtn();
+ ksqlQueryForm
+ .waitUntilScreenReady()
+ .setQuery(SHOW_TABLES.getQuery())
+ .clickExecuteBtn();
+ SoftAssertions softly = new SoftAssertions();
+ softly.assertThat(ksqlQueryForm.getTableByName(FIRST_TABLE.getName()).isVisible())
+ .as("getTableName()").isTrue();
+ softly.assertThat(ksqlQueryForm.getTableByName(SECOND_TABLE.getName()).isVisible())
+ .as("getTableName()").isTrue();
+ softly.assertAll();
+ }
+}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicsTests.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicsTests.java
index 7f301835c8a..fa51ebe1fc4 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicsTests.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/suite/topics/TopicsTests.java
@@ -63,12 +63,14 @@ public class TopicsTests extends BaseTest {
.setNumberOfPartitions(1)
.setMaxMessageBytes("1000012")
.setMaxSizeOnDisk(NOT_SET);
+ private static final Topic TOPIC_FOR_CHECK_FILTERS = new Topic()
+ .setName("topic-for-check-filters-" + randomAlphabetic(5));
private static final Topic TOPIC_FOR_DELETE = new Topic().setName("topic-to-delete-" + randomAlphabetic(5));
private static final List<Topic> TOPIC_LIST = new ArrayList<>();
@BeforeAll
public void beforeAll() {
- TOPIC_LIST.addAll(List.of(TOPIC_TO_UPDATE_AND_DELETE, TOPIC_FOR_DELETE));
+ TOPIC_LIST.addAll(List.of(TOPIC_TO_UPDATE_AND_DELETE, TOPIC_FOR_DELETE, TOPIC_FOR_CHECK_FILTERS));
TOPIC_LIST.forEach(topic -> apiService.createTopic(topic.getName()));
}
@@ -337,7 +339,7 @@ void checkTopicListElements() {
@Order(11)
void addingNewFilterWithinTopic() {
String filterName = randomAlphabetic(5);
- navigateToTopicsAndOpenDetails("_schemas");
+ navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECK_FILTERS.getName());
topicDetails
.openDetailsTab(MESSAGES)
.clickMessagesAddFiltersBtn()
@@ -364,7 +366,7 @@ void addingNewFilterWithinTopic() {
@Order(12)
void checkFilterSavingWithinSavedFilters() {
String displayName = randomAlphabetic(5);
- navigateToTopicsAndOpenDetails("my_ksql_1ksql_processing_log");
+ navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECK_FILTERS.getName());
topicDetails
.openDetailsTab(MESSAGES)
.clickMessagesAddFiltersBtn()
@@ -389,7 +391,7 @@ void checkFilterSavingWithinSavedFilters() {
@Order(13)
void checkingApplyingSavedFilterWithinTopicMessages() {
String displayName = randomAlphabetic(5);
- navigateToTopicsAndOpenDetails("my_ksql_1ksql_processing_log");
+ navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECK_FILTERS.getName());
topicDetails
.openDetailsTab(MESSAGES)
.clickMessagesAddFiltersBtn()
| train | val | 2023-02-01T08:25:16 | "2022-12-19T18:48:32Z" | anezboretskiy | train |
provectus/kafka-ui/3278_3310 | provectus/kafka-ui | provectus/kafka-ui/3278 | provectus/kafka-ui/3310 | [
"connected"
] | 6ffcd845fa3363033ef764e9a18a8de0808faa75 | 45a6e73d295c904a26ded39621058165fc200506 | [
"Hi, please follow the issue template and leave all the requested information (the version you run, configs, etc.)\r\nPing me once you're done",
"> Hi, please follow the issue template and leave all the requested information (the version you run, configs, etc.) Ping me once you're done\r\n\r\nThe description has been updated.",
"I still can't see the `webclient.max-in-memory-buffer-size` property in your config. Please provide a full one.",
"Okay, considering `Version 0.4.0 works completely fine`:\r\nChanges made within #903 might take no effect after #3123.\r\n@iliax can you take a look?",
"@galanoff , thank you for creating issue - we found bug, and will fix it within (https://github.com/provectus/kafka-ui/pull/3310) PR",
"@armenuikafka please check this is covered in test cases as well.",
"Need to add a TC for it to Qase.io"
] | [] | "2023-02-03T10:47:25Z" | [
"type/bug",
"scope/backend",
"status/accepted",
"type/regression"
] | `webclient.max-in-memory-buffer-size` does not take any effect: DataBufferLimit is always 262144 | **Describe the bug** (Actual behavior)
Cannot view <Cluster> -> Schema Registry page. An error message is displayed:
```
500
200 OK from GET http://kafka.intra:8081/subjects; nested exception is org.springframework.core.io.buffer.DataBufferLimitException: Exceeded limit on max bytes to buffer : 262144
```
Overriding `webclient.max-in-memory-buffer-size` didn't help. It always fails with a buffer limit of 262144 bytes.
Version 0.4.0 works completely fine. I guess a buffer override does not work anymore. May be an upstream dependency issue.
**Expected behavior**
<Cluster> -> Schema Registry page shows schema registry topics data
**Set up**
How do you run the app? Please provide as much info as possible:
1. App version : v0.5.0 [027d9b4]
2. Helm chart version: 0.5.0
3. Env Var configs:
```
KAFKA_ADMIN-CLIENT-TIMEOUT: "120000"
KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: ....
```
**Steps to Reproduce**
1. Open <Cluster> -> Schema Registry page.
**Screenshots**
<img width="1219" alt="Screen Shot 2023-02-03 at 11 46 15 am" src="https://user-images.githubusercontent.com/10956941/216484492-39a3558b-70fd-4f2c-90ec-28d1d3b3da98.png">
**Additional context**
Full error stacktrace
```
org.springframework.web.reactive.function.client.WebClientResponseException: 200 OK from GET http://localhost/subjects; nested exception is org.springframework.core.io.buffer.DataBufferLimitException: Exceeded limit on max bytes to buffer : 262144
at org.springframework.web.reactive.function.client.WebClientResponseException.create(WebClientResponseException.java:250)
at org.springframework.web.reactive.function.client.DefaultClientResponse.lambda$createException$1(DefaultClientResponse.java:207)
at reactor.core.publisher.FluxMap$MapSubscriber.onNext(FluxMap.java:106)
at reactor.core.publisher.FluxOnErrorReturn$ReturnSubscriber.onError(FluxOnErrorReturn.java:199)
at reactor.core.publisher.Operators$MonoSubscriber.onError(Operators.java:1863)
at reactor.core.publisher.FluxOnAssembly$OnAssemblySubscriber.onError(FluxOnAssembly.java:544)
at reactor.core.publisher.FluxMapFuseable$MapFuseableSubscriber.onError(FluxMapFuseable.java:142)
at reactor.core.publisher.FluxContextWrite$ContextWriteSubscriber.onError(FluxContextWrite.java:121)
at reactor.core.publisher.FluxMapFuseable$MapFuseableConditionalSubscriber.onError(FluxMapFuseable.java:340)
at reactor.core.publisher.FluxFilterFuseable$FilterFuseableConditionalSubscriber.onError(FluxFilterFuseable.java:382)
at reactor.core.publisher.MonoCollect$CollectSubscriber.onError(MonoCollect.java:145)
at reactor.core.publisher.FluxMap$MapSubscriber.onError(FluxMap.java:134)
at reactor.core.publisher.FluxPeek$PeekSubscriber.onError(FluxPeek.java:222)
at reactor.core.publisher.FluxMap$MapSubscriber.onError(FluxMap.java:134)
at reactor.core.publisher.Operators.error(Operators.java:198)
at reactor.netty.channel.FluxReceive.startReceiver(FluxReceive.java:182)
at reactor.netty.channel.FluxReceive.subscribe(FluxReceive.java:143)
at reactor.core.publisher.InternalFluxOperator.subscribe(InternalFluxOperator.java:62)
at reactor.netty.ByteBufFlux.subscribe(ByteBufFlux.java:340)
at reactor.core.publisher.Mono.subscribe(Mono.java:4455)
at reactor.core.publisher.FluxOnErrorResume$ResumeSubscriber.onError(FluxOnErrorResume.java:103)
at reactor.core.publisher.FluxOnAssembly$OnAssemblySubscriber.onError(FluxOnAssembly.java:544)
at reactor.core.publisher.MonoFlatMap$FlatMapMain.onError(MonoFlatMap.java:172)
at reactor.core.publisher.FluxContextWrite$ContextWriteSubscriber.onError(FluxContextWrite.java:121)
at reactor.core.publisher.FluxMapFuseable$MapFuseableConditionalSubscriber.onError(FluxMapFuseable.java:340)
at reactor.core.publisher.FluxFilterFuseable$FilterFuseableConditionalSubscriber.onError(FluxFilterFuseable.java:382)
at reactor.core.publisher.MonoCollect$CollectSubscriber.onError(MonoCollect.java:145)
at reactor.core.publisher.MonoCollect$CollectSubscriber.onNext(MonoCollect.java:124)
at reactor.core.publisher.FluxMap$MapSubscriber.onNext(FluxMap.java:122)
at reactor.core.publisher.FluxPeek$PeekSubscriber.onNext(FluxPeek.java:200)
at reactor.core.publisher.FluxMap$MapSubscriber.onNext(FluxMap.java:122)
```
| [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaClusterFactory.java"
] | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaClusterFactory.java"
] | [] | diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaClusterFactory.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaClusterFactory.java
index 7113f5af367..aea92ab4c45 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaClusterFactory.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaClusterFactory.java
@@ -82,6 +82,7 @@ private ReactiveFailover<KafkaSrClientApi> schemaRegistryClient(ClustersProperti
WebClient webClient = new WebClientConfigurator()
.configureSsl(clusterProperties.getSchemaRegistrySsl())
.configureBasicAuth(auth.getUsername(), auth.getPassword())
+ .configureBufferSize(maxBuffSize)
.build();
return ReactiveFailover.create(
parseUrlList(clusterProperties.getSchemaRegistry()),
| null | test | val | 2023-02-09T12:53:43 | "2023-01-30T03:55:58Z" | galanoff | train |
provectus/kafka-ui/3299_3324 | provectus/kafka-ui | provectus/kafka-ui/3299 | provectus/kafka-ui/3324 | [
"connected"
] | ebd25c61b1736b707ca1003c0c9872a0c605df58 | cbb166026d8c6360836def9bf9c208313023961c | [
"Hello there cpapad! π\n\nThank you and congratulations π for opening your very first issue in this project! π\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. π",
"Hey, thanks for reporting this. We'll take a look",
"> Hey, thanks for reporting this. We'll take a look\r\n\r\nThank you for your quick reply.\r\nI'm more than happy to contribute.",
"@cpapad you can raise a PR if you wish!"
] | [] | "2023-02-08T16:49:11Z" | [
"status/accepted",
"scope/k8s"
] | K8s secret is created even if no secret is provided as a value. | <!--
Don't forget to check for existing issues/discussions regarding your proposal. We might already have it.
https://github.com/provectus/kafka-ui/issues
https://github.com/provectus/kafka-ui/discussions
-->
**Describe the bug**
<!--(A clear and concise description of what the bug is.)-->
This is not exactly a bug but I felt the `K8s/Helm` section is the appropriate to post this issue.
The chart's secret template renders every time and the resource is created, even when no secrets are provided it is just created with no data.
This may not be a big issue but we faced some conflicts while deploying which have been dealt with some workarounds.
For deploying `kafka-ui` we use both the official chart and a `common` chart we have developed to handle thing like ingress, `istio` routing and `sealed-secrects`. Since our `common` chart has the same naming conventions we ran into a conflict when `sealed-secrets` was trying to unseal and create the k8s secret with the same name.
To overcome this we performed a fullnameoverride in the `kafka-ui` chart but then the network routing broke due to breaking the `kubernetes` service naming convention assumed by our common chart.
We solved this by adding a second service template which works but it is messy because we create 2 services, but only one is used.
So I would like to ask if we could make the secret resource optional if no values are required.
If this is something that we can agree I can work on this and contribute.
**Set up**
<!--
How do you run the app? Please provide as much info as possible:
1. App version (docker image version or check commit hash in the top left corner in UI)
2. Helm chart version, if you use one
3. Any IAAC configs
We might close the issue without further explanation if you don't provide such information.
-->
1. App version `0.5.0`
2. Chart version `0.5.1`
**Steps to Reproduce**
Steps to reproduce the behavior:
1. Create secret with name matching the release name
**Expected behavior**
<!--
(A clear and concise description of what you expected to happen)
-->
Failure to create secret | [
"charts/kafka-ui/Chart.yaml",
"charts/kafka-ui/templates/secret.yaml"
] | [
"charts/kafka-ui/Chart.yaml",
"charts/kafka-ui/templates/secret.yaml"
] | [] | diff --git a/charts/kafka-ui/Chart.yaml b/charts/kafka-ui/Chart.yaml
index 3a1759a3911..6e5f0ee2d5b 100644
--- a/charts/kafka-ui/Chart.yaml
+++ b/charts/kafka-ui/Chart.yaml
@@ -2,6 +2,6 @@ apiVersion: v2
name: kafka-ui
description: A Helm chart for kafka-UI
type: application
-version: 0.5.2
+version: 0.5.3
appVersion: v0.5.0
icon: https://github.com/provectus/kafka-ui/raw/master/documentation/images/kafka-ui-logo.png
diff --git a/charts/kafka-ui/templates/secret.yaml b/charts/kafka-ui/templates/secret.yaml
index a2d1f25fa2f..1f974503dc6 100644
--- a/charts/kafka-ui/templates/secret.yaml
+++ b/charts/kafka-ui/templates/secret.yaml
@@ -1,3 +1,4 @@
+{{- if .Values.envs.secret -}}
apiVersion: v1
kind: Secret
metadata:
@@ -9,3 +10,4 @@ data:
{{- range $key, $val := .Values.envs.secret }}
{{ $key }}: {{ $val | b64enc | quote }}
{{- end -}}
+{{- end}}
\ No newline at end of file
| null | train | val | 2023-02-14T11:13:52 | "2023-02-01T14:52:37Z" | cpapad | train |
provectus/kafka-ui/3296_3327 | provectus/kafka-ui | provectus/kafka-ui/3296 | provectus/kafka-ui/3327 | [
"connected"
] | 45a6e73d295c904a26ded39621058165fc200506 | 8663ef6e8422b84a18cc19d82832d4f0fcf6b1cd | [] | [] | "2023-02-09T12:09:12Z" | [
"scope/QA",
"scope/AQA"
] | [e2e]Checking Clearing results for already executed KSQL Request | **Autotest implementation for:**
https://app.qase.io/case/KAFKAUI-86
**Description:**
Checking Clearing results for already executed KSQL Request
**Pre-conditions:**
- Login to system
- Navigate to KSQL DB
- Execute KSQL Request
**Steps:**
1. Press the "Clear results"
**Expected result:**
All the data should disappear &&
"Clear results" button should become inactive | [
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/KsqlQueryForm.java"
] | [
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/KsqlQueryForm.java"
] | [
"kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/ksqlDb/KsqlDbTest.java"
] | diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/KsqlQueryForm.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/KsqlQueryForm.java
index e34fbedb1cb..30ac1007fdb 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/KsqlQueryForm.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/KsqlQueryForm.java
@@ -1,144 +1,155 @@
package com.provectus.kafka.ui.pages.ksqlDb;
-import static com.codeborne.selenide.Condition.visible;
-import static com.codeborne.selenide.Selenide.$$x;
-import static com.codeborne.selenide.Selenide.$x;
-
import com.codeborne.selenide.CollectionCondition;
import com.codeborne.selenide.Condition;
import com.codeborne.selenide.ElementsCollection;
import com.codeborne.selenide.SelenideElement;
import com.provectus.kafka.ui.pages.BasePage;
import io.qameta.allure.Step;
+
import java.time.Duration;
import java.util.ArrayList;
import java.util.List;
+import static com.codeborne.selenide.Condition.visible;
+import static com.codeborne.selenide.Selenide.$$x;
+import static com.codeborne.selenide.Selenide.$x;
+
public class KsqlQueryForm extends BasePage {
- protected SelenideElement pageTitle = $x("//h1[text()='Query']");
- protected SelenideElement clearBtn = $x("//div/button[text()='Clear']");
- protected SelenideElement executeBtn = $x("//div/button[text()='Execute']");
- protected SelenideElement stopQueryBtn = $x("//div/button[text()='Stop query']");
- protected SelenideElement clearResultsBtn = $x("//div/button[text()='Clear results']");
- protected SelenideElement addStreamPropertyBtn = $x("//button[text()='Add Stream Property']");
- protected SelenideElement queryAreaValue = $x("//div[@class='ace_content']");
- protected SelenideElement queryArea = $x("//div[@id='ksql']/textarea[@class='ace_text-input']");
- protected ElementsCollection ksqlGridItems = $$x("//tbody//tr");
- protected ElementsCollection keyField = $$x("//input[@aria-label='key']");
- protected ElementsCollection valueField = $$x("//input[@aria-label='value']");
-
- @Step
- public KsqlQueryForm waitUntilScreenReady() {
- waitUntilSpinnerDisappear();
- pageTitle.shouldBe(Condition.visible);
- return this;
- }
-
- @Step
- public KsqlQueryForm clickClearBtn() {
- clickByJavaScript(clearBtn);
- return this;
- }
-
- @Step
- public KsqlQueryForm clickExecuteBtn() {
- clickByJavaScript(executeBtn);
- if (queryAreaValue.getText().contains("EMIT CHANGES;")) {
- loadingSpinner.shouldBe(Condition.visible);
- } else {
- waitUntilSpinnerDisappear();
+ protected SelenideElement pageTitle = $x("//h1[text()='Query']");
+ protected SelenideElement clearBtn = $x("//div/button[text()='Clear']");
+ protected SelenideElement executeBtn = $x("//div/button[text()='Execute']");
+ protected SelenideElement stopQueryBtn = $x("//div/button[text()='Stop query']");
+ protected SelenideElement clearResultsBtn = $x("//div/button[text()='Clear results']");
+ protected SelenideElement addStreamPropertyBtn = $x("//button[text()='Add Stream Property']");
+ protected SelenideElement queryAreaValue = $x("//div[@class='ace_content']");
+ protected SelenideElement queryArea = $x("//div[@id='ksql']/textarea[@class='ace_text-input']");
+ protected ElementsCollection ksqlGridItems = $$x("//tbody//tr");
+ protected ElementsCollection keyField = $$x("//input[@aria-label='key']");
+ protected ElementsCollection valueField = $$x("//input[@aria-label='value']");
+
+ @Step
+ public KsqlQueryForm waitUntilScreenReady() {
+ waitUntilSpinnerDisappear();
+ pageTitle.shouldBe(Condition.visible);
+ return this;
}
- return this;
- }
-
- @Step
- public KsqlQueryForm clickStopQueryBtn() {
- clickByJavaScript(stopQueryBtn);
- waitUntilSpinnerDisappear();
- return this;
- }
-
- @Step
- public KsqlQueryForm clickClearResultsBtn() {
- clickByJavaScript(clearResultsBtn);
- waitUntilSpinnerDisappear();
- return this;
- }
-
- @Step
- public KsqlQueryForm clickAddStreamProperty() {
- clickByJavaScript(addStreamPropertyBtn);
- return this;
- }
-
- @Step
- public KsqlQueryForm setQuery(String query) {
- queryAreaValue.shouldBe(Condition.visible).click();
- queryArea.setValue(query);
- return this;
- }
-
- private List<KsqlQueryForm.KsqlResponseGridItem> initItems() {
- List<KsqlQueryForm.KsqlResponseGridItem> gridItemList = new ArrayList<>();
- ksqlGridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
- .forEach(item -> gridItemList.add(new KsqlQueryForm.KsqlResponseGridItem(item)));
- return gridItemList;
- }
-
- @Step
- public KsqlQueryForm.KsqlResponseGridItem getTableByName(String name) {
- return initItems().stream()
- .filter(e -> e.getName().equalsIgnoreCase(name))
- .findFirst().orElseThrow();
- }
-
- public static class KsqlResponseGridItem extends BasePage {
-
- private final SelenideElement element;
-
- private KsqlResponseGridItem(SelenideElement element) {
- this.element = element;
+
+ @Step
+ public KsqlQueryForm clickClearBtn() {
+ clickByJavaScript(clearBtn);
+ return this;
}
@Step
- public String getType() {
- return element.$x("./td[1]").getText().trim();
+ public KsqlQueryForm clickExecuteBtn() {
+ clickByActions(executeBtn);
+ if (queryAreaValue.getText().contains("EMIT CHANGES;")) {
+ loadingSpinner.shouldBe(Condition.visible);
+ } else {
+ waitUntilSpinnerDisappear();
+ }
+ return this;
}
@Step
- public String getName() {
- return element.$x("./td[2]").scrollTo().getText().trim();
+ public KsqlQueryForm clickStopQueryBtn() {
+ clickByActions(stopQueryBtn);
+ waitUntilSpinnerDisappear();
+ return this;
}
@Step
- public boolean isVisible() {
- boolean isVisible = false;
- try {
- element.$x("./td[2]").shouldBe(visible, Duration.ofMillis(500));
- isVisible = true;
- } catch (Throwable ignored) {
- }
- return isVisible;
+ public KsqlQueryForm clickClearResultsBtn() {
+ clickByActions(clearResultsBtn);
+ waitUntilSpinnerDisappear();
+ return this;
}
@Step
- public String getTopic() {
- return element.$x("./td[3]").getText().trim();
+ public KsqlQueryForm clickAddStreamProperty() {
+ clickByJavaScript(addStreamPropertyBtn);
+ return this;
}
@Step
- public String getKeyFormat() {
- return element.$x("./td[4]").getText().trim();
+ public KsqlQueryForm setQuery(String query) {
+ queryAreaValue.shouldBe(Condition.visible).click();
+ queryArea.setValue(query);
+ return this;
}
@Step
- public String getValueFormat() {
- return element.$x("./td[5]").getText().trim();
+ public KsqlQueryForm.KsqlResponseGridItem getTableByName(String name) {
+ return initItems().stream()
+ .filter(e -> e.getName().equalsIgnoreCase(name))
+ .findFirst().orElseThrow();
}
@Step
- public String getIsWindowed() {
- return element.$x("./td[6]").getText().trim();
+ public boolean areResultsVisible() {
+ boolean visible = false;
+ try {
+ visible = initItems().size() > 0;
+ } catch (Throwable ignored) {
+ }
+ return visible;
+ }
+
+ private List<KsqlQueryForm.KsqlResponseGridItem> initItems() {
+ List<KsqlQueryForm.KsqlResponseGridItem> gridItemList = new ArrayList<>();
+ ksqlGridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
+ .forEach(item -> gridItemList.add(new KsqlQueryForm.KsqlResponseGridItem(item)));
+ return gridItemList;
+ }
+
+ public static class KsqlResponseGridItem extends BasePage {
+
+ private final SelenideElement element;
+
+ private KsqlResponseGridItem(SelenideElement element) {
+ this.element = element;
+ }
+
+ @Step
+ public String getType() {
+ return element.$x("./td[1]").getText().trim();
+ }
+
+ @Step
+ public String getName() {
+ return element.$x("./td[2]").scrollTo().getText().trim();
+ }
+
+ @Step
+ public boolean isVisible() {
+ boolean isVisible = false;
+ try {
+ element.$x("./td[2]").shouldBe(visible, Duration.ofMillis(500));
+ isVisible = true;
+ } catch (Throwable ignored) {
+ }
+ return isVisible;
+ }
+
+ @Step
+ public String getTopic() {
+ return element.$x("./td[3]").getText().trim();
+ }
+
+ @Step
+ public String getKeyFormat() {
+ return element.$x("./td[4]").getText().trim();
+ }
+
+ @Step
+ public String getValueFormat() {
+ return element.$x("./td[5]").getText().trim();
+ }
+
+ @Step
+ public String getIsWindowed() {
+ return element.$x("./td[6]").getText().trim();
+ }
}
- }
}
| diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/ksqlDb/KsqlDbTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/ksqlDb/KsqlDbTest.java
index 8689c0abde3..be847b00a7a 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/ksqlDb/KsqlDbTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/ksqlDb/KsqlDbTest.java
@@ -39,7 +39,7 @@ public void beforeClass() {
@Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(41)
- @Test
+ @Test(priority = 1)
public void checkShowTablesRequestExecution() {
naviSideBar
.openSideMenu(KSQL_DB);
@@ -51,8 +51,32 @@ public void checkShowTablesRequestExecution() {
.setQuery(SHOW_TABLES.getQuery())
.clickExecuteBtn();
SoftAssert softly = new SoftAssert();
+ softly.assertTrue(ksqlQueryForm.areResultsVisible(), "areResultsVisible()");
softly.assertTrue(ksqlQueryForm.getTableByName(FIRST_TABLE.getName()).isVisible(), "getTableName()");
softly.assertTrue(ksqlQueryForm.getTableByName(SECOND_TABLE.getName()).isVisible(), "getTableName()");
softly.assertAll();
}
+
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(86)
+ @Test(priority = 2)
+ public void clearResultsForExecutedRequest() {
+ naviSideBar
+ .openSideMenu(KSQL_DB);
+ ksqlDbList
+ .waitUntilScreenReady()
+ .clickExecuteKsqlRequestBtn();
+ ksqlQueryForm
+ .waitUntilScreenReady()
+ .setQuery(SHOW_TABLES.getQuery())
+ .clickExecuteBtn();
+ SoftAssert softly = new SoftAssert();
+ softly.assertTrue(ksqlQueryForm.areResultsVisible(), "areResultsVisible()");
+ softly.assertAll();
+ ksqlQueryForm
+ .clickClearResultsBtn();
+ softly.assertFalse(ksqlQueryForm.areResultsVisible(), "areResultsVisible()");
+ softly.assertAll();
+ }
}
| train | val | 2023-02-09T13:13:04 | "2023-02-01T09:21:54Z" | anezboretskiy | train |
provectus/kafka-ui/3285_3340 | provectus/kafka-ui | provectus/kafka-ui/3285 | provectus/kafka-ui/3340 | [
"connected"
] | 8663ef6e8422b84a18cc19d82832d4f0fcf6b1cd | eeef330fc03b5fdf6891b1e6d5b60ebee4bc4f8f | [] | [] | "2023-02-13T10:00:31Z" | [
"scope/QA",
"scope/AQA"
] | [e2e]Checking available custom parameters with Topic editing | Autotest implementation for:
https://app.qase.io/case/KAFKAUI-268
Pre-conditions:
-Login to kafka-ui
-Create a Topic
-Add any custom parameter or let as a default
Post-conditions:
Not set
Steps:
-Navigate to Topics
-Open the Topic
-Edit settings from 3dot menu
-Add Custom Parameter
-Press the Custom Parameter dropdown
Expected results:
-created in pre-condition
-Custom Parameter and Value should appear
-All values should be available to add as a Custom parameter | [
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersList.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/KsqlDbList.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicCreateEditForm.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicDetails.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicSettingsTab.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicsList.java"
] | [
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersList.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/KsqlDbList.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicCreateEditForm.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicDetails.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicSettingsTab.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicsList.java"
] | [
"kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/TopicsTest.java"
] | diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java
index daea5c0d54a..e51942a39ce 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java
@@ -23,7 +23,8 @@ public abstract class BasePage extends WebUtils {
protected SelenideElement confirmationMdl = $x("//div[text()= 'Confirm the action']/..");
protected SelenideElement confirmBtn = $x("//button[contains(text(),'Confirm')]");
protected SelenideElement cancelBtn = $x("//button[contains(text(),'Cancel')]");
- protected ElementsCollection allGridItems = $$x("//tr[@class]");
+ protected ElementsCollection ddlOptions = $$x("//li[@value]");
+ protected ElementsCollection gridItems = $$x("//tr[@class]");
protected String summaryCellLocator = "//div[contains(text(),'%s')]";
protected String tableElementNameLocator = "//tbody//a[contains(text(),'%s')]";
protected String columnHeaderLocator = "//table//tr/th//div[text()='%s']";
@@ -44,6 +45,10 @@ protected SelenideElement getTableElement(String elementName) {
return $x(String.format(tableElementNameLocator, elementName));
}
+ protected ElementsCollection getDdlOptions() {
+ return ddlOptions;
+ }
+
protected String getAlertHeader() {
log.debug("\ngetAlertHeader");
String result = alertHeader.shouldBe(Condition.visible).getText();
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersList.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersList.java
index 6db2d20ad65..3d3a58f355e 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersList.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersList.java
@@ -62,7 +62,7 @@ public List<SelenideElement> getAllEnabledElements() {
private List<BrokersList.BrokerGridItem> initGridItems() {
List<BrokersList.BrokerGridItem> gridItemList = new ArrayList<>();
- allGridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
+ gridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
.forEach(item -> gridItemList.add(new BrokersList.BrokerGridItem(item)));
return gridItemList;
}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/KsqlDbList.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/KsqlDbList.java
index 84c23d0ac3f..25246a86ed5 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/KsqlDbList.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqlDb/KsqlDbList.java
@@ -41,7 +41,7 @@ public KsqlDbList openDetailsTab(KsqlMenuTabs menu) {
private List<KsqlDbList.KsqlTablesGridItem> initTablesItems() {
List<KsqlDbList.KsqlTablesGridItem> gridItemList = new ArrayList<>();
- allGridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
+ gridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
.forEach(item -> gridItemList.add(new KsqlDbList.KsqlTablesGridItem(item)));
return gridItemList;
}
@@ -89,7 +89,7 @@ public String getIsWindowed() {
private List<KsqlDbList.KsqlStreamsGridItem> initStreamsItems() {
List<KsqlDbList.KsqlStreamsGridItem> gridItemList = new ArrayList<>();
- allGridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
+ gridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
.forEach(item -> gridItemList.add(new KsqlDbList.KsqlStreamsGridItem(item)));
return gridItemList;
}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicCreateEditForm.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicCreateEditForm.java
index 7deed1b7fde..f60bd6d431e 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicCreateEditForm.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicCreateEditForm.java
@@ -1,9 +1,6 @@
package com.provectus.kafka.ui.pages.topics;
-import com.codeborne.selenide.ClickOptions;
-import com.codeborne.selenide.Condition;
-import com.codeborne.selenide.ElementsCollection;
-import com.codeborne.selenide.SelenideElement;
+import com.codeborne.selenide.*;
import com.provectus.kafka.ui.pages.BasePage;
import com.provectus.kafka.ui.pages.topics.enums.CleanupPolicyValue;
import com.provectus.kafka.ui.pages.topics.enums.CustomParameterType;
@@ -91,8 +88,20 @@ public TopicCreateEditForm clickAddCustomParameterTypeButton() {
}
@Step
- public TopicCreateEditForm setCustomParameterType(CustomParameterType customParameterType) {
+ public TopicCreateEditForm openCustomParameterTypeDdl() {
customParameterDdl.shouldBe(Condition.visible).click();
+ ddlOptions.shouldHave(CollectionCondition.sizeGreaterThan(0));
+ return this;
+ }
+
+ @Step
+ public ElementsCollection getAllDdlOptions() {
+ return getDdlOptions();
+ }
+
+ @Step
+ public TopicCreateEditForm setCustomParameterType(CustomParameterType customParameterType) {
+ openCustomParameterTypeDdl();
$x(String.format(ddlElementLocator, customParameterType.getOptionValue())).shouldBe(Condition.visible).click();
return this;
}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicDetails.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicDetails.java
index 78ec2cef14e..980a66b791d 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicDetails.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicDetails.java
@@ -337,7 +337,7 @@ public int getMessageCountAmount() {
private List<TopicDetails.MessageGridItem> initItems() {
List<TopicDetails.MessageGridItem> gridItemList = new ArrayList<>();
- allGridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
+ gridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
.forEach(item -> gridItemList.add(new TopicDetails.MessageGridItem(item)));
return gridItemList;
}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicSettingsTab.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicSettingsTab.java
index 2b00c97c40f..3c0fcac211d 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicSettingsTab.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicSettingsTab.java
@@ -24,7 +24,7 @@ public TopicSettingsTab waitUntilScreenReady() {
private List<SettingsGridItem> initGridItems() {
List<SettingsGridItem> gridItemList = new ArrayList<>();
- allGridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
+ gridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
.forEach(item -> gridItemList.add(new SettingsGridItem(item)));
return gridItemList;
}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicsList.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicsList.java
index 291c94667fb..499d5fe965a 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicsList.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicsList.java
@@ -163,7 +163,7 @@ public List<SelenideElement> getAllEnabledElements() {
private List<TopicGridItem> initGridItems() {
List<TopicGridItem> gridItemList = new ArrayList<>();
- allGridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
+ gridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
.forEach(item -> gridItemList.add(new TopicGridItem(item)));
return gridItemList;
}
| diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/TopicsTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/TopicsTest.java
index 62fb0b04488..8b86d82c9e6 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/TopicsTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/TopicsTest.java
@@ -108,11 +108,32 @@ void checkAvailableOperations() {
}
@Ignore
- @Issue("https://github.com/provectus/kafka-ui/issues/2625")
+ @Issue("https://github.com/provectus/kafka-ui/issues/3071")
@Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
@AutomationStatus(status = Status.AUTOMATED)
- @CaseId(197)
+ @CaseId(268)
@Test(priority = 3)
+ public void checkCustomParametersWithinEditExistingTopic() {
+ navigateToTopicsAndOpenDetails(TOPIC_TO_UPDATE_AND_DELETE.getName());
+ topicDetails
+ .openDotMenu()
+ .clickEditSettingsMenu();
+ SoftAssert softly = new SoftAssert();
+ topicCreateEditForm
+ .waitUntilScreenReady()
+ .clickAddCustomParameterTypeButton()
+ .openCustomParameterTypeDdl()
+ .getAllDdlOptions()
+ .forEach(option ->
+ softly.assertTrue(!option.is(Condition.attribute("disabled")),
+ option.getText() + " is enabled:"));
+ softly.assertAll();
+ }
+
+ @Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
+ @AutomationStatus(status = Status.AUTOMATED)
+ @CaseId(197)
+ @Test(priority = 4)
public void updateTopic() {
navigateToTopicsAndOpenDetails(TOPIC_TO_UPDATE_AND_DELETE.getName());
topicDetails
@@ -166,7 +187,7 @@ public void updateTopic() {
@Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(242)
- @Test(priority = 4)
+ @Test(priority = 5)
public void removeTopicFromTopicList() {
navigateToTopics();
topicsList
@@ -182,7 +203,7 @@ public void removeTopicFromTopicList() {
@Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(207)
- @Test(priority = 5)
+ @Test(priority = 6)
public void deleteTopic() {
navigateToTopicsAndOpenDetails(TOPIC_FOR_DELETE.getName());
topicDetails
@@ -197,7 +218,7 @@ public void deleteTopic() {
@Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(20)
- @Test(priority = 6)
+ @Test(priority = 7)
public void redirectToConsumerFromTopic() {
String topicName = "source-activities";
String consumerGroupId = "connect-sink_postgres_activities";
@@ -218,7 +239,7 @@ public void redirectToConsumerFromTopic() {
@Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(4)
- @Test(priority = 7)
+ @Test(priority = 8)
public void checkTopicCreatePossibility() {
navigateToTopics();
topicsList
@@ -241,7 +262,7 @@ public void checkTopicCreatePossibility() {
@Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(266)
- @Test(priority = 8)
+ @Test(priority = 9)
public void checkTimeToRetainDataCustomValueWithEditingTopic() {
Topic topicToRetainData = new Topic()
.setName("topic-to-retain-data-" + randomAlphabetic(5))
@@ -274,7 +295,7 @@ public void checkTimeToRetainDataCustomValueWithEditingTopic() {
@Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(6)
- @Test(priority = 9)
+ @Test(priority = 10)
public void checkCustomParametersWithinCreateNewTopic() {
navigateToTopics();
topicsList
@@ -295,7 +316,7 @@ public void checkCustomParametersWithinCreateNewTopic() {
@Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(2)
- @Test(priority = 10)
+ @Test(priority = 11)
public void checkTopicListElements() {
navigateToTopics();
verifyElementsCondition(topicsList.getAllVisibleElements(), Condition.visible);
@@ -305,7 +326,7 @@ public void checkTopicListElements() {
@Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(12)
- @Test(priority = 11)
+ @Test(priority = 12)
public void addingNewFilterWithinTopic() {
String filterName = randomAlphabetic(5);
navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECK_FILTERS.getName());
@@ -327,7 +348,7 @@ public void addingNewFilterWithinTopic() {
@Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(13)
- @Test(priority = 12)
+ @Test(priority = 13)
public void checkFilterSavingWithinSavedFilters() {
String displayName = randomAlphabetic(5);
navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECK_FILTERS.getName());
@@ -350,7 +371,7 @@ public void checkFilterSavingWithinSavedFilters() {
@Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(14)
- @Test(priority = 13)
+ @Test(priority = 14)
public void checkApplyingSavedFilterWithinTopicMessages() {
String displayName = randomAlphabetic(5);
navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECK_FILTERS.getName());
@@ -371,7 +392,7 @@ public void checkApplyingSavedFilterWithinTopicMessages() {
@Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(11)
- @Test(priority = 14)
+ @Test(priority = 15)
public void checkShowInternalTopicsButtonFunctionality() {
navigateToTopics();
SoftAssert softly = new SoftAssert();
@@ -389,7 +410,7 @@ public void checkShowInternalTopicsButtonFunctionality() {
@Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(56)
- @Test(priority = 15)
+ @Test(priority = 16)
public void checkRetentionBytesAccordingToMaxSizeOnDisk() {
navigateToTopics();
topicsList
@@ -439,7 +460,7 @@ public void checkRetentionBytesAccordingToMaxSizeOnDisk() {
@Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(247)
- @Test(priority = 16)
+ @Test(priority = 17)
public void recreateTopicFromTopicProfile() {
Topic topicToRecreate = new Topic()
.setName("topic-to-recreate-" + randomAlphabetic(5))
@@ -469,7 +490,7 @@ public void recreateTopicFromTopicProfile() {
@Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
@AutomationStatus(status = Status.AUTOMATED)
@CaseId(8)
- @Test(priority = 17)
+ @Test(priority = 18)
public void checkCopyTopicPossibility() {
Topic topicToCopy = new Topic()
.setName("topic-to-copy-" + randomAlphabetic(5))
| train | val | 2023-02-09T14:23:50 | "2023-01-30T16:00:02Z" | ArthurNiedial | train |
provectus/kafka-ui/3292_3357 | provectus/kafka-ui | provectus/kafka-ui/3292 | provectus/kafka-ui/3357 | [
"connected"
] | b3f74cbfea7b43e50a817331e0ed549e35d566d9 | 4d20cb695804d4247731dffc62ebdb5d685647be | [
"Hello there gassmannalexander! π\n\nThank you and congratulations π for opening your very first issue in this project! π\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. π",
"To me it looks like a problem in the used Confluent library where [ProtobufSchema.java](https://github.com/confluentinc/schema-registry/blob/master/protobuf-provider/src/main/java/io/confluent/kafka/schemaregistry/protobuf/ProtobufSchema.java#L1877) is only considering well known dependencies and does not load additional imports from the file system.",
"@gassmannalexander thank you for issue,\r\nwe already have opened PR to fix this (https://github.com/provectus/kafka-ui/pull/3039). Keep an eye on it )",
"Any updates on this, would love to see the import feature working!\r\n",
"@arun-avudainayagam there are some difficulties, we're working on it",
"Hi @Haarolean - When is release 0.6 expected?",
"@arun-avudainayagam thursday-ish"
] | [] | "2023-02-16T12:06:24Z" | [
"type/bug",
"scope/backend",
"status/accepted",
"status/confirmed"
] | protobuf schema local imports are not resolved | Hi,
I'm using protobuf support but when there is a dependency like an import from one protobuf definition to another, the import statement is ignored. I get this error message:
`[main] o.s.b.w.r.c.AnnotationConfigReactiveWebServerApplicationContext: Exception encountered during context initialization - cancelling refresh attempt: org.springframework.beans.factory.BeanCreationException: Error creating bean with name 'deserializationService' defined in URL [jar:file:/kafka-ui-api.jar!/BOOT-INF/classes!/com/provectus/kafka/ui/service/DeserializationService.class]: Bean instantiation via constructor failed; nested exception is org.springframework.beans.BeanInstantiationException: Failed to instantiate [com.provectus.kafka.ui.service.DeserializationService]: Constructor threw exception; nested exception is java.lang.IllegalStateException: com.google.protobuf.Descriptors$DescriptorValidationException: SomeMessage.id: "StringValue" is not defined.`
here are the two protobufFiles:
**wrappers.proto:**
```
syntax = "proto3";
message StringValue {
string value = 1;
}
message BytesValue {
bytes value = 1;
}
```
**someMessage.proto:**
```
syntax = "proto3";
import "wrappers.proto";
message SomeMessage {
StringValue id = 1;
string message = 2;
string topic = 3;
}
```
**docker-compose.yaml :**
```
environment:
kafka.clusters.0.name: SerdeExampleCluster
kafka.clusters.0.bootstrapServers: kafka0:29092
kafka.clusters.0.defaultKeySerde: Int32
kafka.clusters.0.defaultValueSerde: String
kafka.clusters.0.serde.0.name: ProtobufFile
kafka.clusters.0.serde.0.topicKeysPattern: String
kafka.clusters.0.serde.0.topicValuesPattern: "test"
kafka.clusters.0.serde.0.properties.protobufFiles.0: protofiles/someMessage.proto
kafka.clusters.0.serde.0.properties.protobufFiles.1: protofiles/wrappers.proto
kafka.clusters.0.serde.0.properties.protobufMessageNameByTopic.test: SomeMessage
volumes:
- ../../src/main/protobuf:/protofiles
```
| [
"documentation/compose/kafka-ui-serdes.yaml",
"documentation/compose/proto/key-types.proto",
"documentation/guides/Protobuf.md",
"documentation/guides/Serialization.md",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/ProtobufFileSerde.java"
] | [
"documentation/compose/kafka-ui-serdes.yaml",
"documentation/compose/proto/key-types.proto",
"documentation/guides/Protobuf.md",
"documentation/guides/Serialization.md",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/ProtobufFileSerde.java"
] | [
"kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/ProtobufFileSerdeTest.java",
"kafka-ui-api/src/test/resources/protobuf-serde/address-book.proto",
"kafka-ui-api/src/test/resources/protobuf-serde/lang-description.proto",
"kafka-ui-api/src/test/resources/protobuf-serde/language/language.proto",
"kafka-ui-api/src/test/resources/protobuf-serde/sensor.proto"
] | diff --git a/documentation/compose/kafka-ui-serdes.yaml b/documentation/compose/kafka-ui-serdes.yaml
index 143d454bb50..c380a34f6b4 100644
--- a/documentation/compose/kafka-ui-serdes.yaml
+++ b/documentation/compose/kafka-ui-serdes.yaml
@@ -28,8 +28,7 @@ services:
kafka.clusters.0.serde.0.name: ProtobufFile
kafka.clusters.0.serde.0.topicKeysPattern: "topic1"
kafka.clusters.0.serde.0.topicValuesPattern: "topic1"
- kafka.clusters.0.serde.0.properties.protobufFiles.0: /protofiles/key-types.proto
- kafka.clusters.0.serde.0.properties.protobufFiles.1: /protofiles/values.proto
+ kafka.clusters.0.serde.0.properties.protobufFilesDir: /protofiles/
kafka.clusters.0.serde.0.properties.protobufMessageNameForKey: test.MyKey # default type for keys
kafka.clusters.0.serde.0.properties.protobufMessageName: test.MyValue # default type for values
kafka.clusters.0.serde.0.properties.protobufMessageNameForKeyByTopic.topic1: test.MySpecificTopicKey # keys type for topic "topic1"
diff --git a/documentation/compose/proto/key-types.proto b/documentation/compose/proto/key-types.proto
index 908aed5689d..1f5e22a427d 100644
--- a/documentation/compose/proto/key-types.proto
+++ b/documentation/compose/proto/key-types.proto
@@ -1,11 +1,15 @@
syntax = "proto3";
package test;
+import "google/protobuf/wrappers.proto";
+
message MyKey {
string myKeyF1 = 1;
+ google.protobuf.UInt64Value uint_64_wrapper = 2;
}
message MySpecificTopicKey {
string special_field1 = 1;
string special_field2 = 2;
+ google.protobuf.FloatValue float_wrapper = 3;
}
diff --git a/documentation/guides/Protobuf.md b/documentation/guides/Protobuf.md
index 533a6a8f482..12f92448c81 100644
--- a/documentation/guides/Protobuf.md
+++ b/documentation/guides/Protobuf.md
@@ -12,22 +12,26 @@ To configure Kafkaui to deserialize protobuf messages using a supplied protobuf
```yaml
kafka:
clusters:
- - # Cluster configuration omitted.
- # protobufFile is the path to the protobuf schema. (deprecated: please use "protobufFiles")
+ - # Cluster configuration omitted...
+ # protobufFilesDir specifies root location for proto files (will be scanned recursively)
+ # NOTE: if 'protobufFilesDir' specified, then 'protobufFile' and 'protobufFiles' settings will be ignored
+ protobufFilesDir: "/path/to/my-protobufs"
+ # (DEPRECATED) protobufFile is the path to the protobuf schema. (deprecated: please use "protobufFiles")
protobufFile: path/to/my.proto
- # protobufFiles is the path to one or more protobuf schemas.
- protobufFiles:
- - /path/to/my.proto
- - /path/to/another.proto
- # protobufMessageName is the default protobuf type that is used to deserilize
- # the message's value if the topic is not found in protobufMessageNameByTopic.
+ # (DEPRECATED) protobufFiles is the location of one or more protobuf schemas
+ protobufFiles:
+ - /path/to/my-protobufs/my.proto
+ - /path/to/my-protobufs/another.proto
+ - /path/to/my-protobufs:test/test.proto
+ # protobufMessageName is the default protobuf type that is used to deserialize
+ # the message's value if the topic is not found in protobufMessageNameByTopic.
protobufMessageName: my.DefaultValType
# protobufMessageNameByTopic is a mapping of topic names to protobuf types.
# This mapping is required and is used to deserialize the Kafka message's value.
protobufMessageNameByTopic:
topic1: my.Type1
topic2: my.Type2
- # protobufMessageNameForKey is the default protobuf type that is used to deserilize
+ # protobufMessageNameForKey is the default protobuf type that is used to deserialize
# the message's key if the topic is not found in protobufMessageNameForKeyByTopic.
protobufMessageNameForKey: my.DefaultKeyType
# protobufMessageNameForKeyByTopic is a mapping of topic names to protobuf types.
diff --git a/documentation/guides/Serialization.md b/documentation/guides/Serialization.md
index 3f827441d2e..b9690f2cba1 100644
--- a/documentation/guides/Serialization.md
+++ b/documentation/guides/Serialization.md
@@ -46,10 +46,8 @@ kafka:
serde:
- name: ProtobufFile
properties:
- # path to the protobuf schema files
- protobufFiles:
- - path/to/my.proto
- - path/to/another.proto
+ # path to the protobuf schema files directory
+ protobufFilesDir: "path/to/protofiles"
# default protobuf type that is used for KEY serialization/deserialization
# optional
protobufMessageNameForKey: my.Type1
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/ProtobufFileSerde.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/ProtobufFileSerde.java
index 664c75b70d7..8a4c28a320a 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/ProtobufFileSerde.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/ProtobufFileSerde.java
@@ -1,9 +1,36 @@
package com.provectus.kafka.ui.serdes.builtin;
import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import com.google.protobuf.AnyProto;
+import com.google.protobuf.ApiProto;
+import com.google.protobuf.DescriptorProtos;
+import com.google.protobuf.Descriptors;
import com.google.protobuf.Descriptors.Descriptor;
+import com.google.protobuf.DurationProto;
import com.google.protobuf.DynamicMessage;
+import com.google.protobuf.EmptyProto;
+import com.google.protobuf.FieldMaskProto;
+import com.google.protobuf.SourceContextProto;
+import com.google.protobuf.StructProto;
+import com.google.protobuf.TimestampProto;
+import com.google.protobuf.TypeProto;
+import com.google.protobuf.WrappersProto;
import com.google.protobuf.util.JsonFormat;
+import com.google.type.ColorProto;
+import com.google.type.DateProto;
+import com.google.type.DateTimeProto;
+import com.google.type.DayOfWeekProto;
+import com.google.type.ExprProto;
+import com.google.type.FractionProto;
+import com.google.type.IntervalProto;
+import com.google.type.LatLngProto;
+import com.google.type.MoneyProto;
+import com.google.type.MonthProto;
+import com.google.type.PhoneNumberProto;
+import com.google.type.PostalAddressProto;
+import com.google.type.QuaternionProto;
+import com.google.type.TimeOfDayProto;
import com.provectus.kafka.ui.exception.ValidationException;
import com.provectus.kafka.ui.serde.api.DeserializeResult;
import com.provectus.kafka.ui.serde.api.PropertyResolver;
@@ -11,13 +38,19 @@
import com.provectus.kafka.ui.serde.api.SchemaDescription;
import com.provectus.kafka.ui.serdes.BuiltInSerde;
import com.provectus.kafka.ui.util.jsonschema.ProtobufSchemaConverter;
+import com.squareup.wire.schema.ErrorCollector;
+import com.squareup.wire.schema.Linker;
+import com.squareup.wire.schema.Loader;
+import com.squareup.wire.schema.Location;
+import com.squareup.wire.schema.ProtoFile;
+import com.squareup.wire.schema.internal.parser.ProtoFileElement;
+import com.squareup.wire.schema.internal.parser.ProtoParser;
import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema;
import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchemaUtils;
import java.io.ByteArrayInputStream;
-import java.io.IOException;
-import java.io.UncheckedIOException;
import java.nio.file.Files;
import java.nio.file.Path;
+import java.nio.file.Paths;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
@@ -28,7 +61,10 @@
import java.util.stream.Stream;
import javax.annotation.Nullable;
import lombok.SneakyThrows;
+import lombok.extern.slf4j.Slf4j;
+import org.jetbrains.annotations.NotNull;
+@Slf4j
public class ProtobufFileSerde implements BuiltInSerde {
public static String name() {
@@ -51,132 +87,35 @@ public static String name() {
@Override
public boolean canBeAutoConfigured(PropertyResolver kafkaClusterProperties,
PropertyResolver globalProperties) {
- Optional<String> protobufFile = kafkaClusterProperties.getProperty("protobufFile", String.class);
- Optional<List<String>> protobufFiles = kafkaClusterProperties.getListProperty("protobufFiles", String.class);
- return protobufFile.isPresent() || protobufFiles.filter(files -> !files.isEmpty()).isPresent();
+ return Configuration.canBeAutoConfigured(kafkaClusterProperties);
}
@Override
public void autoConfigure(PropertyResolver kafkaClusterProperties,
PropertyResolver globalProperties) {
- configure(kafkaClusterProperties);
+ configure(Configuration.create(kafkaClusterProperties));
}
@Override
public void configure(PropertyResolver serdeProperties,
PropertyResolver kafkaClusterProperties,
PropertyResolver globalProperties) {
- configure(serdeProperties);
- }
-
- private void configure(PropertyResolver properties) {
- Map<Path, ProtobufSchema> protobufSchemas = joinPathProperties(properties).stream()
- .map(path -> Map.entry(path, new ProtobufSchema(readFileAsString(path))))
- .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
-
- // Load all referenced message schemas and store their source proto file with the descriptors
- Map<Descriptor, Path> descriptorPaths = new HashMap<>();
- Optional<String> protobufMessageName = properties.getProperty("protobufMessageName", String.class);
- protobufMessageName.ifPresent(messageName -> addProtobufSchema(descriptorPaths, protobufSchemas, messageName));
-
- Optional<String> protobufMessageNameForKey =
- properties.getProperty("protobufMessageNameForKey", String.class);
- protobufMessageNameForKey
- .ifPresent(messageName -> addProtobufSchema(descriptorPaths, protobufSchemas, messageName));
-
- Optional<Map<String, String>> protobufMessageNameByTopic =
- properties.getMapProperty("protobufMessageNameByTopic", String.class, String.class);
- protobufMessageNameByTopic
- .ifPresent(messageNamesByTopic -> addProtobufSchemas(descriptorPaths, protobufSchemas, messageNamesByTopic));
-
- Optional<Map<String, String>> protobufMessageNameForKeyByTopic =
- properties.getMapProperty("protobufMessageNameForKeyByTopic", String.class, String.class);
- protobufMessageNameForKeyByTopic
- .ifPresent(messageNamesByTopic -> addProtobufSchemas(descriptorPaths, protobufSchemas, messageNamesByTopic));
-
- // Fill dictionary for descriptor lookup by full message name
- Map<String, Descriptor> descriptorMap = descriptorPaths.keySet().stream()
- .collect(Collectors.toMap(Descriptor::getFullName, Function.identity()));
-
- configure(
- protobufMessageName.map(descriptorMap::get).orElse(null),
- protobufMessageNameForKey.map(descriptorMap::get).orElse(null),
- descriptorPaths,
- protobufMessageNameByTopic.map(map -> populateDescriptors(descriptorMap, map)).orElse(Map.of()),
- protobufMessageNameForKeyByTopic.map(map -> populateDescriptors(descriptorMap, map)).orElse(Map.of())
- );
+ configure(Configuration.create(serdeProperties));
}
@VisibleForTesting
- void configure(
- @Nullable Descriptor defaultMessageDescriptor,
- @Nullable Descriptor defaultKeyMessageDescriptor,
- Map<Descriptor, Path> descriptorPaths,
- Map<String, Descriptor> messageDescriptorMap,
- Map<String, Descriptor> keyMessageDescriptorMap) {
- if (defaultMessageDescriptor == null
- && defaultKeyMessageDescriptor == null
- && messageDescriptorMap.isEmpty()
- && keyMessageDescriptorMap.isEmpty()) {
+ void configure(Configuration configuration) {
+ if (configuration.defaultMessageDescriptor() == null
+ && configuration.defaultKeyMessageDescriptor() == null
+ && configuration.messageDescriptorMap().isEmpty()
+ && configuration.keyMessageDescriptorMap().isEmpty()) {
throw new ValidationException("Neither default, not per-topic descriptors defined for " + name() + " serde");
}
- this.defaultMessageDescriptor = defaultMessageDescriptor;
- this.defaultKeyMessageDescriptor = defaultKeyMessageDescriptor;
- this.descriptorPaths = descriptorPaths;
- this.messageDescriptorMap = messageDescriptorMap;
- this.keyMessageDescriptorMap = keyMessageDescriptorMap;
- }
-
- private static void addProtobufSchema(Map<Descriptor, Path> descriptorPaths,
- Map<Path, ProtobufSchema> protobufSchemas,
- String messageName) {
- var descriptorAndPath = getDescriptorAndPath(protobufSchemas, messageName);
- descriptorPaths.put(descriptorAndPath.getKey(), descriptorAndPath.getValue());
- }
-
- private static void addProtobufSchemas(Map<Descriptor, Path> descriptorPaths,
- Map<Path, ProtobufSchema> protobufSchemas,
- Map<String, String> messageNamesByTopic) {
- messageNamesByTopic.values().stream()
- .map(msgName -> getDescriptorAndPath(protobufSchemas, msgName))
- .forEach(entry -> descriptorPaths.put(entry.getKey(), entry.getValue()));
- }
-
- private static List<Path> joinPathProperties(PropertyResolver propertyResolver) {
- return Stream.concat(
- propertyResolver.getProperty("protobufFile", String.class).map(List::of).stream(),
- propertyResolver.getListProperty("protobufFiles", String.class).stream())
- .flatMap(Collection::stream)
- .distinct()
- .map(Path::of)
- .collect(Collectors.toList());
- }
-
- private static Map.Entry<Descriptor, Path> getDescriptorAndPath(Map<Path, ProtobufSchema> protobufSchemas,
- String msgName) {
- return protobufSchemas.entrySet().stream()
- .filter(schema -> schema.getValue().toDescriptor(msgName) != null)
- .map(schema -> Map.entry(schema.getValue().toDescriptor(msgName), schema.getKey()))
- .findFirst()
- .orElseThrow(() -> new NullPointerException(
- "The given message type not found in protobuf definition: " + msgName));
- }
-
- private static String readFileAsString(Path path) {
- try {
- return Files.readString(path);
- } catch (IOException e) {
- throw new UncheckedIOException(e);
- }
- }
-
- private Map<String, Descriptor> populateDescriptors(Map<String, Descriptor> descriptorMap,
- Map<String, String> messageNameMap) {
- Map<String, Descriptor> descriptors = new HashMap<>();
- for (Map.Entry<String, String> entry : messageNameMap.entrySet()) {
- descriptors.put(entry.getKey(), descriptorMap.get(entry.getValue()));
- }
- return descriptors;
+ this.defaultMessageDescriptor = configuration.defaultMessageDescriptor();
+ this.defaultKeyMessageDescriptor = configuration.defaultKeyMessageDescriptor();
+ this.descriptorPaths = configuration.descriptorPaths();
+ this.messageDescriptorMap = configuration.messageDescriptorMap();
+ this.keyMessageDescriptorMap = configuration.keyMessageDescriptorMap();
}
@Override
@@ -249,4 +188,238 @@ private SchemaDescription toSchemaDescription(Descriptor descriptor) {
Map.of("messageName", descriptor.getFullName())
);
}
+
+ @SneakyThrows
+ private static String readFileAsString(Path path) {
+ return Files.readString(path);
+ }
+
+ //----------------------------------------------------------------------------------------------------------------
+
+ @VisibleForTesting
+ record Configuration(@Nullable Descriptor defaultMessageDescriptor,
+ @Nullable Descriptor defaultKeyMessageDescriptor,
+ Map<Descriptor, Path> descriptorPaths,
+ Map<String, Descriptor> messageDescriptorMap,
+ Map<String, Descriptor> keyMessageDescriptorMap) {
+
+ static boolean canBeAutoConfigured(PropertyResolver kafkaClusterProperties) {
+ Optional<String> protobufFile = kafkaClusterProperties.getProperty("protobufFile", String.class);
+ Optional<List<String>> protobufFiles = kafkaClusterProperties.getListProperty("protobufFiles", String.class);
+ Optional<String> protobufFilesDir = kafkaClusterProperties.getProperty("protobufFilesDir", String.class);
+ return protobufFilesDir.isPresent()
+ || protobufFile.isPresent()
+ || protobufFiles.filter(files -> !files.isEmpty()).isPresent();
+ }
+
+ static Configuration create(PropertyResolver properties) {
+ var protobufSchemas = loadSchemas(
+ properties.getProperty("protobufFile", String.class),
+ properties.getListProperty("protobufFiles", String.class),
+ properties.getProperty("protobufFilesDir", String.class)
+ );
+
+ // Load all referenced message schemas and store their source proto file with the descriptors
+ Map<Descriptor, Path> descriptorPaths = new HashMap<>();
+ Optional<String> protobufMessageName = properties.getProperty("protobufMessageName", String.class);
+ protobufMessageName.ifPresent(messageName -> addProtobufSchema(descriptorPaths, protobufSchemas, messageName));
+
+ Optional<String> protobufMessageNameForKey =
+ properties.getProperty("protobufMessageNameForKey", String.class);
+ protobufMessageNameForKey
+ .ifPresent(messageName -> addProtobufSchema(descriptorPaths, protobufSchemas, messageName));
+
+ Optional<Map<String, String>> protobufMessageNameByTopic =
+ properties.getMapProperty("protobufMessageNameByTopic", String.class, String.class);
+ protobufMessageNameByTopic
+ .ifPresent(messageNamesByTopic -> addProtobufSchemas(descriptorPaths, protobufSchemas, messageNamesByTopic));
+
+ Optional<Map<String, String>> protobufMessageNameForKeyByTopic =
+ properties.getMapProperty("protobufMessageNameForKeyByTopic", String.class, String.class);
+ protobufMessageNameForKeyByTopic
+ .ifPresent(messageNamesByTopic -> addProtobufSchemas(descriptorPaths, protobufSchemas, messageNamesByTopic));
+
+ // Fill dictionary for descriptor lookup by full message name
+ Map<String, Descriptor> descriptorMap = descriptorPaths.keySet().stream()
+ .collect(Collectors.toMap(Descriptor::getFullName, Function.identity()));
+
+ return new Configuration(
+ protobufMessageName.map(descriptorMap::get).orElse(null),
+ protobufMessageNameForKey.map(descriptorMap::get).orElse(null),
+ descriptorPaths,
+ protobufMessageNameByTopic.map(map -> populateDescriptors(descriptorMap, map)).orElse(Map.of()),
+ protobufMessageNameForKeyByTopic.map(map -> populateDescriptors(descriptorMap, map)).orElse(Map.of())
+ );
+ }
+
+ private static Map.Entry<Descriptor, Path> getDescriptorAndPath(Map<Path, ProtobufSchema> protobufSchemas,
+ String msgName) {
+ return protobufSchemas.entrySet().stream()
+ .filter(schema -> schema.getValue().toDescriptor(msgName) != null)
+ .map(schema -> Map.entry(schema.getValue().toDescriptor(msgName), schema.getKey()))
+ .findFirst()
+ .orElseThrow(() -> new NullPointerException(
+ "The given message type not found in protobuf definition: " + msgName));
+ }
+
+ private static Map<String, Descriptor> populateDescriptors(Map<String, Descriptor> descriptorMap,
+ Map<String, String> messageNameMap) {
+ Map<String, Descriptor> descriptors = new HashMap<>();
+ for (Map.Entry<String, String> entry : messageNameMap.entrySet()) {
+ descriptors.put(entry.getKey(), descriptorMap.get(entry.getValue()));
+ }
+ return descriptors;
+ }
+
+ @VisibleForTesting
+ static Map<Path, ProtobufSchema> loadSchemas(Optional<String> protobufFile,
+ Optional<List<String>> protobufFiles,
+ Optional<String> protobufFilesDir) {
+ if (protobufFilesDir.isPresent()) {
+ if (protobufFile.isPresent() || protobufFiles.isPresent()) {
+ log.warn("protobufFile and protobufFiles properties will be ignored, since protobufFilesDir provided");
+ }
+ List<ProtoFile> loadedFiles = new ProtoSchemaLoader(protobufFilesDir.get()).load();
+ Map<String, ProtoFileElement> allPaths = loadedFiles.stream()
+ .collect(Collectors.toMap(f -> f.getLocation().getPath(), ProtoFile::toElement));
+ return loadedFiles.stream()
+ .collect(Collectors.toMap(
+ f -> Path.of(f.getLocation().getBase(), f.getLocation().getPath()),
+ f -> new ProtobufSchema(f.toElement(), List.of(), allPaths)));
+ }
+ //Supporting for backward-compatibility. Normally, protobufFilesDir setting should be used
+ return Stream.concat(
+ protobufFile.stream(),
+ protobufFiles.stream().flatMap(Collection::stream)
+ )
+ .distinct()
+ .map(Path::of)
+ .collect(Collectors.toMap(path -> path, path -> new ProtobufSchema(readFileAsString(path))));
+ }
+
+ private static void addProtobufSchema(Map<Descriptor, Path> descriptorPaths,
+ Map<Path, ProtobufSchema> protobufSchemas,
+ String messageName) {
+ var descriptorAndPath = getDescriptorAndPath(protobufSchemas, messageName);
+ descriptorPaths.put(descriptorAndPath.getKey(), descriptorAndPath.getValue());
+ }
+
+ private static void addProtobufSchemas(Map<Descriptor, Path> descriptorPaths,
+ Map<Path, ProtobufSchema> protobufSchemas,
+ Map<String, String> messageNamesByTopic) {
+ messageNamesByTopic.values().stream()
+ .map(msgName -> getDescriptorAndPath(protobufSchemas, msgName))
+ .forEach(entry -> descriptorPaths.put(entry.getKey(), entry.getValue()));
+ }
+ }
+
+ static class ProtoSchemaLoader {
+
+ private final Path baseLocation;
+
+ ProtoSchemaLoader(String baseLocationStr) {
+ this.baseLocation = Path.of(baseLocationStr);
+ if (!Files.isReadable(baseLocation)) {
+ throw new ValidationException("proto files directory not readable");
+ }
+ }
+
+ List<ProtoFile> load() {
+ Map<String, ProtoFile> knownTypes = knownProtoFiles();
+
+ Map<String, ProtoFile> filesByLocations = new HashMap<>();
+ filesByLocations.putAll(knownTypes);
+ filesByLocations.putAll(loadFilesWithLocations());
+
+ Linker linker = new Linker(
+ createFilesLoader(filesByLocations),
+ new ErrorCollector(),
+ true,
+ true
+ );
+ var schema = linker.link(filesByLocations.values());
+ linker.getErrors().throwIfNonEmpty();
+ return schema.getProtoFiles()
+ .stream()
+ .filter(p -> !knownTypes.containsKey(p.getLocation().getPath())) //filtering known types
+ .toList();
+ }
+
+ private Map<String, ProtoFile> knownProtoFiles() {
+ return Stream.of(
+ loadKnownProtoFile("google/type/color.proto", ColorProto.getDescriptor()),
+ loadKnownProtoFile("google/type/date.proto", DateProto.getDescriptor()),
+ loadKnownProtoFile("google/type/datetime.proto", DateTimeProto.getDescriptor()),
+ loadKnownProtoFile("google/type/dayofweek.proto", DayOfWeekProto.getDescriptor()),
+ loadKnownProtoFile("google/type/decimal.proto", com.google.type.DecimalProto.getDescriptor()),
+ loadKnownProtoFile("google/type/expr.proto", ExprProto.getDescriptor()),
+ loadKnownProtoFile("google/type/fraction.proto", FractionProto.getDescriptor()),
+ loadKnownProtoFile("google/type/interval.proto", IntervalProto.getDescriptor()),
+ loadKnownProtoFile("google/type/latlng.proto", LatLngProto.getDescriptor()),
+ loadKnownProtoFile("google/type/money.proto", MoneyProto.getDescriptor()),
+ loadKnownProtoFile("google/type/month.proto", MonthProto.getDescriptor()),
+ loadKnownProtoFile("google/type/phone_number.proto", PhoneNumberProto.getDescriptor()),
+ loadKnownProtoFile("google/type/postal_address.proto", PostalAddressProto.getDescriptor()),
+ loadKnownProtoFile("google/type/quaternion.prot", QuaternionProto.getDescriptor()),
+ loadKnownProtoFile("google/type/timeofday.proto", TimeOfDayProto.getDescriptor()),
+ loadKnownProtoFile("google/protobuf/any.proto", AnyProto.getDescriptor()),
+ loadKnownProtoFile("google/protobuf/api.proto", ApiProto.getDescriptor()),
+ loadKnownProtoFile("google/protobuf/descriptor.proto", DescriptorProtos.getDescriptor()),
+ loadKnownProtoFile("google/protobuf/duration.proto", DurationProto.getDescriptor()),
+ loadKnownProtoFile("google/protobuf/empty.proto", EmptyProto.getDescriptor()),
+ loadKnownProtoFile("google/protobuf/field_mask.proto", FieldMaskProto.getDescriptor()),
+ loadKnownProtoFile("google/protobuf/source_context.proto", SourceContextProto.getDescriptor()),
+ loadKnownProtoFile("google/protobuf/struct.proto", StructProto.getDescriptor()),
+ loadKnownProtoFile("google/protobuf/timestamp.proto", TimestampProto.getDescriptor()),
+ loadKnownProtoFile("google/protobuf/type.proto", TypeProto.getDescriptor()),
+ loadKnownProtoFile("google/protobuf/wrappers.proto", WrappersProto.getDescriptor())
+ ).collect(Collectors.toMap(p -> p.getLocation().getPath(), p -> p));
+ }
+
+ private ProtoFile loadKnownProtoFile(String path, Descriptors.FileDescriptor fileDescriptor) {
+ String protoFileString = null;
+ // know type file contains either message or enum
+ if (!fileDescriptor.getMessageTypes().isEmpty()) {
+ protoFileString = new ProtobufSchema(fileDescriptor.getMessageTypes().get(0)).canonicalString();
+ } else if (!fileDescriptor.getEnumTypes().isEmpty()) {
+ protoFileString = new ProtobufSchema(fileDescriptor.getEnumTypes().get(0)).canonicalString();
+ } else {
+ throw new IllegalStateException();
+ }
+ return ProtoFile.Companion.get(ProtoParser.Companion.parse(Location.get(path), protoFileString));
+ }
+
+ private Loader createFilesLoader(Map<String, ProtoFile> files) {
+ return new Loader() {
+ @Override
+ public @NotNull ProtoFile load(@NotNull String path) {
+ return Preconditions.checkNotNull(files.get(path), "ProtoFile not found for import '%s'", path);
+ }
+
+ @Override
+ public @NotNull Loader withErrors(@NotNull ErrorCollector errorCollector) {
+ return this;
+ }
+ };
+ }
+
+ @SneakyThrows
+ private Map<String, ProtoFile> loadFilesWithLocations() {
+ Map<String, ProtoFile> filesByLocations = new HashMap<>();
+ try (var files = Files.walk(baseLocation)) {
+ files.filter(p -> !Files.isDirectory(p) && p.toString().endsWith(".proto"))
+ .forEach(path -> {
+ // relative path will be used as "import" statement
+ String relativePath = baseLocation.relativize(path).toString();
+ var protoFileElement = ProtoParser.Companion.parse(
+ Location.get(baseLocation.toString(), relativePath),
+ readFileAsString(path)
+ );
+ filesByLocations.put(relativePath, ProtoFile.Companion.get(protoFileElement));
+ });
+ }
+ return filesByLocations;
+ }
+ }
+
}
| diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/ProtobufFileSerdeTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/ProtobufFileSerdeTest.java
index 3de915145f4..ab99df74de6 100644
--- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/ProtobufFileSerdeTest.java
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/ProtobufFileSerdeTest.java
@@ -10,14 +10,16 @@
import com.google.protobuf.util.JsonFormat;
import com.provectus.kafka.ui.serde.api.PropertyResolver;
import com.provectus.kafka.ui.serde.api.Serde;
+import com.provectus.kafka.ui.serdes.builtin.ProtobufFileSerde.Configuration;
+import com.squareup.wire.schema.ProtoFile;
import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema;
-import java.nio.file.Files;
import java.nio.file.Path;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import lombok.SneakyThrows;
import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test;
import org.springframework.util.ResourceUtils;
@@ -29,28 +31,29 @@ class ProtobufFileSerdeTest {
private static final String sampleBookMsgJson = "{\"version\": 1, \"people\": ["
+ "{ \"name\": \"My Name\",\"id\": 102, \"email\": \"[email protected]\", \"phones\":[]}]}";
- private static final String sampleSensorMsgJson = "{ \"name\": \"My Sensor\", "
- + "\"temperature\": 20.5, \"humidity\": 50, \"door\": \"OPEN\" }";
+ private static final String sampleLangDescriptionMsgJson = "{ \"lang\": \"EN\", "
+ + "\"descr\": \"Some description here\" }";
// Sample message of type `test.Person`
private byte[] personMessageBytes;
// Sample message of type `test.AddressBook`
private byte[] addressBookMessageBytes;
- private byte[] sensorMessageBytes;
- private Path addressBookSchemaPath;
- private Path sensorSchemaPath;
-
+ private byte[] langDescriptionMessageBytes;
private Descriptors.Descriptor personDescriptor;
private Descriptors.Descriptor addressBookDescriptor;
- private Descriptors.Descriptor sensorDescriptor;
+ private Descriptors.Descriptor langDescriptionDescriptor;
private Map<Descriptors.Descriptor, Path> descriptorPaths;
@BeforeEach
void setUp() throws Exception {
- addressBookSchemaPath = ResourceUtils.getFile("classpath:address-book.proto").toPath();
- sensorSchemaPath = ResourceUtils.getFile("classpath:sensor.proto").toPath();
+ Map<Path, ProtobufSchema> files = ProtobufFileSerde.Configuration.loadSchemas(
+ Optional.empty(),
+ Optional.empty(),
+ Optional.of(protoFilesDir())
+ );
- ProtobufSchema addressBookSchema = new ProtobufSchema(Files.readString(addressBookSchemaPath));
+ Path addressBookSchemaPath = ResourceUtils.getFile("classpath:protobuf-serde/address-book.proto").toPath();
+ var addressBookSchema = files.get(addressBookSchemaPath);
var builder = addressBookSchema.newMessageBuilder("test.Person");
JsonFormat.parser().merge(samplePersonMsgJson, builder);
personMessageBytes = builder.build().toByteArray();
@@ -61,63 +64,241 @@ void setUp() throws Exception {
personDescriptor = addressBookSchema.toDescriptor("test.Person");
addressBookDescriptor = addressBookSchema.toDescriptor("test.AddressBook");
- ProtobufSchema sensorSchema = new ProtobufSchema(Files.readString(sensorSchemaPath));
- builder = sensorSchema.newMessageBuilder("iot.Sensor");
- JsonFormat.parser().merge(sampleSensorMsgJson, builder);
- sensorMessageBytes = builder.build().toByteArray();
- sensorDescriptor = sensorSchema.toDescriptor("iot.Sensor");
+ Path languageDescriptionPath = ResourceUtils.getFile("classpath:protobuf-serde/lang-description.proto").toPath();
+ var languageDescriptionSchema = files.get(languageDescriptionPath);
+ builder = languageDescriptionSchema.newMessageBuilder("test.LanguageDescription");
+ JsonFormat.parser().merge(sampleLangDescriptionMsgJson, builder);
+ langDescriptionMessageBytes = builder.build().toByteArray();
+ langDescriptionDescriptor = languageDescriptionSchema.toDescriptor("test.LanguageDescription");
descriptorPaths = Map.of(
personDescriptor, addressBookSchemaPath,
- addressBookDescriptor, addressBookSchemaPath,
- sensorDescriptor, sensorSchemaPath
+ addressBookDescriptor, addressBookSchemaPath
);
}
-
@Test
- void testDeserialize() {
- var messageNameMap = Map.of(
- "persons", personDescriptor,
- "books", addressBookDescriptor
- );
- var keyMessageNameMap = Map.of(
- "books", addressBookDescriptor);
-
- var serde = new ProtobufFileSerde();
- serde.configure(
- null,
- null,
- descriptorPaths,
- messageNameMap,
- keyMessageNameMap
- );
+ void loadsAllProtoFiledFromTargetDirectory() throws Exception {
+ var protoDir = ResourceUtils.getFile("classpath:protobuf-serde/").getPath();
+ List<ProtoFile> files = new ProtobufFileSerde.ProtoSchemaLoader(protoDir).load();
+ assertThat(files).hasSize(4);
+ assertThat(files)
+ .map(f -> f.getLocation().getPath())
+ .containsExactlyInAnyOrder(
+ "language/language.proto",
+ "sensor.proto",
+ "address-book.proto",
+ "lang-description.proto"
+ );
+ }
- var deserializedPerson = serde.deserializer("persons", Serde.Target.VALUE)
- .deserialize(null, personMessageBytes);
- assertJsonEquals(samplePersonMsgJson, deserializedPerson.getResult());
+ @SneakyThrows
+ private String protoFilesDir() {
+ return ResourceUtils.getFile("classpath:protobuf-serde/").getPath();
+ }
- var deserializedBook = serde.deserializer("books", Serde.Target.KEY)
- .deserialize(null, addressBookMessageBytes);
- assertJsonEquals(sampleBookMsgJson, deserializedBook.getResult());
+ @Nested
+ class ConfigurationTests {
+
+ @Test
+ void canBeAutoConfiguredReturnsNoProtoPropertiesProvided() {
+ PropertyResolver resolver = mock(PropertyResolver.class);
+ assertThat(Configuration.canBeAutoConfigured(resolver))
+ .isFalse();
+ }
+
+ @Test
+ void canBeAutoConfiguredReturnsTrueIfNoProtoFileHasBeenProvided() {
+ PropertyResolver resolver = mock(PropertyResolver.class);
+ when(resolver.getProperty("protobufFile", String.class))
+ .thenReturn(Optional.of("file.proto"));
+ assertThat(Configuration.canBeAutoConfigured(resolver))
+ .isTrue();
+ }
+
+ @Test
+ void canBeAutoConfiguredReturnsTrueIfProtoFilesHasBeenProvided() {
+ PropertyResolver resolver = mock(PropertyResolver.class);
+ when(resolver.getListProperty("protobufFiles", String.class))
+ .thenReturn(Optional.of(List.of("file.proto")));
+ assertThat(Configuration.canBeAutoConfigured(resolver))
+ .isTrue();
+ }
+
+ @Test
+ void canBeAutoConfiguredReturnsTrueIfProtoFilesDirProvided() {
+ PropertyResolver resolver = mock(PropertyResolver.class);
+ when(resolver.getProperty("protobufFilesDir", String.class))
+ .thenReturn(Optional.of("/filesDir"));
+ assertThat(Configuration.canBeAutoConfigured(resolver))
+ .isTrue();
+ }
+
+ @Test
+ void unknownSchemaAsDefaultThrowsException() {
+ PropertyResolver resolver = mock(PropertyResolver.class);
+ when(resolver.getProperty("protobufFilesDir", String.class))
+ .thenReturn(Optional.of(protoFilesDir()));
+
+ when(resolver.getProperty("protobufMessageName", String.class))
+ .thenReturn(Optional.of("test.NotExistent"));
+
+ assertThatThrownBy(() -> Configuration.create(resolver))
+ .isInstanceOf(NullPointerException.class)
+ .hasMessage("The given message type not found in protobuf definition: test.NotExistent");
+ }
+
+ @Test
+ void unknownSchemaAsDefaultForKeyThrowsException() {
+ PropertyResolver resolver = mock(PropertyResolver.class);
+ when(resolver.getProperty("protobufFilesDir", String.class))
+ .thenReturn(Optional.of(protoFilesDir()));
+
+ when(resolver.getProperty("protobufMessageNameForKey", String.class))
+ .thenReturn(Optional.of("test.NotExistent"));
+
+ assertThatThrownBy(() -> Configuration.create(resolver))
+ .isInstanceOf(NullPointerException.class)
+ .hasMessage("The given message type not found in protobuf definition: test.NotExistent");
+ }
+
+ @Test
+ void unknownSchemaAsTopicSchemaThrowsException() {
+ PropertyResolver resolver = mock(PropertyResolver.class);
+ when(resolver.getProperty("protobufFilesDir", String.class))
+ .thenReturn(Optional.of(protoFilesDir()));
+
+ when(resolver.getMapProperty("protobufMessageNameByTopic", String.class, String.class))
+ .thenReturn(Optional.of(Map.of("persons", "test.NotExistent")));
+
+ assertThatThrownBy(() -> Configuration.create(resolver))
+ .isInstanceOf(NullPointerException.class)
+ .hasMessage("The given message type not found in protobuf definition: test.NotExistent");
+ }
+
+ @Test
+ void unknownSchemaAsTopicSchemaForKeyThrowsException() {
+ PropertyResolver resolver = mock(PropertyResolver.class);
+ when(resolver.getProperty("protobufFilesDir", String.class))
+ .thenReturn(Optional.of(protoFilesDir()));
+
+ when(resolver.getMapProperty("protobufMessageNameForKeyByTopic", String.class, String.class))
+ .thenReturn(Optional.of(Map.of("persons", "test.NotExistent")));
+
+ assertThatThrownBy(() -> Configuration.create(resolver))
+ .isInstanceOf(NullPointerException.class)
+ .hasMessage("The given message type not found in protobuf definition: test.NotExistent");
+ }
+
+ @Test
+ void createConfigureFillsDescriptorMappingsWhenProtoFilesListProvided() throws Exception {
+ PropertyResolver resolver = mock(PropertyResolver.class);
+ when(resolver.getProperty("protobufFile", String.class))
+ .thenReturn(Optional.of(
+ ResourceUtils.getFile("classpath:protobuf-serde/sensor.proto").getPath()));
+
+ when(resolver.getListProperty("protobufFiles", String.class))
+ .thenReturn(Optional.of(
+ List.of(
+ ResourceUtils.getFile("classpath:protobuf-serde/address-book.proto").getPath())));
+
+ when(resolver.getProperty("protobufMessageName", String.class))
+ .thenReturn(Optional.of("test.Sensor"));
+
+ when(resolver.getProperty("protobufMessageNameForKey", String.class))
+ .thenReturn(Optional.of("test.AddressBook"));
+
+ when(resolver.getMapProperty("protobufMessageNameByTopic", String.class, String.class))
+ .thenReturn(Optional.of(
+ Map.of(
+ "topic1", "test.Sensor",
+ "topic2", "test.AddressBook")));
+
+ when(resolver.getMapProperty("protobufMessageNameForKeyByTopic", String.class, String.class))
+ .thenReturn(Optional.of(
+ Map.of(
+ "topic1", "test.Person",
+ "topic2", "test.AnotherPerson")));
+
+ var configuration = Configuration.create(resolver);
+
+ assertThat(configuration.defaultMessageDescriptor())
+ .matches(d -> d.getFullName().equals("test.Sensor"));
+ assertThat(configuration.defaultKeyMessageDescriptor())
+ .matches(d -> d.getFullName().equals("test.AddressBook"));
+
+ assertThat(configuration.messageDescriptorMap())
+ .containsOnlyKeys("topic1", "topic2")
+ .anySatisfy((topic, descr) -> assertThat(descr.getFullName()).isEqualTo("test.Sensor"))
+ .anySatisfy((topic, descr) -> assertThat(descr.getFullName()).isEqualTo("test.AddressBook"));
+
+ assertThat(configuration.keyMessageDescriptorMap())
+ .containsOnlyKeys("topic1", "topic2")
+ .anySatisfy((topic, descr) -> assertThat(descr.getFullName()).isEqualTo("test.Person"))
+ .anySatisfy((topic, descr) -> assertThat(descr.getFullName()).isEqualTo("test.AnotherPerson"));
+ }
+
+ @Test
+ void createConfigureFillsDescriptorMappingsWhenProtoFileDirProvided() throws Exception {
+ PropertyResolver resolver = mock(PropertyResolver.class);
+ when(resolver.getProperty("protobufFilesDir", String.class))
+ .thenReturn(Optional.of(protoFilesDir()));
+
+ when(resolver.getProperty("protobufMessageName", String.class))
+ .thenReturn(Optional.of("test.Sensor"));
+
+ when(resolver.getProperty("protobufMessageNameForKey", String.class))
+ .thenReturn(Optional.of("test.AddressBook"));
+
+ when(resolver.getMapProperty("protobufMessageNameByTopic", String.class, String.class))
+ .thenReturn(Optional.of(
+ Map.of(
+ "topic1", "test.Sensor",
+ "topic2", "test.LanguageDescription")));
+
+ when(resolver.getMapProperty("protobufMessageNameForKeyByTopic", String.class, String.class))
+ .thenReturn(Optional.of(
+ Map.of(
+ "topic1", "test.Person",
+ "topic2", "test.AnotherPerson")));
+
+ var configuration = Configuration.create(resolver);
+
+ assertThat(configuration.defaultMessageDescriptor())
+ .matches(d -> d.getFullName().equals("test.Sensor"));
+ assertThat(configuration.defaultKeyMessageDescriptor())
+ .matches(d -> d.getFullName().equals("test.AddressBook"));
+
+ assertThat(configuration.messageDescriptorMap())
+ .containsOnlyKeys("topic1", "topic2")
+ .anySatisfy((topic, descr) -> assertThat(descr.getFullName()).isEqualTo("test.Sensor"))
+ .anySatisfy((topic, descr) -> assertThat(descr.getFullName()).isEqualTo("test.LanguageDescription"));
+
+ assertThat(configuration.keyMessageDescriptorMap())
+ .containsOnlyKeys("topic1", "topic2")
+ .anySatisfy((topic, descr) -> assertThat(descr.getFullName()).isEqualTo("test.Person"))
+ .anySatisfy((topic, descr) -> assertThat(descr.getFullName()).isEqualTo("test.AnotherPerson"));
+ }
}
@Test
- void testDeserializeMultipleProtobuf() {
+ void deserializeUsesTopicsMappingToFindMsgDescriptor() {
var messageNameMap = Map.of(
"persons", personDescriptor,
"books", addressBookDescriptor,
- "sensors", sensorDescriptor
+ "langs", langDescriptionDescriptor
);
var keyMessageNameMap = Map.of(
"books", addressBookDescriptor);
var serde = new ProtobufFileSerde();
serde.configure(
- null,
- null,
- descriptorPaths,
- messageNameMap,
- keyMessageNameMap
+ new Configuration(
+ null,
+ null,
+ descriptorPaths,
+ messageNameMap,
+ keyMessageNameMap
+ )
);
var deserializedPerson = serde.deserializer("persons", Serde.Target.VALUE)
@@ -128,20 +309,22 @@ void testDeserializeMultipleProtobuf() {
.deserialize(null, addressBookMessageBytes);
assertJsonEquals(sampleBookMsgJson, deserializedBook.getResult());
- var deserializedSensor = serde.deserializer("sensors", Serde.Target.VALUE)
- .deserialize(null, sensorMessageBytes);
- assertJsonEquals(sampleSensorMsgJson, deserializedSensor.getResult());
+ var deserializedSensor = serde.deserializer("langs", Serde.Target.VALUE)
+ .deserialize(null, langDescriptionMessageBytes);
+ assertJsonEquals(sampleLangDescriptionMsgJson, deserializedSensor.getResult());
}
@Test
- void testDefaultMessageName() {
+ void deserializeUsesDefaultDescriptorIfTopicMappingNotFound() {
var serde = new ProtobufFileSerde();
serde.configure(
- personDescriptor,
- addressBookDescriptor,
- descriptorPaths,
- Map.of(),
- Map.of()
+ new Configuration(
+ personDescriptor,
+ addressBookDescriptor,
+ descriptorPaths,
+ Map.of(),
+ Map.of()
+ )
);
var deserializedPerson = serde.deserializer("persons", Serde.Target.VALUE)
@@ -154,230 +337,57 @@ void testDefaultMessageName() {
}
@Test
- void testSerialize() {
- var messageNameMap = Map.of(
- "persons", personDescriptor,
- "books", addressBookDescriptor
- );
- var keyMessageNameMap = Map.of(
- "books", addressBookDescriptor);
-
- var serde = new ProtobufFileSerde();
- serde.configure(
- null,
- null,
- descriptorPaths,
- messageNameMap,
- keyMessageNameMap
- );
-
- var personBytes = serde.serializer("persons", Serde.Target.VALUE)
- .serialize("{ \"name\": \"My Name\",\"id\": 101, \"email\": \"[email protected]\" }");
- assertThat(personBytes).isEqualTo(personMessageBytes);
-
- var booksBytes = serde.serializer("books", Serde.Target.KEY)
- .serialize("{\"version\": 1, \"people\": ["
- + "{ \"name\": \"My Name\",\"id\": 102, \"email\": \"[email protected]\" }]}");
- assertThat(booksBytes).isEqualTo(addressBookMessageBytes);
- }
-
- @Test
- void testSerializeMultipleProtobuf() {
+ void serializeUsesTopicsMappingToFindMsgDescriptor() {
var messageNameMap = Map.of(
"persons", personDescriptor,
"books", addressBookDescriptor,
- "sensors", sensorDescriptor
+ "langs", langDescriptionDescriptor
);
var keyMessageNameMap = Map.of(
"books", addressBookDescriptor);
var serde = new ProtobufFileSerde();
serde.configure(
- null,
- null,
- descriptorPaths,
- messageNameMap,
- keyMessageNameMap
+ new Configuration(
+ null,
+ null,
+ descriptorPaths,
+ messageNameMap,
+ keyMessageNameMap
+ )
);
- var personBytes = serde.serializer("persons", Serde.Target.VALUE)
- .serialize("{ \"name\": \"My Name\",\"id\": 101, \"email\": \"[email protected]\" }");
- assertThat(personBytes).isEqualTo(personMessageBytes);
+ var personBytes = serde.serializer("langs", Serde.Target.VALUE)
+ .serialize(sampleLangDescriptionMsgJson);
+ assertThat(personBytes).isEqualTo(langDescriptionMessageBytes);
var booksBytes = serde.serializer("books", Serde.Target.KEY)
- .serialize("{\"version\": 1, \"people\": ["
- + "{ \"name\": \"My Name\",\"id\": 102, \"email\": \"[email protected]\" }]}");
+ .serialize(sampleBookMsgJson);
assertThat(booksBytes).isEqualTo(addressBookMessageBytes);
-
- var sensorBytes = serde.serializer("sensors", Serde.Target.VALUE)
- .serialize("{ \"name\": \"My Sensor\", \"temperature\": 20.5, \"humidity\": 50, \"door\": \"OPEN\" }");
- assertThat(sensorBytes).isEqualTo(sensorMessageBytes);
}
@Test
- void testSerializeDefaults() {
+ void serializeUsesDefaultDescriptorIfTopicMappingNotFound() {
var serde = new ProtobufFileSerde();
serde.configure(
- personDescriptor,
- addressBookDescriptor,
- descriptorPaths,
- Map.of(),
- Map.of()
+ new Configuration(
+ personDescriptor,
+ addressBookDescriptor,
+ descriptorPaths,
+ Map.of(),
+ Map.of()
+ )
);
var personBytes = serde.serializer("persons", Serde.Target.VALUE)
- .serialize("{ \"name\": \"My Name\",\"id\": 101, \"email\": \"[email protected]\" }");
+ .serialize(samplePersonMsgJson);
assertThat(personBytes).isEqualTo(personMessageBytes);
var booksBytes = serde.serializer("books", Serde.Target.KEY)
- .serialize("{\"version\": 1, \"people\": ["
- + "{ \"name\": \"My Name\",\"id\": 102, \"email\": \"[email protected]\" }]}");
+ .serialize(sampleBookMsgJson);
assertThat(booksBytes).isEqualTo(addressBookMessageBytes);
}
- @Test
- void canBeAutoConfiguredReturnsFalseIfNoProtoFilesHaveBeenProvided() {
- PropertyResolver resolver = mock(PropertyResolver.class);
-
- var serde = new ProtobufFileSerde();
- boolean startupSuccessful = serde.canBeAutoConfigured(resolver, resolver);
- assertThat(startupSuccessful).isFalse();
- }
-
- @Test
- void canBeAutoConfiguredReturnsFalseIfProtoFilesListIsEmpty() {
- PropertyResolver resolver = mock(PropertyResolver.class);
- when(resolver.getListProperty("protobufFiles", String.class)).thenReturn(Optional.of(List.of()));
-
- var serde = new ProtobufFileSerde();
- boolean startupSuccessful = serde.canBeAutoConfigured(resolver, resolver);
- assertThat(startupSuccessful).isFalse();
- }
-
- @Test
- void canBeAutoConfiguredReturnsTrueIfNoProtoFileHasBeenProvided() {
- PropertyResolver resolver = mock(PropertyResolver.class);
- when(resolver.getProperty("protobufFile", String.class)).thenReturn(Optional.of("file.proto"));
-
- var serde = new ProtobufFileSerde();
- boolean startupSuccessful = serde.canBeAutoConfigured(resolver, resolver);
- assertThat(startupSuccessful).isTrue();
- }
-
- @Test
- void canBeAutoConfiguredReturnsTrueIfProtoFilesHasBeenProvided() {
- PropertyResolver resolver = mock(PropertyResolver.class);
- when(resolver.getListProperty("protobufFiles", String.class)).thenReturn(Optional.of(List.of("file.proto")));
-
- var serde = new ProtobufFileSerde();
- boolean startupSuccessful = serde.canBeAutoConfigured(resolver, resolver);
- assertThat(startupSuccessful).isTrue();
- }
-
- @Test
- void canBeAutoConfiguredReturnsTrueIfProtoFileAndProtoFilesHaveBeenProvided() {
- PropertyResolver resolver = mock(PropertyResolver.class);
- when(resolver.getProperty("protobufFile", String.class)).thenReturn(Optional.of("file1.proto"));
- when(resolver.getListProperty("protobufFiles", String.class)).thenReturn(Optional.of(List.of("file2.proto")));
-
- var serde = new ProtobufFileSerde();
- boolean startupSuccessful = serde.canBeAutoConfigured(resolver, resolver);
- assertThat(startupSuccessful).isTrue();
- }
-
- @Test
- void listOfProtobufFilesIsJoined() {
- PropertyResolver resolver = mock(PropertyResolver.class);
- when(resolver.getProperty("protobufFile", String.class))
- .thenReturn(Optional.of(addressBookSchemaPath.toString()));
- when(resolver.getListProperty("protobufFiles", String.class))
- .thenReturn(Optional.of(List.of(sensorSchemaPath.toString())));
- when(resolver.getProperty("protobufMessageName", String.class))
- .thenReturn(Optional.of("test.AddressBook"));
-
- Map<String, String> protobufMessageNameByTopic = Map.of(
- "persons", "test.Person",
- "books", "test.AddressBook",
- "sensors", "iot.Sensor");
- when(resolver.getMapProperty("protobufMessageNameByTopic", String.class, String.class))
- .thenReturn(Optional.of(protobufMessageNameByTopic));
-
- var serde = new ProtobufFileSerde();
- serde.configure(resolver, resolver, resolver);
-
- var deserializedPerson = serde.deserializer("persons", Serde.Target.VALUE)
- .deserialize(null, personMessageBytes);
- assertJsonEquals(samplePersonMsgJson, deserializedPerson.getResult());
-
- var deserializedSensor = serde.deserializer("sensors", Serde.Target.VALUE)
- .deserialize(null, sensorMessageBytes);
- assertJsonEquals(sampleSensorMsgJson, deserializedSensor.getResult());
- }
-
- @Test
- void unknownSchemaAsDefaultThrowsException() {
- PropertyResolver resolver = mock(PropertyResolver.class);
- when(resolver.getListProperty("protobufFiles", String.class))
- .thenReturn(Optional.of(List.of(addressBookSchemaPath.toString(), sensorSchemaPath.toString())));
- when(resolver.getProperty("protobufMessageName", String.class))
- .thenReturn(Optional.of("test.NotExistent"));
-
- var serde = new ProtobufFileSerde();
- assertThatThrownBy(() -> serde.configure(resolver, resolver, resolver))
- .isInstanceOf(NullPointerException.class)
- .hasMessage("The given message type not found in protobuf definition: test.NotExistent");
- }
-
- @Test
- void unknownSchemaAsDefaultForKeyThrowsException() {
- PropertyResolver resolver = mock(PropertyResolver.class);
- when(resolver.getListProperty("protobufFiles", String.class))
- .thenReturn(Optional.of(List.of(addressBookSchemaPath.toString(), sensorSchemaPath.toString())));
- when(resolver.getProperty("protobufMessageName", String.class))
- .thenReturn(Optional.of("test.AddressBook"));
- when(resolver.getProperty("protobufMessageNameForKey", String.class))
- .thenReturn(Optional.of("test.NotExistent"));
-
- var serde = new ProtobufFileSerde();
- assertThatThrownBy(() -> serde.configure(resolver, resolver, resolver))
- .isInstanceOf(NullPointerException.class)
- .hasMessage("The given message type not found in protobuf definition: test.NotExistent");
- }
-
- @Test
- void unknownSchemaAsTopicSchemaThrowsException() {
- PropertyResolver resolver = mock(PropertyResolver.class);
- when(resolver.getListProperty("protobufFiles", String.class))
- .thenReturn(Optional.of(List.of(addressBookSchemaPath.toString(), sensorSchemaPath.toString())));
- when(resolver.getProperty("protobufMessageName", String.class))
- .thenReturn(Optional.of("test.AddressBook"));
-
- when(resolver.getMapProperty("protobufMessageNameByTopic", String.class, String.class))
- .thenReturn(Optional.of(Map.of("persons", "test.NotExistent")));
-
- var serde = new ProtobufFileSerde();
- assertThatThrownBy(() -> serde.configure(resolver, resolver, resolver))
- .isInstanceOf(NullPointerException.class)
- .hasMessage("The given message type not found in protobuf definition: test.NotExistent");
- }
-
- @Test
- void unknownSchemaAsTopicSchemaForKeyThrowsException() {
- PropertyResolver resolver = mock(PropertyResolver.class);
- when(resolver.getListProperty("protobufFiles", String.class))
- .thenReturn(Optional.of(List.of(addressBookSchemaPath.toString(), sensorSchemaPath.toString())));
- when(resolver.getProperty("protobufMessageName", String.class))
- .thenReturn(Optional.of("test.AddressBook"));
-
- when(resolver.getMapProperty("protobufMessageNameForKeyByTopic", String.class, String.class))
- .thenReturn(Optional.of(Map.of("persons", "test.NotExistent")));
-
- var serde = new ProtobufFileSerde();
- assertThatThrownBy(() -> serde.configure(resolver, resolver, resolver))
- .isInstanceOf(NullPointerException.class)
- .hasMessage("The given message type not found in protobuf definition: test.NotExistent");
- }
-
@SneakyThrows
private void assertJsonEquals(String expectedJson, String actualJson) {
var mapper = new JsonMapper();
diff --git a/kafka-ui-api/src/test/resources/address-book.proto b/kafka-ui-api/src/test/resources/protobuf-serde/address-book.proto
similarity index 81%
rename from kafka-ui-api/src/test/resources/address-book.proto
rename to kafka-ui-api/src/test/resources/protobuf-serde/address-book.proto
index 72eab7aab8c..f6c9a5d7880 100644
--- a/kafka-ui-api/src/test/resources/address-book.proto
+++ b/kafka-ui-api/src/test/resources/protobuf-serde/address-book.proto
@@ -1,16 +1,10 @@
-// [START declaration]
syntax = "proto3";
package test;
-// [END declaration]
-
-// [START java_declaration]
option java_multiple_files = true;
option java_package = "com.example.tutorial.protos";
option java_outer_classname = "AddressBookProtos";
-// [END java_declaration]
-// [START messages]
message Person {
string name = 1;
int32 id = 2; // Unique ID number for this person.
@@ -31,9 +25,13 @@ message Person {
}
+message AnotherPerson {
+ string name = 1;
+ string surname = 2;
+}
+
// Our address book file is just one of these.
message AddressBook {
int32 version = 1;
repeated Person people = 2;
}
-// [END messages]
\ No newline at end of file
diff --git a/kafka-ui-api/src/test/resources/protobuf-serde/lang-description.proto b/kafka-ui-api/src/test/resources/protobuf-serde/lang-description.proto
new file mode 100644
index 00000000000..8e213d58c41
--- /dev/null
+++ b/kafka-ui-api/src/test/resources/protobuf-serde/lang-description.proto
@@ -0,0 +1,11 @@
+syntax = "proto3";
+
+package test;
+
+import "language/language.proto";
+import "google/protobuf/wrappers.proto";
+
+message LanguageDescription {
+ test.lang.Language lang = 1;
+ google.protobuf.StringValue descr = 2;
+}
diff --git a/kafka-ui-api/src/test/resources/protobuf-serde/language/language.proto b/kafka-ui-api/src/test/resources/protobuf-serde/language/language.proto
new file mode 100644
index 00000000000..7ef30eab236
--- /dev/null
+++ b/kafka-ui-api/src/test/resources/protobuf-serde/language/language.proto
@@ -0,0 +1,11 @@
+syntax = "proto3";
+package test.lang;
+
+enum Language {
+ DE = 0;
+ EN = 1;
+ ES = 2;
+ FR = 3;
+ PL = 4;
+ RU = 5;
+}
diff --git a/kafka-ui-api/src/test/resources/sensor.proto b/kafka-ui-api/src/test/resources/protobuf-serde/sensor.proto
similarity index 93%
rename from kafka-ui-api/src/test/resources/sensor.proto
rename to kafka-ui-api/src/test/resources/protobuf-serde/sensor.proto
index 33b8c387e4c..3bde20a3ae2 100644
--- a/kafka-ui-api/src/test/resources/sensor.proto
+++ b/kafka-ui-api/src/test/resources/protobuf-serde/sensor.proto
@@ -1,5 +1,5 @@
syntax = "proto3";
-package iot;
+package test;
message Sensor {
string name = 1;
| train | val | 2023-03-07T10:36:12 | "2023-01-31T12:52:37Z" | gassmannalexander | train |
provectus/kafka-ui/1452_3376 | provectus/kafka-ui | provectus/kafka-ui/1452 | provectus/kafka-ui/3376 | [
"connected"
] | e584b15d97d0c5e14e4a1aa24a951dcd02ee336b | 741bbc1be1bd8dba4fdfe854ca01f6f60b80c0a0 | [
"Hey, thanks for reaching out.\r\n\r\nThis is for sure to be done via ignoring permission issues. We'll get this done in future releases :)\r\n\r\n",
"#928 #753 #482",
"@MaksimMyshkin Currently kafka-ui needs WRITE and DESCRIBE_CONFIGS permissions on a topic to display it correctly. If the account used to configure kafka-ui has some access to any topic where DESCRIBE_CONFIGS is not allowed no topics is displayed at all. You might want to add DESCRIBE_CONFIGS to all your topics as a workaround. ",
"Thank you for information. Unfortunately our team can't get access to all topics in the cluster (only to our topics). \r\n\r\nAlso I dug a little and found out that errors is appearing on creation of AdminClient. It is even before getting information about topics. Is this code a reason why rights to all topics required? Looks like AdminClient requires wider permissions than it is needed for simpler scenarios (testing by producing and reading topics messages for example).",
"@MaksimMyshkin Sorry, looks like I didn't make myself clear enough. It was not about all topics in the cluster, only about \"your\" topics. If you have any access to a topic then WRITE and DESCRIBE_CONFIGS should be allowed for that topic.",
"@vdanylenko hey! Any updates here? What's the status on this issue? ",
"I afraid permissions that @vdanylenko described is not enough to work with topics from kafka-ui. At the moment I think that problem due to using AdminClient and I don't known how to work around it right. I just trying to find a reason to the problem that I (or someone else) can help to fix it sooner(",
"@MaksimMyshkin hey, wanna hop on our discord? We can try to investigate this. The link is available in README.",
"This issue has been automatically marked as stale because no requested feedback has been provided. It will be closed if no further activity occurs. Thank you for your contributions.",
"Using Kafka-UI in a multi-tenant scenario is not working currently. Main reason is, that there are no clusterwide permissions assigned to users. So referring to this table the following permissions are granted:\r\nhttps://docs.confluent.io/platform/current/kafka/authorization.html\r\n\r\nTopics:\r\nDescribe, DescribeConfigs, Read and if needed Write (single topic oder wildcard) ; note: read implies describe\r\nGroup:\r\nDescribe\r\nTransactional ID:\r\nDescribe\r\n\r\nThe following data is available then (without cluster permissions) in a different tool:\r\n\r\n\r\n",
"@moinmoin thanks for the feedback!",
"Faced with similar issue as @MaksimMyshkin where same proposed solution would be good enough for our use (relatively small tradeoff in situations where the list of topics is anyways known and does not grow with such speed that would cause the manual maintenance on the list to be a problem).\r\n\r\nCode requiring alternative or limited-permissions approach (rather than it just crashing): https://github.com/provectus/kafka-ui/blob/4660bb8759e33a2a229bcdc6343d3be1bdfd2f28/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaService.java#L323-L337\r\n\r\nReached this realization by running the application with `LOGGING_LEVEL_ROOT: debug` where it became clear that from the list of commands executed on startup:\r\n\r\n```txt\r\nAPI_VERSIONS\r\nMETADATA\r\nDESCRIBE_CLUSTER\r\nDESCRIBE_CONFIGS <=== this one fails\r\n```\r\n\r\nUsage context: getTopicsData > loadTopicsConfig > describeConfigs",
"> The following data is available then (without cluster permissions) in a different tool:\r\n\r\n@moinmoin - Don't hold out on us - what tool is this?",
"@TaylorSMarks This is the https://www.conduktor.io/download fat client.",
"I have opened a small PR which fixes the root issue (you lack permissions to run describe cluster), although I accomplished it by just ignoring the exception you get instead of going into a crash loop. I don't think adding the feature to be able to run on a subset of topics would actually fix the root issue, since the Admin Client would still fail to create with an auth exception.\r\n\r\nEverything runs pretty ok with the change.\r\n\r\nHere's the PR:\r\nhttps://github.com/provectus/kafka-ui/pull/3325\r\n\r\nThe directions for opening a PR said I should request reviewers and add myself as the owner for the PR, but I lack permissions to do either of those things. Here's hoping the right people see, review, approve, and merge the PR...\r\n\r\nEdit: My PR failed the E2E testing but it seems unrelated to my change. Every build from the last 6 days has been failing this same check.",
"Can confirm that this solves our issue :) Thank you @Haarolean @TaylorSMarks @iliax!",
"@allanpaiste great to hear that! "
] | [] | "2023-02-20T13:42:35Z" | [
"type/enhancement",
"scope/backend"
] | Support running kafka-ui on subset of topics to get around auth errors | ### Is your proposal related to a problem?
In our company we have kafka cluster common for all teams but it does not have convenient UI for our needs. Our team tried to deploy kafka-ui with our account that has only access to our topics but it failed with access error. It would be useful to have possibility to run kafka-ui on subset of topics to which account have the needed rights.
Addition question on described problem: Which rights kafka-ui needs to run correctly? Can't find it on project page.
### Describe the solution you'd like
The easiest solution (from my perspective) is to provide the config with a list of topics to which specified account has access.
As alternative kafka-ui can just skip authorization error on topics.
| [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/TopicsService.java"
] | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/TopicsService.java"
] | [] | diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java
index ea56edcf96e..7cdf2ef16d7 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java
@@ -62,9 +62,11 @@
import org.apache.kafka.common.TopicPartitionReplica;
import org.apache.kafka.common.acl.AclOperation;
import org.apache.kafka.common.config.ConfigResource;
+import org.apache.kafka.common.errors.ClusterAuthorizationException;
import org.apache.kafka.common.errors.GroupIdNotFoundException;
import org.apache.kafka.common.errors.GroupNotEmptyException;
import org.apache.kafka.common.errors.InvalidRequestException;
+import org.apache.kafka.common.errors.TopicAuthorizationException;
import org.apache.kafka.common.errors.UnknownTopicOrPartitionException;
import org.apache.kafka.common.errors.UnsupportedVersionException;
import org.apache.kafka.common.requests.DescribeLogDirsResponse;
@@ -176,6 +178,7 @@ public Mono<Map<String, List<ConfigEntry>>> getTopicsConfig() {
}
//NOTE: skips not-found topics (for which UnknownTopicOrPartitionException was thrown by AdminClient)
+ //and topics for which DESCRIBE_CONFIGS permission is not set (TopicAuthorizationException was thrown)
public Mono<Map<String, List<ConfigEntry>>> getTopicsConfig(Collection<String> topicNames, boolean includeDoc) {
var includeDocFixed = features.contains(SupportedFeature.CONFIG_DOCUMENTATION_RETRIEVAL) && includeDoc;
// we need to partition calls, because it can lead to AdminClient timeouts in case of large topics count
@@ -196,7 +199,8 @@ private Mono<Map<String, List<ConfigEntry>>> getTopicsConfigImpl(Collection<Stri
client.describeConfigs(
resources,
new DescribeConfigsOptions().includeSynonyms(true).includeDocumentation(includeDoc)).values(),
- UnknownTopicOrPartitionException.class
+ UnknownTopicOrPartitionException.class,
+ TopicAuthorizationException.class
).map(config -> config.entrySet().stream()
.collect(toMap(
c -> c.getKey().name(),
@@ -208,11 +212,17 @@ private static Mono<Map<Integer, List<ConfigEntry>>> loadBrokersConfig(AdminClie
.map(brokerId -> new ConfigResource(ConfigResource.Type.BROKER, Integer.toString(brokerId)))
.collect(toList());
return toMono(client.describeConfigs(resources).all())
- .doOnError(InvalidRequestException.class,
- th -> log.trace("Error while getting broker {} configs", brokerIds, th))
// some kafka backends (like MSK serverless) do not support broker's configs retrieval,
// in that case InvalidRequestException will be thrown
- .onErrorResume(InvalidRequestException.class, th -> Mono.just(Map.of()))
+ .onErrorResume(InvalidRequestException.class, th -> {
+ log.trace("Error while getting broker {} configs", brokerIds, th);
+ return Mono.just(Map.of());
+ })
+ // there are situations when kafka-ui user has no DESCRIBE_CONFIGS permission on cluster
+ .onErrorResume(ClusterAuthorizationException.class, th -> {
+ log.trace("AuthorizationException while getting configs for brokers {}", brokerIds, th);
+ return Mono.just(Map.of());
+ })
.map(config -> config.entrySet().stream()
.collect(toMap(
c -> Integer.valueOf(c.getKey().name()),
@@ -242,13 +252,16 @@ public Mono<Map<String, TopicDescription>> describeTopics(Collection<String> top
private Mono<Map<String, TopicDescription>> describeTopicsImpl(Collection<String> topics) {
return toMonoWithExceptionFilter(
- client.describeTopics(topics).values(),
- UnknownTopicOrPartitionException.class
+ client.describeTopics(topics).topicNameValues(),
+ UnknownTopicOrPartitionException.class,
+ // we only describe topics that we see from listTopics() API, so we should have permission to do it,
+ // but also adding this exception here for rare case when access restricted after we called listTopics()
+ TopicAuthorizationException.class
);
}
/**
- * Returns TopicDescription mono, or Empty Mono if topic not found.
+ * Returns TopicDescription mono, or Empty Mono if topic not visible.
*/
public Mono<TopicDescription> describeTopic(String topic) {
return describeTopics(List.of(topic)).flatMap(m -> Mono.justOrEmpty(m.get(topic)));
@@ -262,10 +275,11 @@ public Mono<TopicDescription> describeTopic(String topic) {
* such topics in resulting map.
* <p/>
* This method converts input map into Mono[Map] ignoring keys for which KafkaFutures
- * finished with <code>clazz</code> exception and empty Monos.
+ * finished with <code>classes</code> exceptions and empty Monos.
*/
+ @SafeVarargs
static <K, V> Mono<Map<K, V>> toMonoWithExceptionFilter(Map<K, KafkaFuture<V>> values,
- Class<? extends KafkaException> clazz) {
+ Class<? extends KafkaException>... classes) {
if (values.isEmpty()) {
return Mono.just(Map.of());
}
@@ -277,7 +291,7 @@ static <K, V> Mono<Map<K, V>> toMonoWithExceptionFilter(Map<K, KafkaFuture<V>> v
.defaultIfEmpty(Tuples.of(e.getKey(), Optional.empty())) //tracking empty Monos
.onErrorResume(
// tracking Monos with suppressible error
- th -> th.getClass().isAssignableFrom(clazz),
+ th -> Stream.of(classes).anyMatch(clazz -> th.getClass().isAssignableFrom(clazz)),
th -> Mono.just(Tuples.of(e.getKey(), Optional.empty()))))
.toList();
@@ -300,6 +314,7 @@ public Mono<Map<Integer, Map<String, DescribeLogDirsResponse.LogDirInfo>>> descr
Collection<Integer> brokerIds) {
return toMono(client.describeLogDirs(brokerIds).all())
.onErrorResume(UnsupportedVersionException.class, th -> Mono.just(Map.of()))
+ .onErrorResume(ClusterAuthorizationException.class, th -> Mono.just(Map.of()))
.onErrorResume(th -> true, th -> {
log.warn("Error while calling describeLogDirs", th);
return Mono.just(Map.of());
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/TopicsService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/TopicsService.java
index ab22cecaafc..b9038677267 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/TopicsService.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/TopicsService.java
@@ -162,9 +162,14 @@ public Mono<InternalTopic> getTopicDetails(KafkaCluster cluster, String topicNam
}
public Mono<List<ConfigEntry>> getTopicConfigs(KafkaCluster cluster, String topicName) {
+ // there 2 case that we cover here:
+ // 1. topic not found/visible - describeTopic() will be empty and we will throw TopicNotFoundException
+ // 2. topic is visible, but we don't have DESCRIBE_CONFIG permission - we should return empty list
return adminClientService.get(cluster)
- .flatMap(ac -> ac.getTopicsConfig(List.of(topicName), true))
- .map(m -> m.values().stream().findFirst().orElseThrow(TopicNotFoundException::new));
+ .flatMap(ac -> ac.describeTopic(topicName)
+ .switchIfEmpty(Mono.error(new TopicNotFoundException()))
+ .then(ac.getTopicsConfig(List.of(topicName), true))
+ .map(m -> m.values().stream().findFirst().orElse(List.of())));
}
private Mono<InternalTopic> createTopic(KafkaCluster c, ReactiveAdminClient adminClient,
| null | val | val | 2023-02-21T14:40:05 | "2022-01-22T09:18:26Z" | MaksimMyshkin | train |
provectus/kafka-ui/3288_3377 | provectus/kafka-ui | provectus/kafka-ui/3288 | provectus/kafka-ui/3377 | [
"connected"
] | ea348102c22a4eb4fdaa24a635ca7f4184970442 | ad5b0d44f0dd1a3455d03dfd18ea69758e450104 | [
"Hello there sdahlbac! π\n\nThank you and congratulations π for opening your very first issue in this project! π\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. π",
"Hey, thanks for reporting, we'll take a look.\r\n\r\nif you wanna take a look yourself, I'd start with this class: `com/provectus/kafka/ui/service/ksql/KsqlServiceV2.java:24`",
"@sdahlbac, thank you for the issue. Can you please confirm that it was fixed in `master`-tagged image (will be available in an hour)?"
] | [] | "2023-02-20T19:55:52Z" | [
"type/bug",
"scope/backend",
"status/accepted"
] | ksql: schema vs data mismatch when structs | <!--
Don't forget to check for existing issues/discussions regarding your proposal. We might already have it.
https://github.com/provectus/kafka-ui/issues
https://github.com/provectus/kafka-ui/discussions
-->
<!--
Please follow the naming conventions for bugs:
<Feature/Area/Scope> : <Compact, but specific problem summary>
Avoid generic titles, like βTopics: incorrect layout of message sorting drop-down listβ. Better use something like: βTopics: Message sorting drop-down list overlaps the "Submit" buttonβ.
-->
**Describe the bug** (Actual behavior)
<!--(A clear and concise description of what the bug is.Use a list, if there is more than one problem)-->
When the stream schema contains structs, the headers seem to go down in the struct while the actual data in the table does not. The data seems correct and the headers seems bogus.
**Expected behavior**
<!--(A clear and concise description of what you expected to happen.)-->
Headers should match the data
**Set up**
<!--
WE MIGHT CLOSE THE ISSUE without further explanation IF YOU DON'T PROVIDE THIS INFORMATION.
How do you run the app? Please provide as much info as possible:
1. App version (docker image version or check commit hash in the top left corner in UI) v0.5.0
(
[027d9b4](https://github.com/provectus/kafka-ui/commit/027d9b4)
)
2. Helm chart version, if you use one
3. Any IAAC configs
-->
**Steps to Reproduce**
<!-- We'd like you to provide an example setup (via docker-compose, helm, etc.)
to reproduce the problem, especially with a complex setups. -->
```ksql
CREATE OR REPLACE STREAM `raw_alfabet_applications` (
`RefStr` VARCHAR KEY,
`ClassName` VARCHAR,
`Values` STRUCT<`Name` VARCHAR, `Short Name` VARCHAR, `Version` VARCHAR, `Description` VARCHAR, `Start Date` VARCHAR, `End Date` VARCHAR, `Object State` VARCHAR, `Status` VARCHAR, `Architecture Type` VARCHAR, `Development Type` VARCHAR, `Authentication` VARCHAR, `Alias` VARCHAR, `Recommendation` VARCHAR, `Strategic` VARCHAR, `PACE Governance` VARCHAR, `Cloud Migration Strategy` VARCHAR, `Compliance Regulation` VARCHAR, `Successor` VARCHAR, `Number of Users` VARCHAR, `Confidentiality` VARCHAR, `Integrity` VARCHAR, `Availability` VARCHAR, `Plan Start Date` VARCHAR, `Pilot Start Date` VARCHAR, `Production Start Date` VARCHAR, `Sunset Start Date` VARCHAR, `Retired Start Date` VARCHAR, `Lifecycle End Date` VARCHAR, `1 SW License Cost` VARCHAR, `2 SW Maintenance Cost` VARCHAR, `3 Infrastructure Cost` VARCHAR, `4 Service Cost` VARCHAR, `5 OpEx Other` VARCHAR, `Application Manager [Person]` VARCHAR, `Business Owner [Person]` VARCHAR, `IT Owner [Person]` VARCHAR, `Architect [Person]` VARCHAR, `Stakeholder [Person]` VARCHAR, `Business Owner [Organization]` VARCHAR, `IT Owner [Organization]` VARCHAR, `Operations [Organization]` VARCHAR, `Stakeholder [Organization]` VARCHAR>
) WITH (
KAFKA_TOPIC='raw_alfabet_applications',
KEY_FORMAT='KAFKA',
VALUE_FORMAT='json'
);
```
**Screenshots**
<!--
(If applicable, add screenshots to help explain your problem)
-->
<img width="1629" alt="image" src="https://user-images.githubusercontent.com/659363/215692850-82210e3f-5d85-43a0-86f2-6883e94d5664.png">
**Additional context**
<!--
Add any other context about the problem here. E.g.:
1. Are there any alternative scenarios (different data/methods/configuration/setup) you have tried?
Were they successfull or same issue occured? Please provide steps as well.
2. Related issues (if there are any).
3. Logs (if available)
4. Is there any serious impact or behaviour on the end-user because of this issue, that can be overlooked?
-->
If I was to look into this myself, where would I start looking? | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ksql/response/ResponseParser.java"
] | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ksql/response/ResponseParser.java"
] | [
"kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/ksql/response/ResponseParserTest.java"
] | diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ksql/response/ResponseParser.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ksql/response/ResponseParser.java
index 4781d159e7b..647e23a78ec 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ksql/response/ResponseParser.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ksql/response/ResponseParser.java
@@ -3,14 +3,13 @@
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.json.JsonMapper;
import com.fasterxml.jackson.databind.node.TextNode;
+import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Lists;
import com.provectus.kafka.ui.exception.KsqlApiException;
import com.provectus.kafka.ui.service.ksql.KsqlApiClient;
import java.util.ArrayList;
-import java.util.Arrays;
import java.util.List;
import java.util.Optional;
-import java.util.stream.Collectors;
import org.springframework.web.reactive.function.client.WebClientResponseException;
public class ResponseParser {
@@ -24,11 +23,7 @@ public static Optional<KsqlApiClient.KsqlResponseTable> parseSelectResponse(Json
return Optional.of(
KsqlApiClient.KsqlResponseTable.builder()
.header("Schema")
- .columnNames(
- Arrays.stream(jsonNode.get("header").get("schema").asText().split(","))
- .map(String::trim)
- .collect(Collectors.toList())
- )
+ .columnNames(parseSelectHeadersString(jsonNode.get("header").get("schema").asText()))
.build());
}
if (arrayFieldNonEmpty(jsonNode, "row")) {
@@ -46,6 +41,34 @@ public static Optional<KsqlApiClient.KsqlResponseTable> parseSelectResponse(Json
return Optional.empty();
}
+ @VisibleForTesting
+ static List<String> parseSelectHeadersString(String str) {
+ List<String> headers = new ArrayList<>();
+ int structNesting = 0;
+ boolean quotes = false;
+ var headerBuilder = new StringBuilder();
+ for (char ch : str.toCharArray()) {
+ if (ch == '<') {
+ structNesting++;
+ } else if (ch == '>') {
+ structNesting--;
+ } else if (ch == '`') {
+ quotes = !quotes;
+ } else if (ch == ' ' && headerBuilder.isEmpty()) {
+ continue; //skipping leading & training whitespaces
+ } else if (ch == ',' && structNesting == 0 && !quotes) {
+ headers.add(headerBuilder.toString());
+ headerBuilder = new StringBuilder();
+ continue;
+ }
+ headerBuilder.append(ch);
+ }
+ if (!headerBuilder.isEmpty()) {
+ headers.add(headerBuilder.toString());
+ }
+ return headers;
+ }
+
public static KsqlApiClient.KsqlResponseTable errorTableWithTextMsg(String errorText) {
return KsqlApiClient.KsqlResponseTable.builder()
.header("Execution error")
| diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/ksql/response/ResponseParserTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/ksql/response/ResponseParserTest.java
new file mode 100644
index 00000000000..02552449336
--- /dev/null
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/ksql/response/ResponseParserTest.java
@@ -0,0 +1,25 @@
+package com.provectus.kafka.ui.service.ksql.response;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+import org.junit.jupiter.api.Test;
+
+class ResponseParserTest {
+
+ @Test
+ void parsesSelectHeaderIntoColumnNames() {
+ assertThat(ResponseParser.parseSelectHeadersString("`inQuotes` INT, notInQuotes INT"))
+ .containsExactly("`inQuotes` INT", "notInQuotes INT");
+
+ assertThat(ResponseParser.parseSelectHeadersString("`name with comma,` INT, name2 STRING"))
+ .containsExactly("`name with comma,` INT", "name2 STRING");
+
+ assertThat(ResponseParser.parseSelectHeadersString(
+ "`topLvl` INT, `struct` STRUCT<`nested1` STRING, anotherName STRUCT<nested2 INT>>"))
+ .containsExactly(
+ "`topLvl` INT",
+ "`struct` STRUCT<`nested1` STRING, anotherName STRUCT<nested2 INT>>"
+ );
+ }
+
+}
| val | test | 2023-02-23T12:47:59 | "2023-01-31T07:27:58Z" | sdahlbac | train |
provectus/kafka-ui/2329_3383 | provectus/kafka-ui | provectus/kafka-ui/2329 | provectus/kafka-ui/3383 | [
"connected"
] | c153d6f6348f5fd81ecda6df646fc4cdd9840074 | 4a7893ff1b6a13e7509312742ce1870110fb29fa | [
"Hi @Haarolean , I can try and fix the padding. \r\n\r\n- Is there any specific amount of padding we're looking at?\r\n- Any other detail?\r\n\r\n(participate in the spirit of Hacktoberfest)",
"@prateekvarma thank you.\r\nPlease use [this](https://www.figma.com/file/ZkvysdRUmt9p2PDNU3a9WU/Kafka-UI?node-id=117%3A2644) as a reference",
"@prateekvarma any luck?",
"@David-DB88 https://www.figma.com/file/ZkvysdRUmt9p2PDNU3a9WU/Kafka-UI?node-id=117%3A2644"
] | [
"it is not a good practice to check the text value of a prop , if we want to do something with the size , we should for example pass a prop for `size` and it can several values lets say `large` `small` `medium` with normally typed in the prop `type: large | small | medium` and choose the value of the text accordingly.\r\n\r\nwe should always make `typescript` help us rather than the opposite.",
"ok",
"you can do the check and put the values here directly.\r\n\r\n we don't have to put some values in the `theme` because of a some condition. it makes the `theme` larger and over complicates some styling logic\r\n\r\n`size == 'small'? '16px' : '24px';`",
"`title` props are not being used , we should remove them.",
"i removed table title styles from theme ",
"@Mgrdich removed title props ",
"```suggestion\r\n <TableHeaderCell colSpan={2} title=\"Topic\"/>\r\n <TableHeaderCell title=\"Messages behind\" />\r\n```",
"remove all this stuff",
"Also you need to play with ToggleButton & TableKeyLink components in ListItem.",
"Also Important to update all the tables where we use same approach",
"done",
"done",
"it is not very user perspective to get it by the `data-testid` cause when a person reads the test they should be able to intuitively understand what should be tested. because it ships this attribute to production as well.\r\n\r\n[article](https://betterprogramming.pub/why-you-should-avoid-testing-react-components-with-test-ids-ee50d20d37d2)",
"removed testID"
] | "2023-02-21T17:33:44Z" | [
"good first issue",
"scope/frontend",
"status/accepted",
"type/chore"
] | Consumer overview: topic label padding looks weird | <img width="653" alt="image" src="https://user-images.githubusercontent.com/1494347/180576596-28801e82-bde0-474e-b9c0-5f8ff1339327.png">
| [
"kafka-ui-react-app/src/components/ConsumerGroups/Details/Details.tsx",
"kafka-ui-react-app/src/components/ConsumerGroups/Details/ListItem.styled.ts",
"kafka-ui-react-app/src/components/ConsumerGroups/Details/ListItem.tsx",
"kafka-ui-react-app/src/components/ConsumerGroups/Details/__tests__/Details.spec.tsx",
"kafka-ui-react-app/src/components/ConsumerGroups/Details/__tests__/ListItem.spec.tsx",
"kafka-ui-react-app/src/components/common/table/Table/TableKeyLink.styled.ts"
] | [
"kafka-ui-react-app/src/components/ConsumerGroups/Details/Details.tsx",
"kafka-ui-react-app/src/components/ConsumerGroups/Details/ListItem.styled.ts",
"kafka-ui-react-app/src/components/ConsumerGroups/Details/ListItem.tsx",
"kafka-ui-react-app/src/components/ConsumerGroups/Details/__tests__/Details.spec.tsx",
"kafka-ui-react-app/src/components/ConsumerGroups/Details/__tests__/ListItem.spec.tsx",
"kafka-ui-react-app/src/components/common/table/Table/TableKeyLink.styled.ts"
] | [] | diff --git a/kafka-ui-react-app/src/components/ConsumerGroups/Details/Details.tsx b/kafka-ui-react-app/src/components/ConsumerGroups/Details/Details.tsx
index 098a487081a..e2226f59813 100644
--- a/kafka-ui-react-app/src/components/ConsumerGroups/Details/Details.tsx
+++ b/kafka-ui-react-app/src/components/ConsumerGroups/Details/Details.tsx
@@ -14,7 +14,6 @@ import * as Metrics from 'components/common/Metrics';
import { Tag } from 'components/common/Tag/Tag.styled';
import groupBy from 'lodash/groupBy';
import { Table } from 'components/common/table/Table/Table.styled';
-import TableHeaderCell from 'components/common/table/TableHeaderCell/TableHeaderCell';
import { useAppDispatch, useAppSelector } from 'lib/hooks/redux';
import {
deleteConsumerGroup,
@@ -27,6 +26,7 @@ import { Dropdown } from 'components/common/Dropdown';
import { ControlPanelWrapper } from 'components/common/ControlPanel/ControlPanel.styled';
import { Action, ResourceType } from 'generated-sources';
import { ActionDropdownItem } from 'components/common/ActionComponent';
+import TableHeaderCell from 'components/common/table/TableHeaderCell/TableHeaderCell';
import ListItem from './ListItem';
@@ -137,7 +137,6 @@ const Details: React.FC = () => {
<Table isFullwidth>
<thead>
<tr>
- <TableHeaderCell> </TableHeaderCell>
<TableHeaderCell title="Topic" />
<TableHeaderCell title="Messages behind" />
</tr>
diff --git a/kafka-ui-react-app/src/components/ConsumerGroups/Details/ListItem.styled.ts b/kafka-ui-react-app/src/components/ConsumerGroups/Details/ListItem.styled.ts
index 11c852b3632..358a45e0a6f 100644
--- a/kafka-ui-react-app/src/components/ConsumerGroups/Details/ListItem.styled.ts
+++ b/kafka-ui-react-app/src/components/ConsumerGroups/Details/ListItem.styled.ts
@@ -1,6 +1,7 @@
import styled from 'styled-components';
-export const ToggleButton = styled.td`
- padding: 8px 8px 8px 16px !important;
- width: 30px;
+export const FlexWrapper = styled.div`
+ display: flex;
+ align-items: center;
+ gap: 8px;
`;
diff --git a/kafka-ui-react-app/src/components/ConsumerGroups/Details/ListItem.tsx b/kafka-ui-react-app/src/components/ConsumerGroups/Details/ListItem.tsx
index 313ad234098..7b1a61ae4eb 100644
--- a/kafka-ui-react-app/src/components/ConsumerGroups/Details/ListItem.tsx
+++ b/kafka-ui-react-app/src/components/ConsumerGroups/Details/ListItem.tsx
@@ -8,7 +8,7 @@ import IconButtonWrapper from 'components/common/Icons/IconButtonWrapper';
import { TableKeyLink } from 'components/common/table/Table/TableKeyLink.styled';
import TopicContents from './TopicContents/TopicContents';
-import { ToggleButton } from './ListItem.styled';
+import { FlexWrapper } from './ListItem.styled';
interface Props {
clusterName: ClusterName;
@@ -30,14 +30,16 @@ const ListItem: React.FC<Props> = ({ clusterName, name, consumers }) => {
return (
<>
<tr>
- <ToggleButton>
- <IconButtonWrapper onClick={() => setIsOpen(!isOpen)} aria-hidden>
- <MessageToggleIcon isOpen={isOpen} />
- </IconButtonWrapper>
- </ToggleButton>
- <TableKeyLink>
- <Link to={clusterTopicPath(clusterName, name)}>{name}</Link>
- </TableKeyLink>
+ <td>
+ <FlexWrapper>
+ <IconButtonWrapper onClick={() => setIsOpen(!isOpen)} aria-hidden>
+ <MessageToggleIcon isOpen={isOpen} />
+ </IconButtonWrapper>
+ <TableKeyLink>
+ <Link to={clusterTopicPath(clusterName, name)}>{name}</Link>
+ </TableKeyLink>
+ </FlexWrapper>
+ </td>
<td>{getTotalMessagesBehind()}</td>
</tr>
{isOpen && <TopicContents consumers={consumers} />}
diff --git a/kafka-ui-react-app/src/components/ConsumerGroups/Details/__tests__/Details.spec.tsx b/kafka-ui-react-app/src/components/ConsumerGroups/Details/__tests__/Details.spec.tsx
index 0ed5504e793..9e010a414f2 100644
--- a/kafka-ui-react-app/src/components/ConsumerGroups/Details/__tests__/Details.spec.tsx
+++ b/kafka-ui-react-app/src/components/ConsumerGroups/Details/__tests__/Details.spec.tsx
@@ -64,7 +64,7 @@ describe('Details component', () => {
expect(screen.getByText(groupId)).toBeInTheDocument();
expect(screen.getByRole('table')).toBeInTheDocument();
- expect(screen.getAllByRole('columnheader').length).toEqual(3);
+ expect(screen.getAllByRole('columnheader').length).toEqual(2);
expect(screen.queryByRole('dialog')).not.toBeInTheDocument();
});
diff --git a/kafka-ui-react-app/src/components/ConsumerGroups/Details/__tests__/ListItem.spec.tsx b/kafka-ui-react-app/src/components/ConsumerGroups/Details/__tests__/ListItem.spec.tsx
index c4906e9209d..9cf79d02ded 100644
--- a/kafka-ui-react-app/src/components/ConsumerGroups/Details/__tests__/ListItem.spec.tsx
+++ b/kafka-ui-react-app/src/components/ConsumerGroups/Details/__tests__/ListItem.spec.tsx
@@ -40,7 +40,9 @@ describe('ListItem', () => {
});
it('should renders list item with topic content open', async () => {
- await userEvent.click(screen.getAllByRole('cell')[0].children[0]);
+ await userEvent.click(
+ screen.getByRole('cell', { name: 'cluster1' }).children[0].children[0]
+ );
expect(screen.getByText('Consumer ID')).toBeInTheDocument();
});
});
diff --git a/kafka-ui-react-app/src/components/common/table/Table/TableKeyLink.styled.ts b/kafka-ui-react-app/src/components/common/table/Table/TableKeyLink.styled.ts
index 77301021557..a2b0ece7242 100644
--- a/kafka-ui-react-app/src/components/common/table/Table/TableKeyLink.styled.ts
+++ b/kafka-ui-react-app/src/components/common/table/Table/TableKeyLink.styled.ts
@@ -18,6 +18,6 @@ const tableLinkMixin = css(
`
);
-export const TableKeyLink = styled.td`
+export const TableKeyLink = styled.div`
${tableLinkMixin}
`;
| null | train | test | 2023-03-10T14:45:37 | "2022-07-22T22:29:21Z" | Haarolean | train |
provectus/kafka-ui/2819_3394 | provectus/kafka-ui | provectus/kafka-ui/2819 | provectus/kafka-ui/3394 | [
"connected"
] | f193e5fed7f2e5e33ef3b5d72bf0db33e5e9d6dc | be151b4d82b4029e14d2a37d54efbff939fe7bf4 | [
"Hello there judasn! π\n\nThank you and congratulations π for opening your very first issue in this project! π\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. π",
"Ah, I see. There's no need to open up a duplicate tho.\r\n\r\n",
"@David-DB88 \r\n\r\nThis is a regression. PTAL why did this happen again and make sure (tests?) this doesn't happen again.\r\nRelated:\r\n#2365\r\n#1544\r\n#1545",
"An issue is persistent if there's no SSL enabled and the app is accessed with http"
] | [
"```suggestion\r\n 'Copying to clipboard is unavailable due to unsecured (non-HTTPS) connection',\r\n```",
"I would suggest to use `warning` here",
"ok",
"ok",
"```suggestion\r\n title: 'Warning',\r\n```"
] | "2023-02-23T10:40:31Z" | [
"type/bug",
"scope/frontend",
"status/accepted",
"status/confirmed",
"type/regression"
] | "Copy to clipboard" doesn't work | ## Thanks App ^O^
## Environment
- kafka-uiοΌv0.4.0
- Microsoft EdgeοΌ106.0.1370.52 - x64
## Problem description
- don't work after clicking, nothing has changed.

| [
"kafka-ui-react-app/src/components/common/Alert/Alert.styled.ts",
"kafka-ui-react-app/src/components/common/Alert/Alert.tsx",
"kafka-ui-react-app/src/lib/errorHandling.tsx",
"kafka-ui-react-app/src/lib/hooks/__tests__/useDataSaver.spec.tsx",
"kafka-ui-react-app/src/lib/hooks/useDataSaver.ts"
] | [
"kafka-ui-react-app/src/components/common/Alert/Alert.styled.ts",
"kafka-ui-react-app/src/components/common/Alert/Alert.tsx",
"kafka-ui-react-app/src/lib/errorHandling.tsx",
"kafka-ui-react-app/src/lib/hooks/__tests__/useDataSaver.spec.tsx",
"kafka-ui-react-app/src/lib/hooks/useDataSaver.ts"
] | [] | diff --git a/kafka-ui-react-app/src/components/common/Alert/Alert.styled.ts b/kafka-ui-react-app/src/components/common/Alert/Alert.styled.ts
index 58ae2ed09df..eefd3bd3be0 100644
--- a/kafka-ui-react-app/src/components/common/Alert/Alert.styled.ts
+++ b/kafka-ui-react-app/src/components/common/Alert/Alert.styled.ts
@@ -1,7 +1,7 @@
-import { ToastType } from 'react-hot-toast';
import styled from 'styled-components';
+import { ToastTypes } from 'lib/errorHandling';
-export const Alert = styled.div<{ $type: ToastType }>`
+export const Alert = styled.div<{ $type: ToastTypes }>`
background-color: ${({ $type, theme }) => theme.alert.color[$type]};
width: 500px;
min-height: 64px;
diff --git a/kafka-ui-react-app/src/components/common/Alert/Alert.tsx b/kafka-ui-react-app/src/components/common/Alert/Alert.tsx
index 8f9d167d1e8..5b58a573d4a 100644
--- a/kafka-ui-react-app/src/components/common/Alert/Alert.tsx
+++ b/kafka-ui-react-app/src/components/common/Alert/Alert.tsx
@@ -1,13 +1,13 @@
import React from 'react';
import CloseIcon from 'components/common/Icons/CloseIcon';
import IconButtonWrapper from 'components/common/Icons/IconButtonWrapper';
-import { ToastType } from 'react-hot-toast';
+import { ToastTypes } from 'lib/errorHandling';
import * as S from './Alert.styled';
export interface AlertProps {
title: string;
- type: ToastType;
+ type: ToastTypes;
message: React.ReactNode;
onDissmiss(): void;
}
diff --git a/kafka-ui-react-app/src/lib/errorHandling.tsx b/kafka-ui-react-app/src/lib/errorHandling.tsx
index f07f4cf52ac..58b2a8bb704 100644
--- a/kafka-ui-react-app/src/lib/errorHandling.tsx
+++ b/kafka-ui-react-app/src/lib/errorHandling.tsx
@@ -9,6 +9,7 @@ interface ServerResponse {
url?: string;
message?: ErrorResponse['message'];
}
+export type ToastTypes = ToastType | 'warning';
export const getResponse = async (
response: Response
@@ -34,7 +35,7 @@ interface AlertOptions {
}
export const showAlert = (
- type: ToastType,
+ type: ToastTypes,
{ title, message, id }: AlertOptions
) => {
toast.custom(
diff --git a/kafka-ui-react-app/src/lib/hooks/__tests__/useDataSaver.spec.tsx b/kafka-ui-react-app/src/lib/hooks/__tests__/useDataSaver.spec.tsx
index f0631850e5c..9b125575d97 100644
--- a/kafka-ui-react-app/src/lib/hooks/__tests__/useDataSaver.spec.tsx
+++ b/kafka-ui-react-app/src/lib/hooks/__tests__/useDataSaver.spec.tsx
@@ -1,7 +1,12 @@
import React, { useEffect } from 'react';
import useDataSaver from 'lib/hooks/useDataSaver';
import { render } from '@testing-library/react';
+import { showAlert } from 'lib/errorHandling';
+jest.mock('lib/errorHandling', () => ({
+ ...jest.requireActual('lib/errorHandling'),
+ showAlert: jest.fn(),
+}));
describe('useDataSaver hook', () => {
const content = {
title: 'title',
@@ -38,7 +43,6 @@ describe('useDataSaver hook', () => {
mockCreate.mockRestore();
});
});
-
describe('copies the data to the clipboard', () => {
Object.assign(navigator, {
clipboard: {
@@ -74,4 +78,29 @@ describe('useDataSaver hook', () => {
);
});
});
+ describe('navigator clipboard is undefined', () => {
+ it('calls showAlert with the correct parameters when clipboard API is unavailable', () => {
+ Object.assign(navigator, {
+ clipboard: undefined,
+ });
+
+ const HookWrapper: React.FC = () => {
+ const { copyToClipboard } = useDataSaver('topic', content);
+ useEffect(() => {
+ copyToClipboard();
+ }, [copyToClipboard]);
+ return null;
+ };
+
+ render(<HookWrapper />);
+
+ expect(showAlert).toHaveBeenCalledTimes(1);
+ expect(showAlert).toHaveBeenCalledWith('warning', {
+ id: 'topic',
+ title: 'Warning',
+ message:
+ 'Copying to clipboard is unavailable due to unsecured (non-HTTPS) connection',
+ });
+ });
+ });
});
diff --git a/kafka-ui-react-app/src/lib/hooks/useDataSaver.ts b/kafka-ui-react-app/src/lib/hooks/useDataSaver.ts
index d1ff61386dc..9bcc1036794 100644
--- a/kafka-ui-react-app/src/lib/hooks/useDataSaver.ts
+++ b/kafka-ui-react-app/src/lib/hooks/useDataSaver.ts
@@ -1,4 +1,4 @@
-import { showSuccessAlert } from 'lib/errorHandling';
+import { showAlert, showSuccessAlert } from 'lib/errorHandling';
const useDataSaver = (
subject: string,
@@ -14,6 +14,13 @@ const useDataSaver = (
title: '',
message: 'Copied successfully!',
});
+ } else {
+ showAlert('warning', {
+ id: subject,
+ title: 'Warning',
+ message:
+ 'Copying to clipboard is unavailable due to unsecured (non-HTTPS) connection',
+ });
}
};
const saveFile = () => {
| null | train | test | 2023-03-06T09:59:48 | "2022-10-25T09:04:37Z" | judasn | train |
provectus/kafka-ui/3319_3400 | provectus/kafka-ui | provectus/kafka-ui/3319 | provectus/kafka-ui/3400 | [
"connected"
] | 59837394fbd06602e0d04827d2edbc8673197d88 | 18c046af5b8e6d639af921dacb664b30d0b0f44b | [
"I guess that not only `.`, but also `,` should not be allowed and not only for partitions, but for in sync replicas, time to retain data and maximum message size. They all should not accept decimal values."
] | [
"I would ask you to create helper function and cover it with unit tests",
"i think this function can be located outside of the component , and transformed into a pure function since it returns a boolean it does not have any side effects.\r\n\r\nThat way this function will not be created every-time `Input` component re-renders.",
"Thanks for your suggestion. I forgot to take this into consideration, and I've fixed this in the new commit."
] | "2023-02-24T02:19:03Z" | [
"type/bug",
"good first issue",
"scope/frontend",
"status/accepted",
"status/confirmed"
] | Topics: Number of partitions field validation should not allow `.` and `,` |
**Describe the bug** (Actual behavior)
It's possible to fill "," within Number of partitions field
**Expected behavior**
Let's not allow to add "," as there can be only integers from 1
**Set up**
https://www.kafka-ui.provectus.io/
**Steps to Reproduce**
<!-- We'd like you to provide an example setup (via docker-compose, helm, etc.)
to reproduce the problem, especially with a complex setups. -->
1. Navigate to Topics
2. Add a Topic
3. Fill "Number of Partitions" with copy pasting ",77" or add "1,5"
**Screenshots**
https://user-images.githubusercontent.com/104780608/217461981-bc94e309-7bd1-4386-b8ba-0b5a413ab9e4.mov
| [
"kafka-ui-react-app/src/components/Topics/shared/Form/TopicForm.tsx",
"kafka-ui-react-app/src/components/common/Input/Input.tsx",
"kafka-ui-react-app/src/components/common/Input/__tests__/Input.spec.tsx",
"kafka-ui-react-app/src/components/common/NewTable/__test__/Table.spec.tsx"
] | [
"kafka-ui-react-app/src/components/Topics/shared/Form/TopicForm.tsx",
"kafka-ui-react-app/src/components/common/Input/Input.tsx",
"kafka-ui-react-app/src/components/common/Input/__tests__/Input.spec.tsx",
"kafka-ui-react-app/src/components/common/NewTable/__test__/Table.spec.tsx"
] | [] | diff --git a/kafka-ui-react-app/src/components/Topics/shared/Form/TopicForm.tsx b/kafka-ui-react-app/src/components/Topics/shared/Form/TopicForm.tsx
index ae9d429236a..e8d7f1b844a 100644
--- a/kafka-ui-react-app/src/components/Topics/shared/Form/TopicForm.tsx
+++ b/kafka-ui-react-app/src/components/Topics/shared/Form/TopicForm.tsx
@@ -116,6 +116,8 @@ const TopicForm: React.FC<Props> = ({
placeholder="Number of partitions"
min="1"
name="partitions"
+ positiveOnly
+ integerOnly
/>
<FormError>
<ErrorMessage errors={errors} name="partitions" />
@@ -161,6 +163,8 @@ const TopicForm: React.FC<Props> = ({
placeholder="Min In Sync Replicas"
min="1"
name="minInSyncReplicas"
+ positiveOnly
+ integerOnly
/>
<FormError>
<ErrorMessage errors={errors} name="minInSyncReplicas" />
@@ -177,6 +181,8 @@ const TopicForm: React.FC<Props> = ({
placeholder="Replication Factor"
min="1"
name="replicationFactor"
+ positiveOnly
+ integerOnly
/>
<FormError>
<ErrorMessage errors={errors} name="replicationFactor" />
@@ -227,6 +233,8 @@ const TopicForm: React.FC<Props> = ({
placeholder="Maximum message size"
min="1"
name="maxMessageBytes"
+ positiveOnly
+ integerOnly
/>
<FormError>
<ErrorMessage errors={errors} name="maxMessageBytes" />
diff --git a/kafka-ui-react-app/src/components/common/Input/Input.tsx b/kafka-ui-react-app/src/components/common/Input/Input.tsx
index 932f954aa5c..f5049e69618 100644
--- a/kafka-ui-react-app/src/components/common/Input/Input.tsx
+++ b/kafka-ui-react-app/src/components/common/Input/Input.tsx
@@ -11,6 +11,87 @@ export interface InputProps
hookFormOptions?: RegisterOptions;
search?: boolean;
positiveOnly?: boolean;
+
+ // Some may only accept integer, like `Number of Partitions`
+ // some may accept decimal
+ integerOnly?: boolean;
+}
+
+function inputNumberCheck(
+ key: string,
+ positiveOnly: boolean,
+ integerOnly: boolean,
+ getValues: (name: string) => string,
+ componentName: string
+) {
+ let isValid = true;
+ if (!((key >= '0' && key <= '9') || key === '-' || key === '.')) {
+ // If not a valid digit char.
+ isValid = false;
+ } else {
+ // If there is any restriction.
+ if (positiveOnly) {
+ isValid = !(key === '-');
+ }
+ if (isValid && integerOnly) {
+ isValid = !(key === '.');
+ }
+
+ // Check invalid format
+ const value = getValues(componentName);
+
+ if (isValid && (key === '-' || key === '.')) {
+ if (!positiveOnly) {
+ if (key === '-') {
+ if (value !== '') {
+ // '-' should not appear anywhere except the start of the string
+ isValid = false;
+ }
+ }
+ }
+ if (!integerOnly) {
+ if (key === '.') {
+ if (value === '' || value.indexOf('.') !== -1) {
+ // '.' should not appear at the start of the string or appear twice
+ isValid = false;
+ }
+ }
+ }
+ }
+ }
+ return isValid;
+}
+
+function pasteNumberCheck(
+ text: string,
+ positiveOnly: boolean,
+ integerOnly: boolean
+) {
+ let value: string;
+ value = text;
+ let sign = '';
+ if (!positiveOnly) {
+ if (value.charAt(0) === '-') {
+ sign = '-';
+ }
+ }
+ if (integerOnly) {
+ value = value.replace(/\D/g, '');
+ } else {
+ value = value.replace(/[^\d.]/g, '');
+ if (value.indexOf('.') !== value.lastIndexOf('.')) {
+ const strs = value.split('.');
+ value = '';
+ for (let i = 0; i < strs.length; i += 1) {
+ value += strs[i];
+ if (i === 0) {
+ value += '.';
+ }
+ }
+ }
+ }
+ value = sign + value;
+ return value;
}
const Input: React.FC<InputProps> = ({
@@ -20,17 +101,27 @@ const Input: React.FC<InputProps> = ({
inputSize = 'L',
type,
positiveOnly,
+ integerOnly,
...rest
}) => {
const methods = useFormContext();
+
const keyPressEventHandler = (
event: React.KeyboardEvent<HTMLInputElement>
) => {
- const { key, code } = event;
+ const { key } = event;
if (type === 'number') {
- // Manualy prevent input of 'e' character for all number inputs
+ // Manually prevent input of non-digit and non-minus for all number inputs
// and prevent input of negative numbers for positiveOnly inputs
- if (key === 'e' || (positiveOnly && (key === '-' || code === 'Minus'))) {
+ if (
+ !inputNumberCheck(
+ key,
+ typeof positiveOnly === 'boolean' ? positiveOnly : false,
+ typeof integerOnly === 'boolean' ? integerOnly : false,
+ methods.getValues,
+ typeof name === 'string' ? name : ''
+ )
+ ) {
event.preventDefault();
}
}
@@ -38,17 +129,14 @@ const Input: React.FC<InputProps> = ({
const pasteEventHandler = (event: React.ClipboardEvent<HTMLInputElement>) => {
if (type === 'number') {
const { clipboardData } = event;
- const text = clipboardData.getData('Text');
- // replace all non-digit characters with empty string
- let value = text.replace(/[^\d.]/g, '');
- if (positiveOnly) {
- // check if value is negative
- const parsedData = parseFloat(value);
- if (parsedData < 0) {
- // remove minus sign
- value = String(Math.abs(parsedData));
- }
- }
+ // The 'clipboardData' does not have key 'Text', but has key 'text' instead.
+ const text = clipboardData.getData('text');
+ // Check the format of pasted text.
+ const value = pasteNumberCheck(
+ text,
+ typeof positiveOnly === 'boolean' ? positiveOnly : false,
+ typeof integerOnly === 'boolean' ? integerOnly : false
+ );
// if paste value contains non-numeric characters or
// negative for positiveOnly fields then prevent paste
if (value !== text) {
diff --git a/kafka-ui-react-app/src/components/common/Input/__tests__/Input.spec.tsx b/kafka-ui-react-app/src/components/common/Input/__tests__/Input.spec.tsx
index 3082f6f610a..0254196965d 100644
--- a/kafka-ui-react-app/src/components/common/Input/__tests__/Input.spec.tsx
+++ b/kafka-ui-react-app/src/components/common/Input/__tests__/Input.spec.tsx
@@ -4,12 +4,23 @@ import { screen } from '@testing-library/react';
import { render } from 'lib/testHelpers';
import userEvent from '@testing-library/user-event';
+// Mock useFormContext
+let component: HTMLInputElement;
+
const setupWrapper = (props?: Partial<InputProps>) => (
<Input name="test" {...props} />
);
jest.mock('react-hook-form', () => ({
useFormContext: () => ({
register: jest.fn(),
+
+ // Mock methods.getValues and methods.setValue
+ getValues: jest.fn(() => {
+ return component.value;
+ }),
+ setValue: jest.fn((key, val) => {
+ component.value = val;
+ }),
}),
}));
@@ -23,20 +34,146 @@ describe('Custom Input', () => {
});
});
describe('number', () => {
- const getInput = () => screen.getByRole('spinbutton');
+ const getInput = () => screen.getByRole<HTMLInputElement>('spinbutton');
+
+ describe('input', () => {
+ it('allows user to type numbers only', async () => {
+ render(setupWrapper({ type: 'number' }));
+ const input = getInput();
+ component = input;
+ await userEvent.type(input, 'abc131');
+ expect(input).toHaveValue(131);
+ });
+
+ it('allows user to type negative values', async () => {
+ render(setupWrapper({ type: 'number' }));
+ const input = getInput();
+ component = input;
+ await userEvent.type(input, '-2');
+ expect(input).toHaveValue(-2);
+ });
+
+ it('allows user to type positive values only', async () => {
+ render(setupWrapper({ type: 'number', positiveOnly: true }));
+ const input = getInput();
+ component = input;
+ await userEvent.type(input, '-2');
+ expect(input).toHaveValue(2);
+ });
+
+ it('allows user to type decimal', async () => {
+ render(setupWrapper({ type: 'number' }));
+ const input = getInput();
+ component = input;
+ await userEvent.type(input, '2.3');
+ expect(input).toHaveValue(2.3);
+ });
+
+ it('allows user to type integer only', async () => {
+ render(setupWrapper({ type: 'number', integerOnly: true }));
+ const input = getInput();
+ component = input;
+ await userEvent.type(input, '2.3');
+ expect(input).toHaveValue(23);
+ });
+
+ it("not allow '-' appear at any position of the string except the start", async () => {
+ render(setupWrapper({ type: 'number' }));
+ const input = getInput();
+ component = input;
+ await userEvent.type(input, '2-3');
+ expect(input).toHaveValue(23);
+ });
- it('allows user to type only numbers', async () => {
- render(setupWrapper({ type: 'number' }));
- const input = getInput();
- await userEvent.type(input, 'abc131');
- expect(input).toHaveValue(131);
+ it("not allow '.' appear at the start of the string", async () => {
+ render(setupWrapper({ type: 'number' }));
+ const input = getInput();
+ component = input;
+ await userEvent.type(input, '.33');
+ expect(input).toHaveValue(33);
+ });
+
+ it("not allow '.' appear twice in the string", async () => {
+ render(setupWrapper({ type: 'number' }));
+ const input = getInput();
+ component = input;
+ await userEvent.type(input, '3.3.3');
+ expect(input).toHaveValue(3.33);
+ });
});
- it('allows negative values', async () => {
- render(setupWrapper({ type: 'number' }));
- const input = getInput();
- await userEvent.type(input, '-2');
- expect(input).toHaveValue(-2);
+ describe('paste', () => {
+ it('allows user to paste numbers only', async () => {
+ render(setupWrapper({ type: 'number' }));
+ const input = getInput();
+ component = input;
+ await userEvent.click(input);
+ await userEvent.paste('abc131');
+ expect(input).toHaveValue(131);
+ });
+
+ it('allows user to paste negative values', async () => {
+ render(setupWrapper({ type: 'number' }));
+ const input = getInput();
+ component = input;
+ await userEvent.click(input);
+ await userEvent.paste('-2');
+ expect(input).toHaveValue(-2);
+ });
+
+ it('allows user to paste positive values only', async () => {
+ render(setupWrapper({ type: 'number', positiveOnly: true }));
+ const input = getInput();
+ component = input;
+ await userEvent.click(input);
+ await userEvent.paste('-2');
+ expect(input).toHaveValue(2);
+ });
+
+ it('allows user to paste decimal', async () => {
+ render(setupWrapper({ type: 'number' }));
+ const input = getInput();
+ component = input;
+ await userEvent.click(input);
+ await userEvent.paste('2.3');
+ expect(input).toHaveValue(2.3);
+ });
+
+ it('allows user to paste integer only', async () => {
+ render(setupWrapper({ type: 'number', integerOnly: true }));
+ const input = getInput();
+ component = input;
+ await userEvent.click(input);
+ await userEvent.paste('2.3');
+ expect(input).toHaveValue(23);
+ });
+
+ it("not allow '-' appear at any position of the pasted string except the start", async () => {
+ render(setupWrapper({ type: 'number' }));
+ const input = getInput();
+ component = input;
+ await userEvent.click(input);
+ await userEvent.paste('2-3');
+ expect(input).toHaveValue(23);
+ });
+
+ it("not allow '.' appear at the start of the pasted string", async () => {
+ render(setupWrapper({ type: 'number' }));
+ const input = getInput();
+ component = input;
+ await userEvent.click(input);
+ await userEvent.paste('.33');
+ expect(input).toHaveValue(0.33);
+ });
+
+ it("not allow '.' appear twice in the pasted string", async () => {
+ render(setupWrapper({ type: 'number' }));
+ const input = getInput();
+ component = input;
+ await userEvent.click(input);
+ await userEvent.paste('3.3.3');
+ expect(input).toHaveValue(3.33);
+ });
});
});
});
diff --git a/kafka-ui-react-app/src/components/common/NewTable/__test__/Table.spec.tsx b/kafka-ui-react-app/src/components/common/NewTable/__test__/Table.spec.tsx
index 43dd7b8dbf7..c60cccdabea 100644
--- a/kafka-ui-react-app/src/components/common/NewTable/__test__/Table.spec.tsx
+++ b/kafka-ui-react-app/src/components/common/NewTable/__test__/Table.spec.tsx
@@ -20,6 +20,17 @@ jest.mock('react-router-dom', () => ({
useNavigate: () => mockedUsedNavigate,
}));
+// This is needed by ESLint.
+jest.mock('react-hook-form', () => ({
+ useFormContext: () => ({
+ register: jest.fn(),
+
+ // Mock methods.getValues and methods.setValue
+ getValues: jest.fn(),
+ setValue: jest.fn(),
+ }),
+}));
+
type Datum = typeof data[0];
const data = [
| null | train | test | 2023-03-01T13:13:29 | "2023-02-08T07:25:33Z" | armenuikafka | train |
provectus/kafka-ui/2680_3408 | provectus/kafka-ui | provectus/kafka-ui/2680 | provectus/kafka-ui/3408 | [
"connected"
] | ca225440d84744add5ce291ba792eccda2e2e627 | a3daa45ccb473ba77b7b5888bf0e38c32f7f65d4 | [] | [
"can we put `null` here instead of empty string, cause here React will try to render the empty string. \r\n\r\nor we can do it with short circuit checks such as \r\n```\r\n{!!filterApplyErrors && (\r\n <S.Metric title=\"Errors\">\r\n <span>{filterApplyErrors} errors</span>\r\n </S.Metric>\r\n )}\r\n```\r\n",
"shouldn't this be an `enum` ?"
] | "2023-02-24T15:33:49Z" | [
"type/enhancement",
"scope/backend",
"scope/frontend",
"status/accepted"
] | UI: Display smart filters filtered stats | When polling messages from topic (and filtering is enabled) we should show number of errors that appeared during filtration process - > this number placed in `TopicMessageConsuming.filterApplyErrors`
UI:
1. during polling process - show value of `TopicMessageConsuming.filterApplyErrors` if it is > 0
2. when polling is finished BE will send `TopicMessageEvent` with `type = DONE` with final stats of polling process - this stats should be rendered. (also this event should be logically treated as polling end by UI)
| [
"kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.tsx",
"kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/FiltersContainer.ts",
"kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/__tests__/Filters.spec.tsx",
"kafka-ui-react-app/src/redux/interfaces/topic.ts",
"kafka-ui-react-app/src/redux/reducers/topicMessages/selectors.ts",
"kafka-ui-react-app/src/redux/reducers/topicMessages/topicMessagesSlice.ts"
] | [
"kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.tsx",
"kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/FiltersContainer.ts",
"kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/__tests__/Filters.spec.tsx",
"kafka-ui-react-app/src/redux/interfaces/topic.ts",
"kafka-ui-react-app/src/redux/reducers/topicMessages/selectors.ts",
"kafka-ui-react-app/src/redux/reducers/topicMessages/topicMessagesSlice.ts"
] | [] | diff --git a/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.tsx b/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.tsx
index f9fa3401fc0..2941536f8bf 100644
--- a/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.tsx
+++ b/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.tsx
@@ -53,11 +53,13 @@ export interface FiltersProps {
phaseMessage?: string;
meta: TopicMessageConsuming;
isFetching: boolean;
+ messageEventType?: string;
addMessage(content: { message: TopicMessage; prepend: boolean }): void;
resetMessages(): void;
updatePhase(phase: string): void;
updateMeta(meta: TopicMessageConsuming): void;
setIsFetching(status: boolean): void;
+ setMessageType(messageType: string): void;
}
export interface MessageFilters {
@@ -80,13 +82,15 @@ export const SeekTypeOptions = [
const Filters: React.FC<FiltersProps> = ({
phaseMessage,
- meta: { elapsedMs, bytesConsumed, messagesConsumed },
+ meta: { elapsedMs, bytesConsumed, messagesConsumed, filterApplyErrors },
isFetching,
addMessage,
resetMessages,
updatePhase,
updateMeta,
setIsFetching,
+ setMessageType,
+ messageEventType,
}) => {
const { clusterName, topicName } = useAppParams<RouteParamsClusterTopic>();
const location = useLocation();
@@ -355,6 +359,12 @@ const Filters: React.FC<FiltersProps> = ({
case TopicMessageEventTypeEnum.CONSUMING:
if (consuming) updateMeta(consuming);
break;
+ case TopicMessageEventTypeEnum.DONE:
+ if (consuming && type) {
+ setMessageType(type);
+ updateMeta(consuming);
+ }
+ break;
default:
}
};
@@ -551,6 +561,7 @@ const Filters: React.FC<FiltersProps> = ({
{seekDirection !== SeekDirection.TAILING &&
isFetching &&
phaseMessage}
+ {!isFetching && messageEventType}
</S.Message>
<S.MessageLoading isLive={isTailing}>
<S.MessageLoadingSpinner isFetching={isFetching} />
@@ -582,6 +593,11 @@ const Filters: React.FC<FiltersProps> = ({
</S.MetricsIcon>
<span>{messagesConsumed} messages consumed</span>
</S.Metric>
+ {!!filterApplyErrors && (
+ <S.Metric title="Errors">
+ <span>{filterApplyErrors} errors</span>
+ </S.Metric>
+ )}
</S.FiltersMetrics>
</S.FiltersWrapper>
);
diff --git a/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/FiltersContainer.ts b/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/FiltersContainer.ts
index 144672ee603..19dca0184bd 100644
--- a/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/FiltersContainer.ts
+++ b/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/FiltersContainer.ts
@@ -6,11 +6,13 @@ import {
updateTopicMessagesMeta,
updateTopicMessagesPhase,
setTopicMessagesFetchingStatus,
+ setMessageEventType,
} from 'redux/reducers/topicMessages/topicMessagesSlice';
import {
getTopicMessgesMeta,
getTopicMessgesPhase,
getIsTopicMessagesFetching,
+ getIsTopicMessagesType,
} from 'redux/reducers/topicMessages/selectors';
import Filters from './Filters';
@@ -19,6 +21,7 @@ const mapStateToProps = (state: RootState) => ({
phaseMessage: getTopicMessgesPhase(state),
meta: getTopicMessgesMeta(state),
isFetching: getIsTopicMessagesFetching(state),
+ messageEventType: getIsTopicMessagesType(state),
});
const mapDispatchToProps = {
@@ -27,6 +30,7 @@ const mapDispatchToProps = {
updatePhase: updateTopicMessagesPhase,
updateMeta: updateTopicMessagesMeta,
setIsFetching: setTopicMessagesFetchingStatus,
+ setMessageType: setMessageEventType,
};
export default connect(mapStateToProps, mapDispatchToProps)(Filters);
diff --git a/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/__tests__/Filters.spec.tsx b/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/__tests__/Filters.spec.tsx
index e248fdeaeb8..3e75f117874 100644
--- a/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/__tests__/Filters.spec.tsx
+++ b/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/__tests__/Filters.spec.tsx
@@ -44,13 +44,17 @@ const renderComponent = (
<WithRoute path={clusterTopicPath()}>
<TopicMessagesContext.Provider value={ctx}>
<Filters
- meta={{}}
+ meta={{
+ filterApplyErrors: 10,
+ }}
isFetching={false}
addMessage={jest.fn()}
resetMessages={jest.fn()}
updatePhase={jest.fn()}
updateMeta={jest.fn()}
setIsFetching={jest.fn()}
+ setMessageType={jest.fn}
+ messageEventType="Done"
{...props}
/>
</TopicMessagesContext.Provider>
@@ -228,4 +232,17 @@ describe('Filters component', () => {
expect(anotherSmartFilterElement).not.toBeInTheDocument();
});
});
+
+ describe('show errors when get an filterApplyErrors and message event type', () => {
+ it('show errors', () => {
+ renderComponent();
+ const errors = screen.getByText('10 errors');
+ expect(errors).toBeInTheDocument();
+ });
+ it('message event type when fetching is false ', () => {
+ renderComponent();
+ const messageType = screen.getByText('Done');
+ expect(messageType).toBeInTheDocument();
+ });
+ });
});
diff --git a/kafka-ui-react-app/src/redux/interfaces/topic.ts b/kafka-ui-react-app/src/redux/interfaces/topic.ts
index 9f667e135e8..153002240a1 100644
--- a/kafka-ui-react-app/src/redux/interfaces/topic.ts
+++ b/kafka-ui-react-app/src/redux/interfaces/topic.ts
@@ -56,5 +56,6 @@ export interface TopicMessagesState {
messages: TopicMessage[];
phase?: string;
meta: TopicMessageConsuming;
+ messageEventType?: string;
isFetching: boolean;
}
diff --git a/kafka-ui-react-app/src/redux/reducers/topicMessages/selectors.ts b/kafka-ui-react-app/src/redux/reducers/topicMessages/selectors.ts
index 03adca8e424..b2636cdf2ad 100644
--- a/kafka-ui-react-app/src/redux/reducers/topicMessages/selectors.ts
+++ b/kafka-ui-react-app/src/redux/reducers/topicMessages/selectors.ts
@@ -23,3 +23,8 @@ export const getIsTopicMessagesFetching = createSelector(
topicMessagesState,
({ isFetching }) => isFetching
);
+
+export const getIsTopicMessagesType = createSelector(
+ topicMessagesState,
+ ({ messageEventType }) => messageEventType
+);
diff --git a/kafka-ui-react-app/src/redux/reducers/topicMessages/topicMessagesSlice.ts b/kafka-ui-react-app/src/redux/reducers/topicMessages/topicMessagesSlice.ts
index 846cbd0c978..530a3781140 100644
--- a/kafka-ui-react-app/src/redux/reducers/topicMessages/topicMessagesSlice.ts
+++ b/kafka-ui-react-app/src/redux/reducers/topicMessages/topicMessagesSlice.ts
@@ -10,6 +10,7 @@ export const initialState: TopicMessagesState = {
messagesConsumed: 0,
isCancelled: false,
},
+ messageEventType: '',
isFetching: false,
};
@@ -37,6 +38,10 @@ const topicMessagesSlice = createSlice({
setTopicMessagesFetchingStatus: (state, action) => {
state.isFetching = action.payload;
},
+
+ setMessageEventType: (state, action) => {
+ state.messageEventType = action.payload;
+ },
},
});
@@ -46,6 +51,7 @@ export const {
updateTopicMessagesPhase,
updateTopicMessagesMeta,
setTopicMessagesFetchingStatus,
+ setMessageEventType,
} = topicMessagesSlice.actions;
export default topicMessagesSlice.reducer;
| null | train | test | 2023-04-06T16:21:24 | "2022-09-30T09:33:59Z" | Haarolean | train |
provectus/kafka-ui/3384_3412 | provectus/kafka-ui | provectus/kafka-ui/3384 | provectus/kafka-ui/3412 | [
"keyword_pr_to_issue",
"connected"
] | d5a5f66528ee6dbb73d182a864e74a9a04c2e384 | f51da4bb61ceac755ed0ec6f439b52b0233e9538 | [
"Hi, @Haarolean I would like to work on this issue. Can you please assign it to me?",
"@vict-o-ria hey Victoria, sure thing :)"
] | [] | "2023-02-27T08:45:04Z" | [
"good first issue",
"scope/frontend",
"status/accepted",
"type/chore"
] | "Saved filters" invalid font | <img width="576" alt="image" src="https://user-images.githubusercontent.com/1494347/220509484-46e27477-5fd2-4a27-a69c-998436eac7f9.png">
https://www.figma.com/file/ZkvysdRUmt9p2PDNU3a9WU/Kafka-UI?node-id=2536%3A10370&t=WBEoXbSrlD54Pnqw-4 | [
"kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.styled.ts"
] | [
"kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.styled.ts"
] | [] | diff --git a/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.styled.ts b/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.styled.ts
index d1230febf94..6e1d50b3bf6 100644
--- a/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.styled.ts
+++ b/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.styled.ts
@@ -184,6 +184,7 @@ export const CreatedFilter = styled.p`
margin: 25px 0 10px;
font-size: 14px;
line-height: 20px;
+ color: ${({ theme }) => theme.list.meta.color};
`;
export const SavedFiltersContainer = styled.div`
| null | train | test | 2023-03-09T19:10:11 | "2023-02-22T02:55:46Z" | Haarolean | train |
provectus/kafka-ui/3395_3413 | provectus/kafka-ui | provectus/kafka-ui/3395 | provectus/kafka-ui/3413 | [
"connected"
] | c8619268cd1e791b5d509371981cb0d9b0a80e99 | ffa49ebb3d4cada57d415d7b5ee054987ac32977 | [] | [
"1. lets's remove signs from locators -> use 'contains'\n2. seems that buttons could be shared in several page classes, suggest to store them in BasePage.class from the start",
"1. we need to add some wait until message list loaded after opening tab or navigation between pages, because getAllMessages().size() will not be 100 at the time you open the tab Messages (inspect the UI behavior)\r\n2. also here we need to add assertion that page is really changed, current implementation will not face linked issue",
"Fixed",
"Fixed",
"get(messagesPerPage) -> get(messagesPerPage - 1)",
"assertNotEquals -> assertEquals",
"signs still there",
"Fixed.",
"Removed.",
"Fixed.",
"fix the link pls",
"Fixed."
] | "2023-02-27T09:48:19Z" | [
"scope/QA",
"scope/AQA"
] | [e2e]Checking Messages count per page within Topic | Autotest implementation for:
https://app.qase.io/case/KAFKAUI-267
Pre-conditions:
-Create a Topic
-Add more than 100 messages to Topic
Steps:
- Login to Kafka-ui
- Navigate to Topics
- Select the previously added Topic
- Turn to Messages tab
- Press Next to turn second page
Expected results:
- First 100 messages should be displayed within first page with active Next button and disabled Back button
- Messages (from 101) should be displayed within second page | [
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicDetails.java"
] | [
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicDetails.java"
] | [
"kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/MessagesTest.java"
] | diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java
index e51942a39ce..c131bd8e0d1 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java
@@ -23,6 +23,8 @@ public abstract class BasePage extends WebUtils {
protected SelenideElement confirmationMdl = $x("//div[text()= 'Confirm the action']/..");
protected SelenideElement confirmBtn = $x("//button[contains(text(),'Confirm')]");
protected SelenideElement cancelBtn = $x("//button[contains(text(),'Cancel')]");
+ protected SelenideElement backBtn = $x("//button[contains(text(),'Back')]");
+ protected SelenideElement nextBtn = $x("//button[contains(text(),'Next')]");
protected ElementsCollection ddlOptions = $$x("//li[@value]");
protected ElementsCollection gridItems = $$x("//tr[@class]");
protected String summaryCellLocator = "//div[contains(text(),'%s')]";
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicDetails.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicDetails.java
index 980a66b791d..a99afd903b6 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicDetails.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicDetails.java
@@ -170,6 +170,13 @@ public TopicDetails clickMessagesAddFiltersBtn() {
return this;
}
+ @Step
+ public TopicDetails clickNextButton() {
+ nextBtn.shouldBe(Condition.enabled).click();
+ waitUntilSpinnerDisappear();
+ return this;
+ }
+
@Step
public TopicDetails openSavedFiltersListMdl() {
savedFiltersLink.shouldBe(Condition.enabled).click();
@@ -240,6 +247,16 @@ public boolean isAddFilterBtnAddFilterMdlEnabled() {
return isEnabled(addFilterBtnAddFilterMdl);
}
+ @Step
+ public boolean isBackButtonEnabled() {
+ return isEnabled(backBtn);
+ }
+
+ @Step
+ public boolean isNextButtonEnabled() {
+ return isEnabled(nextBtn);
+ }
+
@Step
public boolean isActiveFilterVisible(String activeFilterName) {
return isVisible($x(String.format(activeFilterNameLocator, activeFilterName)));
| diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/MessagesTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/MessagesTest.java
index 2dee8588d79..5e5af5c0448 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/MessagesTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/MessagesTest.java
@@ -9,6 +9,7 @@
import io.qameta.allure.Issue;
import io.qameta.allure.Step;
import io.qase.api.annotation.CaseId;
+import io.qase.api.annotation.QaseId;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
@@ -48,16 +49,21 @@ public class MessagesTest extends BaseTest {
.setName("topic-to-recreate-attribute-" + randomAlphabetic(5))
.setMessageKey(randomAlphabetic(5))
.setMessageContent(randomAlphabetic(10));
+ private static final Topic TOPIC_FOR_CHECK_MESSAGES_COUNT = new Topic()
+ .setName("topic-for-check-messages-count" + randomAlphabetic(5))
+ .setMessageKey(randomAlphabetic(5))
+ .setMessageContent(randomAlphabetic(10));
private static final List<Topic> TOPIC_LIST = new ArrayList<>();
@BeforeClass(alwaysRun = true)
public void beforeClass() {
TOPIC_LIST.addAll(List.of(TOPIC_FOR_MESSAGES, TOPIC_FOR_CHECKING_FILTERS, TOPIC_TO_CLEAR_AND_PURGE_MESSAGES,
- TOPIC_TO_RECREATE));
+ TOPIC_TO_RECREATE, TOPIC_FOR_CHECK_MESSAGES_COUNT));
TOPIC_LIST.forEach(topic -> apiService.createTopic(topic.getName()));
IntStream.range(1, 3).forEach(i -> apiService.sendMessage(TOPIC_FOR_CHECKING_FILTERS));
waitUntilNewMinuteStarted();
IntStream.range(1, 3).forEach(i -> apiService.sendMessage(TOPIC_FOR_CHECKING_FILTERS));
+ IntStream.range(1, 110).forEach(i -> apiService.sendMessage(TOPIC_FOR_CHECK_MESSAGES_COUNT));
}
@Suite(suiteId = SUITE_ID, title = SUITE_TITLE)
@@ -272,6 +278,31 @@ public void checkRecreateTopic() {
softly.assertAll();
}
+ @Ignore
+ @Issue("https://github.com/provectus/kafka-ui/issues/3129")
+ @QaseId(267)
+ @Test(priority = 10)
+ public void CheckMessagesCountPerPageWithinTopic() {
+ navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECK_MESSAGES_COUNT.getName());
+ topicDetails
+ .openDetailsTab(MESSAGES);
+ int messagesPerPage = topicDetails.getAllMessages().size();
+ SoftAssert softly = new SoftAssert();
+ softly.assertEquals(messagesPerPage, 100, "getAllMessages()");
+ softly.assertFalse(topicDetails.isBackButtonEnabled(), "isBackButtonEnabled()");
+ softly.assertTrue(topicDetails.isNextButtonEnabled(), "isNextButtonEnabled()");
+ softly.assertAll();
+ int lastOffsetOnPage = topicDetails.getAllMessages()
+ .get(messagesPerPage -1).getOffset();
+ topicDetails
+ .clickNextButton();
+ softly.assertEquals(topicDetails.getAllMessages().stream().findFirst().orElseThrow().getOffset(),
+ lastOffsetOnPage + 1, "findFirst().getOffset()");
+ softly.assertTrue(topicDetails.isBackButtonEnabled(), "isBackButtonEnabled()");
+ softly.assertFalse(topicDetails.isNextButtonEnabled(), "isNextButtonEnabled()");
+ softly.assertAll();
+ }
+
@Step
protected void produceMessage(Topic topic) {
topicDetails
| train | test | 2023-02-27T11:44:28 | "2023-02-23T10:42:02Z" | ArthurNiedial | train |
provectus/kafka-ui/3171_3433 | provectus/kafka-ui | provectus/kafka-ui/3171 | provectus/kafka-ui/3433 | [
"connected"
] | c148f112a404815d6645fa97209199eced054728 | 0f5a9d7a630129c5b03a3993c24132eeb484c1f2 | [
"Hey @Haarolean! I've been using kafka-ui at my job for a few months and would like to contribute. Would you assign the issue to me?",
"@michal-cesek hey, sure! \r\nYou can refer to [contributing](https://provectus.gitbook.io/kafka-ui/development/contributing) guide as a start"
] | [] | "2023-03-03T11:18:37Z" | [
"type/enhancement",
"good first issue",
"scope/backend",
"scope/frontend",
"status/accepted"
] | KSQL: Implement sorting | <!--
Don't forget to check for existing issues/discussions regarding your proposal. We might already have it.
https://github.com/provectus/kafka-ui/issues
https://github.com/provectus/kafka-ui/discussions
-->
**Describe the bug**
<!--(A clear and concise description of what the bug is.)-->
1. Neither Tables nor Streams tab have any options to Search, default sorting or user defined sorting by clicking the column names.
2. Not possible to see the Table or Stream details by clicking the row in the table ([task #790](https://github.com/provectus/kafka-ui/issues/790)).
**Expected behavior**
<!--
(A clear and concise description of what you expected to happen)
-->
1. Add search by the tab columns.
2. Add the Default sorting for the tab (by the Name?).
3. Add custom sorting by the column names.
4. Table and Stream detailed info is handled in a separate task #790
**Set up**
<!--
How do you run the app? Please provide as much info as possible:
1. App version (docker image version or check commit hash in the top left corner in UI)
6. Helm chart version, if you use one
7. Any IAAC configs
We might close the issue without further explanation if you don't provide such information.
-->
[f4e6afe](https://github.com/provectus/kafka-ui/commit/f4e6afe)
**Steps to Reproduce**
<!-- We'd like you to provide an example setup (via docker-compose, helm, etc.)
to reproduce the problem, especially with a complex setups. -->
1. Log in to Kafka UI and navigate to the KSQL DB.
**Screenshots**
<!--
(If applicable, add screenshots to help explain your problem)
-->

**Additional context**
<!--
(Add any other context about the problem here)
-->
@Haarolean requested to create a separate issue from the #2651 | [
"kafka-ui-react-app/src/components/KsqlDb/TableView.tsx",
"kafka-ui-react-app/src/components/common/NewTable/Table.tsx"
] | [
"kafka-ui-react-app/src/components/KsqlDb/TableView.tsx",
"kafka-ui-react-app/src/components/common/NewTable/Table.tsx"
] | [] | diff --git a/kafka-ui-react-app/src/components/KsqlDb/TableView.tsx b/kafka-ui-react-app/src/components/KsqlDb/TableView.tsx
index d27e4968b7f..538345954da 100644
--- a/kafka-ui-react-app/src/components/KsqlDb/TableView.tsx
+++ b/kafka-ui-react-app/src/components/KsqlDb/TableView.tsx
@@ -31,7 +31,7 @@ const TableView: React.FC<TableViewProps> = ({ fetching, rows }) => {
data={rows || []}
columns={columns}
emptyMessage={fetching ? 'Loading...' : 'No rows found'}
- enableSorting={false}
+ enableSorting
/>
);
};
diff --git a/kafka-ui-react-app/src/components/common/NewTable/Table.tsx b/kafka-ui-react-app/src/components/common/NewTable/Table.tsx
index 55652df082d..da1f2c090a4 100644
--- a/kafka-ui-react-app/src/components/common/NewTable/Table.tsx
+++ b/kafka-ui-react-app/src/components/common/NewTable/Table.tsx
@@ -14,7 +14,7 @@ import type {
PaginationState,
ColumnDef,
} from '@tanstack/react-table';
-import { useSearchParams } from 'react-router-dom';
+import { useSearchParams, useLocation } from 'react-router-dom';
import { PER_PAGE } from 'lib/constants';
import { Button } from 'components/common/Button/Button';
import Input from 'components/common/Input/Input';
@@ -129,6 +129,7 @@ const Table: React.FC<TableProps<any>> = ({
onRowClick,
}) => {
const [searchParams, setSearchParams] = useSearchParams();
+ const location = useLocation();
const [rowSelection, setRowSelection] = React.useState({});
const onSortingChange = React.useCallback(
(updater: UpdaterFn<SortingState>) => {
@@ -136,7 +137,7 @@ const Table: React.FC<TableProps<any>> = ({
setSearchParams(searchParams);
return newState;
},
- [searchParams]
+ [searchParams, location]
);
const onPaginationChange = React.useCallback(
(updater: UpdaterFn<PaginationState>) => {
@@ -145,7 +146,7 @@ const Table: React.FC<TableProps<any>> = ({
setRowSelection({});
return newState;
},
- [searchParams]
+ [searchParams, location]
);
const table = useReactTable({
| null | train | test | 2023-04-13T09:57:40 | "2022-12-28T14:56:57Z" | BulatKha | train |
provectus/kafka-ui/3432_3450 | provectus/kafka-ui | provectus/kafka-ui/3432 | provectus/kafka-ui/3450 | [
"connected"
] | e261143bb466998302e7b91ccd6ad07622bc548a | b3f74cbfea7b43e50a817331e0ed549e35d566d9 | [] | [] | "2023-03-06T17:42:24Z" | [
"scope/QA",
"scope/AQA"
] | [e2e]Checking the URL redirections for kafka-ui pages | Autotest implementation for:
https://app.qase.io/case/KAFKAUI-45
Description
Purpose of this case is to make sure that all the redirections are right.
Steps:
- Login to kafka-ui
- Press Brokers
- Press Topics
- Press Consumers
- Press Schema Registry
- Press Kafka Connect
- Press KSQL DB
Expexted result:
- Should redirect to https://www.kafka-ui.provectus.io/
- Should redirect to https://www.kafka-ui.provectus.io/ui/clusters/local/brokers
- Should redirect to https://www.kafka-ui.provectus.io/ui/clusters/local/all-topics?perPage=25
- Should redirect to https://www.kafka-ui.provectus.io/ui/clusters/local/consumer-groups
- Should redirect to https://www.kafka-ui.provectus.io/ui/clusters/local/schemas
- Should redirect to https://www.kafka-ui.provectus.io/ui/clusters/local/connectors
- Should redirect to https://www.kafka-ui.provectus.io/ui/clusters/local/ksqldb/tables
| [
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/consumers/ConsumersList.java"
] | [
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/consumers/ConsumersList.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/variables/Url.java"
] | [
"kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/BaseTest.java",
"kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/suite/TopicsTest.java",
"kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/SmokeTest.java",
"kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/brokers/BrokersTest.java",
"kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/connectors/ConnectorsTest.java",
"kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/ksqlDb/KsqlDbTest.java",
"kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/schemas/SchemasTest.java",
"kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/MessagesTest.java"
] | diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/consumers/ConsumersList.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/consumers/ConsumersList.java
index b3a3be42a21..6d0c1d48f73 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/consumers/ConsumersList.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/consumers/ConsumersList.java
@@ -1,11 +1,20 @@
package com.provectus.kafka.ui.pages.consumers;
+import com.codeborne.selenide.Condition;
import com.codeborne.selenide.SelenideElement;
import com.provectus.kafka.ui.pages.BasePage;
+import io.qameta.allure.Step;
import static com.codeborne.selenide.Selenide.$x;
public class ConsumersList extends BasePage {
protected SelenideElement consumerListHeader = $x("//h1[text()='Consumers']");
+
+ @Step
+ public ConsumersList waitUntilScreenReady() {
+ waitUntilSpinnerDisappear();
+ consumerListHeader.shouldHave(Condition.visible);
+ return this;
+ }
}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/variables/Url.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/variables/Url.java
new file mode 100644
index 00000000000..f612d743a5c
--- /dev/null
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/variables/Url.java
@@ -0,0 +1,11 @@
+package com.provectus.kafka.ui.variables;
+
+public interface Url {
+
+ String BROKERS_LIST_URL = "http://%s:8080/ui/clusters/local/brokers";
+ String TOPICS_LIST_URL = "http://%s:8080/ui/clusters/local/all-topics?perPage=25";
+ String CONSUMERS_LIST_URL = "http://%s:8080/ui/clusters/local/consumer-groups";
+ String SCHEMA_REGISTRY_LIST_URL = "http://%s:8080/ui/clusters/local/schemas";
+ String KAFKA_CONNECT_LIST_URL = "http://%s:8080/ui/clusters/local/connectors";
+ String KSQL_DB_LIST_URL = "http://%s:8080/ui/clusters/local/ksqldb/tables";
+}
| diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/BaseTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/BaseTest.java
index 399f81e1aa2..b2e7e007e58 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/BaseTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/BaseTest.java
@@ -22,7 +22,7 @@
import java.time.Duration;
import java.util.List;
-import static com.provectus.kafka.ui.pages.NaviSideBar.SideMenuOption.TOPICS;
+import static com.provectus.kafka.ui.pages.NaviSideBar.SideMenuOption.*;
import static com.provectus.kafka.ui.settings.BaseSource.*;
import static com.provectus.kafka.ui.settings.drivers.LocalWebDriver.*;
import static com.provectus.kafka.ui.utilities.qaseUtils.QaseSetup.qaseIntegrationSetup;
@@ -108,6 +108,14 @@ public void beforeMethod() {
public void afterMethod() {
browserClear();
}
+
+ @Step
+ protected void navigateToBrokers() {
+ naviSideBar
+ .openSideMenu(BROKERS);
+ brokersList
+ .waitUntilScreenReady();
+ }
@Step
protected void navigateToTopics() {
@@ -127,7 +135,57 @@ protected void navigateToTopicsAndOpenDetails(String topicName) {
topicDetails
.waitUntilScreenReady();
}
-
+
+ @Step
+ protected void navigateToConsumers() {
+ naviSideBar
+ .openSideMenu(CONSUMERS);
+ consumersList
+ .waitUntilScreenReady();
+ }
+
+ @Step
+ protected void navigateToSchemaRegistry() {
+ naviSideBar
+ .openSideMenu(SCHEMA_REGISTRY);
+ schemaRegistryList
+ .waitUntilScreenReady();
+ }
+
+ @Step
+ protected void navigateToSchemaRegistryAndOpenDetails(String schemaName) {
+ navigateToSchemaRegistry();
+ schemaRegistryList
+ .openSchema(schemaName);
+ schemaDetails
+ .waitUntilScreenReady();
+ }
+
+ @Step
+ protected void navigateToConnectors() {
+ naviSideBar
+ .openSideMenu(KAFKA_CONNECT);
+ kafkaConnectList
+ .waitUntilScreenReady();
+ }
+
+ @Step
+ protected void navigateToConnectorsAndOpenDetails(String connectorName) {
+ navigateToConnectors();
+ kafkaConnectList
+ .openConnector(connectorName);
+ connectorDetails
+ .waitUntilScreenReady();
+ }
+
+ @Step
+ protected void navigateToKsqlDb() {
+ naviSideBar
+ .openSideMenu(KSQL_DB);
+ ksqlDbList
+ .waitUntilScreenReady();
+ }
+
@Step
protected void verifyElementsCondition(List<SelenideElement> elementList, Condition expectedCondition) {
SoftAssert softly = new SoftAssert();
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/suite/TopicsTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/suite/TopicsTest.java
index d768f939ce0..35188e3f45e 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/suite/TopicsTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/suite/TopicsTest.java
@@ -21,75 +21,81 @@ public void testCaseA() {
public void testCaseB() {
}
+ @Automation(state = NOT_AUTOMATED)
+ @QaseId(21)
+ @Test()
+ public void testCaseC() {
+ }
+
@Automation(state = NOT_AUTOMATED)
@QaseId(22)
@Test
- public void testCaseC() {
+ public void testCaseD() {
}
@Automation(state = NOT_AUTOMATED)
@QaseId(46)
@Test
- public void testCaseD() {
+ public void testCaseE() {
}
@Automation(state = NOT_AUTOMATED)
@QaseId(47)
@Test
- public void testCaseE() {
+ public void testCaseF() {
}
@Automation(state = NOT_AUTOMATED)
@QaseId(48)
@Test
- public void testCaseF() {
+ public void testCaseG() {
}
@Automation(state = NOT_AUTOMATED)
@QaseId(49)
@Test
- public void testCaseG() {
+ public void testCaseH() {
}
@Automation(state = NOT_AUTOMATED)
@QaseId(50)
@Test
- public void testCaseH() {
+ public void testCaseI() {
}
@Automation(state = NOT_AUTOMATED)
@QaseId(57)
@Test
- public void testCaseI() {
+ public void testCaseJ() {
}
@Automation(state = NOT_AUTOMATED)
@QaseId(58)
@Test
- public void testCaseJ() {
+ public void testCaseK() {
}
@Automation(state = NOT_AUTOMATED)
@QaseId(269)
@Test
- public void testCaseK() {
+ public void testCaseL() {
}
@Automation(state = NOT_AUTOMATED)
@QaseId(270)
@Test
- public void testCaseL() {
+ public void testCaseM() {
}
@Automation(state = NOT_AUTOMATED)
@QaseId(271)
@Test
- public void testCaseM() {
+ public void testCaseN() {
}
@Automation(state = NOT_AUTOMATED)
@QaseId(272)
@Test
- public void testCaseN() {
+ public void testCaseO() {
}
}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/SmokeTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/SmokeTest.java
index a0c4f05da23..0ea5d2eb827 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/SmokeTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/SmokeTest.java
@@ -1,13 +1,20 @@
package com.provectus.kafka.ui.smokeSuite;
import com.codeborne.selenide.Condition;
+import com.codeborne.selenide.WebDriverRunner;
import com.provectus.kafka.ui.BaseTest;
+import io.qameta.allure.Step;
import io.qase.api.annotation.QaseId;
+import org.testng.Assert;
import org.testng.annotations.Test;
import java.util.stream.Collectors;
import java.util.stream.Stream;
+import static com.provectus.kafka.ui.settings.BaseSource.BROWSER;
+import static com.provectus.kafka.ui.variables.Browser.LOCAL;
+import static com.provectus.kafka.ui.variables.Url.*;
+
public class SmokeTest extends BaseTest {
@QaseId(198)
@@ -20,4 +27,28 @@ public void checkBasePageElements() {
Stream.concat(topPanel.getAllEnabledElements().stream(), naviSideBar.getAllMenuButtons().stream())
.collect(Collectors.toList()), Condition.enabled);
}
+
+ @QaseId(45)
+ @Test
+ public void checkUrlWhileNavigating() {
+ navigateToBrokers();
+ verifyCurrentUrl(BROKERS_LIST_URL);
+ navigateToTopics();
+ verifyCurrentUrl(TOPICS_LIST_URL);
+ navigateToConsumers();
+ verifyCurrentUrl(CONSUMERS_LIST_URL);
+ navigateToSchemaRegistry();
+ verifyCurrentUrl(SCHEMA_REGISTRY_LIST_URL);
+ navigateToConnectors();
+ verifyCurrentUrl(KAFKA_CONNECT_LIST_URL);
+ navigateToKsqlDb();
+ verifyCurrentUrl(KSQL_DB_LIST_URL);
+ }
+
+ @Step
+ private void verifyCurrentUrl(String expectedUrl) {
+ String host = BROWSER.equals(LOCAL) ? "localhost" : "host.testcontainers.internal";
+ Assert.assertEquals(WebDriverRunner.getWebDriver().getCurrentUrl(),
+ String.format(expectedUrl, host), "getCurrentUrl()");
+ }
}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/brokers/BrokersTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/brokers/BrokersTest.java
index 7b11aa6556c..c9029e30ae5 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/brokers/BrokersTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/brokers/BrokersTest.java
@@ -2,12 +2,10 @@
import com.codeborne.selenide.Condition;
import com.provectus.kafka.ui.BaseTest;
-import io.qameta.allure.Step;
import io.qase.api.annotation.QaseId;
import org.testng.Assert;
import org.testng.annotations.Test;
-import static com.provectus.kafka.ui.pages.NaviSideBar.SideMenuOption.BROKERS;
import static com.provectus.kafka.ui.pages.brokers.BrokersDetails.DetailsTab.CONFIGS;
public class BrokersTest extends BaseTest {
@@ -40,12 +38,4 @@ public void checkExistingBrokersInCluster() {
verifyElementsCondition(brokersConfigTab.getEditButtons(), Condition.enabled);
Assert.assertTrue(brokersConfigTab.isSearchByKeyVisible(), "isSearchByKeyVisible()");
}
-
- @Step
- private void navigateToBrokers() {
- naviSideBar
- .openSideMenu(BROKERS);
- brokersList
- .waitUntilScreenReady();
- }
}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/connectors/ConnectorsTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/connectors/ConnectorsTest.java
index 5291648014e..c54138d8c5a 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/connectors/ConnectorsTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/connectors/ConnectorsTest.java
@@ -3,7 +3,6 @@
import com.provectus.kafka.ui.BaseTest;
import com.provectus.kafka.ui.models.Connector;
import com.provectus.kafka.ui.models.Topic;
-import io.qameta.allure.Step;
import io.qase.api.annotation.QaseId;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
@@ -14,7 +13,6 @@
import java.util.List;
import static com.provectus.kafka.ui.pages.BasePage.AlertHeader.SUCCESS;
-import static com.provectus.kafka.ui.pages.NaviSideBar.SideMenuOption.KAFKA_CONNECT;
import static com.provectus.kafka.ui.utilities.FileUtils.getResourceAsString;
import static org.apache.commons.lang3.RandomStringUtils.randomAlphabetic;
@@ -107,21 +105,4 @@ public void afterClass() {
apiService.deleteConnector(CONNECT_NAME, connector.getName()));
TOPIC_LIST.forEach(topic -> apiService.deleteTopic(topic.getName()));
}
-
- @Step
- private void navigateToConnectors() {
- naviSideBar
- .openSideMenu(KAFKA_CONNECT);
- kafkaConnectList
- .waitUntilScreenReady();
- }
-
- @Step
- private void navigateToConnectorsAndOpenDetails(String connectorName) {
- navigateToConnectors();
- kafkaConnectList
- .openConnector(connectorName);
- connectorDetails
- .waitUntilScreenReady();
- }
}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/ksqlDb/KsqlDbTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/ksqlDb/KsqlDbTest.java
index 3627f321df6..ab1705922a1 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/ksqlDb/KsqlDbTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/ksqlDb/KsqlDbTest.java
@@ -8,7 +8,6 @@
import org.testng.annotations.Test;
import org.testng.asserts.SoftAssert;
-import static com.provectus.kafka.ui.pages.NaviSideBar.SideMenuOption.KSQL_DB;
import static com.provectus.kafka.ui.pages.ksqlDb.enums.KsqlQueryConfig.SHOW_TABLES;
import static org.apache.commons.lang3.RandomStringUtils.randomAlphabetic;
@@ -34,10 +33,8 @@ public void beforeClass() {
@QaseId(41)
@Test(priority = 1)
public void checkShowTablesRequestExecution() {
- naviSideBar
- .openSideMenu(KSQL_DB);
+ navigateToKsqlDb();
ksqlDbList
- .waitUntilScreenReady()
.clickExecuteKsqlRequestBtn();
ksqlQueryForm
.waitUntilScreenReady()
@@ -53,10 +50,8 @@ public void checkShowTablesRequestExecution() {
@QaseId(86)
@Test(priority = 2)
public void clearResultsForExecutedRequest() {
- naviSideBar
- .openSideMenu(KSQL_DB);
+ navigateToKsqlDb();
ksqlDbList
- .waitUntilScreenReady()
.clickExecuteKsqlRequestBtn();
ksqlQueryForm
.waitUntilScreenReady()
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/schemas/SchemasTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/schemas/SchemasTest.java
index 56ff2c0ff04..bc9519ee500 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/schemas/SchemasTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/schemas/SchemasTest.java
@@ -4,7 +4,6 @@
import com.provectus.kafka.ui.BaseTest;
import com.provectus.kafka.ui.api.model.CompatibilityLevel;
import com.provectus.kafka.ui.models.Schema;
-import io.qameta.allure.Step;
import io.qase.api.annotation.QaseId;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
@@ -15,7 +14,6 @@
import java.util.ArrayList;
import java.util.List;
-import static com.provectus.kafka.ui.pages.NaviSideBar.SideMenuOption.SCHEMA_REGISTRY;
import static com.provectus.kafka.ui.utilities.FileUtils.fileToString;
public class SchemasTest extends BaseTest {
@@ -188,21 +186,4 @@ public void deleteSchemaProtobuf() {
public void afterClass() {
SCHEMA_LIST.forEach(schema -> apiService.deleteSchema(schema.getName()));
}
-
- @Step
- private void navigateToSchemaRegistry() {
- naviSideBar
- .openSideMenu(SCHEMA_REGISTRY);
- schemaRegistryList
- .waitUntilScreenReady();
- }
-
- @Step
- private void navigateToSchemaRegistryAndOpenDetails(String schemaName) {
- navigateToSchemaRegistry();
- schemaRegistryList
- .openSchema(schemaName);
- schemaDetails
- .waitUntilScreenReady();
- }
}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/MessagesTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/MessagesTest.java
index 9f8d18b6855..dc7fc403c82 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/MessagesTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/MessagesTest.java
@@ -147,21 +147,6 @@ public void checkPurgeMessagePossibility() {
softly.assertAll();
}
- @Ignore
- @Issue("https://github.com/provectus/kafka-ui/issues/2819")
- @QaseId(21)
- @Test(priority = 5)
- public void copyMessageFromTopicProfile() {
- navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECK_FILTERS.getName());
- topicDetails
- .openDetailsTab(MESSAGES)
- .getRandomMessage()
- .openDotMenu()
- .clickCopyToClipBoard();
- Assert.assertTrue(topicDetails.isAlertWithMessageVisible(SUCCESS, "Copied successfully!"),
- "isAlertWithMessageVisible()");
- }
-
@Ignore
@Issue("https://github.com/provectus/kafka-ui/issues/2394")
@QaseId(15)
| test | test | 2023-03-07T07:39:15 | "2023-03-03T10:51:04Z" | ArthurNiedial | train |
provectus/kafka-ui/3437_3451 | provectus/kafka-ui | provectus/kafka-ui/3437 | provectus/kafka-ui/3451 | [
"keyword_pr_to_issue"
] | 4d20cb695804d4247731dffc62ebdb5d685647be | 334ba3df99dfc84385faace167f6410c8ce0be91 | [
"Hello there yardenshoham! π\n\nThank you and congratulations π for opening your very first issue in this project! π\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. π",
"I want to claim this issue"
] | [] | "2023-03-07T06:41:26Z" | [
"status/accepted",
"scope/k8s"
] | Request for a `global.imageRegistry` parameter | **Describe the bug**
This allows anyone that uses Bitnami helm charts to have this chart as a subchart without explicitly specifying `image.registry`.
**Set up**
I have a helm chart with 5 subcharts:
- service A (respects `global.imageRegistry`)
- service B (respects `global.imageRegistry`)
- service C (respects `global.imageRegistry`)
- kafka from Bitnami (respects `global.imageRegistry`)
- kafka-ui (I have to manually set `kafka-ui.image.registry` to match `global.imageRegistry`)
**Steps to Reproduce**
Setup a helm-chart with kafka-ui as a subchart.
**Expected behavior**
In my ideal setup my `values.yaml` looks like:
```yaml
global:
imageRegistry: container-registry.my-domain.com
```
But because of the way things are now, I have to write it like this:
```yaml
global:
imageRegistry: container-registry.my-domain.com
kafka-ui:
image:
registry: container-registry.my-domain.com
```
I'm willing to contribute the code to make this happen if the idea is accepted. | [
"charts/kafka-ui/Chart.yaml",
"charts/kafka-ui/templates/_helpers.tpl"
] | [
"charts/kafka-ui/Chart.yaml",
"charts/kafka-ui/templates/_helpers.tpl"
] | [] | diff --git a/charts/kafka-ui/Chart.yaml b/charts/kafka-ui/Chart.yaml
index 6e5f0ee2d5b..4f36aa7f0b1 100644
--- a/charts/kafka-ui/Chart.yaml
+++ b/charts/kafka-ui/Chart.yaml
@@ -2,6 +2,6 @@ apiVersion: v2
name: kafka-ui
description: A Helm chart for kafka-UI
type: application
-version: 0.5.3
+version: 0.5.4
appVersion: v0.5.0
icon: https://github.com/provectus/kafka-ui/raw/master/documentation/images/kafka-ui-logo.png
diff --git a/charts/kafka-ui/templates/_helpers.tpl b/charts/kafka-ui/templates/_helpers.tpl
index 510452d4cf3..7155681a44b 100644
--- a/charts/kafka-ui/templates/_helpers.tpl
+++ b/charts/kafka-ui/templates/_helpers.tpl
@@ -68,6 +68,11 @@ This allows us to check if the registry of the image is specified or not.
*/}}
{{- define "kafka-ui.imageName" -}}
{{- $registryName := .Values.image.registry -}}
+{{- if .Values.global }}
+ {{- if .Values.global.imageRegistry }}
+ {{- $registryName = .Values.global.imageRegistry -}}
+ {{- end -}}
+{{- end -}}
{{- $repository := .Values.image.repository -}}
{{- $tag := .Values.image.tag | default .Chart.AppVersion -}}
{{- if $registryName }}
| null | test | test | 2023-03-07T13:24:19 | "2023-03-05T12:14:16Z" | yardenshoham | train |
provectus/kafka-ui/3423_3479 | provectus/kafka-ui | provectus/kafka-ui/3423 | provectus/kafka-ui/3479 | [
"connected"
] | c5d6896ae1f61dea2ace3fdda6e78817eaca6c4b | 4c2d37dd525b38c75135917b96afcb8e8fe72e46 | [] | [] | "2023-03-10T10:38:19Z" | [
"scope/backend",
"type/refactoring",
"status/accepted"
] | Get rid of UtilityClass annotation usage | <img width="789" alt="image" src="https://user-images.githubusercontent.com/1494347/229412156-9aa71568-30af-4585-b181-04f149b2188a.png">
| [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/Oddrn.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/AvroExtractor.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/DataSetFieldsExtractors.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/JsonSchemaExtractor.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/ProtoExtractor.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/KafkaServicesValidation.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/KafkaVersion.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/SslPropertiesUtil.java"
] | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/Oddrn.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/AvroExtractor.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/DataSetFieldsExtractors.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/JsonSchemaExtractor.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/ProtoExtractor.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/KafkaServicesValidation.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/KafkaVersion.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/SslPropertiesUtil.java"
] | [] | diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/Oddrn.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/Oddrn.java
index d228843b21b..00b29b3b8b4 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/Oddrn.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/Oddrn.java
@@ -4,36 +4,34 @@
import java.net.URI;
import java.util.stream.Collectors;
import java.util.stream.Stream;
-import lombok.experimental.UtilityClass;
-import org.opendatadiscovery.oddrn.Generator;
import org.opendatadiscovery.oddrn.model.AwsS3Path;
import org.opendatadiscovery.oddrn.model.KafkaConnectorPath;
import org.opendatadiscovery.oddrn.model.KafkaPath;
-@UtilityClass
-public class Oddrn {
+public final class Oddrn {
- private static final Generator GENERATOR = new Generator();
+ private Oddrn() {
+ }
- String clusterOddrn(KafkaCluster cluster) {
+ static String clusterOddrn(KafkaCluster cluster) {
return KafkaPath.builder()
.cluster(bootstrapServersForOddrn(cluster.getBootstrapServers()))
.build()
.oddrn();
}
- KafkaPath topicOddrnPath(KafkaCluster cluster, String topic) {
+ static KafkaPath topicOddrnPath(KafkaCluster cluster, String topic) {
return KafkaPath.builder()
.cluster(bootstrapServersForOddrn(cluster.getBootstrapServers()))
.topic(topic)
.build();
}
- String topicOddrn(KafkaCluster cluster, String topic) {
+ static String topicOddrn(KafkaCluster cluster, String topic) {
return topicOddrnPath(cluster, topic).oddrn();
}
- String awsS3Oddrn(String bucket, String key) {
+ static String awsS3Oddrn(String bucket, String key) {
return AwsS3Path.builder()
.bucket(bucket)
.key(key)
@@ -41,14 +39,14 @@ String awsS3Oddrn(String bucket, String key) {
.oddrn();
}
- String connectDataSourceOddrn(String connectUrl) {
+ static String connectDataSourceOddrn(String connectUrl) {
return KafkaConnectorPath.builder()
.host(normalizedConnectHosts(connectUrl))
.build()
.oddrn();
}
- private String normalizedConnectHosts(String connectUrlStr) {
+ private static String normalizedConnectHosts(String connectUrlStr) {
return Stream.of(connectUrlStr.split(","))
.map(String::trim)
.sorted()
@@ -61,7 +59,7 @@ private String normalizedConnectHosts(String connectUrlStr) {
.collect(Collectors.joining(","));
}
- String connectorOddrn(String connectUrl, String connectorName) {
+ static String connectorOddrn(String connectUrl, String connectorName) {
return KafkaConnectorPath.builder()
.host(normalizedConnectHosts(connectUrl))
.connector(connectorName)
@@ -69,7 +67,7 @@ String connectorOddrn(String connectUrl, String connectorName) {
.oddrn();
}
- private String bootstrapServersForOddrn(String bootstrapServers) {
+ private static String bootstrapServersForOddrn(String bootstrapServers) {
return Stream.of(bootstrapServers.split(","))
.map(String::trim)
.sorted()
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/AvroExtractor.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/AvroExtractor.java
index 538bbde1a8a..cc799a9e109 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/AvroExtractor.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/AvroExtractor.java
@@ -1,18 +1,18 @@
package com.provectus.kafka.ui.service.integration.odd.schema;
import com.google.common.collect.ImmutableSet;
-import com.provectus.kafka.ui.service.integration.odd.Oddrn;
import com.provectus.kafka.ui.sr.model.SchemaSubject;
import java.util.ArrayList;
import java.util.List;
-import lombok.experimental.UtilityClass;
import org.apache.avro.Schema;
import org.opendatadiscovery.client.model.DataSetField;
import org.opendatadiscovery.client.model.DataSetFieldType;
import org.opendatadiscovery.oddrn.model.KafkaPath;
-@UtilityClass
-class AvroExtractor {
+final class AvroExtractor {
+
+ private AvroExtractor() {
+ }
static List<DataSetField> extract(SchemaSubject subject, KafkaPath topicOddrn, boolean isKey) {
var schema = new Schema.Parser().parse(subject.getSchema());
@@ -31,14 +31,14 @@ static List<DataSetField> extract(SchemaSubject subject, KafkaPath topicOddrn, b
return result;
}
- private void extract(Schema schema,
- String parentOddr,
- String oddrn, //null for root
- String name,
- String doc,
- Boolean nullable,
- ImmutableSet<String> registeredRecords,
- List<DataSetField> sink
+ private static void extract(Schema schema,
+ String parentOddr,
+ String oddrn, //null for root
+ String name,
+ String doc,
+ Boolean nullable,
+ ImmutableSet<String> registeredRecords,
+ List<DataSetField> sink
) {
switch (schema.getType()) {
case RECORD -> extractRecord(schema, parentOddr, oddrn, name, doc, nullable, registeredRecords, sink);
@@ -49,12 +49,12 @@ private void extract(Schema schema,
}
}
- private DataSetField createDataSetField(String name,
- String doc,
- String parentOddrn,
- String oddrn,
- Schema schema,
- Boolean nullable) {
+ private static DataSetField createDataSetField(String name,
+ String doc,
+ String parentOddrn,
+ String oddrn,
+ Schema schema,
+ Boolean nullable) {
return new DataSetField()
.name(name)
.description(doc)
@@ -63,14 +63,14 @@ private DataSetField createDataSetField(String name,
.type(mapSchema(schema, nullable));
}
- private void extractRecord(Schema schema,
- String parentOddr,
- String oddrn, //null for root
- String name,
- String doc,
- Boolean nullable,
- ImmutableSet<String> registeredRecords,
- List<DataSetField> sink) {
+ private static void extractRecord(Schema schema,
+ String parentOddr,
+ String oddrn, //null for root
+ String name,
+ String doc,
+ Boolean nullable,
+ ImmutableSet<String> registeredRecords,
+ List<DataSetField> sink) {
boolean isRoot = oddrn == null;
if (!isRoot) {
sink.add(createDataSetField(name, doc, parentOddr, oddrn, schema, nullable));
@@ -99,13 +99,13 @@ private void extractRecord(Schema schema,
));
}
- private void extractUnion(Schema schema,
- String parentOddr,
- String oddrn, //null for root
- String name,
- String doc,
- ImmutableSet<String> registeredRecords,
- List<DataSetField> sink) {
+ private static void extractUnion(Schema schema,
+ String parentOddr,
+ String oddrn, //null for root
+ String name,
+ String doc,
+ ImmutableSet<String> registeredRecords,
+ List<DataSetField> sink) {
boolean isRoot = oddrn == null;
boolean containsNull = schema.getTypes().stream().map(Schema::getType).anyMatch(t -> t == Schema.Type.NULL);
// if it is not root and there is only 2 values for union (null and smth else)
@@ -149,14 +149,14 @@ private void extractUnion(Schema schema,
}
}
- private void extractArray(Schema schema,
- String parentOddr,
- String oddrn, //null for root
- String name,
- String doc,
- Boolean nullable,
- ImmutableSet<String> registeredRecords,
- List<DataSetField> sink) {
+ private static void extractArray(Schema schema,
+ String parentOddr,
+ String oddrn, //null for root
+ String name,
+ String doc,
+ Boolean nullable,
+ ImmutableSet<String> registeredRecords,
+ List<DataSetField> sink) {
boolean isRoot = oddrn == null;
oddrn = isRoot ? parentOddr + "/array" : oddrn;
if (isRoot) {
@@ -176,14 +176,14 @@ private void extractArray(Schema schema,
);
}
- private void extractMap(Schema schema,
- String parentOddr,
- String oddrn, //null for root
- String name,
- String doc,
- Boolean nullable,
- ImmutableSet<String> registeredRecords,
- List<DataSetField> sink) {
+ private static void extractMap(Schema schema,
+ String parentOddr,
+ String oddrn, //null for root
+ String name,
+ String doc,
+ Boolean nullable,
+ ImmutableSet<String> registeredRecords,
+ List<DataSetField> sink) {
boolean isRoot = oddrn == null;
oddrn = isRoot ? parentOddr + "/map" : oddrn;
if (isRoot) {
@@ -214,13 +214,13 @@ private void extractMap(Schema schema,
}
- private void extractPrimitive(Schema schema,
- String parentOddr,
- String oddrn, //null for root
- String name,
- String doc,
- Boolean nullable,
- List<DataSetField> sink) {
+ private static void extractPrimitive(Schema schema,
+ String parentOddr,
+ String oddrn, //null for root
+ String name,
+ String doc,
+ Boolean nullable,
+ List<DataSetField> sink) {
boolean isRoot = oddrn == null;
String primOddrn = isRoot ? (parentOddr + "/" + schema.getType()) : oddrn;
if (isRoot) {
@@ -231,7 +231,7 @@ private void extractPrimitive(Schema schema,
}
}
- private DataSetFieldType.TypeEnum mapType(Schema.Type type) {
+ private static DataSetFieldType.TypeEnum mapType(Schema.Type type) {
return switch (type) {
case INT, LONG -> DataSetFieldType.TypeEnum.INTEGER;
case FLOAT, DOUBLE, FIXED -> DataSetFieldType.TypeEnum.NUMBER;
@@ -246,14 +246,14 @@ private DataSetFieldType.TypeEnum mapType(Schema.Type type) {
};
}
- private DataSetFieldType mapSchema(Schema schema, Boolean nullable) {
+ private static DataSetFieldType mapSchema(Schema schema, Boolean nullable) {
return new DataSetFieldType()
.logicalType(logicalType(schema))
.isNullable(nullable)
.type(mapType(schema.getType()));
}
- private String logicalType(Schema schema) {
+ private static String logicalType(Schema schema) {
return schema.getType() == Schema.Type.RECORD
? schema.getFullName()
: schema.getType().toString().toLowerCase();
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/DataSetFieldsExtractors.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/DataSetFieldsExtractors.java
index 746f172b57f..e357db30793 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/DataSetFieldsExtractors.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/DataSetFieldsExtractors.java
@@ -1,19 +1,16 @@
package com.provectus.kafka.ui.service.integration.odd.schema;
-import com.provectus.kafka.ui.service.integration.odd.Oddrn;
import com.provectus.kafka.ui.sr.model.SchemaSubject;
import com.provectus.kafka.ui.sr.model.SchemaType;
import java.util.List;
import java.util.Optional;
-import lombok.experimental.UtilityClass;
import org.opendatadiscovery.client.model.DataSetField;
import org.opendatadiscovery.client.model.DataSetFieldType;
import org.opendatadiscovery.oddrn.model.KafkaPath;
-@UtilityClass
-public class DataSetFieldsExtractors {
+public final class DataSetFieldsExtractors {
- public List<DataSetField> extract(SchemaSubject subject, KafkaPath topicOddrn, boolean isKey) {
+ public static List<DataSetField> extract(SchemaSubject subject, KafkaPath topicOddrn, boolean isKey) {
SchemaType schemaType = Optional.ofNullable(subject.getSchemaType()).orElse(SchemaType.AVRO);
return switch (schemaType) {
case AVRO -> AvroExtractor.extract(subject, topicOddrn, isKey);
@@ -23,7 +20,7 @@ public List<DataSetField> extract(SchemaSubject subject, KafkaPath topicOddrn, b
}
- DataSetField rootField(KafkaPath topicOddrn, boolean isKey) {
+ static DataSetField rootField(KafkaPath topicOddrn, boolean isKey) {
var rootOddrn = topicOddrn.oddrn() + "/columns/" + (isKey ? "key" : "value");
return new DataSetField()
.name(isKey ? "key" : "value")
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/JsonSchemaExtractor.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/JsonSchemaExtractor.java
index f92e1fc876b..06201b1ce7f 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/JsonSchemaExtractor.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/JsonSchemaExtractor.java
@@ -1,7 +1,6 @@
package com.provectus.kafka.ui.service.integration.odd.schema;
import com.google.common.collect.ImmutableSet;
-import com.provectus.kafka.ui.service.integration.odd.Oddrn;
import com.provectus.kafka.ui.sr.model.SchemaSubject;
import io.confluent.kafka.schemaregistry.json.JsonSchema;
import java.net.URI;
@@ -10,7 +9,6 @@
import java.util.Map;
import java.util.Optional;
import javax.annotation.Nullable;
-import lombok.experimental.UtilityClass;
import org.everit.json.schema.ArraySchema;
import org.everit.json.schema.BooleanSchema;
import org.everit.json.schema.CombinedSchema;
@@ -27,8 +25,10 @@
import org.opendatadiscovery.client.model.MetadataExtension;
import org.opendatadiscovery.oddrn.model.KafkaPath;
-@UtilityClass
-class JsonSchemaExtractor {
+final class JsonSchemaExtractor {
+
+ private JsonSchemaExtractor() {
+ }
static List<DataSetField> extract(SchemaSubject subject, KafkaPath topicOddrn, boolean isKey) {
Schema schema = new JsonSchema(subject.getSchema()).rawSchema();
@@ -46,13 +46,13 @@ static List<DataSetField> extract(SchemaSubject subject, KafkaPath topicOddrn, b
return result;
}
- private void extract(Schema schema,
- String parentOddr,
- String oddrn, //null for root
- String name,
- Boolean nullable,
- ImmutableSet<String> registeredRecords,
- List<DataSetField> sink) {
+ private static void extract(Schema schema,
+ String parentOddr,
+ String oddrn, //null for root
+ String name,
+ Boolean nullable,
+ ImmutableSet<String> registeredRecords,
+ List<DataSetField> sink) {
if (schema instanceof ReferenceSchema s) {
Optional.ofNullable(s.getReferredSchema())
.ifPresent(refSchema -> extract(refSchema, parentOddr, oddrn, name, nullable, registeredRecords, sink));
@@ -73,12 +73,12 @@ private void extract(Schema schema,
}
}
- private void extractPrimitive(Schema schema,
- String parentOddr,
- String oddrn, //null for root
- String name,
- Boolean nullable,
- List<DataSetField> sink) {
+ private static void extractPrimitive(Schema schema,
+ String parentOddr,
+ String oddrn, //null for root
+ String name,
+ Boolean nullable,
+ List<DataSetField> sink) {
boolean isRoot = oddrn == null;
sink.add(
createDataSetField(
@@ -93,12 +93,12 @@ private void extractPrimitive(Schema schema,
);
}
- private void extractUnknown(Schema schema,
- String parentOddr,
- String oddrn, //null for root
- String name,
- Boolean nullable,
- List<DataSetField> sink) {
+ private static void extractUnknown(Schema schema,
+ String parentOddr,
+ String oddrn, //null for root
+ String name,
+ Boolean nullable,
+ List<DataSetField> sink) {
boolean isRoot = oddrn == null;
sink.add(
createDataSetField(
@@ -113,13 +113,13 @@ private void extractUnknown(Schema schema,
);
}
- private void extractObject(ObjectSchema schema,
- String parentOddr,
- String oddrn, //null for root
- String name,
- Boolean nullable,
- ImmutableSet<String> registeredRecords,
- List<DataSetField> sink) {
+ private static void extractObject(ObjectSchema schema,
+ String parentOddr,
+ String oddrn, //null for root
+ String name,
+ Boolean nullable,
+ ImmutableSet<String> registeredRecords,
+ List<DataSetField> sink) {
boolean isRoot = oddrn == null;
// schemaLocation can be null for empty object schemas (like if it used in anyOf)
@Nullable var schemaLocation = schema.getSchemaLocation();
@@ -162,13 +162,13 @@ private void extractObject(ObjectSchema schema,
});
}
- private void extractArray(ArraySchema schema,
- String parentOddr,
- String oddrn, //null for root
- String name,
- Boolean nullable,
- ImmutableSet<String> registeredRecords,
- List<DataSetField> sink) {
+ private static void extractArray(ArraySchema schema,
+ String parentOddr,
+ String oddrn, //null for root
+ String name,
+ Boolean nullable,
+ ImmutableSet<String> registeredRecords,
+ List<DataSetField> sink) {
boolean isRoot = oddrn == null;
oddrn = isRoot ? parentOddr + "/array" : oddrn;
if (isRoot) {
@@ -208,13 +208,13 @@ private void extractArray(ArraySchema schema,
}
}
- private void extractCombined(CombinedSchema schema,
- String parentOddr,
- String oddrn, //null for root
- String name,
- Boolean nullable,
- ImmutableSet<String> registeredRecords,
- List<DataSetField> sink) {
+ private static void extractCombined(CombinedSchema schema,
+ String parentOddr,
+ String oddrn, //null for root
+ String name,
+ Boolean nullable,
+ ImmutableSet<String> registeredRecords,
+ List<DataSetField> sink) {
String combineType = "unknown";
if (schema.getCriterion() == CombinedSchema.ALL_CRITERION) {
combineType = "allOf";
@@ -255,24 +255,24 @@ private void extractCombined(CombinedSchema schema,
}
}
- private String getDescription(Schema schema) {
+ private static String getDescription(Schema schema) {
return Optional.ofNullable(schema.getTitle())
.orElse(schema.getDescription());
}
- private String logicalTypeName(Schema schema) {
+ private static String logicalTypeName(Schema schema) {
return schema.getClass()
.getSimpleName()
.replace("Schema", "");
}
- private DataSetField createDataSetField(Schema schema,
- String name,
- String parentOddrn,
- String oddrn,
- DataSetFieldType.TypeEnum type,
- String logicalType,
- Boolean nullable) {
+ private static DataSetField createDataSetField(Schema schema,
+ String name,
+ String parentOddrn,
+ String oddrn,
+ DataSetFieldType.TypeEnum type,
+ String logicalType,
+ Boolean nullable) {
return new DataSetField()
.name(name)
.parentFieldOddrn(parentOddrn)
@@ -286,7 +286,7 @@ private DataSetField createDataSetField(Schema schema,
);
}
- private DataSetFieldType.TypeEnum mapType(Schema type) {
+ private static DataSetFieldType.TypeEnum mapType(Schema type) {
if (type instanceof NumberSchema) {
return DataSetFieldType.TypeEnum.NUMBER;
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/ProtoExtractor.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/ProtoExtractor.java
index b4a53782391..c1316172f30 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/ProtoExtractor.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/schema/ProtoExtractor.java
@@ -15,20 +15,17 @@
import com.google.protobuf.UInt32Value;
import com.google.protobuf.UInt64Value;
import com.google.protobuf.Value;
-import com.provectus.kafka.ui.service.integration.odd.Oddrn;
import com.provectus.kafka.ui.sr.model.SchemaSubject;
import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchema;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
-import lombok.experimental.UtilityClass;
import org.opendatadiscovery.client.model.DataSetField;
import org.opendatadiscovery.client.model.DataSetFieldType;
import org.opendatadiscovery.client.model.DataSetFieldType.TypeEnum;
import org.opendatadiscovery.oddrn.model.KafkaPath;
-@UtilityClass
-class ProtoExtractor {
+final class ProtoExtractor {
private static final Set<String> PRIMITIVES_WRAPPER_TYPE_NAMES = Set.of(
BoolValue.getDescriptor().getFullName(),
@@ -42,7 +39,10 @@ class ProtoExtractor {
DoubleValue.getDescriptor().getFullName()
);
- List<DataSetField> extract(SchemaSubject subject, KafkaPath topicOddrn, boolean isKey) {
+ private ProtoExtractor() {
+ }
+
+ static List<DataSetField> extract(SchemaSubject subject, KafkaPath topicOddrn, boolean isKey) {
Descriptor schema = new ProtobufSchema(subject.getSchema()).toDescriptor();
List<DataSetField> result = new ArrayList<>();
result.add(DataSetFieldsExtractors.rootField(topicOddrn, isKey));
@@ -60,14 +60,14 @@ List<DataSetField> extract(SchemaSubject subject, KafkaPath topicOddrn, boolean
return result;
}
- private void extract(Descriptors.FieldDescriptor field,
- String parentOddr,
- String oddrn, //null for root
- String name,
- boolean nullable,
- boolean repeated,
- ImmutableSet<String> registeredRecords,
- List<DataSetField> sink) {
+ private static void extract(Descriptors.FieldDescriptor field,
+ String parentOddr,
+ String oddrn, //null for root
+ String name,
+ boolean nullable,
+ boolean repeated,
+ ImmutableSet<String> registeredRecords,
+ List<DataSetField> sink) {
if (repeated) {
extractRepeated(field, parentOddr, oddrn, name, nullable, registeredRecords, sink);
} else if (field.getType() == Descriptors.FieldDescriptor.Type.MESSAGE) {
@@ -79,12 +79,12 @@ private void extract(Descriptors.FieldDescriptor field,
// converts some(!) Protobuf Well-known type (from google.protobuf.* packages)
// see JsonFormat::buildWellKnownTypePrinters for impl details
- private boolean extractProtoWellKnownType(Descriptors.FieldDescriptor field,
- String parentOddr,
- String oddrn, //null for root
- String name,
- boolean nullable,
- List<DataSetField> sink) {
+ private static boolean extractProtoWellKnownType(Descriptors.FieldDescriptor field,
+ String parentOddr,
+ String oddrn, //null for root
+ String name,
+ boolean nullable,
+ List<DataSetField> sink) {
// all well-known types are messages
if (field.getType() != Descriptors.FieldDescriptor.Type.MESSAGE) {
return false;
@@ -111,13 +111,13 @@ private boolean extractProtoWellKnownType(Descriptors.FieldDescriptor field,
return false;
}
- private void extractRepeated(Descriptors.FieldDescriptor field,
- String parentOddr,
- String oddrn, //null for root
- String name,
- boolean nullable,
- ImmutableSet<String> registeredRecords,
- List<DataSetField> sink) {
+ private static void extractRepeated(Descriptors.FieldDescriptor field,
+ String parentOddr,
+ String oddrn, //null for root
+ String name,
+ boolean nullable,
+ ImmutableSet<String> registeredRecords,
+ List<DataSetField> sink) {
sink.add(createDataSetField(name, parentOddr, oddrn, TypeEnum.LIST, "repeated", nullable));
String itemName = field.getType() == Descriptors.FieldDescriptor.Type.MESSAGE
@@ -136,13 +136,13 @@ private void extractRepeated(Descriptors.FieldDescriptor field,
);
}
- private void extractMessage(Descriptors.FieldDescriptor field,
- String parentOddr,
- String oddrn, //null for root
- String name,
- boolean nullable,
- ImmutableSet<String> registeredRecords,
- List<DataSetField> sink) {
+ private static void extractMessage(Descriptors.FieldDescriptor field,
+ String parentOddr,
+ String oddrn, //null for root
+ String name,
+ boolean nullable,
+ ImmutableSet<String> registeredRecords,
+ List<DataSetField> sink) {
if (extractProtoWellKnownType(field, parentOddr, oddrn, name, nullable, sink)) {
return;
}
@@ -173,12 +173,12 @@ private void extractMessage(Descriptors.FieldDescriptor field,
});
}
- private void extractPrimitive(Descriptors.FieldDescriptor field,
- String parentOddr,
- String oddrn,
- String name,
- boolean nullable,
- List<DataSetField> sink) {
+ private static void extractPrimitive(Descriptors.FieldDescriptor field,
+ String parentOddr,
+ String oddrn,
+ String name,
+ boolean nullable,
+ List<DataSetField> sink) {
sink.add(
createDataSetField(
name,
@@ -191,18 +191,18 @@ private void extractPrimitive(Descriptors.FieldDescriptor field,
);
}
- private String getLogicalTypeName(Descriptors.FieldDescriptor f) {
+ private static String getLogicalTypeName(Descriptors.FieldDescriptor f) {
return f.getType() == Descriptors.FieldDescriptor.Type.MESSAGE
? f.getMessageType().getFullName()
: f.getType().name().toLowerCase();
}
- private DataSetField createDataSetField(String name,
- String parentOddrn,
- String oddrn,
- TypeEnum type,
- String logicalType,
- Boolean nullable) {
+ private static DataSetField createDataSetField(String name,
+ String parentOddrn,
+ String oddrn,
+ TypeEnum type,
+ String logicalType,
+ Boolean nullable) {
return new DataSetField()
.name(name)
.parentFieldOddrn(parentOddrn)
@@ -216,7 +216,7 @@ private DataSetField createDataSetField(String name,
}
- private TypeEnum mapType(Descriptors.FieldDescriptor.Type type) {
+ private static TypeEnum mapType(Descriptors.FieldDescriptor.Type type) {
return switch (type) {
case INT32, INT64, SINT32, SFIXED32, SINT64, UINT32, UINT64, FIXED32, FIXED64, SFIXED64 -> TypeEnum.INTEGER;
case FLOAT, DOUBLE -> TypeEnum.NUMBER;
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/KafkaServicesValidation.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/KafkaServicesValidation.java
index 7f5b8c45f2a..31d90244ebe 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/KafkaServicesValidation.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/KafkaServicesValidation.java
@@ -1,6 +1,7 @@
package com.provectus.kafka.ui.util;
-import com.provectus.kafka.ui.config.ClustersProperties;
+import static com.provectus.kafka.ui.config.ClustersProperties.TruststoreConfig;
+
import com.provectus.kafka.ui.connect.api.KafkaConnectClientApi;
import com.provectus.kafka.ui.model.ApplicationPropertyValidationDTO;
import com.provectus.kafka.ui.service.ReactiveAdminClient;
@@ -13,38 +14,36 @@
import java.util.Properties;
import java.util.function.Supplier;
import javax.annotation.Nullable;
-import javax.net.ssl.KeyManagerFactory;
import javax.net.ssl.TrustManagerFactory;
-import lombok.experimental.UtilityClass;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.admin.AdminClientConfig;
import org.springframework.util.ResourceUtils;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
-import reactor.util.function.Tuple2;
-import reactor.util.function.Tuples;
@Slf4j
-@UtilityClass
-public class KafkaServicesValidation {
+public final class KafkaServicesValidation {
+
+ private KafkaServicesValidation() {
+ }
- private Mono<ApplicationPropertyValidationDTO> valid() {
+ private static Mono<ApplicationPropertyValidationDTO> valid() {
return Mono.just(new ApplicationPropertyValidationDTO().error(false));
}
- private Mono<ApplicationPropertyValidationDTO> invalid(String errorMsg) {
+ private static Mono<ApplicationPropertyValidationDTO> invalid(String errorMsg) {
return Mono.just(new ApplicationPropertyValidationDTO().error(true).errorMessage(errorMsg));
}
- private Mono<ApplicationPropertyValidationDTO> invalid(Throwable th) {
+ private static Mono<ApplicationPropertyValidationDTO> invalid(Throwable th) {
return Mono.just(new ApplicationPropertyValidationDTO().error(true).errorMessage(th.getMessage()));
}
/**
* Returns error msg, if any.
*/
- public Optional<String> validateTruststore(ClustersProperties.TruststoreConfig truststoreConfig) {
+ public static Optional<String> validateTruststore(TruststoreConfig truststoreConfig) {
if (truststoreConfig.getTruststoreLocation() != null && truststoreConfig.getTruststorePassword() != null) {
try {
KeyStore trustStore = KeyStore.getInstance(KeyStore.getDefaultType());
@@ -63,10 +62,10 @@ public Optional<String> validateTruststore(ClustersProperties.TruststoreConfig t
return Optional.empty();
}
- public Mono<ApplicationPropertyValidationDTO> validateClusterConnection(String bootstrapServers,
- Properties clusterProps,
- @Nullable
- ClustersProperties.TruststoreConfig ssl) {
+ public static Mono<ApplicationPropertyValidationDTO> validateClusterConnection(String bootstrapServers,
+ Properties clusterProps,
+ @Nullable
+ TruststoreConfig ssl) {
Properties properties = new Properties();
SslPropertiesUtil.addKafkaSslProperties(ssl, properties);
properties.putAll(clusterProps);
@@ -93,7 +92,7 @@ public Mono<ApplicationPropertyValidationDTO> validateClusterConnection(String b
});
}
- public Mono<ApplicationPropertyValidationDTO> validateSchemaRegistry(
+ public static Mono<ApplicationPropertyValidationDTO> validateSchemaRegistry(
Supplier<ReactiveFailover<KafkaSrClientApi>> clientSupplier) {
ReactiveFailover<KafkaSrClientApi> client;
try {
@@ -108,7 +107,7 @@ public Mono<ApplicationPropertyValidationDTO> validateSchemaRegistry(
.onErrorResume(KafkaServicesValidation::invalid);
}
- public Mono<ApplicationPropertyValidationDTO> validateConnect(
+ public static Mono<ApplicationPropertyValidationDTO> validateConnect(
Supplier<ReactiveFailover<KafkaConnectClientApi>> clientSupplier) {
ReactiveFailover<KafkaConnectClientApi> client;
try {
@@ -123,7 +122,8 @@ public Mono<ApplicationPropertyValidationDTO> validateConnect(
.onErrorResume(KafkaServicesValidation::invalid);
}
- public Mono<ApplicationPropertyValidationDTO> validateKsql(Supplier<ReactiveFailover<KsqlApiClient>> clientSupplier) {
+ public static Mono<ApplicationPropertyValidationDTO> validateKsql(
+ Supplier<ReactiveFailover<KsqlApiClient>> clientSupplier) {
ReactiveFailover<KsqlApiClient> client;
try {
client = clientSupplier.get();
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/KafkaVersion.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/KafkaVersion.java
index 48ff7ff1214..5ed21c6a6e2 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/KafkaVersion.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/KafkaVersion.java
@@ -1,11 +1,12 @@
package com.provectus.kafka.ui.util;
-import lombok.experimental.UtilityClass;
import lombok.extern.slf4j.Slf4j;
-@UtilityClass
@Slf4j
-public class KafkaVersion {
+public final class KafkaVersion {
+
+ private KafkaVersion() {
+ }
public static float parse(String version) throws NumberFormatException {
log.trace("Parsing cluster version [{}]", version);
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/SslPropertiesUtil.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/SslPropertiesUtil.java
index ea120076373..4d157fbcb5f 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/SslPropertiesUtil.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/SslPropertiesUtil.java
@@ -1,27 +1,17 @@
package com.provectus.kafka.ui.util;
import com.provectus.kafka.ui.config.ClustersProperties;
-import io.netty.handler.ssl.SslContext;
-import io.netty.handler.ssl.SslContextBuilder;
-import java.io.FileInputStream;
-import java.security.KeyStore;
import java.util.Properties;
import javax.annotation.Nullable;
-import javax.net.ssl.KeyManagerFactory;
-import javax.net.ssl.SSLContext;
-import javax.net.ssl.TrustManagerFactory;
-import lombok.SneakyThrows;
-import lombok.experimental.UtilityClass;
import org.apache.kafka.common.config.SslConfigs;
-import org.springframework.http.client.reactive.ReactorClientHttpConnector;
-import org.springframework.util.ResourceUtils;
-import reactor.netty.http.client.HttpClient;
-@UtilityClass
-public class SslPropertiesUtil {
+public final class SslPropertiesUtil {
- public void addKafkaSslProperties(@Nullable ClustersProperties.TruststoreConfig truststoreConfig,
- Properties sink) {
+ private SslPropertiesUtil() {
+ }
+
+ public static void addKafkaSslProperties(@Nullable ClustersProperties.TruststoreConfig truststoreConfig,
+ Properties sink) {
if (truststoreConfig != null && truststoreConfig.getTruststoreLocation() != null) {
sink.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, truststoreConfig.getTruststoreLocation());
if (truststoreConfig.getTruststorePassword() != null) {
| null | train | test | 2023-03-09T20:30:58 | "2023-03-01T08:01:39Z" | Haarolean | train |
provectus/kafka-ui/3430_3481 | provectus/kafka-ui | provectus/kafka-ui/3430 | provectus/kafka-ui/3481 | [
"connected"
] | de21721e00b5109d6792630707db6cff568df824 | e3ee4c7fa7dae75d92b715ff7efa96e22455913c | [] | [
"```suggestion\r\n if (appInfo.hasDynamicConfig && clusters.isSuccess && clusters.data.length === 0) {\r\n```",
"why non exact?",
"understood",
"done ",
"we use custom `render` method. Pls check other components",
"@David-DB88 why we use `getNonExactPath` here?",
"removed it ",
"replaced with custom render ",
"```suggestion\r\nconst mockedNavigate = jest.fn();\r\n```",
"done"
] | "2023-03-10T13:15:04Z" | [
"type/enhancement",
"scope/frontend",
"status/accepted",
"area/wizard"
] | Redirect the user to the wizard page if no clusters present upon app startup | If:
1. there are no clusters configured
2. dynamic config is ON
then redirect the user to the wizard page initially | [
"kafka-ui-react-app/src/components/Dashboard/Dashboard.tsx",
"kafka-ui-react-app/src/components/contexts/GlobalSettingsContext.tsx"
] | [
"kafka-ui-react-app/src/components/Dashboard/Dashboard.tsx",
"kafka-ui-react-app/src/components/Dashboard/__test__/Dashboard.spec.tsx",
"kafka-ui-react-app/src/components/contexts/GlobalSettingsContext.tsx"
] | [
"kafka-ui-react-app/src/lib/testHelpers.tsx"
] | diff --git a/kafka-ui-react-app/src/components/Dashboard/Dashboard.tsx b/kafka-ui-react-app/src/components/Dashboard/Dashboard.tsx
index 46c35d50794..7eab4c1d2ff 100644
--- a/kafka-ui-react-app/src/components/Dashboard/Dashboard.tsx
+++ b/kafka-ui-react-app/src/components/Dashboard/Dashboard.tsx
@@ -1,4 +1,4 @@
-import React from 'react';
+import React, { useEffect } from 'react';
import PageHeading from 'components/common/PageHeading/PageHeading';
import * as Metrics from 'components/common/Metrics';
import { Tag } from 'components/common/Tag/Tag.styled';
@@ -11,6 +11,7 @@ import useBoolean from 'lib/hooks/useBoolean';
import { Button } from 'components/common/Button/Button';
import { clusterNewConfigPath } from 'lib/paths';
import { GlobalSettingsContext } from 'components/contexts/GlobalSettingsContext';
+import { useNavigate } from 'react-router-dom';
import * as S from './Dashboard.styled';
import ClusterName from './ClusterName';
@@ -20,7 +21,7 @@ const Dashboard: React.FC = () => {
const clusters = useClusters();
const { value: showOfflineOnly, toggle } = useBoolean(false);
const appInfo = React.useContext(GlobalSettingsContext);
-
+ const navigate = useNavigate();
const config = React.useMemo(() => {
const clusterList = clusters.data || [];
const offlineClusters = clusterList.filter(
@@ -55,6 +56,12 @@ const Dashboard: React.FC = () => {
return initialColumns;
}, []);
+ useEffect(() => {
+ if (appInfo.hasDynamicConfig && !clusters.data) {
+ navigate(clusterNewConfigPath);
+ }
+ }, [clusters, appInfo.hasDynamicConfig]);
+
return (
<>
<PageHeading text="Dashboard" />
diff --git a/kafka-ui-react-app/src/components/Dashboard/__test__/Dashboard.spec.tsx b/kafka-ui-react-app/src/components/Dashboard/__test__/Dashboard.spec.tsx
new file mode 100644
index 00000000000..e9141e980e4
--- /dev/null
+++ b/kafka-ui-react-app/src/components/Dashboard/__test__/Dashboard.spec.tsx
@@ -0,0 +1,45 @@
+import React from 'react';
+import { useClusters } from 'lib/hooks/api/clusters';
+import Dashboard from 'components/Dashboard/Dashboard';
+import { Cluster, ServerStatus } from 'generated-sources';
+import { render } from 'lib/testHelpers';
+
+interface DataType {
+ data: Cluster[] | undefined;
+}
+jest.mock('lib/hooks/api/clusters');
+const mockedNavigate = jest.fn();
+jest.mock('react-router-dom', () => ({
+ ...jest.requireActual('react-router-dom'),
+ useNavigate: () => mockedNavigate,
+}));
+describe('Dashboard component', () => {
+ const renderComponent = (hasDynamicConfig: boolean, data: DataType) => {
+ const useClustersMock = useClusters as jest.Mock;
+ useClustersMock.mockReturnValue(data);
+ render(<Dashboard />, {
+ globalSettings: { hasDynamicConfig },
+ });
+ };
+ it('redirects to new cluster configuration page if there are no clusters and dynamic config is enabled', async () => {
+ await renderComponent(true, { data: undefined });
+
+ expect(mockedNavigate).toHaveBeenCalled();
+ });
+
+ it('should not navigate to new cluster config page when there are clusters', async () => {
+ await renderComponent(true, {
+ data: [{ name: 'Cluster 1', status: ServerStatus.ONLINE }],
+ });
+
+ expect(mockedNavigate).not.toHaveBeenCalled();
+ });
+
+ it('should not navigate to new cluster config page when there are no clusters and hasDynamicConfig is false', async () => {
+ await renderComponent(false, {
+ data: [],
+ });
+
+ expect(mockedNavigate).not.toHaveBeenCalled();
+ });
+});
diff --git a/kafka-ui-react-app/src/components/contexts/GlobalSettingsContext.tsx b/kafka-ui-react-app/src/components/contexts/GlobalSettingsContext.tsx
index 4de05307b11..563fb175f32 100644
--- a/kafka-ui-react-app/src/components/contexts/GlobalSettingsContext.tsx
+++ b/kafka-ui-react-app/src/components/contexts/GlobalSettingsContext.tsx
@@ -2,7 +2,7 @@ import { useAppInfo } from 'lib/hooks/api/appConfig';
import React from 'react';
import { ApplicationInfoEnabledFeaturesEnum } from 'generated-sources';
-interface GlobalSettingsContextProps {
+export interface GlobalSettingsContextProps {
hasDynamicConfig: boolean;
}
| diff --git a/kafka-ui-react-app/src/lib/testHelpers.tsx b/kafka-ui-react-app/src/lib/testHelpers.tsx
index 508904d1466..42539a0aac9 100644
--- a/kafka-ui-react-app/src/lib/testHelpers.tsx
+++ b/kafka-ui-react-app/src/lib/testHelpers.tsx
@@ -26,7 +26,10 @@ import {
} from '@tanstack/react-query';
import { ConfirmContextProvider } from 'components/contexts/ConfirmContext';
import ConfirmationModal from 'components/common/ConfirmationModal/ConfirmationModal';
-import { GlobalSettingsContext } from 'components/contexts/GlobalSettingsContext';
+import {
+ GlobalSettingsContext,
+ GlobalSettingsContextProps,
+} from 'components/contexts/GlobalSettingsContext';
import { UserInfoRolesAccessContext } from 'components/contexts/UserInfoRolesAccessContext';
import { RolesType, modifyRolesData } from './permissions';
@@ -35,6 +38,7 @@ interface CustomRenderOptions extends Omit<RenderOptions, 'wrapper'> {
preloadedState?: Partial<RootState>;
store?: Store<Partial<RootState>, AnyAction>;
initialEntries?: MemoryRouterProps['initialEntries'];
+ globalSettings?: GlobalSettingsContextProps;
userInfo?: {
roles?: RolesType;
rbacFlag: boolean;
@@ -110,6 +114,7 @@ const customRender = (
preloadedState,
}),
initialEntries,
+ globalSettings = { hasDynamicConfig: false },
userInfo,
...renderOptions
}: CustomRenderOptions = {}
@@ -119,7 +124,7 @@ const customRender = (
children,
}) => (
<TestQueryClientProvider>
- <GlobalSettingsContext.Provider value={{ hasDynamicConfig: false }}>
+ <GlobalSettingsContext.Provider value={globalSettings}>
<ThemeProvider theme={theme}>
<TestUserInfoProvider data={userInfo}>
<ConfirmContextProvider>
| train | test | 2023-04-03T07:10:14 | "2023-03-02T08:52:28Z" | Haarolean | train |
provectus/kafka-ui/3476_3500 | provectus/kafka-ui | provectus/kafka-ui/3476 | provectus/kafka-ui/3500 | [
"connected"
] | 1117b296a7843272a8827099b9220aee4e5d9ab5 | 4d03802a5dc77f6b327485484eb63106a87152b5 | [
"@akamensky \r\nThank you for the issue!\r\n\r\n In 0.6 SchemaRegistry serde is **not** trying to find schema using subject based on topic name (unless `schemaRegistryCheckSchemaExistenceForDeserialize` property is set). It only relies on schema ID from message (as you expected).\r\n\r\nCouple of questions for investigation:\r\n1. Is `SchemaRegistry` visible in serdes list in UI ? \r\n2. Do you have truststore / keystore properties set for SchemaRegistry?\r\n3. Do you use auth for SchemaRegistry ? \r\n\r\nThanks in advance!\r\n\r\n\r\n",
"@iliax \n\n- Yes\n- No\n- No\n\nI can see ScheaRegistry in the list of decoders. We don't use auth or keystore. ",
"@akamensky \r\nok, can you please verify that UI is passing valid serde name to backend? \r\nTo do it pls see payload that UI sends to `GET /messages` endpoint in browser ? \r\nshould be look like \r\n<img width=\"386\" alt=\"Screenshot 2023-03-10 at 11 46 18\" src=\"https://user-images.githubusercontent.com/702205/224255117-5fd0a306-ec53-44b8-b29d-0d2f3b1632ad.png\">\r\n",
"@iliax I'll check on Monday and update here. Thanks. ",
"The call to `GET /messages` looks fine, it passed `keySerde: SchemaRegistry` and `valueSerde: SchemaRegistry` as query parameters.\r\n\r\nHowever I noticed that the request to `/serdes?use=DESERIALIZE` (and the same call with `serialize`) would return empty `schema` field for `SchemaRegistry`. The value was `null`, while `preferred: true`. However there is no problem connecting to SR from the host and there is no log entries for that whatsoever. After I restarted KafkaUI multiple times (namely take the docker stack down completely and create it anew), it seems to have disappeared. And the same call now has correct Avro schema value in `schema` field.\r\n\r\nNo idea what's happening there, but looks like it either does not fetch schemas from SR, or does not even try.",
"@akamensky thank you for info, we will try to reproduce that. For now, it looks like there was an issue connecting with SR and UI is not working property in such situation"
] | [] | "2023-03-15T09:33:27Z" | [
"type/bug",
"scope/frontend",
"status/accepted",
"status/confirmed"
] | Serde 'null' not found when trying to decode message encoded using Avro schema stored in SchemaRegistry | **Describe the bug** (Actual behavior)
Messages in UI are not decoded. Log shows following:
```
javax.validation.ValidationException: Serde 'null' not found
at com.provectus.kafka.ui.service.DeserializationService.lambda$getSerdeForDeserialize$1(DeserializationService.java:70)
```
**Expected behavior**
Messages are correctly decoded
**Set up**
1. Kafka UI upgraded from 0.4.0 to 0.6.0 (nothing else changed in setup)
2. Schema registry contains correct schema
3. Message is encoded with correct schema ID
4. Manual decoding and consumers work as expected
5. **Kafka UI tries to lookup schema using subject based on topic name (such as Topic-key/Topic-value) completely ignoring that schema ID is already encoded in the message...**
**Steps to Reproduce**
See above
**Screenshots**
None
**Additional context**
When SchemaRegistry is used there is a standard way to encode schema ID in the messages, doing something other than that is not the best perhaps?
| [
"kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.tsx"
] | [
"kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.tsx"
] | [] | diff --git a/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.tsx b/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.tsx
index 3abe336f044..c3871432e70 100644
--- a/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.tsx
+++ b/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.tsx
@@ -125,10 +125,10 @@ const Filters: React.FC<FiltersProps> = ({
getTimestampFromSeekToParam(searchParams)
);
const [keySerde, setKeySerde] = React.useState<string>(
- searchParams.get('keySerde') as string
+ searchParams.get('keySerde') || ''
);
const [valueSerde, setValueSerde] = React.useState<string>(
- searchParams.get('valueSerde') as string
+ searchParams.get('valueSerde') || ''
);
const [savedFilters, setSavedFilters] = React.useState<MessageFilters[]>(
@@ -206,8 +206,8 @@ const Filters: React.FC<FiltersProps> = ({
limit: PER_PAGE,
page: page || 0,
seekDirection,
- keySerde: keySerde || (searchParams.get('keySerde') as string),
- valueSerde: valueSerde || (searchParams.get('valueSerde') as string),
+ keySerde: keySerde || searchParams.get('keySerde') || '',
+ valueSerde: valueSerde || searchParams.get('valueSerde') || '',
};
if (isSeekTypeControlVisible) {
| null | train | test | 2023-03-14T19:03:05 | "2023-03-10T06:38:58Z" | akamensky | train |
provectus/kafka-ui/3135_3505 | provectus/kafka-ui | provectus/kafka-ui/3135 | provectus/kafka-ui/3505 | [
"keyword_pr_to_issue"
] | 4d03802a5dc77f6b327485484eb63106a87152b5 | d42e911379a53bcb13ee07e352138fe3083fad98 | [
"Hello there joschi! π\n\nThank you and congratulations π for opening your very first issue in this project! π\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. π",
"Hey Jochen, thanks for raising the issue.\r\n\r\nWe've recently changed this behavior (#2755). If you suggest that it should be implied that internal topics start with *double* underscore, shouldn't we count \"_schemas\" (single underscore) as internal topic for example?\r\nI'm not sure about correct behavior here. Also there's an \"internal\" flag in kafka via AC, which we use as well (you can see tha pr for #2755).",
"> If you suggest that it should be implied that internal topics start with _double_ underscore, shouldn't we count \"_schemas\" (single underscore) as internal topic for example?\r\n\r\nA single underscore as an indicator is fine (topics starting with two underscores also start with 1 underscore π).\r\n\r\nWe're using Amazon MSK and MSK Connect which is using the convention with two underscore characters as an indicator for internal topics.\r\n\r\nConfluent also seems to follow the convention with a single underscore character:\r\nhttps://docs.confluent.io/platform/current/control-center/topics/overview.html#topics-page\r\n\r\n> Internal topics names start with an underscore (_) and should not be individually modified. Modifying an internal topic could adversely impact your Confluent Platform installation and result in unexpected behavior.\r\n\r\nThe Javadoc of [``](https://kafka.apache.org/33/javadoc/org/apache/kafka/clients/admin/TopicDescription.html#isInternal--) also hints to a similar convention:\r\n\r\n> Whether the topic is internal to Kafka. An example of an internal topic is the offsets and group management topic: `__consumer_offsets`.\r\n\r\nAnd here's another indicator:\r\nhttps://github.com/apache/kafka/blob/3.3.1/clients/src/main/java/org/apache/kafka/common/internals/Topic.java#L29-L36",
"This issue has been automatically marked as stale because no requested feedback has been provided. It will be closed if no further activity occurs. Thank you for your contributions.",
"The internality flag is being defaulted to a single underscore character now. Can be overridden with \"kafka_clusters_internalTopicPrefix\" property."
] | [
"add @Nullable annotation, or do defaulting on higher level (in service)",
"Done, the first way"
] | "2023-03-15T17:38:10Z" | [
"type/bug",
"scope/backend",
"status/accepted"
] | "Show Internal Topics" not working | TODO: make the "internal" flag prefix configurable, default it to a single underscore character
///
**Describe the bug**
After updating to Kafka UI 0.5.0 (using the Docker image [provectuslabs/kafka-ui:v0.5.0](https://hub.docker.com/layers/provectuslabs/kafka-ui/v0.5.0/images/sha256-cd0fdd18d7d18dd2ba86c237507d7979778405de3e74377fed7582e6a60297d1?context=explore)), the "Show Internal Topics" toggle in the "Topics" view isn't working anymore.
Internal topics are always shown, no matter the state of the toggle.
**Set up**
Docker image [provectuslabs/kafka-ui:v0.5.0](https://hub.docker.com/layers/provectuslabs/kafka-ui/v0.5.0/images/sha256-cd0fdd18d7d18dd2ba86c237507d7979778405de3e74377fed7582e6a60297d1?context=explore) deployed on Kubernetes (no Helm), using the configuration from Kafka UI 0.4.0.
> check commit hash in the top left corner in UI
There is no commit hash shown in the web UI anymore.
**Steps to Reproduce**
<!-- We'd like you to provide an example setup (via docker-compose, helm, etc.)
to reproduce the problem, especially with a complex setups. -->
Steps to reproduce the behavior:
1. Open Kafka UI 0.5.0.
2. Click on the configured Kafka cluster.
3. Go to "Topics" page.
4. Click on "Show Internal Topics" toggle.
**Expected behavior**
Internal topics (starting with two underscores: `__`) should be hidden when the "Show Internal Topics" toggle is disabled.
**Screenshots**
<img width="1074" alt="Show Internal Topics disabled" src="https://user-images.githubusercontent.com/43951/209367014-c916520e-0207-4275-acd6-9c88517d2d0e.png">
<img width="1079" alt="Show Internal Topics enabled" src="https://user-images.githubusercontent.com/43951/209367073-d5518e1f-cbef-48fc-8580-325c325c02f4.png"> | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalTopic.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/TopicsService.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicsList.java"
] | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalTopic.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/TopicsService.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicsList.java"
] | [
"kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/TopicsServicePaginationTest.java",
"kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/TopicsTest.java"
] | diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java
index 2cd5e0e69cd..7b6b494ad12 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java
@@ -25,6 +25,8 @@ public class ClustersProperties {
List<Cluster> clusters = new ArrayList<>();
+ String internalTopicPrefix;
+
@Data
public static class Cluster {
String name;
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalTopic.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalTopic.java
index efabd15f098..43a6012d215 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalTopic.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalTopic.java
@@ -1,9 +1,11 @@
package com.provectus.kafka.ui.model;
+import com.provectus.kafka.ui.config.ClustersProperties;
import java.math.BigDecimal;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
+import javax.annotation.Nullable;
import lombok.Builder;
import lombok.Data;
import org.apache.kafka.clients.admin.ConfigEntry;
@@ -14,6 +16,8 @@
@Builder(toBuilder = true)
public class InternalTopic {
+ ClustersProperties clustersProperties;
+
// from TopicDescription
private final String name;
private final boolean internal;
@@ -40,9 +44,17 @@ public static InternalTopic from(TopicDescription topicDescription,
List<ConfigEntry> configs,
InternalPartitionsOffsets partitionsOffsets,
Metrics metrics,
- InternalLogDirStats logDirInfo) {
+ InternalLogDirStats logDirInfo,
+ @Nullable String internalTopicPrefix) {
var topic = InternalTopic.builder();
- topic.internal(topicDescription.isInternal());
+
+ internalTopicPrefix = internalTopicPrefix == null || internalTopicPrefix.isEmpty()
+ ? "_"
+ : internalTopicPrefix;
+
+ topic.internal(
+ topicDescription.isInternal() || topicDescription.name().startsWith(internalTopicPrefix)
+ );
topic.name(topicDescription.name());
List<InternalPartition> partitions = topicDescription.partitions().stream()
@@ -56,10 +68,10 @@ public static InternalTopic from(TopicDescription topicDescription,
List<InternalReplica> replicas = partition.replicas().stream()
.map(r ->
InternalReplica.builder()
- .broker(r.id())
- .inSync(partition.isr().contains(r))
- .leader(partition.leader() != null && partition.leader().id() == r.id())
- .build())
+ .broker(r.id())
+ .inSync(partition.isr().contains(r))
+ .leader(partition.leader() != null && partition.leader().id() == r.id())
+ .build())
.collect(Collectors.toList());
partitionDto.replicas(replicas);
@@ -79,7 +91,7 @@ public static InternalTopic from(TopicDescription topicDescription,
return partitionDto.build();
})
- .collect(Collectors.toList());
+ .toList();
topic.partitions(partitions.stream().collect(
Collectors.toMap(InternalPartition::getPartition, t -> t)));
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/TopicsService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/TopicsService.java
index b172e2b4c61..9aaff3e9ef0 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/TopicsService.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/TopicsService.java
@@ -3,6 +3,7 @@
import static java.util.stream.Collectors.toList;
import static java.util.stream.Collectors.toMap;
+import com.provectus.kafka.ui.config.ClustersProperties;
import com.provectus.kafka.ui.exception.TopicMetadataException;
import com.provectus.kafka.ui.exception.TopicNotFoundException;
import com.provectus.kafka.ui.exception.TopicRecreationException;
@@ -52,6 +53,7 @@ public class TopicsService {
private final AdminClientService adminClientService;
private final StatisticsCache statisticsCache;
+ private final ClustersProperties clustersProperties;
@Value("${topic.recreate.maxRetries:15}")
private int recreateMaxRetries;
@Value("${topic.recreate.delay.seconds:1}")
@@ -127,7 +129,8 @@ private List<InternalTopic> createList(List<String> orderedNames,
configs.getOrDefault(t, List.of()),
partitionsOffsets,
metrics,
- logDirInfo
+ logDirInfo,
+ clustersProperties.getInternalTopicPrefix()
))
.collect(toList());
}
@@ -459,7 +462,9 @@ public Mono<List<InternalTopic>> getTopicsForPagination(KafkaCluster cluster) {
stats.getTopicConfigs().getOrDefault(topicName, List.of()),
InternalPartitionsOffsets.empty(),
stats.getMetrics(),
- stats.getLogDirInfo()))
+ stats.getLogDirInfo(),
+ clustersProperties.getInternalTopicPrefix()
+ ))
.collect(toList())
);
}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicsList.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicsList.java
index 499d5fe965a..35f4c9b1b41 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicsList.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicsList.java
@@ -6,7 +6,6 @@
import com.provectus.kafka.ui.pages.BasePage;
import io.qameta.allure.Step;
-import java.time.Duration;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
@@ -175,6 +174,12 @@ public TopicGridItem getTopicItem(String name) {
.findFirst().orElseThrow();
}
+ @Step
+ public TopicGridItem getAnyNonInternalTopic() {
+ return getNonInternalTopics().stream()
+ .findAny().orElseThrow();
+ }
+
@Step
public List<TopicGridItem> getNonInternalTopics() {
return initGridItems().stream()
@@ -207,8 +212,7 @@ public TopicsList selectItem(boolean select) {
public boolean isInternal() {
boolean internal = false;
try {
- element.$x("./td[2]/a/span").shouldBe(visible, Duration.ofMillis(500));
- internal = true;
+ internal = element.$x("./td[2]/a/span").isDisplayed();
} catch (Throwable ignored) {
}
return internal;
| diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/TopicsServicePaginationTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/TopicsServicePaginationTest.java
index 44b771f1542..8867400fee3 100644
--- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/TopicsServicePaginationTest.java
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/TopicsServicePaginationTest.java
@@ -69,7 +69,7 @@ public void shouldListFirst25Topics() {
.map(Objects::toString)
.map(name -> new TopicDescription(name, false, List.of()))
.map(topicDescription -> InternalTopic.from(topicDescription, List.of(), null,
- Metrics.empty(), InternalLogDirStats.empty()))
+ Metrics.empty(), InternalLogDirStats.empty(), "_"))
.collect(Collectors.toMap(InternalTopic::getName, Function.identity()))
);
@@ -95,7 +95,7 @@ public void shouldListFirst25TopicsSortedByNameDescendingOrder() {
.map(Objects::toString)
.map(name -> new TopicDescription(name, false, List.of()))
.map(topicDescription -> InternalTopic.from(topicDescription, List.of(), null,
- Metrics.empty(), InternalLogDirStats.empty()))
+ Metrics.empty(), InternalLogDirStats.empty(), "_"))
.collect(Collectors.toMap(InternalTopic::getName, Function.identity()));
init(internalTopics);
@@ -122,7 +122,7 @@ public void shouldCalculateCorrectPageCountForNonDivisiblePageSize() {
.map(Objects::toString)
.map(name -> new TopicDescription(name, false, List.of()))
.map(topicDescription -> InternalTopic.from(topicDescription, List.of(), null,
- Metrics.empty(), InternalLogDirStats.empty()))
+ Metrics.empty(), InternalLogDirStats.empty(), "_"))
.collect(Collectors.toMap(InternalTopic::getName, Function.identity()))
);
@@ -141,7 +141,7 @@ public void shouldCorrectlyHandleNonPositivePageNumberAndPageSize() {
.map(Objects::toString)
.map(name -> new TopicDescription(name, false, List.of()))
.map(topicDescription -> InternalTopic.from(topicDescription, List.of(), null,
- Metrics.empty(), InternalLogDirStats.empty()))
+ Metrics.empty(), InternalLogDirStats.empty(), "_"))
.collect(Collectors.toMap(InternalTopic::getName, Function.identity()))
);
@@ -160,7 +160,7 @@ public void shouldListBotInternalAndNonInternalTopics() {
.map(Objects::toString)
.map(name -> new TopicDescription(name, Integer.parseInt(name) % 10 == 0, List.of()))
.map(topicDescription -> InternalTopic.from(topicDescription, List.of(), null,
- Metrics.empty(), InternalLogDirStats.empty()))
+ Metrics.empty(), InternalLogDirStats.empty(), "_"))
.collect(Collectors.toMap(InternalTopic::getName, Function.identity()))
);
@@ -181,7 +181,7 @@ public void shouldListOnlyNonInternalTopics() {
.map(Objects::toString)
.map(name -> new TopicDescription(name, Integer.parseInt(name) % 5 == 0, List.of()))
.map(topicDescription -> InternalTopic.from(topicDescription, List.of(), null,
- Metrics.empty(), InternalLogDirStats.empty()))
+ Metrics.empty(), InternalLogDirStats.empty(), "_"))
.collect(Collectors.toMap(InternalTopic::getName, Function.identity()))
);
@@ -202,7 +202,7 @@ public void shouldListOnlyTopicsContainingOne() {
.map(Objects::toString)
.map(name -> new TopicDescription(name, false, List.of()))
.map(topicDescription -> InternalTopic.from(topicDescription, List.of(), null,
- Metrics.empty(), InternalLogDirStats.empty()))
+ Metrics.empty(), InternalLogDirStats.empty(), "_"))
.collect(Collectors.toMap(InternalTopic::getName, Function.identity()))
);
@@ -224,7 +224,7 @@ public void shouldListTopicsOrderedByPartitionsCount() {
new TopicPartitionInfo(p, null, List.of(), List.of()))
.collect(Collectors.toList())))
.map(topicDescription -> InternalTopic.from(topicDescription, List.of(), InternalPartitionsOffsets.empty(),
- Metrics.empty(), InternalLogDirStats.empty()))
+ Metrics.empty(), InternalLogDirStats.empty(), "_"))
.collect(Collectors.toMap(InternalTopic::getName, Function.identity()));
init(internalTopics);
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/TopicsTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/TopicsTest.java
index 3f1347cb391..92e580ec32c 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/TopicsTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/TopicsTest.java
@@ -52,7 +52,8 @@ public class TopicsTest extends BaseTest {
.setMaxSizeOnDisk(NOT_SET);
private static final Topic TOPIC_FOR_CHECK_FILTERS = new Topic()
.setName("topic-for-check-filters-" + randomAlphabetic(5));
- private static final Topic TOPIC_FOR_DELETE = new Topic().setName("topic-to-delete-" + randomAlphabetic(5));
+ private static final Topic TOPIC_FOR_DELETE = new Topic()
+ .setName("topic-to-delete-" + randomAlphabetic(5));
private static final List<Topic> TOPIC_LIST = new ArrayList<>();
@BeforeClass(alwaysRun = true)
@@ -89,11 +90,11 @@ public void createTopic() {
void checkAvailableOperations() {
navigateToTopics();
topicsList
- .getTopicItem("my_ksql_1ksql_processing_log")
+ .getTopicItem(TOPIC_TO_UPDATE_AND_DELETE.getName())
.selectItem(true);
verifyElementsCondition(topicsList.getActionButtons(), Condition.enabled);
topicsList
- .getTopicItem("_confluent-ksql-my_ksql_1_command_topic")
+ .getTopicItem(TOPIC_FOR_CHECK_FILTERS.getName())
.selectItem(true);
Assert.assertFalse(topicsList.isCopySelectedTopicBtnEnabled(), "isCopySelectedTopicBtnEnabled()");
}
@@ -456,7 +457,7 @@ public void checkCopyTopicPossibility() {
.setNumberOfPartitions(1);
navigateToTopics();
topicsList
- .getTopicItem("_schemas")
+ .getAnyNonInternalTopic()
.selectItem(true)
.clickCopySelectedTopicBtn();
topicCreateEditForm
| val | test | 2023-03-15T14:53:57 | "2022-12-23T16:33:18Z" | joschi | train |
provectus/kafka-ui/3503_3523 | provectus/kafka-ui | provectus/kafka-ui/3503 | provectus/kafka-ui/3523 | [
"connected"
] | 96a577a98c6069376c5d22ed49cffd3739f1bbdc | 40c198f0fc504923449260af2d9a5c78bb825160 | [] | [] | "2023-03-20T12:49:51Z" | [
"scope/backend",
"status/accepted",
"type/chore",
"area/wizard"
] | Config wizard: Implement remaining cluster properties | - polling/throttling rates
- custom properties | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/Config.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/AdminClientServiceImpl.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaClusterFactory.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/DynamicConfigOperations.java",
"kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml"
] | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/Config.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/WebclientProperties.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/AdminClientServiceImpl.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaClusterFactory.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/DynamicConfigOperations.java",
"kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml"
] | [] | diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java
index 15436c1cd8b..24b60b57116 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java
@@ -27,6 +27,8 @@ public class ClustersProperties {
String internalTopicPrefix;
+ Integer adminClientTimeout;
+
PollingProperties polling = new PollingProperties();
@Data
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/Config.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/Config.java
index 37495b50291..2ad0538c0ec 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/Config.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/Config.java
@@ -5,7 +5,6 @@
import lombok.AllArgsConstructor;
import org.openapitools.jackson.nullable.JsonNullableModule;
import org.springframework.beans.factory.ObjectProvider;
-import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.web.ServerProperties;
import org.springframework.boot.autoconfigure.web.reactive.WebFluxProperties;
import org.springframework.context.ApplicationContext;
@@ -15,8 +14,6 @@
import org.springframework.http.server.reactive.HttpHandler;
import org.springframework.jmx.export.MBeanExporter;
import org.springframework.util.StringUtils;
-import org.springframework.util.unit.DataSize;
-import org.springframework.web.reactive.function.client.WebClient;
import org.springframework.web.server.adapter.WebHttpHandlerBuilder;
@Configuration
@@ -52,14 +49,7 @@ public MBeanExporter exporter() {
}
@Bean
- public WebClient webClient(
- @Value("${webclient.max-in-memory-buffer-size:20MB}") DataSize maxBuffSize) {
- return WebClient.builder()
- .codecs(c -> c.defaultCodecs().maxInMemorySize((int) maxBuffSize.toBytes()))
- .build();
- }
-
- @Bean
+ // will be used by webflux json mapping
public JsonNullableModule jsonNullableModule() {
return new JsonNullableModule();
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/WebclientProperties.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/WebclientProperties.java
new file mode 100644
index 00000000000..ad7732612d4
--- /dev/null
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/WebclientProperties.java
@@ -0,0 +1,33 @@
+package com.provectus.kafka.ui.config;
+
+import com.provectus.kafka.ui.exception.ValidationException;
+import java.beans.Transient;
+import javax.annotation.PostConstruct;
+import lombok.Data;
+import org.springframework.boot.context.properties.ConfigurationProperties;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.util.unit.DataSize;
+
+@Configuration
+@ConfigurationProperties("webclient")
+@Data
+public class WebclientProperties {
+
+ String maxInMemoryBufferSize;
+
+ @PostConstruct
+ public void validate() {
+ validateAndSetDefaultBufferSize();
+ }
+
+ private void validateAndSetDefaultBufferSize() {
+ if (maxInMemoryBufferSize != null) {
+ try {
+ DataSize.parse(maxInMemoryBufferSize);
+ } catch (Exception e) {
+ throw new ValidationException("Invalid format for webclient.maxInMemoryBufferSize");
+ }
+ }
+ }
+
+}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/AdminClientServiceImpl.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/AdminClientServiceImpl.java
index 886b67b9282..1bd4d7e33e8 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/AdminClientServiceImpl.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/AdminClientServiceImpl.java
@@ -1,33 +1,36 @@
package com.provectus.kafka.ui.service;
+import com.provectus.kafka.ui.config.ClustersProperties;
import com.provectus.kafka.ui.model.KafkaCluster;
import com.provectus.kafka.ui.util.SslPropertiesUtil;
import java.io.Closeable;
import java.time.Instant;
import java.util.Map;
+import java.util.Optional;
import java.util.Properties;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicLong;
-import lombok.RequiredArgsConstructor;
-import lombok.Setter;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.admin.AdminClientConfig;
-import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import reactor.core.publisher.Mono;
@Service
-@RequiredArgsConstructor
@Slf4j
public class AdminClientServiceImpl implements AdminClientService, Closeable {
+ private static final int DEFAULT_CLIENT_TIMEOUT_MS = 30_000;
+
private static final AtomicLong CLIENT_ID_SEQ = new AtomicLong();
private final Map<String, ReactiveAdminClient> adminClientCache = new ConcurrentHashMap<>();
- @Setter // used in tests
- @Value("${kafka.admin-client-timeout:30000}")
- private int clientTimeout;
+ private final int clientTimeout;
+
+ public AdminClientServiceImpl(ClustersProperties clustersProperties) {
+ this.clientTimeout = Optional.ofNullable(clustersProperties.getAdminClientTimeout())
+ .orElse(DEFAULT_CLIENT_TIMEOUT_MS);
+ }
@Override
public Mono<ReactiveAdminClient> get(KafkaCluster cluster) {
@@ -42,7 +45,7 @@ private Mono<ReactiveAdminClient> createAdminClient(KafkaCluster cluster) {
SslPropertiesUtil.addKafkaSslProperties(cluster.getOriginalProperties().getSsl(), properties);
properties.putAll(cluster.getProperties());
properties.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, cluster.getBootstrapServers());
- properties.put(AdminClientConfig.REQUEST_TIMEOUT_MS_CONFIG, clientTimeout);
+ properties.putIfAbsent(AdminClientConfig.REQUEST_TIMEOUT_MS_CONFIG, clientTimeout);
properties.putIfAbsent(
AdminClientConfig.CLIENT_ID_CONFIG,
"kafka-ui-admin-" + Instant.now().getEpochSecond() + "-" + CLIENT_ID_SEQ.incrementAndGet()
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaClusterFactory.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaClusterFactory.java
index 357a548a637..964b25473d3 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaClusterFactory.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaClusterFactory.java
@@ -2,6 +2,7 @@
import com.provectus.kafka.ui.client.RetryingKafkaConnectClient;
import com.provectus.kafka.ui.config.ClustersProperties;
+import com.provectus.kafka.ui.config.WebclientProperties;
import com.provectus.kafka.ui.connect.api.KafkaConnectClientApi;
import com.provectus.kafka.ui.emitter.PollingSettings;
import com.provectus.kafka.ui.model.ApplicationPropertyValidationDTO;
@@ -22,9 +23,7 @@
import java.util.Properties;
import java.util.stream.Stream;
import javax.annotation.Nullable;
-import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
-import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.util.unit.DataSize;
import org.springframework.web.reactive.function.client.WebClient;
@@ -34,12 +33,18 @@
import reactor.util.function.Tuples;
@Service
-@RequiredArgsConstructor
@Slf4j
public class KafkaClusterFactory {
- @Value("${webclient.max-in-memory-buffer-size:20MB}")
- private DataSize maxBuffSize;
+ private static final DataSize DEFAULT_WEBCLIENT_BUFFER = DataSize.parse("20MB");
+
+ private final DataSize webClientMaxBuffSize;
+
+ public KafkaClusterFactory(WebclientProperties webclientProperties) {
+ this.webClientMaxBuffSize = Optional.ofNullable(webclientProperties.getMaxInMemoryBufferSize())
+ .map(DataSize::parse)
+ .orElse(DEFAULT_WEBCLIENT_BUFFER);
+ }
public KafkaCluster create(ClustersProperties properties,
ClustersProperties.Cluster clusterProperties) {
@@ -140,7 +145,7 @@ private ReactiveFailover<KafkaConnectClientApi> connectClient(ClustersProperties
url -> new RetryingKafkaConnectClient(
connectCluster.toBuilder().address(url).build(),
cluster.getSsl(),
- maxBuffSize
+ webClientMaxBuffSize
),
ReactiveFailover.CONNECTION_REFUSED_EXCEPTION_FILTER,
"No alive connect instances available",
@@ -158,7 +163,7 @@ private ReactiveFailover<KafkaSrClientApi> schemaRegistryClient(ClustersProperti
WebClient webClient = new WebClientConfigurator()
.configureSsl(clusterProperties.getSsl(), clusterProperties.getSchemaRegistrySsl())
.configureBasicAuth(auth.getUsername(), auth.getPassword())
- .configureBufferSize(maxBuffSize)
+ .configureBufferSize(webClientMaxBuffSize)
.build();
return ReactiveFailover.create(
parseUrlList(clusterProperties.getSchemaRegistry()),
@@ -181,7 +186,7 @@ private ReactiveFailover<KsqlApiClient> ksqlClient(ClustersProperties.Cluster cl
clusterProperties.getKsqldbServerAuth(),
clusterProperties.getSsl(),
clusterProperties.getKsqldbServerSsl(),
- maxBuffSize
+ webClientMaxBuffSize
),
ReactiveFailover.CONNECTION_REFUSED_EXCEPTION_FILTER,
"No live ksqldb instances available",
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/DynamicConfigOperations.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/DynamicConfigOperations.java
index 2e1b32d3f1b..75c6d25f959 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/DynamicConfigOperations.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/DynamicConfigOperations.java
@@ -2,6 +2,7 @@
import com.provectus.kafka.ui.config.ClustersProperties;
+import com.provectus.kafka.ui.config.WebclientProperties;
import com.provectus.kafka.ui.config.auth.OAuthProperties;
import com.provectus.kafka.ui.config.auth.RoleBasedAccessControlProperties;
import com.provectus.kafka.ui.exception.FileUploadException;
@@ -97,6 +98,7 @@ public PropertiesStructure getCurrentProperties() {
.type(ctx.getEnvironment().getProperty("auth.type"))
.oauth2(getNullableBean(OAuthProperties.class))
.build())
+ .webclient(getNullableBean(WebclientProperties.class))
.build();
}
@@ -204,6 +206,7 @@ public static class PropertiesStructure {
private ClustersProperties kafka;
private RoleBasedAccessControlProperties rbac;
private Auth auth;
+ private WebclientProperties webclient;
@Data
@Builder
@@ -222,6 +225,9 @@ public void initAndValidate() {
Optional.ofNullable(auth)
.flatMap(a -> Optional.ofNullable(a.oauth2))
.ifPresent(OAuthProperties::validate);
+
+ Optional.ofNullable(webclient)
+ .ifPresent(WebclientProperties::validate);
}
}
diff --git a/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml b/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
index 7b6fd3c1131..aef72444662 100644
--- a/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
+++ b/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
@@ -3467,6 +3467,12 @@ components:
type: array
items:
$ref: '#/components/schemas/Action'
+ webclient:
+ type: object
+ properties:
+ maxInMemoryBufferSize:
+ type: string
+ description: "examples: 20, 12KB, 5MB"
kafka:
type: object
properties:
@@ -3479,6 +3485,10 @@ components:
type: integer
noDataEmptyPolls:
type: integer
+ adminClientTimeout:
+ type: integer
+ internalTopicPrefix:
+ type: string
clusters:
type: array
items:
| null | train | test | 2023-04-14T11:46:49 | "2023-03-15T11:47:00Z" | Haarolean | train |
provectus/kafka-ui/3398_3526 | provectus/kafka-ui | provectus/kafka-ui/3398 | provectus/kafka-ui/3526 | [
"keyword_pr_to_issue"
] | acfe7a4afcce8da9d111d8922a6cdbb311e53450 | deb3dba29e0f1740d40b4ebc9e390c63911596dc | [] | [
"This covers both Truststore and Keystore",
"with `undefined` as the first argument, `trigger` validates all fields provided in `useForm` config",
"get rid of required validation for password field"
] | "2023-03-21T09:00:44Z" | [
"type/bug",
"scope/frontend",
"status/accepted",
"status/confirmed",
"severity/medium",
"area/wizard"
] | Wizard: Cluster config validator doesn't switch focus to first field failing validation. | **Describe the bug** (Actual behavior)
<!--(A clear and concise description of what the bug is.Use a list, if there is more than one problem)-->
On cluster creation or change, 'Validate' button doesn't switch focus to the field with an error.
Since the wizard is quite long, the validation errors might be outside visible page area.
**Expected behavior**
<!--(A clear and concise description of what you expected to happen.)-->
Focus should be set to the first field with a failed validation on 'Validate' button click.
**Set up**
<!--
WE MIGHT CLOSE THE ISSUE without further explanation IF YOU DON'T PROVIDE THIS INFORMATION.
How do you run the app? Please provide as much info as possible:
1. App version (docker image version or check commit hash in the top left corner in UI)
2. Helm chart version, if you use one
3. Any IAAC configs
-->
[dac023e](https://github.com/provectus/kafka-ui/commit/dac023e)
the 'wizard' branch
**Steps to Reproduce**
<!-- We'd like you to provide an example setup (via docker-compose, helm, etc.)
to reproduce the problem, especially with a complex setups. -->
1. Set up the Kafka UI with a setting
`DYNAMIC_CONFIG_ENABLED: true`
2. Open dashboard and click 'Configure' for and existing cluster or 'Configure new cluster'.
3. Leave out some fields empty, e.g. Truststore or Keystone
4. Click 'Validate' button.
**Screenshots**
<!--
(If applicable, add screenshots to help explain your problem)
-->

**Additional context**
<!--
Add any other context about the problem here. E.g.:
1. Are there any alternative scenarios (different data/methods/configuration/setup) you have tried?
Were they successfull or same issue occured? Please provide steps as well.
5. Related issues (if there are any).
6. Logs (if available)
7. Is there any serious impact or behaviour on the end-user because of this issue, that can be overlooked?
-->
| [
"kafka-ui-react-app/src/components/common/Select/ControlledSelect.tsx",
"kafka-ui-react-app/src/components/common/Select/Select.tsx",
"kafka-ui-react-app/src/widgets/ClusterConfigForm/index.tsx"
] | [
"kafka-ui-react-app/src/components/common/Select/ControlledSelect.tsx",
"kafka-ui-react-app/src/components/common/Select/Select.tsx",
"kafka-ui-react-app/src/widgets/ClusterConfigForm/index.tsx"
] | [] | diff --git a/kafka-ui-react-app/src/components/common/Select/ControlledSelect.tsx b/kafka-ui-react-app/src/components/common/Select/ControlledSelect.tsx
index b3e15aacdf2..1ea90c356ae 100644
--- a/kafka-ui-react-app/src/components/common/Select/ControlledSelect.tsx
+++ b/kafka-ui-react-app/src/components/common/Select/ControlledSelect.tsx
@@ -45,6 +45,7 @@ const ControlledSelect: React.FC<ControlledSelectProps> = ({
options={options}
placeholder={placeholder}
disabled={disabled}
+ ref={field.ref}
/>
);
}}
diff --git a/kafka-ui-react-app/src/components/common/Select/Select.tsx b/kafka-ui-react-app/src/components/common/Select/Select.tsx
index b56dbc4959d..a72660d2cb7 100644
--- a/kafka-ui-react-app/src/components/common/Select/Select.tsx
+++ b/kafka-ui-react-app/src/components/common/Select/Select.tsx
@@ -27,90 +27,99 @@ export interface SelectOption {
isLive?: boolean;
}
-const Select: React.FC<SelectProps> = ({
- options = [],
- value,
- defaultValue,
- selectSize = 'L',
- placeholder = '',
- isLive,
- disabled = false,
- onChange,
- isThemeMode,
- ...props
-}) => {
- const [selectedOption, setSelectedOption] = useState(value);
- const [showOptions, setShowOptions] = useState(false);
+const Select = React.forwardRef<HTMLUListElement, SelectProps>(
+ (
+ {
+ options = [],
+ value,
+ defaultValue,
+ selectSize = 'L',
+ placeholder = '',
+ isLive,
+ disabled = false,
+ onChange,
+ isThemeMode,
+ ...props
+ },
+ ref
+ ) => {
+ const [selectedOption, setSelectedOption] = useState(value);
+ const [showOptions, setShowOptions] = useState(false);
- const showOptionsHandler = () => {
- if (!disabled) setShowOptions(!showOptions);
- };
+ const showOptionsHandler = () => {
+ if (!disabled) setShowOptions(!showOptions);
+ };
- const selectContainerRef = useRef(null);
- const clickOutsideHandler = () => setShowOptions(false);
- useClickOutside(selectContainerRef, clickOutsideHandler);
+ const selectContainerRef = useRef(null);
+ const clickOutsideHandler = () => setShowOptions(false);
+ useClickOutside(selectContainerRef, clickOutsideHandler);
- const updateSelectedOption = (option: SelectOption) => {
- if (!option.disabled) {
- setSelectedOption(option.value);
+ const updateSelectedOption = (option: SelectOption) => {
+ if (!option.disabled) {
+ setSelectedOption(option.value);
- if (onChange) {
- onChange(option.value);
+ if (onChange) {
+ onChange(option.value);
+ }
+
+ setShowOptions(false);
}
+ };
- setShowOptions(false);
- }
- };
+ React.useEffect(() => {
+ setSelectedOption(value);
+ }, [isLive, value]);
- React.useEffect(() => {
- setSelectedOption(value);
- }, [isLive, value]);
+ return (
+ <div ref={selectContainerRef}>
+ <S.Select
+ role="listbox"
+ selectSize={selectSize}
+ isLive={isLive}
+ disabled={disabled}
+ onClick={showOptionsHandler}
+ onKeyDown={showOptionsHandler}
+ isThemeMode={isThemeMode}
+ ref={ref}
+ tabIndex={0}
+ {...props}
+ >
+ <S.SelectedOptionWrapper>
+ {isLive && <LiveIcon />}
+ <S.SelectedOption
+ role="option"
+ tabIndex={0}
+ isThemeMode={isThemeMode}
+ >
+ {options.find(
+ (option) => option.value === (defaultValue || selectedOption)
+ )?.label || placeholder}
+ </S.SelectedOption>
+ </S.SelectedOptionWrapper>
+ {showOptions && (
+ <S.OptionList>
+ {options?.map((option) => (
+ <S.Option
+ value={option.value}
+ key={option.value}
+ disabled={option.disabled}
+ onClick={() => updateSelectedOption(option)}
+ tabIndex={0}
+ role="option"
+ >
+ {option.isLive && <LiveIcon />}
+ {option.label}
+ </S.Option>
+ ))}
+ </S.OptionList>
+ )}
+ <DropdownArrowIcon isOpen={showOptions} />
+ </S.Select>
+ </div>
+ );
+ }
+);
- return (
- <div ref={selectContainerRef}>
- <S.Select
- role="listbox"
- selectSize={selectSize}
- isLive={isLive}
- disabled={disabled}
- onClick={showOptionsHandler}
- onKeyDown={showOptionsHandler}
- isThemeMode={isThemeMode}
- {...props}
- >
- <S.SelectedOptionWrapper>
- {isLive && <LiveIcon />}
- <S.SelectedOption
- role="option"
- tabIndex={0}
- isThemeMode={isThemeMode}
- >
- {options.find(
- (option) => option.value === (defaultValue || selectedOption)
- )?.label || placeholder}
- </S.SelectedOption>
- </S.SelectedOptionWrapper>
- {showOptions && (
- <S.OptionList>
- {options?.map((option) => (
- <S.Option
- value={option.value}
- key={option.value}
- disabled={option.disabled}
- onClick={() => updateSelectedOption(option)}
- tabIndex={0}
- role="option"
- >
- {option.isLive && <LiveIcon />}
- {option.label}
- </S.Option>
- ))}
- </S.OptionList>
- )}
- <DropdownArrowIcon isOpen={showOptions} />
- </S.Select>
- </div>
- );
-};
+Select.displayName = 'Select';
export default Select;
diff --git a/kafka-ui-react-app/src/widgets/ClusterConfigForm/index.tsx b/kafka-ui-react-app/src/widgets/ClusterConfigForm/index.tsx
index 8bd0c3f1257..2c636c29a27 100644
--- a/kafka-ui-react-app/src/widgets/ClusterConfigForm/index.tsx
+++ b/kafka-ui-react-app/src/widgets/ClusterConfigForm/index.tsx
@@ -75,7 +75,7 @@ const ClusterConfigForm: React.FC<ClusterConfigFormProps> = ({
const onReset = () => methods.reset();
const onValidate = async () => {
- await trigger();
+ await trigger(undefined, { shouldFocus: true });
if (!methods.formState.isValid) return;
disableForm();
const data = methods.getValues();
| null | train | test | 2023-03-24T11:05:59 | "2023-02-23T16:36:36Z" | BulatKha | train |
provectus/kafka-ui/3468_3527 | provectus/kafka-ui | provectus/kafka-ui/3468 | provectus/kafka-ui/3527 | [
"keyword_pr_to_issue"
] | bfb80f36b33954a6b8a5a6b31d32e8be67dbfb61 | d06f77ad5338775b56a61b7a24d217a993924df4 | [
"@Haarolean can you hint at how this could be achieved? I want to try",
"I opened a PR with @nisanohana3, #3527 ",
"@yardenshoham yep, I've seen it, thanks! Just assigning the issue"
] | [
"please rename method to `getMessagesBehind(..)` or `calculateMessagesBehind(..)`",
"not needed here, please remove",
"I suggest either `MESSAGES_BEHIND` or `CONSUMER_LAG`",
"Done",
"no need to add description into InternalConsumerGroup\r\nhere how it can be done\r\n```\r\n case MESSAGES_BEHIND -> {\r\n record GroupWithDescr(InternalConsumerGroup g, ConsumerGroupDescription d) { }\r\n\r\n Comparator<GroupWithDescr> comparator = Comparator.comparingLong(gwd ->\r\n gwd.g.getMessagesBehind() == null ? 0L : gwd.g.getMessagesBehind());\r\n\r\n var groupNames = groups.stream().map(ConsumerGroupListing::groupId).toList();\r\n\r\n yield ac.describeConsumerGroups(groupNames)\r\n .flatMap(descriptionsMap -> {\r\n List<ConsumerGroupDescription> descriptions = descriptionsMap.values().stream().toList();\r\n return getConsumerGroups(ac, descriptions)\r\n .map(g -> Streams.zip(g.stream(), descriptions.stream(), GroupWithDescr::new).toList())\r\n .map(g -> sortAndPaginate(g, comparator, pageNum, perPage, sortOrderDto)\r\n .map(GroupWithDescr::d).toList());\r\n }\r\n );\r\n }\r\n```",
"Done",
"Done",
"Done"
] | "2023-03-21T11:59:37Z" | [
"type/enhancement",
"good first issue",
"scope/backend",
"scope/frontend",
"status/accepted"
] | Sort consumer groups by messages behind | ### Discussed in https://github.com/provectus/kafka-ui/discussions/3467
<div type='discussions-op-text'>
<sup>Originally posted by **yardenshoham** March 9, 2023</sup>
In the UI we can sort by `Group ID`, `Num Of Members` or `State` but not `Num of Topics`, `Messages Behind` nor `Coordinator`.

</div>
I suppose one would need to add logic here? https://github.com/provectus/kafka-ui/blob/334ba3df99dfc84385faace167f6410c8ce0be91/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java#L134-L160 | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ConsumerGroupMapper.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalConsumerGroup.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java",
"kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml",
"kafka-ui-react-app/src/components/ConsumerGroups/List.tsx"
] | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ConsumerGroupMapper.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalConsumerGroup.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java",
"kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml",
"kafka-ui-react-app/src/components/ConsumerGroups/List.tsx"
] | [] | diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ConsumerGroupMapper.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ConsumerGroupMapper.java
index 9f7e32ed148..21d9efda9ca 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ConsumerGroupMapper.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ConsumerGroupMapper.java
@@ -89,19 +89,7 @@ private static <T extends ConsumerGroupDTO> T convertToConsumerGroup(
.flatMap(m -> m.getAssignment().stream().map(TopicPartition::topic))
).collect(Collectors.toSet()).size();
- Long messagesBehind = null;
- // messagesBehind should be undefined if no committed offsets found for topic
- if (!c.getOffsets().isEmpty()) {
- messagesBehind = c.getOffsets().entrySet().stream()
- .mapToLong(e ->
- Optional.ofNullable(c.getEndOffsets())
- .map(o -> o.get(e.getKey()))
- .map(o -> o - e.getValue())
- .orElse(0L)
- ).sum();
- }
-
- consumerGroup.setMessagesBehind(messagesBehind);
+ consumerGroup.setMessagesBehind(c.getMessagesBehind());
consumerGroup.setTopics(numTopics);
consumerGroup.setSimple(c.isSimple());
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalConsumerGroup.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalConsumerGroup.java
index d7b3a732f1f..e8199fa8ef5 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalConsumerGroup.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalConsumerGroup.java
@@ -20,6 +20,7 @@ public class InternalConsumerGroup {
private final Collection<InternalMember> members;
private final Map<TopicPartition, Long> offsets;
private final Map<TopicPartition, Long> endOffsets;
+ private final Long messagesBehind;
private final String partitionAssignor;
private final ConsumerGroupState state;
private final Node coordinator;
@@ -58,7 +59,25 @@ public static InternalConsumerGroup create(
);
builder.offsets(groupOffsets);
builder.endOffsets(topicEndOffsets);
+ builder.messagesBehind(calculateMessagesBehind(groupOffsets, topicEndOffsets));
Optional.ofNullable(description.coordinator()).ifPresent(builder::coordinator);
return builder.build();
}
+
+ private static Long calculateMessagesBehind(Map<TopicPartition, Long> offsets, Map<TopicPartition, Long> endOffsets) {
+ Long messagesBehind = null;
+ // messagesBehind should be undefined if no committed offsets found for topic
+ if (!offsets.isEmpty()) {
+ messagesBehind = offsets.entrySet().stream()
+ .mapToLong(e ->
+ Optional.ofNullable(endOffsets)
+ .map(o -> o.get(e.getKey()))
+ .map(o -> o - e.getValue())
+ .orElse(0L)
+ ).sum();
+ }
+
+ return messagesBehind;
+ }
+
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java
index 024eb3df513..e848146881d 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java
@@ -1,5 +1,6 @@
package com.provectus.kafka.ui.service;
+import com.google.common.collect.Streams;
import com.google.common.collect.Table;
import com.provectus.kafka.ui.model.ConsumerGroupOrderingDTO;
import com.provectus.kafka.ui.model.InternalConsumerGroup;
@@ -157,6 +158,24 @@ private Mono<List<ConsumerGroupDescription>> loadSortedDescriptions(ReactiveAdmi
.map(descriptions ->
sortAndPaginate(descriptions.values(), comparator, pageNum, perPage, sortOrderDto).toList());
}
+ case MESSAGES_BEHIND -> {
+ record GroupWithDescr(InternalConsumerGroup icg, ConsumerGroupDescription cgd) { }
+
+ Comparator<GroupWithDescr> comparator = Comparator.comparingLong(gwd ->
+ gwd.icg.getMessagesBehind() == null ? 0L : gwd.icg.getMessagesBehind());
+
+ var groupNames = groups.stream().map(ConsumerGroupListing::groupId).toList();
+
+ yield ac.describeConsumerGroups(groupNames)
+ .flatMap(descriptionsMap -> {
+ List<ConsumerGroupDescription> descriptions = descriptionsMap.values().stream().toList();
+ return getConsumerGroups(ac, descriptions)
+ .map(icg -> Streams.zip(icg.stream(), descriptions.stream(), GroupWithDescr::new).toList())
+ .map(gwd -> sortAndPaginate(gwd, comparator, pageNum, perPage, sortOrderDto)
+ .map(GroupWithDescr::cgd).toList());
+ }
+ );
+ }
};
}
diff --git a/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml b/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
index ea335f282cf..fe9723b2dd3 100644
--- a/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
+++ b/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
@@ -2416,6 +2416,7 @@ components:
- NAME
- MEMBERS
- STATE
+ - MESSAGES_BEHIND
ConsumerGroupsPageResponse:
type: object
diff --git a/kafka-ui-react-app/src/components/ConsumerGroups/List.tsx b/kafka-ui-react-app/src/components/ConsumerGroups/List.tsx
index feb772ac826..ef8d73f5156 100644
--- a/kafka-ui-react-app/src/components/ConsumerGroups/List.tsx
+++ b/kafka-ui-react-app/src/components/ConsumerGroups/List.tsx
@@ -56,9 +56,9 @@ const List = () => {
enableSorting: false,
},
{
+ id: ConsumerGroupOrdering.MESSAGES_BEHIND,
header: 'Messages Behind',
accessorKey: 'messagesBehind',
- enableSorting: false,
},
{
header: 'Coordinator',
| null | val | test | 2023-03-30T08:27:39 | "2023-03-09T12:29:57Z" | yardenshoham | train |
provectus/kafka-ui/3164_3527 | provectus/kafka-ui | provectus/kafka-ui/3164 | provectus/kafka-ui/3527 | [
"connected"
] | bfb80f36b33954a6b8a5a6b31d32e8be67dbfb61 | d06f77ad5338775b56a61b7a24d217a993924df4 | [
"Sorting by \"Num of Topics\" and by \"messages Behind\" was added and merged to the master. \r\nI am adding references to the relevant PRs:\r\n[https://github.com/provectus/kafka-ui/pull/3633](url)\r\n[https://github.com/provectus/kafka-ui/pull/3527](url)\r\n\r\nI think this issue can be closed\r\n"
] | [
"please rename method to `getMessagesBehind(..)` or `calculateMessagesBehind(..)`",
"not needed here, please remove",
"I suggest either `MESSAGES_BEHIND` or `CONSUMER_LAG`",
"Done",
"no need to add description into InternalConsumerGroup\r\nhere how it can be done\r\n```\r\n case MESSAGES_BEHIND -> {\r\n record GroupWithDescr(InternalConsumerGroup g, ConsumerGroupDescription d) { }\r\n\r\n Comparator<GroupWithDescr> comparator = Comparator.comparingLong(gwd ->\r\n gwd.g.getMessagesBehind() == null ? 0L : gwd.g.getMessagesBehind());\r\n\r\n var groupNames = groups.stream().map(ConsumerGroupListing::groupId).toList();\r\n\r\n yield ac.describeConsumerGroups(groupNames)\r\n .flatMap(descriptionsMap -> {\r\n List<ConsumerGroupDescription> descriptions = descriptionsMap.values().stream().toList();\r\n return getConsumerGroups(ac, descriptions)\r\n .map(g -> Streams.zip(g.stream(), descriptions.stream(), GroupWithDescr::new).toList())\r\n .map(g -> sortAndPaginate(g, comparator, pageNum, perPage, sortOrderDto)\r\n .map(GroupWithDescr::d).toList());\r\n }\r\n );\r\n }\r\n```",
"Done",
"Done",
"Done"
] | "2023-03-21T11:59:37Z" | [
"type/enhancement",
"good first issue",
"scope/backend",
"scope/frontend",
"status/accepted"
] | Consumers: Add sorting by additional columns | <!--
Don't forget to check for existing issues/discussions regarding your proposal. We might already have it.
https://github.com/provectus/kafka-ui/issues
https://github.com/provectus/kafka-ui/discussions
-->
**Describe the bug**
<!--(A clear and concise description of what the bug is.)-->
Sorting only possible by the Num of messages and State. Not possible by the Num of topics or Messages behind
**Expected behavior**
<!--
(A clear and concise description of what you expected to happen)
-->
Sorting by the "Num of topics" and "Messages behind" should be possible as well.
**Set up**
<!--
How do you run the app? Please provide as much info as possible:
1. App version (docker image version or check commit hash in the top left corner in UI)
2. Helm chart version, if you use one
3. Any IAAC configs
We might close the issue without further explanation if you don't provide such information.
-->
[f4e6afe](https://github.com/provectus/kafka-ui/commit/f4e6afe)
**Steps to Reproduce**
<!-- We'd like you to provide an example setup (via docker-compose, helm, etc.)
to reproduce the problem, especially with a complex setups. -->
Steps to reproduce the behavior:
1. Log in to Kafka UI and navigate to the Consumers page.
**Screenshots**
<!--
(If applicable, add screenshots to help explain your problem)
-->

**Additional context**
<!--
(Add any other context about the problem here)
-->
@Haarolean requested to create a separate issue from the #2651 "Please raise an issue for sorting by num of topics and messages behind." | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ConsumerGroupMapper.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalConsumerGroup.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java",
"kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml",
"kafka-ui-react-app/src/components/ConsumerGroups/List.tsx"
] | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ConsumerGroupMapper.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalConsumerGroup.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java",
"kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml",
"kafka-ui-react-app/src/components/ConsumerGroups/List.tsx"
] | [] | diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ConsumerGroupMapper.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ConsumerGroupMapper.java
index 9f7e32ed148..21d9efda9ca 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ConsumerGroupMapper.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ConsumerGroupMapper.java
@@ -89,19 +89,7 @@ private static <T extends ConsumerGroupDTO> T convertToConsumerGroup(
.flatMap(m -> m.getAssignment().stream().map(TopicPartition::topic))
).collect(Collectors.toSet()).size();
- Long messagesBehind = null;
- // messagesBehind should be undefined if no committed offsets found for topic
- if (!c.getOffsets().isEmpty()) {
- messagesBehind = c.getOffsets().entrySet().stream()
- .mapToLong(e ->
- Optional.ofNullable(c.getEndOffsets())
- .map(o -> o.get(e.getKey()))
- .map(o -> o - e.getValue())
- .orElse(0L)
- ).sum();
- }
-
- consumerGroup.setMessagesBehind(messagesBehind);
+ consumerGroup.setMessagesBehind(c.getMessagesBehind());
consumerGroup.setTopics(numTopics);
consumerGroup.setSimple(c.isSimple());
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalConsumerGroup.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalConsumerGroup.java
index d7b3a732f1f..e8199fa8ef5 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalConsumerGroup.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalConsumerGroup.java
@@ -20,6 +20,7 @@ public class InternalConsumerGroup {
private final Collection<InternalMember> members;
private final Map<TopicPartition, Long> offsets;
private final Map<TopicPartition, Long> endOffsets;
+ private final Long messagesBehind;
private final String partitionAssignor;
private final ConsumerGroupState state;
private final Node coordinator;
@@ -58,7 +59,25 @@ public static InternalConsumerGroup create(
);
builder.offsets(groupOffsets);
builder.endOffsets(topicEndOffsets);
+ builder.messagesBehind(calculateMessagesBehind(groupOffsets, topicEndOffsets));
Optional.ofNullable(description.coordinator()).ifPresent(builder::coordinator);
return builder.build();
}
+
+ private static Long calculateMessagesBehind(Map<TopicPartition, Long> offsets, Map<TopicPartition, Long> endOffsets) {
+ Long messagesBehind = null;
+ // messagesBehind should be undefined if no committed offsets found for topic
+ if (!offsets.isEmpty()) {
+ messagesBehind = offsets.entrySet().stream()
+ .mapToLong(e ->
+ Optional.ofNullable(endOffsets)
+ .map(o -> o.get(e.getKey()))
+ .map(o -> o - e.getValue())
+ .orElse(0L)
+ ).sum();
+ }
+
+ return messagesBehind;
+ }
+
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java
index 024eb3df513..e848146881d 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java
@@ -1,5 +1,6 @@
package com.provectus.kafka.ui.service;
+import com.google.common.collect.Streams;
import com.google.common.collect.Table;
import com.provectus.kafka.ui.model.ConsumerGroupOrderingDTO;
import com.provectus.kafka.ui.model.InternalConsumerGroup;
@@ -157,6 +158,24 @@ private Mono<List<ConsumerGroupDescription>> loadSortedDescriptions(ReactiveAdmi
.map(descriptions ->
sortAndPaginate(descriptions.values(), comparator, pageNum, perPage, sortOrderDto).toList());
}
+ case MESSAGES_BEHIND -> {
+ record GroupWithDescr(InternalConsumerGroup icg, ConsumerGroupDescription cgd) { }
+
+ Comparator<GroupWithDescr> comparator = Comparator.comparingLong(gwd ->
+ gwd.icg.getMessagesBehind() == null ? 0L : gwd.icg.getMessagesBehind());
+
+ var groupNames = groups.stream().map(ConsumerGroupListing::groupId).toList();
+
+ yield ac.describeConsumerGroups(groupNames)
+ .flatMap(descriptionsMap -> {
+ List<ConsumerGroupDescription> descriptions = descriptionsMap.values().stream().toList();
+ return getConsumerGroups(ac, descriptions)
+ .map(icg -> Streams.zip(icg.stream(), descriptions.stream(), GroupWithDescr::new).toList())
+ .map(gwd -> sortAndPaginate(gwd, comparator, pageNum, perPage, sortOrderDto)
+ .map(GroupWithDescr::cgd).toList());
+ }
+ );
+ }
};
}
diff --git a/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml b/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
index ea335f282cf..fe9723b2dd3 100644
--- a/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
+++ b/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
@@ -2416,6 +2416,7 @@ components:
- NAME
- MEMBERS
- STATE
+ - MESSAGES_BEHIND
ConsumerGroupsPageResponse:
type: object
diff --git a/kafka-ui-react-app/src/components/ConsumerGroups/List.tsx b/kafka-ui-react-app/src/components/ConsumerGroups/List.tsx
index feb772ac826..ef8d73f5156 100644
--- a/kafka-ui-react-app/src/components/ConsumerGroups/List.tsx
+++ b/kafka-ui-react-app/src/components/ConsumerGroups/List.tsx
@@ -56,9 +56,9 @@ const List = () => {
enableSorting: false,
},
{
+ id: ConsumerGroupOrdering.MESSAGES_BEHIND,
header: 'Messages Behind',
accessorKey: 'messagesBehind',
- enableSorting: false,
},
{
header: 'Coordinator',
| null | train | test | 2023-03-30T08:27:39 | "2022-12-28T12:30:46Z" | BulatKha | train |
provectus/kafka-ui/3401_3529 | provectus/kafka-ui | provectus/kafka-ui/3401 | provectus/kafka-ui/3529 | [
"connected"
] | 75a6282a84d77b15fbaaf7e382107a86e2e20f0f | d8289d2ee676a7f4de7e8aebfbe73aa809a80955 | [
"Hey! This is still causing issues.\r\nThe frontend client is not adding CORS header while initializing google oauth client as you can see in the network tab(in screenshot)\r\n\r\n\r\n",
"@prdpx7 could you please try this image?\r\n`public.ecr.aws/provectus/kafka-ui-custom-build:3957`\r\nLet me know how it goes.",
"> @prdpx7 could you please try this image? `public.ecr.aws/provectus/kafka-ui-custom-build:3957` Let me know how it goes.\r\n\r\nHey! The new build is working fine. Appreciate the help π "
] | [] | "2023-03-21T15:16:03Z" | [
"type/bug",
"status/pending",
"scope/backend",
"status/accepted",
"area/auth"
] | OAuth2: Add filters allowing CORS | https://discord.com/channels/897805035122077716/897805035122077719/1077852545168445440
![Uploading image.pngβ¦]()
| [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/CorsGlobalConfiguration.java"
] | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/CorsGlobalConfiguration.java"
] | [] | diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/CorsGlobalConfiguration.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/CorsGlobalConfiguration.java
index 0128110ab72..c6c88bfa984 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/CorsGlobalConfiguration.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/CorsGlobalConfiguration.java
@@ -1,25 +1,12 @@
package com.provectus.kafka.ui.config;
-import lombok.AllArgsConstructor;
-import org.springframework.boot.autoconfigure.web.ServerProperties;
-import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
-import org.springframework.context.annotation.Profile;
-import org.springframework.core.io.ClassPathResource;
-import org.springframework.util.StringUtils;
import org.springframework.web.reactive.config.CorsRegistry;
import org.springframework.web.reactive.config.WebFluxConfigurer;
-import org.springframework.web.reactive.function.server.RouterFunction;
-import org.springframework.web.reactive.function.server.RouterFunctions;
-import org.springframework.web.reactive.function.server.ServerResponse;
@Configuration
-@Profile("local")
-@AllArgsConstructor
public class CorsGlobalConfiguration implements WebFluxConfigurer {
- private final ServerProperties serverProperties;
-
@Override
public void addCorsMappings(CorsRegistry registry) {
registry.addMapping("/**")
@@ -28,31 +15,4 @@ public void addCorsMappings(CorsRegistry registry) {
.allowedHeaders("*")
.allowCredentials(false);
}
-
- private String withContext(String pattern) {
- final String basePath = serverProperties.getServlet().getContextPath();
- if (StringUtils.hasText(basePath)) {
- return basePath + pattern;
- } else {
- return pattern;
- }
- }
-
- @Bean
- public RouterFunction<ServerResponse> cssFilesRouter() {
- return RouterFunctions
- .resources(withContext("/static/css/**"), new ClassPathResource("static/static/css/"));
- }
-
- @Bean
- public RouterFunction<ServerResponse> jsFilesRouter() {
- return RouterFunctions
- .resources(withContext("/static/js/**"), new ClassPathResource("static/static/js/"));
- }
-
- @Bean
- public RouterFunction<ServerResponse> mediaFilesRouter() {
- return RouterFunctions
- .resources(withContext("/static/media/**"), new ClassPathResource("static/static/media/"));
- }
-}
\ No newline at end of file
+}
| null | train | test | 2023-03-22T14:14:52 | "2023-02-24T06:53:36Z" | Haarolean | train |
provectus/kafka-ui/2441_3540 | provectus/kafka-ui | provectus/kafka-ui/2441 | provectus/kafka-ui/3540 | [
"connected"
] | 8d3bac8834b5dbc26009915801d25aa1a0a6adf6 | acfe7a4afcce8da9d111d8922a6cdbb311e53450 | [
"Hello there WizzardMaker! π\n\nThank you and congratulations π for opening your very first issue in this project! π\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. π",
"Hey, thanks for reaching out. We'll take a look",
"Hi,\r\n\r\nIm interested on the connection to Azure Events Hub too. \r\n\r\nThanks!!",
"Seems like they [don't care ](https://github.com/Azure/azure-event-hubs-for-kafka/issues/61) that much about implementing describeConfigs.\r\n\r\nTODO: look for a workaround",
"Thanks for the feedback @Haarolean !",
"@Haarolean I would like to ask current kafka ui version whether have supported to connect Azure Eventhub? Thank you.",
"@hueiyuan thanks for the reminder\r\n\r\nMight be doable now considering #3376,\r\n\r\n@iliax PTAL",
"@Haarolean Thanks for your reply. so maybe can I connect eventhub in `release 0.5.0`?\r\nBy the way, also please @iliax to confirm it. thank you.",
"@hueiyuan no you can't since this issue is still open.",
"The same problem here with MSK(AWS) Kafka 1.1.1.",
"@robsonpeixoto MSK (even serverless) is fully supported. Try `master`-labeled docker image and if it doesn't work, raise a new issue/discussion.",
"@robsonpeixoto https://aws.amazon.com/marketplace/pp/prodview-ogtt5hfhzkq6a",
"I'm having the same issues, and using the `master` tagged Docker image doesn't help.\r\nThe same `UnknownTopicOrPartitionException: This server does not host this topic-partition` error keeps popping up unfortunately.",
"@FireDrunk fortunately, we're already working on this :)",
"@WizzardMaker @FireDrunk @hueiyuan hey guys, can you please verify it works for you on `master` - tagged docker image?",
"Can confirm that it works with the newest master branch image",
"@iliax @WizzardMaker \r\nAfter verifying, my kafka ui still does not connect eventhub. Could you give connect example of docker-compose.yaml ? My error message is below:\r\n```\r\nkafka-ui | 2023-03-27 01:02:12,188 WARN [kafka-admin-client-thread | kafka-ui-admin-1679878922-2] o.a.k.c.NetworkClient: [AdminClient clientId=kafka-ui-admin-1679878922-2] Connection to node -1 (<my-eventhub-namespace>.servicebus.windows.net/40.78.253.136:9093) could not be established. Broker may not be available.\r\n```\r\n\r\nI also provide my docker-compose.yaml for your reference:\r\n```yaml\r\nversion: '3'\r\n\r\nservices:\r\n kafka-ui:\r\n image: provectuslabs/kafka-ui:master\r\n container_name: kafka-ui\r\n ports:\r\n - \"8080:8080\"\r\n restart: always\r\n environment:\r\n - KAFKA_CLUSTERS_0_NAME=prod-event-hub\r\n - KAFKA_CLUSTERS_0_PROPERTIES_SECURITY_PROTOCOL=SASL_SSL\r\n - KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS=<my-eventhub-namespaces>.servicebus.windows.net:9093\r\n - KAFKA_CLUSTERS_0_PROPERTIES_SASL_MECHANISM=PLAIN\r\n - KAFKA_CLUSTERS_0_PROPERTIES_SASL_JAAS_CONFIG=org.apache.kafka.common.security.plain.PlainLoginModule required username='$ConnectionString' password='Endpoint=sb://<my-eventhub-namespaces>.servicebus.windows.net/;SharedAccessKeyName=RootManageSharedAccessKey;SharedAccessKey=<my-key>';\r\n```",
"> @iliax @WizzardMaker After verifying, my kafka ui still does not connect eventhub. Could you give connect example of docker-compose.yaml ? My error message is below:\r\n> \r\n> ```\r\n> kafka-ui | 2023-03-27 01:02:12,188 WARN [kafka-admin-client-thread | kafka-ui-admin-1679878922-2] o.a.k.c.NetworkClient: [AdminClient clientId=kafka-ui-admin-1679878922-2] Connection to node -1 (<my-eventhub-namespace>.servicebus.windows.net/40.78.253.136:9093) could not be established. Broker may not be available.\r\n> ```\r\n> \r\n> I also provide my docker-compose.yaml for your reference:\r\n> \r\n> ```yaml\r\n> version: '3'\r\n> \r\n> services:\r\n> kafka-ui:\r\n> image: provectuslabs/kafka-ui:master\r\n> container_name: kafka-ui\r\n> ports:\r\n> - \"8080:8080\"\r\n> restart: always\r\n> environment:\r\n> - KAFKA_CLUSTERS_0_NAME=prod-event-hub\r\n> - KAFKA_CLUSTERS_0_PROPERTIES_SECURITY_PROTOCOL=SASL_SSL\r\n> - KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS=<my-eventhub-namespaces>.servicebus.windows.net:9093\r\n> - KAFKA_CLUSTERS_0_PROPERTIES_SASL_MECHANISM=PLAIN\r\n> - KAFKA_CLUSTERS_0_PROPERTIES_SASL_JAAS_CONFIG=org.apache.kafka.common.security.plain.PlainLoginModule required username='$ConnectionString' password='Endpoint=sb://<my-eventhub-namespaces>.servicebus.windows.net/;SharedAccessKeyName=RootManageSharedAccessKey;SharedAccessKey=<my-key>';\r\n> ```\r\n\r\nThat error message looks more like a general connection problem, and not the problem discussed in this issue.\r\n\r\nAre you sure, that your at all able to connect to your event hub at that address? (What is \\<my-eventhub-namespaces\\>? Make sure that this is the name of the Azure resource - Check your \"Host name\" Property of the Event Hub Resource)",
"@WizzardMaker \r\n`<my-eventhub-namespaces>` just is a namespace of eventhub. You can image this is a variable which can be replaced. Because I do not want to paste complete host name here. Based on this reason, I have confirm my host name of eventhub is correct. So I not sure and want to confirm this configure of docker-compose.yaml whether have incorrect setting.",
"The configuration looks correct.\r\n\r\nIs the network of your docker container restricted in any way? Can you open a shell on the container and ping to the internet (`ping google.com` for example)?",
"@hueiyuan also make sure that you are using standart+ plan for Eventhub, since lower versions are not kafka-protocol compatible",
"Worked like a charm for me. Great job, Thanks!"
] | [] | "2023-03-23T12:29:38Z" | [
"type/bug",
"scope/backend",
"status/accepted",
"status/confirmed"
] | Can't connect to azure event hub kafka interface | <!--
Don't forget to check for existing issues/discussions regarding your proposal. We might already have it.
https://github.com/provectus/kafka-ui/issues
https://github.com/provectus/kafka-ui/discussions
-->
**Describe the bug**
<!--(A clear and concise description of what the bug is.)-->
We wanted to monitor our azure event hub, which we mainly use as a kafka server.
The kafka interface works and our backend services can communicate with the kafka interface
Monitoring also works with kafka-tool/offset-explorer
But kafka-ui fails to connect to the hub with this error message:
```
2022-08-12 14:27:25,701 ERROR [parallel-6] c.p.k.u.s.MetricsService: Failed to collect cluster prod-event-hub info
java.lang.IllegalStateException: Error while creating AdminClient for Cluster prod-event-hub
at com.provectus.kafka.ui.service.AdminClientServiceImpl.lambda$createAdminClient$3(AdminClientServiceImpl.java:45)
at reactor.core.publisher.Mono.lambda$onErrorMap$31(Mono.java:3733)
at reactor.core.publisher.FluxOnErrorResume$ResumeSubscriber.onError(FluxOnErrorResume.java:94)
at reactor.core.publisher.FluxMapFuseable$MapFuseableSubscriber.onError(FluxMapFuseable.java:140)
at reactor.core.publisher.MonoFlatMap$FlatMapMain.secondError(MonoFlatMap.java:192)
at reactor.core.publisher.MonoFlatMap$FlatMapInner.onError(MonoFlatMap.java:259)
at reactor.core.publisher.MonoPublishOn$PublishOnSubscriber.run(MonoPublishOn.java:187)
at reactor.core.scheduler.SchedulerTask.call(SchedulerTask.java:68)
at reactor.core.scheduler.SchedulerTask.call(SchedulerTask.java:28)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:304)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
at java.base/java.lang.Thread.run(Thread.java:830)
Caused by: org.apache.kafka.common.errors.UnknownTopicOrPartitionException: This server does not host this topic-partition.
```
The kafka UI has full admin rights on the assigned user
**Set up**
<!--
How do you run the app? Please provide as much info as possible:
1. App version (docker image version or check commit hash in the top left corner in UI)
2. Helm chart version, if you use one
3. Any IAAC configs
We might close the issue without further explanation if you don't provide such information.
-->
Kafka-UI is setup with a standard docker compose:
```yaml
version: '2.7'
services:
kafka-ui:
image: provectuslabs/kafka-ui
container_name: kafka-ui
ports:
- "8085:8080"
restart: always
environment:
- KAFKA_CLUSTERS_0_NAME=prod-event-hub
- KAFKA_CLUSTERS_0_PROPERTIES_SECURITY_PROTOCOL=SASL_SSL
- KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS=***.servicebus.windows.net:9093
- KAFKA_CLUSTERS_0_PROPERTIES_SASL_MECHANISM=PLAIN
- KAFKA_CLUSTERS_0_PROPERTIES_SASL_JAAS_CONFIG=org.apache.kafka.common.security.plain.PlainLoginModule required username='$$ConnectionString' password='***;
```
**Expected behavior**
<!--
(A clear and concise description of what you expected to happen)
-->
A functioning connection with the azure event hub
| [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java"
] | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java"
] | [] | diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java
index 19d06a0c48b..7cabb79f2d0 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java
@@ -212,17 +212,24 @@ private static Mono<Map<Integer, List<ConfigEntry>>> loadBrokersConfig(AdminClie
.map(brokerId -> new ConfigResource(ConfigResource.Type.BROKER, Integer.toString(brokerId)))
.collect(toList());
return toMono(client.describeConfigs(resources).all())
- // some kafka backends (like MSK serverless) do not support broker's configs retrieval,
- // in that case InvalidRequestException will be thrown
- .onErrorResume(InvalidRequestException.class, th -> {
- log.trace("Error while getting broker {} configs", brokerIds, th);
- return Mono.just(Map.of());
- })
+ // some kafka backends don't support broker's configs retrieval,
+ // and throw various exceptions on describeConfigs() call
+ .onErrorResume(th -> th instanceof InvalidRequestException // MSK Serverless
+ || th instanceof UnknownTopicOrPartitionException, // Azure event hub
+ th -> {
+ log.trace("Error while getting configs for brokers {}", brokerIds, th);
+ return Mono.just(Map.of());
+ })
// there are situations when kafka-ui user has no DESCRIBE_CONFIGS permission on cluster
.onErrorResume(ClusterAuthorizationException.class, th -> {
log.trace("AuthorizationException while getting configs for brokers {}", brokerIds, th);
return Mono.just(Map.of());
})
+ // catching all remaining exceptions, but logging on WARN level
+ .onErrorResume(th -> true, th -> {
+ log.warn("Unexpected error while getting configs for brokers {}", brokerIds, th);
+ return Mono.just(Map.of());
+ })
.map(config -> config.entrySet().stream()
.collect(toMap(
c -> Integer.valueOf(c.getKey().name()),
| null | val | test | 2023-03-23T16:40:12 | "2022-08-12T14:30:26Z" | WizzardMaker | train |
provectus/kafka-ui/3117_3543 | provectus/kafka-ui | provectus/kafka-ui/3117 | provectus/kafka-ui/3543 | [
"connected"
] | 20cc1e489b2dd9590c14c1c4819e5e01d9dc9b97 | 4623f8d7b8a8dddd6b1c0cf3b9a7acd4074eab3c | [
"@David-DB88 Implementation is invalid.\r\n#3117 clearly states that the value should be fetched from API (the second screenshot is of deprecated fields in API response).\r\n\r\n```\r\nconst keyFormat = searchParams.get('keySerde') || '';\r\n const valueFormat = searchParams.get('valueSerde') || '';\r\n```\r\nThis will display the currently selected serdes no matter which serde is used for the message itself.\r\n",
"Got it. Thank you for pointing out."
] | [
"shouldn't we test it by setting search params in the testing to these values ? then render the component for testing. that way we can test `messageContentFormat`",
"@Mgo i wrote above that i will change test cases if the solution is normal, i know about linter error i commented some code in test cases "
] | "2023-03-23T16:55:31Z" | [
"good first issue",
"scope/frontend",
"status/accepted",
"type/chore"
] | [FE] Update display of key/value format | 
key/value types are not displayed, they're currently being fetched from `keyFormat` and `valueFormat` fields, but:

| [
"kafka-ui-react-app/src/components/Topics/Topic/Messages/Message.tsx",
"kafka-ui-react-app/src/components/Topics/Topic/Messages/MessageContent/MessageContent.tsx",
"kafka-ui-react-app/src/components/Topics/Topic/Messages/MessageContent/__tests__/MessageContent.spec.tsx"
] | [
"kafka-ui-react-app/src/components/Topics/Topic/Messages/Message.tsx",
"kafka-ui-react-app/src/components/Topics/Topic/Messages/MessageContent/MessageContent.tsx",
"kafka-ui-react-app/src/components/Topics/Topic/Messages/MessageContent/__tests__/MessageContent.spec.tsx"
] | [] | diff --git a/kafka-ui-react-app/src/components/Topics/Topic/Messages/Message.tsx b/kafka-ui-react-app/src/components/Topics/Topic/Messages/Message.tsx
index cb2dbbb3c0c..47b6e49a41e 100644
--- a/kafka-ui-react-app/src/components/Topics/Topic/Messages/Message.tsx
+++ b/kafka-ui-react-app/src/components/Topics/Topic/Messages/Message.tsx
@@ -42,8 +42,6 @@ const Message: React.FC<Props> = ({
key,
partition,
content,
- valueFormat,
- keyFormat,
headers,
},
keyFilters,
@@ -140,9 +138,7 @@ const Message: React.FC<Props> = ({
{isOpen && (
<MessageContent
messageKey={key}
- messageKeyFormat={keyFormat}
messageContent={content}
- messageContentFormat={valueFormat}
headers={headers}
timestamp={timestamp}
timestampType={timestampType}
diff --git a/kafka-ui-react-app/src/components/Topics/Topic/Messages/MessageContent/MessageContent.tsx b/kafka-ui-react-app/src/components/Topics/Topic/Messages/MessageContent/MessageContent.tsx
index 6edfdde54c1..fe472ad3b12 100644
--- a/kafka-ui-react-app/src/components/Topics/Topic/Messages/MessageContent/MessageContent.tsx
+++ b/kafka-ui-react-app/src/components/Topics/Topic/Messages/MessageContent/MessageContent.tsx
@@ -3,6 +3,7 @@ import EditorViewer from 'components/common/EditorViewer/EditorViewer';
import BytesFormatted from 'components/common/BytesFormatted/BytesFormatted';
import { SchemaType, TopicMessageTimestampTypeEnum } from 'generated-sources';
import { formatTimestamp } from 'lib/dateTimeHelpers';
+import { useSearchParams } from 'react-router-dom';
import * as S from './MessageContent.styled';
@@ -10,9 +11,7 @@ type Tab = 'key' | 'content' | 'headers';
export interface MessageContentProps {
messageKey?: string;
- messageKeyFormat?: string;
messageContent?: string;
- messageContentFormat?: string;
headers?: { [key: string]: string | undefined };
timestamp?: Date;
timestampType?: TopicMessageTimestampTypeEnum;
@@ -20,14 +19,15 @@ export interface MessageContentProps {
const MessageContent: React.FC<MessageContentProps> = ({
messageKey,
- messageKeyFormat,
messageContent,
- messageContentFormat,
headers,
timestamp,
timestampType,
}) => {
const [activeTab, setActiveTab] = React.useState<Tab>('content');
+ const [searchParams] = useSearchParams();
+ const keyFormat = searchParams.get('keySerde') || '';
+ const valueFormat = searchParams.get('valueSerde') || '';
const activeTabContent = () => {
switch (activeTab) {
@@ -54,7 +54,6 @@ const MessageContent: React.FC<MessageContentProps> = ({
e.preventDefault();
setActiveTab('headers');
};
-
const keySize = new TextEncoder().encode(messageKey).length;
const contentSize = new TextEncoder().encode(messageContent).length;
const contentType =
@@ -106,21 +105,21 @@ const MessageContent: React.FC<MessageContentProps> = ({
</S.Metadata>
<S.Metadata>
- <S.MetadataLabel>Value</S.MetadataLabel>
+ <S.MetadataLabel>Key Serde</S.MetadataLabel>
<span>
- <S.MetadataValue>{messageContentFormat}</S.MetadataValue>
+ <S.MetadataValue>{keyFormat}</S.MetadataValue>
<S.MetadataMeta>
- Size: <BytesFormatted value={contentSize} />
+ Size: <BytesFormatted value={keySize} />
</S.MetadataMeta>
</span>
</S.Metadata>
<S.Metadata>
- <S.MetadataLabel>Key</S.MetadataLabel>
+ <S.MetadataLabel>Value Serde</S.MetadataLabel>
<span>
- <S.MetadataValue>{messageKeyFormat}</S.MetadataValue>
+ <S.MetadataValue>{valueFormat}</S.MetadataValue>
<S.MetadataMeta>
- Size: <BytesFormatted value={keySize} />
+ Size: <BytesFormatted value={contentSize} />
</S.MetadataMeta>
</span>
</S.Metadata>
diff --git a/kafka-ui-react-app/src/components/Topics/Topic/Messages/MessageContent/__tests__/MessageContent.spec.tsx b/kafka-ui-react-app/src/components/Topics/Topic/Messages/MessageContent/__tests__/MessageContent.spec.tsx
index 23af3d45cf4..91310a30e41 100644
--- a/kafka-ui-react-app/src/components/Topics/Topic/Messages/MessageContent/__tests__/MessageContent.spec.tsx
+++ b/kafka-ui-react-app/src/components/Topics/Topic/Messages/MessageContent/__tests__/MessageContent.spec.tsx
@@ -16,9 +16,7 @@ const setupWrapper = (props?: Partial<MessageContentProps>) => {
<tbody>
<MessageContent
messageKey='"test-key"'
- messageKeyFormat="JSON"
messageContent='{"data": "test"}'
- messageContentFormat="AVRO"
headers={{ header: 'test' }}
timestamp={new Date(0)}
timestampType={TopicMessageTimestampTypeEnum.CREATE_TIME}
@@ -34,14 +32,33 @@ const proto =
global.TextEncoder = TextEncoder;
+const searchParamsContentAVRO = new URLSearchParams({
+ keySerde: 'SchemaRegistry',
+ valueSerde: 'AVRO',
+ limit: '100',
+});
+
+const searchParamsContentJSON = new URLSearchParams({
+ keySerde: 'SchemaRegistry',
+ valueSerde: 'JSON',
+ limit: '100',
+});
+
+const searchParamsContentPROTOBUF = new URLSearchParams({
+ keySerde: 'SchemaRegistry',
+ valueSerde: 'PROTOBUF',
+ limit: '100',
+});
describe('MessageContent screen', () => {
beforeEach(() => {
- render(setupWrapper());
+ render(setupWrapper(), {
+ initialEntries: [`/messages?${searchParamsContentAVRO}`],
+ });
});
describe('renders', () => {
it('key format in document', () => {
- expect(screen.getByText('JSON')).toBeInTheDocument();
+ expect(screen.getByText('SchemaRegistry')).toBeInTheDocument();
});
it('content format in document', () => {
@@ -86,36 +103,36 @@ describe('checking content type depend on message type', () => {
it('renders component with message having JSON type', () => {
render(
setupWrapper({
- messageContentFormat: 'JSON',
messageContent: '{"data": "test"}',
- })
+ }),
+ { initialEntries: [`/messages?${searchParamsContentJSON}`] }
);
- expect(screen.getAllByText('JSON')[1]).toBeInTheDocument();
+ expect(screen.getByText('JSON')).toBeInTheDocument();
});
it('renders component with message having AVRO type', () => {
render(
setupWrapper({
- messageContentFormat: 'AVRO',
messageContent: '{"data": "test"}',
- })
+ }),
+ { initialEntries: [`/messages?${searchParamsContentAVRO}`] }
);
expect(screen.getByText('AVRO')).toBeInTheDocument();
});
it('renders component with message having PROTOBUF type', () => {
render(
setupWrapper({
- messageContentFormat: 'PROTOBUF',
messageContent: proto,
- })
+ }),
+ { initialEntries: [`/messages?${searchParamsContentPROTOBUF}`] }
);
expect(screen.getByText('PROTOBUF')).toBeInTheDocument();
});
it('renders component with message having no type which is equal to having PROTOBUF type', () => {
render(
setupWrapper({
- messageContentFormat: 'PROTOBUF',
messageContent: '',
- })
+ }),
+ { initialEntries: [`/messages?${searchParamsContentPROTOBUF}`] }
);
expect(screen.getByText('PROTOBUF')).toBeInTheDocument();
});
| null | train | test | 2023-03-27T21:26:12 | "2022-12-22T08:17:10Z" | Haarolean | train |
provectus/kafka-ui/3542_3545 | provectus/kafka-ui | provectus/kafka-ui/3542 | provectus/kafka-ui/3545 | [
"connected"
] | 4a1e987a1d2a958119ab5c936d4b1d82125e14d9 | 8348241e3d49f3947eb432761dd587c8c19a4c06 | [
"Hello there sti-yvi! π\n\nThank you and congratulations π for opening your very first issue in this project! π\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. π",
"Hi, thanks for reaching out.\r\n\r\nWhat's `UserInfoUri` exactly? Not really familiar with on-premises github, is that possible to retrieve this information just with a different URI?",
"Hello, thank you for the quick answer!\r\n\r\n> What's `UserInfoUri` exactly? Not really familiar with on-premises github, is that possible to retrieve this information just with a different URI?\r\n\r\nWell, I found this field here:\r\nhttps://github.com/provectus/kafka-ui/blob/c5d6896ae1f61dea2ace3fdda6e78817eaca6c4b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthProperties.java#L38\r\n\r\nIt is also mentioned in #3249.\r\n\r\nIt looks like this is an URI to retrieve user information. This URI can look like this:\r\n- github.com: `https://api.github.com/user` (doc [here](https://docs.github.com/en/apps/oauth-apps/building-oauth-apps/authorizing-oauth-apps#3-use-the-access-token-to-access-the-api))\r\n- GitHub Enterprise: `http(s)://HOSTNAME/api/v3/user` (doc [here](https://docs.github.com/en/[email protected]/apps/oauth-apps/building-oauth-apps/authorizing-oauth-apps#3-use-the-access-token-to-access-the-api))",
">Also, like this one: `.uri(\"/user/orgs\")`\r\n\r\nthis one is consistent for both APIs.\r\n\r\nI've implemented the usage of `UserInfoUri`, please try this docker image `public.ecr.aws/provectus/kafka-ui-custom-build:3545` and let me know. Don't forget to pass a custom `user-info-uri`, btw.",
"> > Also, like this one: `.uri(\"/user/orgs\")`\r\n> \r\n> this one is consistent for both APIs.\r\n\r\nUnderstood. I put `user-info-uri: https://xxx/api/v3` in the configuration file.\r\n\r\nWith `public.ecr.aws/provectus/kafka-ui-custom-build:3545` image, it goes to `/login?error`. Below, logs with `TRACE` level:\r\n```\r\n2023-03-24 09:32:31,152 TRACE [reactor-http-epoll-3] o.s.h.c.j.Jackson2JsonDecoder: [492b2eea] [e326097b-1, L:/xxx.xxx.xxx.xxx:54634 - R:xxx/yyy.yyy.yyy.yyy:443] Decoded [{access_token=zzz, token_type=bearer, scope=read:org}]\r\n2023-03-24 09:32:31,194 TRACE [reactor-http-epoll-3] o.s.w.r.f.c.ExchangeFunctions: [597c6943] HTTP GET https://xxx/api/v3, headers={masked}\r\n2023-03-24 09:32:31,202 DEBUG [reactor-http-epoll-3] r.n.r.PooledConnectionProvider: [00c69207] Created a new pooled channel, now: 1 active connections, 0 inactive connections and 0 pending acquire requests.\r\n2023-03-24 09:32:31,203 DEBUG [reactor-http-epoll-3] r.n.t.SslProvider: [00c69207] SSL enabled using engine SSLEngine[hostname=xxx, port=443, Session(1679650351202|SSL_NULL_WITH_NULL_NULL)] and SNI xxx/<unresolved>:443\r\n2023-03-24 09:32:31,203 DEBUG [reactor-http-epoll-3] r.n.t.TransportConfig: [00c69207] Initialized pipeline DefaultChannelPipeline{(reactor.left.sslHandler = io.netty.handler.ssl.SslHandler), (reactor.left.sslReader = reactor.netty.tcp.SslProvider$SslReadHandler), (reactor.left.httpCodec = io.netty.handler.codec.http.HttpClientCodec), (reactor.left.httpDecompressor = io.netty.handler.codec.http.HttpContentDecompressor), (reactor.right.reactiveBridge = reactor.netty.channel.ChannelOperationsHandler)}\r\n2023-03-24 09:32:31,203 DEBUG [reactor-http-epoll-3] r.n.r.DefaultPooledConnectionProvider: [e326097b, L:/xxx.xxx.xxx.xxx:54634 - R:xxx/yyy.yyy.yyy.yyy:443] onStateChange(POST{uri=/login/oauth/access_token, connection=PooledConnection{channel=[id: 0xe326097b, L:/xxx.xxx.xxx.xxx:54634 - R:xxx/yyy.yyy.yyy.yyy:443]}}, [response_completed])\r\n2023-03-24 09:32:31,203 DEBUG [reactor-http-epoll-3] r.n.r.DefaultPooledConnectionProvider: [e326097b, L:/xxx.xxx.xxx.xxx:54634 - R:xxx/yyy.yyy.yyy.yyy:443] onStateChange(POST{uri=/login/oauth/access_token, connection=PooledConnection{channel=[id: 0xe326097b, L:/xxx.xxx.xxx.xxx:54634 - R:xxx/yyy.yyy.yyy.yyy:443]}}, [disconnecting])\r\n2023-03-24 09:32:31,203 DEBUG [reactor-http-epoll-3] r.n.r.DefaultPooledConnectionProvider: [e326097b, L:/xxx.xxx.xxx.xxx:54634 - R:xxx/yyy.yyy.yyy.yyy:443] Releasing channel\r\n2023-03-24 09:32:31,217 DEBUG [reactor-http-epoll-3] r.n.r.PooledConnectionProvider: [e326097b, L:/xxx.xxx.xxx.xxx:54634 - R:xxx/yyy.yyy.yyy.yyy:443] Channel cleaned, now: 0 active connections, 1 inactive connections and 0 pending acquire requests.\r\n2023-03-24 09:32:31,217 DEBUG [reactor-http-epoll-3] r.n.t.TransportConnector: [00c69207] Connecting to [xxx/yyy.yyy.yyy.yyy:443].\r\n2023-03-24 09:32:31,219 DEBUG [reactor-http-epoll-3] r.n.r.DefaultPooledConnectionProvider: [00c69207, L:/xxx.xxx.xxx.xxx:54636 - R:xxx/yyy.yyy.yyy.yyy:443] Registering pool release on close event for channel\r\n2023-03-24 09:32:31,219 DEBUG [reactor-http-epoll-3] r.n.r.PooledConnectionProvider: [00c69207, L:/xxx.xxx.xxx.xxx:54636 - R:xxx/yyy.yyy.yyy.yyy:443] Channel connected, now: 1 active connections, 1 inactive connections and 0 pending acquire requests.\r\n2023-03-24 09:32:31,244 DEBUG [reactor-http-epoll-3] j.e.security: TLSHandshake: xxx:443, TLSv1.3, TLS_AES_128_GCM_SHA256, -831680270\r\n2023-03-24 09:32:31,245 DEBUG [reactor-http-epoll-3] i.n.h.s.SslHandler: [id: 0x00c69207, L:/xxx.xxx.xxx.xxx:54636 - R:xxx/yyy.yyy.yyy.yyy:443] HANDSHAKEN: protocol:TLSv1.3 cipher suite:TLS_AES_128_GCM_SHA256\r\n2023-03-24 09:32:31,245 DEBUG [reactor-http-epoll-3] r.n.r.DefaultPooledConnectionProvider: [00c69207, L:/xxx.xxx.xxx.xxx:54636 - R:xxx/yyy.yyy.yyy.yyy:443] onStateChange(PooledConnection{channel=[id: 0x00c69207, L:/xxx.xxx.xxx.xxx:54636 - R:xxx/yyy.yyy.yyy.yyy:443]}, [connected])\r\n2023-03-24 09:32:31,245 DEBUG [reactor-http-epoll-3] r.n.r.DefaultPooledConnectionProvider: [00c69207-1, L:/xxx.xxx.xxx.xxx:54636 - R:xxx/yyy.yyy.yyy.yyy:443] onStateChange(GET{uri=null, connection=PooledConnection{channel=[id: 0x00c69207, L:/xxx.xxx.xxx.xxx:54636 - R:xxx/yyy.yyy.yyy.yyy:443]}}, [configured])\r\n2023-03-24 09:32:31,245 DEBUG [reactor-http-epoll-3] r.n.h.c.HttpClientConnect: [00c69207-1, L:/xxx.xxx.xxx.xxx:54636 - R:xxx/yyy.yyy.yyy.yyy:443] Handler is being applied: {uri=https://xxx/api/v3, method=GET}\r\n2023-03-24 09:32:31,245 DEBUG [reactor-http-epoll-3] r.n.r.DefaultPooledConnectionProvider: [00c69207-1, L:/xxx.xxx.xxx.xxx:54636 - R:xxx/yyy.yyy.yyy.yyy:443] onStateChange(GET{uri=/api/v3, connection=PooledConnection{channel=[id: 0x00c69207, L:/xxx.xxx.xxx.xxx:54636 - R:xxx/yyy.yyy.yyy.yyy:443]}}, [request_prepared])\r\n2023-03-24 09:32:31,254 DEBUG [reactor-http-epoll-3] r.n.r.DefaultPooledConnectionProvider: [00c69207-1, L:/xxx.xxx.xxx.xxx:54636 - R:xxx/yyy.yyy.yyy.yyy:443] onStateChange(GET{uri=/api/v3, connection=PooledConnection{channel=[id: 0x00c69207, L:/xxx.xxx.xxx.xxx:54636 - R:xxx/yyy.yyy.yyy.yyy:443]}}, [request_sent])\r\n2023-03-24 09:32:31,286 DEBUG [reactor-http-epoll-3] r.n.h.c.HttpClientOperations: [00c69207-1, L:/xxx.xxx.xxx.xxx:54636 - R:xxx/yyy.yyy.yyy.yyy:443] Received response (auto-read:false) : RESPONSE(decodeResult: success, version: HTTP/1.1)\r\nHTTP/1.1 200 OK\r\nServer: <filtered>\r\nDate: <filtered>\r\nContent-Type: <filtered>\r\nTransfer-Encoding: <filtered>\r\nCache-Control: <filtered>\r\nVary: <filtered>\r\nETag: <filtered>\r\nX-OAuth-Scopes: <filtered>\r\nX-Accepted-OAuth-Scopes: <filtered>\r\nx-oauth-client-id: <filtered>\r\nX-GitHub-Enterprise-Version: <filtered>\r\nX-GitHub-Media-Type: <filtered>\r\nX-RateLimit-Limit: <filtered>\r\nX-RateLimit-Remaining: <filtered>\r\nX-RateLimit-Reset: <filtered>\r\nX-RateLimit-Used: <filtered>\r\nx-ratelimit-resource: <filtered>\r\nAccess-Control-Expose-Headers: <filtered>\r\nAccess-Control-Allow-Origin: <filtered>\r\nX-GitHub-Request-Id: <filtered>\r\nStrict-Transport-Security: <filtered>\r\nX-Frame-Options: <filtered>\r\nX-Content-Type-Options: <filtered>\r\nX-XSS-Protection: <filtered>\r\nReferrer-Policy: <filtered>\r\nContent-Security-Policy: <filtered>\r\nX-Runtime-Rack: <filtered>\r\n2023-03-24 09:32:31,286 DEBUG [reactor-http-epoll-3] r.n.r.DefaultPooledConnectionProvider: [00c69207-1, L:/xxx.xxx.xxx.xxx:54636 - R:xxx/yyy.yyy.yyy.yyy:443] onStateChange(GET{uri=/api/v3, connection=PooledConnection{channel=[id: 0x00c69207, L:/xxx.xxx.xxx.xxx:54636 - R:xxx/yyy.yyy.yyy.yyy:443]}}, [response_received])\r\n2023-03-24 09:32:31,287 TRACE [reactor-http-epoll-3] o.s.w.r.f.c.ExchangeFunctions: [597c6943] [00c69207-1, L:/xxx.xxx.xxx.xxx:54636 - R:xxx/yyy.yyy.yyy.yyy:443] Response 200 OK, headers={masked}\r\n2023-03-24 09:32:31,288 DEBUG [reactor-http-epoll-3] r.n.c.FluxReceive: [00c69207-1, L:/xxx.xxx.xxx.xxx:54636 - R:xxx/yyy.yyy.yyy.yyy:443] FluxReceive{pending=0, cancelled=false, inboundDone=false, inboundError=null}: subscribing inbound receiver\r\n2023-03-24 09:32:31,297 DEBUG [reactor-http-epoll-3] r.n.h.c.HttpClientOperations: [00c69207-1, L:/xxx.xxx.xxx.xxx:54636 - R:xxx/yyy.yyy.yyy.yyy:443] Received last HTTP packet\r\n2023-03-24 09:32:31,297 TRACE [reactor-http-epoll-3] r.n.c.ChannelOperations: [00c69207, L:/xxx.xxx.xxx.xxx:54636 - R:xxx/yyy.yyy.yyy.yyy:443] Disposing ChannelOperation from a channel\r\njava.lang.Exception: ChannelOperation terminal stack\r\n at reactor.netty.channel.ChannelOperations.terminate(ChannelOperations.java:465)\r\n at reactor.netty.http.client.HttpClientOperations.onInboundNext(HttpClientOperations.java:702)\r\n at reactor.netty.channel.ChannelOperationsHandler.channelRead(ChannelOperationsHandler.java:113)\r\n at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:444)\r\n at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420)\r\n at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412)\r\n at io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\r\n at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:444)\r\n at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420)\r\n at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412)\r\n at io.netty.channel.CombinedChannelDuplexHandler$DelegatingChannelHandlerContext.fireChannelRead(CombinedChannelDuplexHandler.java:436)\r\n at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:336)\r\n at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:308)\r\n at io.netty.channel.CombinedChannelDuplexHandler.channelRead(CombinedChannelDuplexHandler.java:251)\r\n at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:442)\r\n at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420)\r\n at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412)\r\n at io.netty.handler.ssl.SslHandler.unwrap(SslHandler.java:1373)\r\n at io.netty.handler.ssl.SslHandler.decodeJdkCompatible(SslHandler.java:1236)\r\n at io.netty.handler.ssl.SslHandler.decode(SslHandler.java:1285)\r\n at io.netty.handler.codec.ByteToMessageDecoder.decodeRemovalReentryProtection(ByteToMessageDecoder.java:519)\r\n at io.netty.handler.codec.ByteToMessageDecoder.callDecode(ByteToMessageDecoder.java:458)\r\n at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:280)\r\n at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:444)\r\n at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420)\r\n at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412)\r\n at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\r\n at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:440)\r\n at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420)\r\n at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\r\n at io.netty.channel.epoll.AbstractEpollStreamChannel$EpollStreamUnsafe.epollInReady(AbstractEpollStreamChannel.java:800)\r\n at io.netty.channel.epoll.AbstractEpollChannel$AbstractEpollUnsafe$1.run(AbstractEpollChannel.java:425)\r\n at io.netty.util.concurrent.AbstractEventExecutor.runTask(AbstractEventExecutor.java:174)\r\n at io.netty.util.concurrent.AbstractEventExecutor.safeExecute(AbstractEventExecutor.java:167)\r\n at io.netty.util.concurrent.SingleThreadEventExecutor.runAllTasks(SingleThreadEventExecutor.java:470)\r\n at io.netty.channel.epoll.EpollEventLoop.run(EpollEventLoop.java:403)\r\n at io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:997)\r\n at io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\r\n at io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\r\n at java.base/java.lang.Thread.run(Thread.java:833)\r\n2023-03-24 09:32:31,299 TRACE [reactor-http-epoll-3] o.s.h.c.j.Jackson2JsonDecoder: [597c6943] [00c69207-1, L:/xxx.xxx.xxx.xxx:54636 - R:xxx/yyy.yyy.yyy.yyy:443] Decoded [{current_user_url=https://xxx/api/v3/user, current_user_authorizations_html_url=https://xxx/settings/connections/applications{/client_id}, authorizations_url=https://xxx/api/v3/authorizations, code_search_url=https://xxx/api/v3/search/code?q={query}{&page,per_page,sort,order}, commit_search_url=https://xxx/api/v3/search/commits?q={query}{&page,per_page,sort,order}, emails_url=https://xxx/api/v3/user/emails, emojis_url=https://xxx/api/v3/emojis, events_url=https://xxx/api/v3/events, feeds_url=https://xxx/api/v3/feeds, followers_url=https://xxx/api/v3/user/followers, following_url=https://xxx/api/v3/user/following{/target}, gists_url=https://xxx/api/v3/gists{/gist_id}, hub_url=https://xxx/api/v3/hub, issue_search_url=https://xxx/api/v3/search/issues?q={query}{&page,per_page,sort,order}, issues_url=https://xxx/api/v3/issues, keys_url=https://xxx/api/v3/user/keys, label_search_url=https://xxx/api/v3/search/labels?q={query}&repository_id={repository_id}{&page,per_page}, notifications_url=https://xxx/api/v3/notifications, organization_url=https://xxx/api/v3/orgs/{org}, organization_repositories_url=https://xxx/api/v3/orgs/{org}/repos{?type,page,per_page,sort}, organization_teams_url=https://xxx/api/v3/orgs/{org}/teams, public_gists_url=https://xxx/api/v3/gists/public, rate_limit_url=https://xxx/api/v3/rate_limit, repository_url=https://xxx/api/v3/repos/{owner}/{repo}, repository_search_url=https://xxx/api/v3/search/repositories?q={query}{&page,per_page,sort,order}, current_user_repositories_url=https://xxx/api/v3/user/repos{?type,page,per_page,sort}, starred_url=https://xxx/api/v3/user/starred{/owner}{/repo}, starred_gists_url=https://xxx/api/v3/gists/starred, topic_search_url=https://xxx/api/v3/search/topics?q={query}{&page,per_page}, user_url=https://xxx/api/v3/users/{user}, user_organizations_url=https://xxx/api/v3/user/orgs, user_repositories_url=https://xxx/api/v3/users/{user}/repos{?type,page,per_page,sort}, user_search_url=https://xxx/api/v3/search/users?q={query}{&page,per_page,sort,order}}]\r\n2023-03-24 09:32:31,302 DEBUG [reactor-http-epoll-3] o.s.w.r.f.c.ExchangeFunctions: [597c6943] Cancel signal (to close connection)\r\n2023-03-24 09:32:31,309 DEBUG [reactor-http-epoll-3] o.s.s.w.s.a.AuthenticationWebFilter: Authentication failed: [invalid_user_info_response] An error occurred reading the UserInfo response: Missing attribute 'login' in attributes\r\n2023-03-24 09:32:31,309 DEBUG [reactor-http-epoll-3] o.s.s.w.s.DefaultServerRedirectStrategy: Redirecting to '/login?error'\r\n2023-03-24 09:32:31,309 TRACE [reactor-http-epoll-3] o.s.w.s.a.HttpWebHandlerAdapter: [ff0aa489-3] Completed 302 FOUND, headers={masked}\r\n```\r\n\r\n",
"It looks like `UserInfoUri` is something else, sorry.\r\n@Haarolean, maybe you should put the API endpoint property somewhere else (like under `customParams`)?",
">Understood. I put user-info-uri: https://xxx/api/v3 in the configuration file.\r\n\r\nAlmost, please try setting it to `http(s)://HOSTNAME/api/v3/user` instead.",
"> Almost, please try setting it to `http(s)://HOSTNAME/api/v3/user` instead.\r\n\r\nWith `http(s)://HOSTNAME/api/v3/user`, it does request to `http(s)://HOSTNAME/api/v3/user/user/orgs`, so it is now working:\r\n*redacted as unrelated*",
"@sti-yvi wait, is it working? Now the URL seems invalid, looks like the previous one was correct.\r\nI didn't take a look at your logs, it states this: `An error occurred reading the UserInfo response: Missing attribute 'login' in attributes`. No idea what is this about considering I've tried that build with an non-premises github and it works fine.",
"> @sti-yvi wait, is it working?\r\n\r\nNo, it does not work with both URI.\r\n\r\n> Now the URL seems invalid, looks like the previous one was correct.\r\n\r\nIndeed.\r\n\r\n> I didn't take a look at your logs, it states this: `An error occurred reading the UserInfo response: Missing attribute 'login' in attributes`. No idea what is this about considering I've tried that build with an non-premises github and it works fine.\r\n\r\nI think my first assumption (*It would be great to use `UserInfoUri` for RBAC with GitHub provider.*) was wrong, sorry. It looks like `UserInfoUri` is used by something else, so `UserInfoUri` must not be used here.\r\n\r\nCan you use a `customParams` instead of `UserInfoUri` in `GithubAuthorityExtractor.java`, please? This `customParams` should allow to set an alternative GitHub API endpoint.\r\n",
">I think my first assumption (It would be great to use UserInfoUri for RBAC with GitHub provider.) was wrong, sorry. It looks like UserInfoUri is used by something else, so UserInfoUri must not be used here.\r\n\r\nNope, I'm confident that's the right assumption, see [here](https://github.com/spring-projects/spring-security/blob/main/config/src/main/java/org/springframework/security/config/oauth2/client/CommonOAuth2Provider.java#L65).\r\n\r\nIt still puzzles me why you do receive `Missing attribute 'login' in attributes`. \r\nI've applied a little fix, which reduces the duplication in the URL (`/user/user`), so both standard & cloud URLs should work. Let's see if that does fix the issue.\r\n\r\nNow, let's set your URL again to `http(s)://HOSTNAME/api/v3/user` and **repull the image** and let's try the same thing again.\r\n",
"> Nope, I'm confident that's the right assumption, see [here](https://github.com/spring-projects/spring-security/blob/main/config/src/main/java/org/springframework/security/config/oauth2/client/CommonOAuth2Provider.java#L65).\r\n\r\nNice catch.\r\n\r\n> Now, let's set your URL again to `http(s)://HOSTNAME/api/v3/user` and **repull the image** and let's try the same thing again.\r\n\r\nYes, it works now! π \r\nI tested a read-only and admin setups on two Kafka clusters, permissions work as expected.\r\n\r\nThank you for your time and your work. π ",
"@Haarolean may I suggest to update the [*OAuth-Configuration* wiki page](https://github.com/provectus/kafka-ui/wiki/OAuth-Configuration) with [following content](https://github.com/provectus/kafka-ui/files/11091031/OAuth-Configuration.md)?\r\n\r\n\r\n",
"@sti-yvi our new docs are located at docs.kafka-ui.provectus.io. Can you raise a PR of [this file](https://github.com/provectus/kafka-ui/blob/docs/configuration/authentication/oauth2.md) into `docs` branch? :)",
"> @sti-yvi our new docs are located at docs.kafka-ui.provectus.io. Can you raise a PR of [this file](https://github.com/provectus/kafka-ui/blob/docs/configuration/authentication/oauth2.md) into `docs` branch? :)\r\n\r\nSure. PR #3577 opened.",
"Tested with v0.7.0: all is good. Thank you again for your work @Haarolean."
] | [
"get it from OAuth2UserRequest"
] | "2023-03-23T20:33:36Z" | [
"type/bug",
"status/pending",
"scope/backend",
"status/accepted"
] | RBAC with GitHub Enterprise Server | Hello,
I am trying to use Kafka UI with OAuth and RBAC in an isolated environment (no Internet access).
I configured OAuth for GitHub Enterprise Server 3.6.8 like this:
```yaml
auth:
type: OAUTH2
oauth2:
client:
github:
provider: github
clientId: xxx
clientSecret: xxx
scope:
- read:org
user-name-attribute: login
custom-params:
type: github
authorization-uri: https://xxx/login/oauth/authorize
token-uri: https://xxx/login/oauth/access_token
user-info-uri: https://xxx/api/v3/user
```
It works fine: I can login to Kafka UI by using GitHub Enterprise.
However, I cannot use RBAC, because Kafka UI is trying to connect to `api.github.com`. It looks like some parts are hardcoded in GithubAuthorityExtractor.java:
https://github.com/provectus/kafka-ui/blob/5c723d9b4491019bc6de01e3746b5b86d70f0560/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/GithubAuthorityExtractor.java#L28
Also, like this one: `.uri("/user/orgs")`
It would be great to use `UserInfoUri` for RBAC with GitHub provider. | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/GithubAuthorityExtractor.java",
"kafka-ui-api/src/main/resources/application-local.yml"
] | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/GithubAuthorityExtractor.java",
"kafka-ui-api/src/main/resources/application-local.yml"
] | [] | diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/GithubAuthorityExtractor.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/GithubAuthorityExtractor.java
index 882c727375a..0f66e45917e 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/GithubAuthorityExtractor.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/GithubAuthorityExtractor.java
@@ -12,6 +12,7 @@
import lombok.extern.slf4j.Slf4j;
import org.springframework.core.ParameterizedTypeReference;
import org.springframework.http.HttpHeaders;
+import org.springframework.security.config.oauth2.client.CommonOAuth2Provider;
import org.springframework.security.oauth2.client.userinfo.OAuth2UserRequest;
import org.springframework.security.oauth2.core.user.DefaultOAuth2User;
import org.springframework.web.reactive.function.client.WebClient;
@@ -24,8 +25,7 @@ public class GithubAuthorityExtractor implements ProviderAuthorityExtractor {
private static final String USERNAME_ATTRIBUTE_NAME = "login";
private static final String ORGANIZATION_NAME = "login";
private static final String GITHUB_ACCEPT_HEADER = "application/vnd.github+json";
-
- private final WebClient webClient = WebClient.create("https://api.github.com");
+ private static final String DUMMY = "dummy";
@Override
public boolean isApplicable(String provider) {
@@ -64,9 +64,24 @@ public Mono<Set<String>> extract(AccessControlService acs, Object value, Map<Str
return Mono.just(groupsByUsername);
}
+ OAuth2UserRequest req = (OAuth2UserRequest) additionalParams.get("request");
+ String infoEndpoint = req.getClientRegistration().getProviderDetails().getUserInfoEndpoint().getUri();
+
+ if (infoEndpoint == null) {
+ infoEndpoint = CommonOAuth2Provider.GITHUB
+ .getBuilder(DUMMY)
+ .clientId(DUMMY)
+ .build()
+ .getProviderDetails()
+ .getUserInfoEndpoint()
+ .getUri();
+ }
+
+ WebClient webClient = WebClient.create(infoEndpoint);
+
final Mono<List<Map<String, Object>>> userOrganizations = webClient
.get()
- .uri("/user/orgs")
+ .uri("/orgs")
.headers(headers -> {
headers.set(HttpHeaders.ACCEPT, GITHUB_ACCEPT_HEADER);
OAuth2UserRequest request = (OAuth2UserRequest) additionalParams.get("request");
diff --git a/kafka-ui-api/src/main/resources/application-local.yml b/kafka-ui-api/src/main/resources/application-local.yml
index 5eb3772e2bb..5e8fa18f407 100644
--- a/kafka-ui-api/src/main/resources/application-local.yml
+++ b/kafka-ui-api/src/main/resources/application-local.yml
@@ -6,6 +6,9 @@ logging:
#org.springframework.http.codec.json.Jackson2JsonDecoder: DEBUG
reactor.netty.http.server.AccessLog: INFO
+#server:
+# port: 8080 #- Port in which kafka-ui will run.
+
kafka:
clusters:
- name: local
@@ -42,27 +45,40 @@ kafka:
spring:
jmx:
enabled: true
- security:
- oauth2:
- client:
- registration:
- cognito:
- clientId: xx
- clientSecret: yy
- scope: openid
- client-name: cognito
- provider: cognito
- redirect-uri: http://localhost:8080/login/oauth2/code/cognito
- authorization-grant-type: authorization_code
- provider:
- cognito:
- issuer-uri: https://cognito-idp.eu-central-1.amazonaws.com/eu-central-1_M7cIUn1nj
- jwk-set-uri: https://cognito-idp.eu-central-1.amazonaws.com/eu-central-1_M7cIUn1nj/.well-known/jwks.json
- user-name-attribute: username
+
auth:
type: DISABLED
-
-roles.file: /tmp/roles.yml
-
-#server:
-# port: 8080 #- Port in which kafka-ui will run.
+# type: OAUTH2
+# oauth2:
+# client:
+# cognito:
+# clientId:
+# clientSecret:
+# scope: openid
+# client-name: cognito
+# provider: cognito
+# redirect-uri: http://localhost:8080/login/oauth2/code/cognito
+# authorization-grant-type: authorization_code
+# issuer-uri: https://cognito-idp.eu-central-1.amazonaws.com/eu-central-1_M7cIUn1nj
+# jwk-set-uri: https://cognito-idp.eu-central-1.amazonaws.com/eu-central-1_M7cIUn1nj/.well-known/jwks.json
+# user-name-attribute: username
+# custom-params:
+# type: cognito
+# logoutUrl: https://kafka-ui.auth.eu-central-1.amazoncognito.com/logout
+# google:
+# provider: google
+# clientId:
+# clientSecret:
+# user-name-attribute: email
+# custom-params:
+# type: google
+# allowedDomain: provectus.com
+# github:
+# provider: github
+# clientId:
+# clientSecret:
+# scope:
+# - read:org
+# user-name-attribute: login
+# custom-params:
+# type: github
| null | val | test | 2023-03-28T16:11:07 | "2023-03-23T14:39:01Z" | sti-yvi | train |
provectus/kafka-ui/3536_3548 | provectus/kafka-ui | provectus/kafka-ui/3536 | provectus/kafka-ui/3548 | [
"connected"
] | acfe7a4afcce8da9d111d8922a6cdbb311e53450 | 58102faa16922488c509110948a003b753ebdf15 | [
"Hello there ubergarm! π\n\nThank you and congratulations π for opening your very first issue in this project! π\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. π",
"Hello @ubergarm, thank you for opening as issue. \r\nWe found that if contoller node is unknown(which is Msk Serverless case) we disabling topics deletion, which is not right. We created PR to fix it (https://github.com/provectus/kafka-ui/pull/3548), we will merge it soon.",
"I confirmed this works now in `0.6.1` thanks so much for the quick point release!!"
] | [] | "2023-03-24T09:22:35Z" | [
"type/bug",
"scope/backend"
] | Bug: Topic deletion is unavailable on MSK Serverless for 0.6.0 | ## Issue
I can successfully configure and run `kafka-ui-api-v0.6.0.jar` to connect to our MSK Serverless cluster, list topics, and create a topic. However when I attempt to delete the topic it always pops up a red toast saying `400 Bad Request Topic deletion restricted`. I've tried many config options after grep'ing through the code but nothing seems to change it.
## Expected Behavior
When I click on `Topics->Delete Selected Topics->Confirm` I expect it to delete the topic and remove it from the list. (see screenshot at bottom)
## AWS EC2 IAM Role Setup
I have an EC2 instance with the correct security group, IAM policy with wildcard permissions on the cluster, and IAM role. To be sure I've confirmed that I can run the kafka cli tools to create, list, and delete topics successfully on the same EC2 instance running ubuntu LTS and all this works fine:
```bash
export BS="boot-blahblahblah.c3.kafka-serverless.us-east-2.amazonaws.com:9098"
# CREATE TOPIC
bin/kafka-topics.sh \
--bootstrap-server $BS \
--command-config client.properties \
--create \
--topic myTestTopic \
--partitions 2
# LIST TOPICS
bin/kafka-topics.sh \
--bootstrap-server $BS \
--command-config client.properties \
--list
# DELETE TOPIC
bin/kafka-topics.sh \
--bootstrap-server $BS \
--command-config client.properties \
--delete \
--topic myTestTopic
```
## kafka-ui invokation
Now on the same EC2 instance, I'm running kafka-ui as such:
```bash
# using sdkman
$ sdk current java
Using java version 17.0.5-amzn
# java details
$ java --version
openjdk 17.0.5 2022-10-18 LTS
OpenJDK Runtime Environment Corretto-17.0.5.8.1 (build 17.0.5+8-LTS)
OpenJDK 64-Bit Server VM Corretto-17.0.5.8.1 (build 17.0.5+8-LTS, mixed mode, sharing)
# start up kafka-ui
$ java \
--add-opens java.rmi/javax.rmi.ssl=ALL-UNNAMED \
-Dspring.profiles.active=default \
-jar kafka-ui-api-v0.6.0.jar
```
Here is an example log from when i run like this:
```
_ _ ___ __ _ _ _ __ __ _
| | | |_ _| / _|___ _ _ /_\ _ __ __ _ __| |_ ___ | |/ /__ _ / _| |_____
| |_| || | | _/ _ | '_| / _ \| '_ / _` / _| ' \/ -_) | ' </ _` | _| / / _`|
\___/|___| |_| \___|_| /_/ \_| .__\__,_\__|_||_\___| |_|\_\__,_|_| |_\_\__,|
|_|
2023-03-22 17:53:11,991 INFO [background-preinit] o.h.v.i.u.Version: HV000001: Hibernate Validator 6.2.5.Final
2023-03-22 17:53:12,101 INFO [main] c.p.k.u.KafkaUiApplication: Starting KafkaUiApplication using Java 17.0.5 on ec2.staging with PID 41483 (/home/ubuntu/kafka-ui/kafka-ui-api-v0.6.0.jar started by ubuntu in /home/ubuntu/kafka-ui)
2023-03-22 17:53:12,102 DEBUG [main] c.p.k.u.KafkaUiApplication: Running with Spring Boot v2.7.5, Spring v5.3.23
2023-03-22 17:53:12,103 INFO [main] c.p.k.u.KafkaUiApplication: The following 1 profile is active: "default"
2023-03-22 17:53:16,382 INFO [main] o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker: Bean 'org.springframework.security.config.annotation.method.configuration.ReactiveMethodSecurityConfiguration' of type [org.springframework.security.config.annotation.method.configuration.ReactiveMethodSecurityConfiguration] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2023-03-22 17:53:18,049 DEBUG [main] c.p.k.u.s.SerdesInitializer: Configuring serdes for cluster staging
2023-03-22 17:53:19,485 INFO [main] o.s.b.a.e.w.EndpointLinksResolver: Exposing 2 endpoint(s) beneath base path '/actuator'
2023-03-22 17:53:19,765 INFO [main] o.s.b.a.s.r.ReactiveUserDetailsServiceAutoConfiguration:
Using generated security password: BLAHBLAHB-BLAH-BLAH-BLAHBLAH-BLAH
2023-03-22 17:53:19,947 INFO [main] c.p.k.u.c.a.OAuthSecurityConfig: Configuring OAUTH2 authentication.
2023-03-22 17:53:20,967 INFO [main] o.s.b.w.e.n.NettyWebServer: Netty started on port 8080
2023-03-22 17:53:21,056 INFO [main] c.p.k.u.KafkaUiApplication: Started KafkaUiApplication in 10.707 seconds (JVM running for 12.153)
2023-03-22 17:53:21,099 DEBUG [parallel-1] c.p.k.u.s.ClustersStatisticsScheduler: Start getting metrics for kafkaCluster: staging
2023-03-22 17:53:21,130 INFO [parallel-1] o.a.k.c.a.AdminClientConfig: AdminClientConfig values:
bootstrap.servers = [boot-blahblah.c3.kafka-serverless.us-east-2.amazonaws.com:9098]
client.dns.lookup = use_all_dns_ips
client.id = kafka-ui-admin-1679507601-1
connections.max.idle.ms = 300000
default.api.timeout.ms = 60000
metadata.max.age.ms = 300000
metric.reporters = []
metrics.num.samples = 2
metrics.recording.level = INFO
metrics.sample.window.ms = 30000
receive.buffer.bytes = 65536
reconnect.backoff.max.ms = 1000
reconnect.backoff.ms = 50
request.timeout.ms = 30000
retries = 2147483647
retry.backoff.ms = 100
sasl.client.callback.handler.class = class software.amazon.msk.auth.iam.IAMClientCallbackHandler
sasl.jaas.config = [hidden]
sasl.kerberos.kinit.cmd = /usr/bin/kinit
sasl.kerberos.min.time.before.relogin = 60000
sasl.kerberos.service.name = null
sasl.kerberos.ticket.renew.jitter = 0.05
sasl.kerberos.ticket.renew.window.factor = 0.8
sasl.login.callback.handler.class = null
sasl.login.class = null
sasl.login.connect.timeout.ms = null
sasl.login.read.timeout.ms = null
sasl.login.refresh.buffer.seconds = 300
sasl.login.refresh.min.period.seconds = 60
sasl.login.refresh.window.factor = 0.8
sasl.login.refresh.window.jitter = 0.05
sasl.login.retry.backoff.max.ms = 10000
sasl.login.retry.backoff.ms = 100
sasl.mechanism = AWS_MSK_IAM
sasl.oauthbearer.clock.skew.seconds = 30
sasl.oauthbearer.expected.audience = null
sasl.oauthbearer.expected.issuer = null
sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000
sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000
sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100
sasl.oauthbearer.jwks.endpoint.url = null
sasl.oauthbearer.scope.claim.name = scope
sasl.oauthbearer.sub.claim.name = sub
sasl.oauthbearer.token.endpoint.url = null
security.protocol = SASL_SSL
security.providers = null
send.buffer.bytes = 131072
socket.connection.setup.timeout.max.ms = 30000
socket.connection.setup.timeout.ms = 10000
ssl.cipher.suites = null
ssl.enabled.protocols = [TLSv1.2, TLSv1.3]
ssl.endpoint.identification.algorithm = https
ssl.engine.factory.class = null
ssl.key.password = null
ssl.keymanager.algorithm = SunX509
ssl.keystore.certificate.chain = null
ssl.keystore.key = null
ssl.keystore.location = null
ssl.keystore.password = null
ssl.keystore.type = JKS
ssl.protocol = TLSv1.3
ssl.provider = null
ssl.secure.random.implementation = null
ssl.trustmanager.algorithm = PKIX
ssl.truststore.certificates = null
ssl.truststore.location = null
ssl.truststore.password = null
ssl.truststore.type = JKS
2023-03-22 17:53:21,554 INFO [parallel-1] o.a.k.c.s.a.AbstractLogin: Successfully logged in.
2023-03-22 17:53:21,609 INFO [parallel-1] o.a.k.c.u.AppInfoParser: Kafka version: 3.3.1
2023-03-22 17:53:21,609 INFO [parallel-1] o.a.k.c.u.AppInfoParser: Kafka commitId: e23c59d00e687ff5
2023-03-22 17:53:21,609 INFO [parallel-1] o.a.k.c.u.AppInfoParser: Kafka startTimeMs: 1679507601606
2023-03-22 17:53:24,183 DEBUG [parallel-2] c.p.k.u.s.ClustersStatisticsScheduler: Metrics updated for cluster: staging
```
## kafka-ui config
Here is the contents of my config file `application-default.yml`:
```yaml
# first specify the kafka cluster connection and config options
# https://github.com/provectus/kafka-ui/blob/master/documentation/guides/AWS_IAM.md
dynamic.config.enabled: false
delete.topic.enable: true
delete.topic.enabled: true
topic.deletion: true
kafka:
clusters:
- name: staging
bootstrapServers: "boot-blahblah.c3.kafka-serverless.us-east-2.amazonaws.com:9098"
readOnly: false
delete.topic.enable: true
delete.topic.enabled: true
delete:
topic:
enable: true
enabled: true
topic.deletion: true
topic:
deletion: true
properties:
security.protocol: SASL_SSL
sasl.mechanism: AWS_MSK_IAM
sasl.client.callback.handler.class: software.amazon.msk.auth.iam.IAMClientCallbackHandler
sasl.jaas.config: software.amazon.msk.auth.iam.IAMLoginModule required;
## how to authenticate with the kafka-ui app
auth:
type: OAUTH2
oauth2:
client:
github:
provider: github
clientId: xxx
clientSecret: yyy
scope:
- read:org
user-name-attribute: login
custom-params:
type: github
# next specify the role based access control stuff
# https://github.com/provectus/kafka-ui/wiki/RBAC-(role-based-access-control)
rbac:
roles:
- name: "admins"
clusters:
- staging
subjects:
- provider: oauth_github
type: user
value: ubergarm
permissions:
- resource: clusterconfig
actions: all
- resource: topic
value: ".*"
actions: all
- resource: topic
value: "foobar"
actions:
- VIEW # can be upper or lower case
- CREATE
- EDIT
- DELETE
- MESSAGES_READ
- MESSAGES_PRODUCE
- MESSAGES_DELETE
- resource: consumer
value: ".*"
actions: all
- resource: schema
value: ".*"
actions: all
- resource: connect
value: ".*"
actions: all
- resource: ksql
actions: all
```
I initially tried without OAUTH or RBAC, but that didn't work, so I tried with it and even explicitly set permissions to delete the test topic. I've also tried many combinations of variable names and even tried some ENV VAR versions to allow deleting of topics but nothing has yet worked.
Finally I tried to use the previous release `kafka-ui-api-v0.5.0.jar` but it can't even list topics and gives this error at runtime:
```
2023-03-22 17:28:28,138 INFO [parallel-1] o.a.k.c.s.a.AbstractLogin: Successfully logged in.
2023-03-22 17:28:28,391 INFO [parallel-1] o.a.k.c.u.AppInfoParser: Kafka version: 3.3.1
2023-03-22 17:28:28,392 INFO [parallel-1] o.a.k.c.u.AppInfoParser: Kafka commitId: e23c59d00e687ff5
2023-03-22 17:28:28,392 INFO [parallel-1] o.a.k.c.u.AppInfoParser: Kafka startTimeMs: 1679506108389
2023-03-22 17:28:30,977 ERROR [parallel-2] c.p.k.u.s.StatisticsService: Failed to collect cluster staging info
org.apache.kafka.common.errors.UnsupportedVersionException: The broker does not support DESCRIBE_LOG_DIRS
2023-03-22 17:28:30,977 DEBUG [parallel-2] c.p.k.u.s.ClustersStatisticsScheduler: Metrics updated for cluster: staging
2023-03-22 17:28:57,644 DEBUG [parallel-2] c.p.k.u.s.ClustersStatisticsScheduler: Start getting metrics for kafkaCluster: staging
2023-03-22 17:28:57,708 ERROR [parallel-2] c.p.k.u.s.StatisticsService: Failed to collect cluster staging info
org.apache.kafka.common.errors.UnsupportedVersionException: The broker does not support DESCRIBE_LOG_DIRS
2023-03-22 17:28:57,709 DEBUG [parallel-2] c.p.k.u.s.ClustersStatisticsScheduler: Metrics updated for cluster: staging
```
## Screenshots
Here is a screenshot of the webapp where I just created a new topic fine, but am unable to delete it.

## Thanks
Thanks much! Hopefully it is just some config confusion and we can get this going. Cheers! | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/FeatureService.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/StatisticsService.java"
] | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/FeatureService.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/StatisticsService.java"
] | [] | diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/FeatureService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/FeatureService.java
index f5fbf262649..ec749abd14e 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/FeatureService.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/FeatureService.java
@@ -25,7 +25,8 @@ public class FeatureService {
private final AdminClientService adminClientService;
- public Mono<List<ClusterFeature>> getAvailableFeatures(KafkaCluster cluster, @Nullable Node controller) {
+ public Mono<List<ClusterFeature>> getAvailableFeatures(KafkaCluster cluster,
+ ReactiveAdminClient.ClusterDescription clusterDescription) {
List<Mono<ClusterFeature>> features = new ArrayList<>();
if (Optional.ofNullable(cluster.getConnectsClients())
@@ -42,17 +43,15 @@ public Mono<List<ClusterFeature>> getAvailableFeatures(KafkaCluster cluster, @Nu
features.add(Mono.just(ClusterFeature.SCHEMA_REGISTRY));
}
- if (controller != null) {
- features.add(
- isTopicDeletionEnabled(cluster, controller)
- .flatMap(r -> Boolean.TRUE.equals(r) ? Mono.just(ClusterFeature.TOPIC_DELETION) : Mono.empty())
- );
- }
+ features.add(topicDeletionEnabled(cluster, clusterDescription.getController()));
return Flux.fromIterable(features).flatMap(m -> m).collectList();
}
- private Mono<Boolean> isTopicDeletionEnabled(KafkaCluster cluster, Node controller) {
+ private Mono<ClusterFeature> topicDeletionEnabled(KafkaCluster cluster, @Nullable Node controller) {
+ if (controller == null) {
+ return Mono.just(ClusterFeature.TOPIC_DELETION); // assuming it is enabled by default
+ }
return adminClientService.get(cluster)
.flatMap(ac -> ac.loadBrokersConfig(List.of(controller.id())))
.map(config ->
@@ -61,6 +60,9 @@ private Mono<Boolean> isTopicDeletionEnabled(KafkaCluster cluster, Node controll
.filter(e -> e.name().equals(DELETE_TOPIC_ENABLED_SERVER_PROPERTY))
.map(e -> Boolean.parseBoolean(e.value()))
.findFirst()
- .orElse(true));
+ .orElse(true))
+ .flatMap(enabled -> enabled
+ ? Mono.just(ClusterFeature.TOPIC_DELETION)
+ : Mono.empty());
}
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/StatisticsService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/StatisticsService.java
index a36a64ff6dc..994c30714ae 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/StatisticsService.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/StatisticsService.java
@@ -41,7 +41,7 @@ private Mono<Statistics> getStatistics(KafkaCluster cluster) {
List.of(
metricsCollector.getBrokerMetrics(cluster, description.getNodes()),
getLogDirInfo(description, ac),
- featureService.getAvailableFeatures(cluster, description.getController()),
+ featureService.getAvailableFeatures(cluster, description),
loadTopicConfigs(cluster),
describeTopics(cluster)),
results ->
| null | train | test | 2023-03-24T11:05:59 | "2023-03-22T17:57:42Z" | ubergarm | train |
provectus/kafka-ui/3406_3551 | provectus/kafka-ui | provectus/kafka-ui/3406 | provectus/kafka-ui/3551 | [
"keyword_pr_to_issue"
] | 58102faa16922488c509110948a003b753ebdf15 | dc08701246418536c664bb53ebd281ae4f9e306d | [] | [
"i think maybe we can just make a general one cause i see this spinner code thing repeated across the application. we can make it customizable and just use that with `height` and `widths` and `colors` `props` etc and set the most common used one as a default value.",
"I would suggest add such spinner to Button component. So we will have button with `inProgress` state ",
"Yeah, I thought about it too. Update PR soon, thanks!",
"> I would suggest add such spinner to Button component. So we will have button with `inProgress` state\r\n@workshur\r\n\r\nBut what if we need button with spinner but it somehow differs from other button with such spinner? I don't consider expanding button props with spinner customization props as a great move",
"@Mgrdich @workshur Hey, updated spinner usage",
"Should I add spinner to Link button?",
"Split border properties for easier manipulating them",
"We use S namespace for all styled components related to current component. \r\n```suggestion\r\n <Spinner\r\n```",
"do we really need to customize all this props here? ",
"I think yes, but we can create default props object and customize on demand. In PageLoader it's gonna be empty in such case",
"so Links are for navigation , the actions are done in a sync way so maybe no.",
"i think we should disable the button when it is in progress,\r\n`<StyledButton type=\"button\" {...props} disabled={props.disabled || props.inProgress}`",
"Updated in https://github.com/provectus/kafka-ui/pull/3551/commits/8a6706f0dd3c26aa2fc5f6bcd0ddf91625bf1092",
"Updated + added test for this behavior"
] | "2023-03-24T11:41:58Z" | [
"type/enhancement",
"scope/frontend",
"status/accepted",
"status/needs-attention",
"area/wizard"
] | Wizard: Add a proper UI handling for image restart on Cluster config update | <!--
Don't forget to check for existing issues/discussions regarding your proposal. We might already have it.
https://github.com/provectus/kafka-ui/issues
https://github.com/provectus/kafka-ui/discussions
-->
### Which version of the app are you running?
<!-- Please provide docker image version or check commit hash in the top left corner in UI) -->
[dac023e](https://github.com/provectus/kafka-ui/commit/dac023e)
the 'wizard' branch
### Is your proposal related to a problem?
<!--
Provide a clear and concise description of what the problem is.
For example, "I'm always frustrated when..."
-->
On the update of cluster settings Kafka image is restarted and it is not reflected on the UI in any way.
There are also failed requests, that can be confusing or misleading:

### Describe the solution you'd like
<!--
Provide a clear and concise description of what you want to happen.
-->
1. After successful submit wizard redirects to dashboard with extra query param (e.g. `restarting=true`)
1. Update useCluster hook:
- if `restarting=true` set refresh number to 10 and refresh interval = 10s.
- update error message. `It appears that there may be an issue. Please review the cluster configuration or logs to ensure everything is correct`
1. While the updating process is ongoing, a message will be displayed on the dashboard page: `The configuration has been updated and is currently being applied. Please wait.`.
### Describe alternatives you've considered
<!--
Let us know about other solutions you've tried or researched.
-->
### Additional context
<!--
Is there anything else you can add about the proposal?
You might want to link to related issues here, if you haven't already.
-->
| [
"kafka-ui-react-app/src/components/common/Button/Button.tsx",
"kafka-ui-react-app/src/components/common/Button/__tests__/Button.spec.tsx",
"kafka-ui-react-app/src/components/common/PageLoader/PageLoader.styled.ts",
"kafka-ui-react-app/src/components/common/PageLoader/PageLoader.tsx",
"kafka-ui-react-app/src/widgets/ClusterConfigForm/index.tsx"
] | [
"kafka-ui-react-app/src/components/common/Button/Button.tsx",
"kafka-ui-react-app/src/components/common/Button/__tests__/Button.spec.tsx",
"kafka-ui-react-app/src/components/common/PageLoader/PageLoader.styled.ts",
"kafka-ui-react-app/src/components/common/PageLoader/PageLoader.tsx",
"kafka-ui-react-app/src/components/common/Spinner/Spinner.styled.ts",
"kafka-ui-react-app/src/components/common/Spinner/Spinner.tsx",
"kafka-ui-react-app/src/components/common/Spinner/types.ts",
"kafka-ui-react-app/src/widgets/ClusterConfigForm/index.tsx"
] | [] | diff --git a/kafka-ui-react-app/src/components/common/Button/Button.tsx b/kafka-ui-react-app/src/components/common/Button/Button.tsx
index 5e0a6609eb3..fe330a5e44f 100644
--- a/kafka-ui-react-app/src/components/common/Button/Button.tsx
+++ b/kafka-ui-react-app/src/components/common/Button/Button.tsx
@@ -3,11 +3,13 @@ import StyledButton, {
} from 'components/common/Button/Button.styled';
import React from 'react';
import { Link } from 'react-router-dom';
+import Spinner from 'components/common/Spinner/Spinner';
export interface Props
extends React.ButtonHTMLAttributes<HTMLButtonElement>,
ButtonProps {
to?: string | object;
+ inProgress?: boolean;
}
export const Button: React.FC<Props> = ({ to, ...props }) => {
@@ -20,5 +22,16 @@ export const Button: React.FC<Props> = ({ to, ...props }) => {
</Link>
);
}
- return <StyledButton type="button" {...props} />;
+ return (
+ <StyledButton
+ type="button"
+ disabled={props.disabled || props.inProgress}
+ {...props}
+ >
+ {props.children}{' '}
+ {props.inProgress ? (
+ <Spinner size={16} borderWidth={2} marginLeft={2} emptyBorderColor />
+ ) : null}
+ </StyledButton>
+ );
};
diff --git a/kafka-ui-react-app/src/components/common/Button/__tests__/Button.spec.tsx b/kafka-ui-react-app/src/components/common/Button/__tests__/Button.spec.tsx
index e0fb407a01a..21919eb0da7 100644
--- a/kafka-ui-react-app/src/components/common/Button/__tests__/Button.spec.tsx
+++ b/kafka-ui-react-app/src/components/common/Button/__tests__/Button.spec.tsx
@@ -58,4 +58,10 @@ describe('Button', () => {
theme.button.primary.invertedColors.normal
);
});
+ it('renders disabled button and spinner when inProgress truthy', () => {
+ render(<Button buttonType="primary" buttonSize="M" inProgress />);
+ expect(screen.getByRole('button')).toBeInTheDocument();
+ expect(screen.getByRole('progressbar')).toBeInTheDocument();
+ expect(screen.getByRole('button')).toBeDisabled();
+ });
});
diff --git a/kafka-ui-react-app/src/components/common/PageLoader/PageLoader.styled.ts b/kafka-ui-react-app/src/components/common/PageLoader/PageLoader.styled.ts
index 87f7a27fd13..f38f21c0b28 100644
--- a/kafka-ui-react-app/src/components/common/PageLoader/PageLoader.styled.ts
+++ b/kafka-ui-react-app/src/components/common/PageLoader/PageLoader.styled.ts
@@ -1,4 +1,4 @@
-import styled, { css } from 'styled-components';
+import styled from 'styled-components';
export const Wrapper = styled.div`
display: flex;
@@ -8,23 +8,3 @@ export const Wrapper = styled.div`
height: 100%;
width: 100%;
`;
-
-export const Spinner = styled.div(
- ({ theme }) => css`
- border: 10px solid ${theme.pageLoader.borderColor};
- border-bottom: 10px solid ${theme.pageLoader.borderBottomColor};
- border-radius: 50%;
- width: 80px;
- height: 80px;
- animation: spin 1.3s linear infinite;
-
- @keyframes spin {
- 0% {
- transform: rotate(0deg);
- }
- 100% {
- transform: rotate(360deg);
- }
- }
- `
-);
diff --git a/kafka-ui-react-app/src/components/common/PageLoader/PageLoader.tsx b/kafka-ui-react-app/src/components/common/PageLoader/PageLoader.tsx
index 33348b17ea1..674ab0f0cee 100644
--- a/kafka-ui-react-app/src/components/common/PageLoader/PageLoader.tsx
+++ b/kafka-ui-react-app/src/components/common/PageLoader/PageLoader.tsx
@@ -1,10 +1,11 @@
import React from 'react';
+import Spinner from 'components/common/Spinner/Spinner';
import * as S from './PageLoader.styled';
const PageLoader: React.FC = () => (
<S.Wrapper>
- <S.Spinner role="progressbar" />
+ <Spinner />
</S.Wrapper>
);
diff --git a/kafka-ui-react-app/src/components/common/Spinner/Spinner.styled.ts b/kafka-ui-react-app/src/components/common/Spinner/Spinner.styled.ts
new file mode 100644
index 00000000000..32edbcb11c4
--- /dev/null
+++ b/kafka-ui-react-app/src/components/common/Spinner/Spinner.styled.ts
@@ -0,0 +1,26 @@
+import styled from 'styled-components';
+import { SpinnerProps } from 'components/common/Spinner/types';
+
+export const Spinner = styled.div<SpinnerProps>`
+ border-width: ${(props) => props.borderWidth}px;
+ border-style: solid;
+ border-color: ${({ theme }) => theme.pageLoader.borderColor};
+ border-bottom-color: ${(props) =>
+ props.emptyBorderColor
+ ? 'transparent'
+ : props.theme.pageLoader.borderBottomColor};
+ border-radius: 50%;
+ width: ${(props) => props.size}px;
+ height: ${(props) => props.size}px;
+ margin-left: ${(props) => props.marginLeft}px;
+ animation: spin 1.3s linear infinite;
+
+ @keyframes spin {
+ 0% {
+ transform: rotate(0deg);
+ }
+ 100% {
+ transform: rotate(360deg);
+ }
+ }
+`;
diff --git a/kafka-ui-react-app/src/components/common/Spinner/Spinner.tsx b/kafka-ui-react-app/src/components/common/Spinner/Spinner.tsx
new file mode 100644
index 00000000000..1d1cd597333
--- /dev/null
+++ b/kafka-ui-react-app/src/components/common/Spinner/Spinner.tsx
@@ -0,0 +1,20 @@
+/* eslint-disable react/default-props-match-prop-types */
+import React from 'react';
+import { SpinnerProps } from 'components/common/Spinner/types';
+
+import * as S from './Spinner.styled';
+
+const defaultProps: SpinnerProps = {
+ size: 80,
+ borderWidth: 10,
+ emptyBorderColor: false,
+ marginLeft: 0,
+};
+
+const Spinner: React.FC<SpinnerProps> = (props) => (
+ <S.Spinner role="progressbar" {...props} />
+);
+
+Spinner.defaultProps = defaultProps;
+
+export default Spinner;
diff --git a/kafka-ui-react-app/src/components/common/Spinner/types.ts b/kafka-ui-react-app/src/components/common/Spinner/types.ts
new file mode 100644
index 00000000000..7db64b7fd30
--- /dev/null
+++ b/kafka-ui-react-app/src/components/common/Spinner/types.ts
@@ -0,0 +1,6 @@
+export interface SpinnerProps {
+ size?: number;
+ borderWidth?: number;
+ emptyBorderColor?: boolean;
+ marginLeft?: number;
+}
diff --git a/kafka-ui-react-app/src/widgets/ClusterConfigForm/index.tsx b/kafka-ui-react-app/src/widgets/ClusterConfigForm/index.tsx
index 2c636c29a27..d4007cf4883 100644
--- a/kafka-ui-react-app/src/widgets/ClusterConfigForm/index.tsx
+++ b/kafka-ui-react-app/src/widgets/ClusterConfigForm/index.tsx
@@ -142,6 +142,7 @@ const ClusterConfigForm: React.FC<ClusterConfigFormProps> = ({
buttonSize="L"
buttonType="primary"
disabled={isSubmitDisabled}
+ inProgress={isSubmitting}
>
Submit
</Button>
| null | val | test | 2023-03-27T13:40:03 | "2023-02-24T12:33:02Z" | BulatKha | train |
provectus/kafka-ui/3515_3555 | provectus/kafka-ui | provectus/kafka-ui/3515 | provectus/kafka-ui/3555 | [
"connected"
] | acfe7a4afcce8da9d111d8922a6cdbb311e53450 | 58eca230fc8392d42e4ad4ef47d7b83dc4ee3a75 | [] | [] | "2023-03-27T05:38:45Z" | [
"scope/QA",
"scope/AQA"
] | [E2E] Verify internal topics naming | 1. Navigate to Topics - Topics list in opened
2. Activate Show Internal switcher - Internal topics are displayed at the list
3. Verify internal topics naming - All internal topics name starts from '_'
https://app.qase.io/project/KAFKAUI?case=334&previewMode=modal&suite=2 | [
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicsList.java"
] | [
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicsList.java"
] | [
"kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/backlog/SmokeBacklog.java",
"kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/TopicsTest.java"
] | diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicsList.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicsList.java
index b26f2c2997a..0f25489128f 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicsList.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicsList.java
@@ -208,23 +208,23 @@ public TopicsList selectItem(boolean select) {
return new TopicsList();
}
+ private SelenideElement getNameElm() {
+ return element.$x("./td[2]");
+ }
+
@Step
public boolean isInternal() {
boolean internal = false;
try {
- internal = element.$x("./td[2]/a/span").isDisplayed();
+ internal = getNameElm().$x("./a/span").isDisplayed();
} catch (Throwable ignored) {
}
return internal;
}
- private SelenideElement getNameElm() {
- return element.$x("./td[2]");
- }
-
@Step
public String getName() {
- return getNameElm().getText().trim();
+ return getNameElm().$x("./a").getAttribute("title");
}
@Step
| diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/backlog/SmokeBacklog.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/backlog/SmokeBacklog.java
index 32edaff8c96..84fa7476cf3 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/backlog/SmokeBacklog.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualSuite/backlog/SmokeBacklog.java
@@ -58,4 +58,18 @@ public void testCaseF() {
@Test
public void testCaseG() {
}
+
+ @Automation(state = TO_BE_AUTOMATED)
+ @Suite(id = 5)
+ @QaseId(335)
+ @Test
+ public void testCaseH() {
+ }
+
+ @Automation(state = TO_BE_AUTOMATED)
+ @Suite(id = 5)
+ @QaseId(336)
+ @Test
+ public void testCaseI() {
+ }
}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/TopicsTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/TopicsTest.java
index c3091a61c18..8cca35beb6b 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/TopicsTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokeSuite/topics/TopicsTest.java
@@ -359,7 +359,7 @@ public void checkApplyingSavedFilterWithinTopicMessages() {
@QaseId(11)
@Test(priority = 15)
- public void checkShowInternalTopicsButtonFunctionality() {
+ public void checkShowInternalTopicsButton() {
navigateToTopics();
SoftAssert softly = new SoftAssert();
softly.assertTrue(topicsList.isShowInternalRadioBtnSelected(), "isInternalRadioBtnSelected()");
@@ -373,8 +373,21 @@ public void checkShowInternalTopicsButtonFunctionality() {
softly.assertAll();
}
- @QaseId(56)
+ @QaseId(334)
@Test(priority = 16)
+ public void checkInternalTopicsNaming() {
+ navigateToTopics();
+ SoftAssert softly = new SoftAssert();
+ topicsList
+ .setShowInternalRadioButton(true)
+ .getInternalTopics()
+ .forEach(topic -> softly.assertTrue(topic.getName().startsWith("_"),
+ String.format("'%s' starts with '_'", topic.getName())));
+ softly.assertAll();
+ }
+
+ @QaseId(56)
+ @Test(priority = 17)
public void checkRetentionBytesAccordingToMaxSizeOnDisk() {
navigateToTopics();
topicsList
@@ -422,7 +435,7 @@ public void checkRetentionBytesAccordingToMaxSizeOnDisk() {
}
@QaseId(247)
- @Test(priority = 17)
+ @Test(priority = 18)
public void recreateTopicFromTopicProfile() {
Topic topicToRecreate = new Topic()
.setName("topic-to-recreate-" + randomAlphabetic(5))
@@ -450,7 +463,7 @@ public void recreateTopicFromTopicProfile() {
}
@QaseId(8)
- @Test(priority = 18)
+ @Test(priority = 19)
public void checkCopyTopicPossibility() {
Topic topicToCopy = new Topic()
.setName("topic-to-copy-" + randomAlphabetic(5))
| val | test | 2023-03-24T11:05:59 | "2023-03-17T11:19:21Z" | VladSenyuta | train |
provectus/kafka-ui/3040_3561 | provectus/kafka-ui | provectus/kafka-ui/3040 | provectus/kafka-ui/3561 | [
"connected"
] | acfe7a4afcce8da9d111d8922a6cdbb311e53450 | 9f1a4df0a17fe2ebaeacae6310fa9f434e5aa9a1 | [] | [] | "2023-03-27T10:56:05Z" | [
"good first issue",
"scope/frontend",
"status/accepted",
"type/chore"
] | [UI] Schema paths are displayed differently for compare versions and edit | **Describe the bug**
Schema name displayed in a path for Compare versions
**Set up**
https://www.kafka-ui.provectus.io/
**Steps to Reproduce**
1. Navigate to Schema registry
2. Select any schema
3. Press Compare versions
4. Check the path to be [Schema name/Compare versions]
5. Turn back to Schema profile
6. Press Edit
7. Check the path
**Actual behavior:** Only Edit exists in a path without Schema name
**Expected behavior**
Would be better to keep consistency, remove or keep schema name for both
**Screenshots**
https://user-images.githubusercontent.com/104780608/206220499-9626fe94-1ff9-4dfc-8ea4-69f377fc2ade.mov
**Additional context**
**Priority:** low
| [
"kafka-ui-react-app/src/components/Schemas/Edit/Form.tsx"
] | [
"kafka-ui-react-app/src/components/Schemas/Edit/Form.tsx"
] | [] | diff --git a/kafka-ui-react-app/src/components/Schemas/Edit/Form.tsx b/kafka-ui-react-app/src/components/Schemas/Edit/Form.tsx
index ea414ab4b4d..9ce7f280f43 100644
--- a/kafka-ui-react-app/src/components/Schemas/Edit/Form.tsx
+++ b/kafka-ui-react-app/src/components/Schemas/Edit/Form.tsx
@@ -110,7 +110,7 @@ const Form: React.FC = () => {
return (
<FormProvider {...methods}>
<PageHeading
- text="Edit"
+ text={`${subject} Edit`}
backText="Schema Registry"
backTo={clusterSchemasPath(clusterName)}
/>
| null | train | test | 2023-03-24T11:05:59 | "2022-12-07T15:31:14Z" | armenuikafka | train |
provectus/kafka-ui/3427_3566 | provectus/kafka-ui | provectus/kafka-ui/3427 | provectus/kafka-ui/3566 | [
"connected"
] | aed6c16496ddc1dbc83daceaf3c0efc296083a23 | 8ecb719e9b762a4bc132997dd660b8519c185ef6 | [
"TODO, brokers page:\r\nadd:\r\n- partitions count per broker (+skew %)\r\n- leaders count per broker (+skew %)\r\n\r\nFrontend:\r\nBrokers table:\r\n- merge segment size & segment count, make it a single column \"Disk usage\", data format: \"%Size%, %x% segment(s)\"\r\n- Partitions skew (Column tooltip: \"The divergence from the average brokers' value\"). Display \"-\" if null, partitionsSkew field. \r\n**Percentage might be either negative or positive**\r\nChange the font color depending on the skew:\r\n10-20% -> yellow\r\n20%+ -> red\r\n- Leaders\r\n- Leader skew\r\n- Online partitions (inSyncPartitions of partitions). If inSyncPartitions != partitions -> font color red"
] | [
"partitionsHost -> partitions"
] | "2023-03-27T19:36:16Z" | [
"type/enhancement",
"scope/backend",
"scope/frontend",
"status/accepted"
] | View broker skew in kafka-ui | Hi there,
is there a way to see if there is a broker skew in kafka ui?
screenshot from conduktor.
<img width="1465" alt="MicrosoftTeams-image" src="https://user-images.githubusercontent.com/51237742/222124221-865dcf40-3a9c-49a4-a60f-60359bde667e.png">
Cheers | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalBroker.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/BrokerService.java",
"kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml"
] | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalBroker.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/PartitionDistributionStats.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/BrokerService.java",
"kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml"
] | [
"kafka-ui-api/src/test/java/com/provectus/kafka/ui/model/PartitionDistributionStatsTest.java"
] | diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalBroker.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalBroker.java
index edab9a8aeb5..4a0d1ba0dd1 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalBroker.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalBroker.java
@@ -1,6 +1,7 @@
package com.provectus.kafka.ui.model;
import java.math.BigDecimal;
+import javax.annotation.Nullable;
import lombok.Data;
import org.apache.kafka.common.Node;
@@ -10,15 +11,27 @@ public class InternalBroker {
private final Integer id;
private final String host;
private final Integer port;
- private final BigDecimal bytesInPerSec;
- private final BigDecimal bytesOutPerSec;
+ private final @Nullable BigDecimal bytesInPerSec;
+ private final @Nullable BigDecimal bytesOutPerSec;
+ private final @Nullable Integer partitionsLeader;
+ private final @Nullable Integer partitions;
+ private final @Nullable Integer inSyncPartitions;
+ private final @Nullable BigDecimal leadersSkew;
+ private final @Nullable BigDecimal partitionsSkew;
- public InternalBroker(Node node, Statistics statistics) {
+ public InternalBroker(Node node,
+ PartitionDistributionStats partitionDistribution,
+ Statistics statistics) {
this.id = node.id();
this.host = node.host();
this.port = node.port();
this.bytesInPerSec = statistics.getMetrics().getBrokerBytesInPerSec().get(node.id());
this.bytesOutPerSec = statistics.getMetrics().getBrokerBytesOutPerSec().get(node.id());
+ this.partitionsLeader = partitionDistribution.getPartitionLeaders().get(node);
+ this.partitions = partitionDistribution.getPartitionsCount().get(node);
+ this.inSyncPartitions = partitionDistribution.getInSyncPartitions().get(node);
+ this.leadersSkew = partitionDistribution.leadersSkew(node);
+ this.partitionsSkew = partitionDistribution.partitionsSkew(node);
}
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/PartitionDistributionStats.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/PartitionDistributionStats.java
new file mode 100644
index 00000000000..b625533d1dd
--- /dev/null
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/PartitionDistributionStats.java
@@ -0,0 +1,93 @@
+package com.provectus.kafka.ui.model;
+
+import java.math.BigDecimal;
+import java.math.MathContext;
+import java.util.HashMap;
+import java.util.Map;
+import javax.annotation.Nullable;
+import lombok.AccessLevel;
+import lombok.Getter;
+import lombok.RequiredArgsConstructor;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.kafka.clients.admin.TopicDescription;
+import org.apache.kafka.common.Node;
+import org.apache.kafka.common.TopicPartitionInfo;
+
+@RequiredArgsConstructor(access = AccessLevel.PRIVATE)
+@Getter
+@Slf4j
+public class PartitionDistributionStats {
+
+ // avg skew will show unuseful results on low number of partitions
+ private static final int MIN_PARTITIONS_FOR_SKEW_CALCULATION = 50;
+
+ private static final MathContext ROUNDING_MATH_CTX = new MathContext(3);
+
+ private final Map<Node, Integer> partitionLeaders;
+ private final Map<Node, Integer> partitionsCount;
+ private final Map<Node, Integer> inSyncPartitions;
+ private final double avgLeadersCntPerBroker;
+ private final double avgPartitionsPerBroker;
+ private final boolean skewCanBeCalculated;
+
+ public static PartitionDistributionStats create(Statistics stats) {
+ return create(stats, MIN_PARTITIONS_FOR_SKEW_CALCULATION);
+ }
+
+ static PartitionDistributionStats create(Statistics stats, int minPartitionsForSkewCalculation) {
+ var partitionLeaders = new HashMap<Node, Integer>();
+ var partitionsReplicated = new HashMap<Node, Integer>();
+ var isr = new HashMap<Node, Integer>();
+ int partitionsCnt = 0;
+ for (TopicDescription td : stats.getTopicDescriptions().values()) {
+ for (TopicPartitionInfo tp : td.partitions()) {
+ partitionsCnt++;
+ tp.replicas().forEach(r -> incr(partitionsReplicated, r));
+ tp.isr().forEach(r -> incr(isr, r));
+ if (tp.leader() != null) {
+ incr(partitionLeaders, tp.leader());
+ }
+ }
+ }
+ int nodesWithPartitions = partitionsReplicated.size();
+ int partitionReplications = partitionsReplicated.values().stream().mapToInt(i -> i).sum();
+ var avgPartitionsPerBroker = nodesWithPartitions == 0 ? 0 : ((double) partitionReplications) / nodesWithPartitions;
+
+ int nodesWithLeaders = partitionLeaders.size();
+ int leadersCnt = partitionLeaders.values().stream().mapToInt(i -> i).sum();
+ var avgLeadersCntPerBroker = nodesWithLeaders == 0 ? 0 : ((double) leadersCnt) / nodesWithLeaders;
+
+ return new PartitionDistributionStats(
+ partitionLeaders,
+ partitionsReplicated,
+ isr,
+ avgLeadersCntPerBroker,
+ avgPartitionsPerBroker,
+ partitionsCnt >= minPartitionsForSkewCalculation
+ );
+ }
+
+ private static void incr(Map<Node, Integer> map, Node n) {
+ map.compute(n, (k, c) -> c == null ? 1 : ++c);
+ }
+
+ @Nullable
+ public BigDecimal partitionsSkew(Node node) {
+ return calculateAvgSkew(partitionsCount.get(node), avgPartitionsPerBroker);
+ }
+
+ @Nullable
+ public BigDecimal leadersSkew(Node node) {
+ return calculateAvgSkew(partitionLeaders.get(node), avgLeadersCntPerBroker);
+ }
+
+ // Returns difference (in percents) from average value, null if it can't be calculated
+ @Nullable
+ private BigDecimal calculateAvgSkew(@Nullable Integer value, double avgValue) {
+ if (avgValue == 0 || !skewCanBeCalculated) {
+ return null;
+ }
+ value = value == null ? 0 : value;
+ return new BigDecimal((value - avgValue) / avgValue * 100.0).round(ROUNDING_MATH_CTX);
+ }
+}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/BrokerService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/BrokerService.java
index 720642157b0..8a2ac1a63e9 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/BrokerService.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/BrokerService.java
@@ -10,6 +10,7 @@
import com.provectus.kafka.ui.model.InternalBroker;
import com.provectus.kafka.ui.model.InternalBrokerConfig;
import com.provectus.kafka.ui.model.KafkaCluster;
+import com.provectus.kafka.ui.model.PartitionDistributionStats;
import com.provectus.kafka.ui.service.metrics.RawMetric;
import java.util.Collections;
import java.util.HashMap;
@@ -64,11 +65,13 @@ private Flux<InternalBrokerConfig> getBrokersConfig(KafkaCluster cluster, Intege
}
public Flux<InternalBroker> getBrokers(KafkaCluster cluster) {
+ var stats = statisticsCache.get(cluster);
+ var partitionsDistribution = PartitionDistributionStats.create(stats);
return adminClientService
.get(cluster)
.flatMap(ReactiveAdminClient::describeCluster)
.map(description -> description.getNodes().stream()
- .map(node -> new InternalBroker(node, statisticsCache.get(cluster)))
+ .map(node -> new InternalBroker(node, partitionsDistribution, stats))
.collect(Collectors.toList()))
.flatMapMany(Flux::fromIterable);
}
diff --git a/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml b/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
index 4bd3d2207c3..78c7cf3bf52 100644
--- a/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
+++ b/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
@@ -2375,6 +2375,16 @@ components:
type: number
bytesOutPerSec:
type: number
+ partitionsLeader:
+ type: integer
+ partitions:
+ type: integer
+ inSyncPartitions:
+ type: integer
+ partitionsSkew:
+ type: number
+ leadersSkew:
+ type: number
required:
- id
| diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/model/PartitionDistributionStatsTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/model/PartitionDistributionStatsTest.java
new file mode 100644
index 00000000000..c83c4f5cd86
--- /dev/null
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/model/PartitionDistributionStatsTest.java
@@ -0,0 +1,83 @@
+package com.provectus.kafka.ui.model;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+import com.provectus.kafka.ui.service.ReactiveAdminClient;
+import java.math.BigDecimal;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import org.apache.kafka.clients.admin.TopicDescription;
+import org.apache.kafka.common.Node;
+import org.apache.kafka.common.TopicPartitionInfo;
+import org.assertj.core.data.Percentage;
+import org.junit.jupiter.api.Test;
+
+class PartitionDistributionStatsTest {
+
+ @Test
+ void skewCalculatedBasedOnPartitionsCounts() {
+ Node n1 = new Node(1, "n1", 9092);
+ Node n2 = new Node(2, "n2", 9092);
+ Node n3 = new Node(3, "n3", 9092);
+ Node n4 = new Node(4, "n4", 9092);
+
+ var stats = PartitionDistributionStats.create(
+ Statistics.builder()
+ .clusterDescription(
+ new ReactiveAdminClient.ClusterDescription(null, "test", Set.of(n1, n2, n3), null))
+ .topicDescriptions(
+ Map.of(
+ "t1", new TopicDescription(
+ "t1", false,
+ List.of(
+ new TopicPartitionInfo(0, n1, List.of(n1, n2), List.of(n1, n2)),
+ new TopicPartitionInfo(1, n2, List.of(n2, n3), List.of(n2, n3))
+ )
+ ),
+ "t2", new TopicDescription(
+ "t2", false,
+ List.of(
+ new TopicPartitionInfo(0, n1, List.of(n1, n2), List.of(n1, n2)),
+ new TopicPartitionInfo(1, null, List.of(n2, n1), List.of(n1))
+ )
+ )
+ )
+ )
+ .build(), 4
+ );
+
+ assertThat(stats.getPartitionLeaders())
+ .containsExactlyInAnyOrderEntriesOf(Map.of(n1, 2, n2, 1));
+ assertThat(stats.getPartitionsCount())
+ .containsExactlyInAnyOrderEntriesOf(Map.of(n1, 3, n2, 4, n3, 1));
+ assertThat(stats.getInSyncPartitions())
+ .containsExactlyInAnyOrderEntriesOf(Map.of(n1, 3, n2, 3, n3, 1));
+
+ // Node(partitions): n1(3), n2(4), n3(1), n4(0)
+ // average partitions cnt = (3+4+1) / 3 = 2.666 (counting only nodes with partitions!)
+ assertThat(stats.getAvgPartitionsPerBroker())
+ .isCloseTo(2.666, Percentage.withPercentage(1));
+
+ assertThat(stats.partitionsSkew(n1))
+ .isCloseTo(BigDecimal.valueOf(12.5), Percentage.withPercentage(1));
+ assertThat(stats.partitionsSkew(n2))
+ .isCloseTo(BigDecimal.valueOf(50), Percentage.withPercentage(1));
+ assertThat(stats.partitionsSkew(n3))
+ .isCloseTo(BigDecimal.valueOf(-62.5), Percentage.withPercentage(1));
+ assertThat(stats.partitionsSkew(n4))
+ .isCloseTo(BigDecimal.valueOf(-100), Percentage.withPercentage(1));
+
+ // Node(leaders): n1(2), n2(1), n3(0), n4(0)
+ // average leaders cnt = (2+1) / 2 = 1.5 (counting only nodes with leaders!)
+ assertThat(stats.leadersSkew(n1))
+ .isCloseTo(BigDecimal.valueOf(33.33), Percentage.withPercentage(1));
+ assertThat(stats.leadersSkew(n2))
+ .isCloseTo(BigDecimal.valueOf(-33.33), Percentage.withPercentage(1));
+ assertThat(stats.leadersSkew(n3))
+ .isCloseTo(BigDecimal.valueOf(-100), Percentage.withPercentage(1));
+ assertThat(stats.leadersSkew(n4))
+ .isCloseTo(BigDecimal.valueOf(-100), Percentage.withPercentage(1));
+ }
+
+}
| test | test | 2023-04-21T15:58:11 | "2023-03-01T11:16:49Z" | wanisfahmyDE | train |
provectus/kafka-ui/3317_3569 | provectus/kafka-ui | provectus/kafka-ui/3317 | provectus/kafka-ui/3569 | [
"connected"
] | 20cc1e489b2dd9590c14c1c4819e5e01d9dc9b97 | ef0dacb0c36a1a180ef8fda670c82854658aab00 | [
"Filtering by offset still unavailable\r\n\r\n\r\nhttps://user-images.githubusercontent.com/104780608/228433188-be5f19f6-9057-42a1-a84a-0c594c0f1f73.mov\r\n\r\n"
] | [] | "2023-03-28T11:02:51Z" | [
"type/bug",
"scope/frontend",
"status/accepted",
"status/confirmed",
"type/regression",
"severity/high"
] | Topics: Messages filtering by seek type is broken within Topic profile |
**Describe the bug** (Actual behavior)
Messages filtering by seek type (Offset, Timestamp) is broken within Topic profile
**Expected behavior**
Should be possible to filter messages by Offset/Timestamp
**Set up**
https://www.kafka-ui.provectus.io/
**Steps to Reproduce**
<!-- We'd like you to provide an example setup (via docker-compose, helm, etc.)
to reproduce the problem, especially with a complex setups. -->
1. Login to kafka-ui
2. Navigate to Topics
3. Select the Topic
4. Turn to Messages tab
5. Add new messages
6. Select seek type (Offset/Timestamp)
7. Press Submit
**Screenshots**
https://user-images.githubusercontent.com/104780608/217451012-2888e38d-0346-49fc-82cc-b89d43ed12cb.mov
| [
"kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.tsx"
] | [
"kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.tsx"
] | [] | diff --git a/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.tsx b/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.tsx
index c3871432e70..ec62f266ed4 100644
--- a/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.tsx
+++ b/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.tsx
@@ -231,7 +231,10 @@ const Filters: React.FC<FiltersProps> = ({
props.seekType = SeekType.TIMESTAMP;
}
- if (selectedPartitions.length !== partitions.length) {
+ if (
+ selectedPartitions.length !== partitions.length ||
+ currentSeekType === SeekType.TIMESTAMP
+ ) {
// not everything in the partition is selected
props.seekTo = selectedPartitions.map(({ value }) => {
const offsetProperty =
| null | train | test | 2023-03-27T21:26:12 | "2023-02-08T06:24:28Z" | armenuikafka | train |
provectus/kafka-ui/2753_3570 | provectus/kafka-ui | provectus/kafka-ui/2753 | provectus/kafka-ui/3570 | [
"connected"
] | ee1cd72dd5b6b74f8d856e4ed9135850ebc12e10 | 5c357f94fdde5356a2126193605476194f3cfca6 | [
"Hello there LaurentDanti! π\n\nThank you and congratulations π for opening your very first issue in this project! π\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. π",
"@LaurentDanti thank you for the issue!\r\n\r\n@Haarolean I think we already have issue created for fonts, maybe we add github call handling to that issue? ",
"hey @LaurentDanti, thanks for reaching out.\r\nI know already at least a few cases in which people successfully use the app in the \"air gap\" environment (e.g. #1390).\r\nWe already have the fonts issue in progress (#2728), we'll keep this open for considering opting out of the version check.",
"@Haarolean hello, I also have the fonts issue, UI doesn't work in isolated network, would you give workaround ?",
"@Fuou we're working on it (#2372)",
" @Haarolean what's the status here?",
"@cassanellicarlo it's up for grabs for the 3rd-party contributors since we don't have enough capacity to address this issue.",
"Hey π what is the status woth the opt out option?",
"Frontend implementation is invalid.\r\n\r\nTagged versions before:\r\n<img width=\"361\" alt=\"image\" src=\"https://github.com/provectus/kafka-ui/assets/1494347/c44b0863-63dd-446f-a25a-a5d816843a60\">\r\nTagged versions now are missing the tag and have a commit displayed anyway:\r\n<img width=\"408\" alt=\"image\" src=\"https://github.com/provectus/kafka-ui/assets/1494347/ed5fb888-52fa-4f8a-aa18-9d0d768d1402\">\r\n\r\nif version == versionTag display version rather than commitId\r\n<img width=\"766\" alt=\"image\" src=\"https://github.com/provectus/kafka-ui/assets/1494347/66a9a277-d452-46df-ac17-2696fa8bba87\">\r\n\r\n@David-DB88 \r\n\r\n\r\n",
"ok\r\n"
] | [
"I suggest a rename as it's not kafka-related"
] | "2023-03-28T11:05:00Z" | [
"type/enhancement",
"scope/backend",
"scope/frontend",
"status/pending-frontend"
] | Air Gap: Enhancement: Opt out of version check | Hello,
i'm using your great UI for kafka cluster instance and it's perfect : functionality, rapidity, installating in k8s world with helm.
But i try to install this UI in isolated network without internet access so github.com is not allow.
Is it possible to disable call from the ui to https://api.github.com/repos/provectus/kafka-ui/releases/latest and load the font inside the image ?
Like that the UI could be use in isolated network.
| [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/ApplicationConfigController.java",
"kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml"
] | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/ApplicationConfigController.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ApplicationInfoService.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/GithubReleaseInfo.java",
"kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml"
] | [
"kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/GithubReleaseInfoTest.java"
] | diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/ApplicationConfigController.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/ApplicationConfigController.java
index b21ef10c619..571250ba947 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/ApplicationConfigController.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/ApplicationConfigController.java
@@ -13,12 +13,12 @@
import com.provectus.kafka.ui.model.RestartRequestDTO;
import com.provectus.kafka.ui.model.UploadedFileInfoDTO;
import com.provectus.kafka.ui.model.rbac.AccessContext;
+import com.provectus.kafka.ui.service.ApplicationInfoService;
import com.provectus.kafka.ui.service.KafkaClusterFactory;
import com.provectus.kafka.ui.service.rbac.AccessControlService;
import com.provectus.kafka.ui.util.ApplicationRestarter;
import com.provectus.kafka.ui.util.DynamicConfigOperations;
import com.provectus.kafka.ui.util.DynamicConfigOperations.PropertiesStructure;
-import java.util.List;
import java.util.Map;
import javax.annotation.Nullable;
import lombok.RequiredArgsConstructor;
@@ -53,18 +53,11 @@ interface PropertiesMapper {
private final DynamicConfigOperations dynamicConfigOperations;
private final ApplicationRestarter restarter;
private final KafkaClusterFactory kafkaClusterFactory;
-
+ private final ApplicationInfoService applicationInfoService;
@Override
public Mono<ResponseEntity<ApplicationInfoDTO>> getApplicationInfo(ServerWebExchange exchange) {
- return Mono.just(
- new ApplicationInfoDTO()
- .enabledFeatures(
- dynamicConfigOperations.dynamicConfigEnabled()
- ? List.of(ApplicationInfoDTO.EnabledFeaturesEnum.DYNAMIC_CONFIG)
- : List.of()
- )
- ).map(ResponseEntity::ok);
+ return Mono.just(applicationInfoService.getApplicationInfo()).map(ResponseEntity::ok);
}
@Override
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ApplicationInfoService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ApplicationInfoService.java
new file mode 100644
index 00000000000..750a7179fb8
--- /dev/null
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ApplicationInfoService.java
@@ -0,0 +1,76 @@
+package com.provectus.kafka.ui.service;
+
+import static com.provectus.kafka.ui.model.ApplicationInfoDTO.EnabledFeaturesEnum;
+
+import com.provectus.kafka.ui.model.ApplicationInfoBuildDTO;
+import com.provectus.kafka.ui.model.ApplicationInfoDTO;
+import com.provectus.kafka.ui.model.ApplicationInfoLatestReleaseDTO;
+import com.provectus.kafka.ui.util.DynamicConfigOperations;
+import com.provectus.kafka.ui.util.GithubReleaseInfo;
+import java.time.format.DateTimeFormatter;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Optional;
+import java.util.Properties;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.info.BuildProperties;
+import org.springframework.boot.info.GitProperties;
+import org.springframework.scheduling.annotation.Scheduled;
+import org.springframework.stereotype.Service;
+
+@Service
+public class ApplicationInfoService {
+
+ private final GithubReleaseInfo githubReleaseInfo = new GithubReleaseInfo();
+
+ private final DynamicConfigOperations dynamicConfigOperations;
+ private final BuildProperties buildProperties;
+ private final GitProperties gitProperties;
+
+ public ApplicationInfoService(DynamicConfigOperations dynamicConfigOperations,
+ @Autowired(required = false) BuildProperties buildProperties,
+ @Autowired(required = false) GitProperties gitProperties) {
+ this.dynamicConfigOperations = dynamicConfigOperations;
+ this.buildProperties = Optional.ofNullable(buildProperties).orElse(new BuildProperties(new Properties()));
+ this.gitProperties = Optional.ofNullable(gitProperties).orElse(new GitProperties(new Properties()));
+ }
+
+ public ApplicationInfoDTO getApplicationInfo() {
+ var releaseInfo = githubReleaseInfo.get();
+ return new ApplicationInfoDTO()
+ .build(getBuildInfo(releaseInfo))
+ .enabledFeatures(getEnabledFeatures())
+ .latestRelease(convert(releaseInfo));
+ }
+
+ private ApplicationInfoLatestReleaseDTO convert(GithubReleaseInfo.GithubReleaseDto releaseInfo) {
+ return new ApplicationInfoLatestReleaseDTO()
+ .htmlUrl(releaseInfo.html_url())
+ .publishedAt(releaseInfo.published_at())
+ .versionTag(releaseInfo.tag_name());
+ }
+
+ private ApplicationInfoBuildDTO getBuildInfo(GithubReleaseInfo.GithubReleaseDto release) {
+ return new ApplicationInfoBuildDTO()
+ .isLatestRelease(release.tag_name() != null && release.tag_name().equals(buildProperties.getVersion()))
+ .commitId(gitProperties.getShortCommitId())
+ .version(buildProperties.getVersion())
+ .buildTime(buildProperties.getTime() != null
+ ? DateTimeFormatter.ISO_INSTANT.format(buildProperties.getTime()) : null);
+ }
+
+ private List<EnabledFeaturesEnum> getEnabledFeatures() {
+ var enabledFeatures = new ArrayList<EnabledFeaturesEnum>();
+ if (dynamicConfigOperations.dynamicConfigEnabled()) {
+ enabledFeatures.add(EnabledFeaturesEnum.DYNAMIC_CONFIG);
+ }
+ return enabledFeatures;
+ }
+
+ // updating on startup and every hour
+ @Scheduled(fixedRateString = "${github-release-info-update-rate:3600000}")
+ public void updateGithubReleaseInfo() {
+ githubReleaseInfo.refresh().block();
+ }
+
+}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/GithubReleaseInfo.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/GithubReleaseInfo.java
new file mode 100644
index 00000000000..2ad0c9c399b
--- /dev/null
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/GithubReleaseInfo.java
@@ -0,0 +1,53 @@
+package com.provectus.kafka.ui.util;
+
+import com.google.common.annotations.VisibleForTesting;
+import java.time.Duration;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.web.reactive.function.client.WebClient;
+import reactor.core.publisher.Mono;
+
+@Slf4j
+public class GithubReleaseInfo {
+
+ private static final String GITHUB_LATEST_RELEASE_RETRIEVAL_URL =
+ "https://api.github.com/repos/provectus/kafka-ui/releases/latest";
+
+ private static final Duration GITHUB_API_MAX_WAIT_TIME = Duration.ofSeconds(2);
+
+ public record GithubReleaseDto(String html_url, String tag_name, String published_at) {
+
+ static GithubReleaseDto empty() {
+ return new GithubReleaseDto(null, null, null);
+ }
+ }
+
+ private volatile GithubReleaseDto release = GithubReleaseDto.empty();
+
+ private final Mono<Void> refreshMono;
+
+ public GithubReleaseInfo() {
+ this(GITHUB_LATEST_RELEASE_RETRIEVAL_URL);
+ }
+
+ @VisibleForTesting
+ GithubReleaseInfo(String url) {
+ this.refreshMono = WebClient.create()
+ .get()
+ .uri(url)
+ .exchangeToMono(resp -> resp.bodyToMono(GithubReleaseDto.class))
+ .timeout(GITHUB_API_MAX_WAIT_TIME)
+ .doOnError(th -> log.trace("Error getting latest github release info", th))
+ .onErrorResume(th -> true, th -> Mono.just(GithubReleaseDto.empty()))
+ .doOnNext(release -> this.release = release)
+ .then();
+ }
+
+ public GithubReleaseDto get() {
+ return release;
+ }
+
+ public Mono<Void> refresh() {
+ return refreshMono;
+ }
+
+}
diff --git a/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml b/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
index d3b9f331366..7b6fd3c1131 100644
--- a/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
+++ b/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
@@ -1917,6 +1917,26 @@ components:
type: string
enum:
- DYNAMIC_CONFIG
+ build:
+ type: object
+ properties:
+ commitId:
+ type: string
+ version:
+ type: string
+ buildTime:
+ type: string
+ isLatestRelease:
+ type: boolean
+ latestRelease:
+ type: object
+ properties:
+ versionTag:
+ type: string
+ publishedAt:
+ type: string
+ htmlUrl:
+ type: string
Cluster:
type: object
| diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/GithubReleaseInfoTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/GithubReleaseInfoTest.java
new file mode 100644
index 00000000000..6ec4bb78638
--- /dev/null
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/GithubReleaseInfoTest.java
@@ -0,0 +1,54 @@
+package com.provectus.kafka.ui.util;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+import java.io.IOException;
+import java.time.Duration;
+import okhttp3.mockwebserver.MockResponse;
+import okhttp3.mockwebserver.MockWebServer;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import reactor.test.StepVerifier;
+
+class GithubReleaseInfoTest {
+
+ private final MockWebServer mockWebServer = new MockWebServer();
+
+ @BeforeEach
+ void startMockServer() throws IOException {
+ mockWebServer.start();
+ }
+
+ @AfterEach
+ void stopMockServer() throws IOException {
+ mockWebServer.close();
+ }
+
+ @Test
+ void test() {
+ mockWebServer.enqueue(new MockResponse()
+ .addHeader("content-type: application/json")
+ .setBody("""
+ {
+ "published_at": "2023-03-09T16:11:31Z",
+ "tag_name": "v0.6.0",
+ "html_url": "https://github.com/provectus/kafka-ui/releases/tag/v0.6.0",
+ "some_unused_prop": "ololo"
+ }
+ """));
+ var url = mockWebServer.url("repos/provectus/kafka-ui/releases/latest").toString();
+
+ var infoHolder = new GithubReleaseInfo(url);
+ infoHolder.refresh().block();
+
+ var i = infoHolder.get();
+ assertThat(i.html_url())
+ .isEqualTo("https://github.com/provectus/kafka-ui/releases/tag/v0.6.0");
+ assertThat(i.published_at())
+ .isEqualTo("2023-03-09T16:11:31Z");
+ assertThat(i.tag_name())
+ .isEqualTo("v0.6.0");
+ }
+
+}
| train | test | 2023-04-07T15:31:04 | "2022-10-14T10:49:29Z" | LaurentDanti | train |
provectus/kafka-ui/3572_3579 | provectus/kafka-ui | provectus/kafka-ui/3572 | provectus/kafka-ui/3579 | [
"keyword_pr_to_issue"
] | c79660b32ad0f0c139c6c9489dc3736fd8b975c5 | bfb80f36b33954a6b8a5a6b31d32e8be67dbfb61 | [
"Could you assign this to me? I finished it rn, just need to find how to make a pr that satisfied the rules",
"Great! I've already published a pr, however, I can not add labels and assign myself to the pr or add reviewers to the pr as the contributing documentation says. Could you help me with that?"
] | [] | "2023-03-29T00:37:45Z" | [
"good first issue",
"scope/frontend",
"status/accepted",
"type/chore"
] | Dark theme: 404: Text is black | with a dark theme, the text on the 404 page (page not found) is black. Please adapt. | [
"kafka-ui-react-app/src/components/ErrorPage/ErrorPage.styled.ts"
] | [
"kafka-ui-react-app/src/components/ErrorPage/ErrorPage.styled.ts"
] | [] | diff --git a/kafka-ui-react-app/src/components/ErrorPage/ErrorPage.styled.ts b/kafka-ui-react-app/src/components/ErrorPage/ErrorPage.styled.ts
index fc6e0332db2..b24d6de564e 100644
--- a/kafka-ui-react-app/src/components/ErrorPage/ErrorPage.styled.ts
+++ b/kafka-ui-react-app/src/components/ErrorPage/ErrorPage.styled.ts
@@ -17,4 +17,5 @@ export const Number = styled.div`
export const Text = styled.div`
font-size: 20px;
+ color: ${({ theme }) => theme.default.color.normal};
`;
| null | train | test | 2023-03-30T06:47:08 | "2023-03-28T12:09:52Z" | Haarolean | train |
provectus/kafka-ui/3417_3580 | provectus/kafka-ui | provectus/kafka-ui/3417 | provectus/kafka-ui/3580 | [
"connected"
] | 734d4ccdf71ee9b5c85570faa0c4767db3624509 | 0278700edb88cb09c5831675ab4c6d95f27798b1 | [] | [
"`client.invalidateQueries(topicKeys.all(clusterName));`\r\nthis has to be left intact I suppose\r\nAlso, please remove the code instead of commenting it"
] | "2023-03-29T01:56:58Z" | [
"good first issue",
"scope/frontend",
"status/accepted",
"type/chore"
] | FE: Topics: Remove a success message upon creating a topic |
**Describe the bug** (Actual behavior)
The success message appears on topic profile after create topic
**Expected behavior**
In case system redirects to new created Topic profile, no need to have a success message
**Set up**
https://www.kafka-ui.provectus.io/
**Steps to Reproduce**
<!-- We'd like you to provide an example setup (via docker-compose, helm, etc.)
to reproduce the problem, especially with a complex setups. -->
1. Navigate to Topics
2. Add a new topic
3. Fill the required fields
4. Press Save
**Screenshots**
https://user-images.githubusercontent.com/104780608/221803973-bbe5697d-9120-4432-951a-62e37e5c6c9d.mov
| [
"kafka-ui-react-app/src/lib/hooks/api/topics.ts"
] | [
"kafka-ui-react-app/src/lib/hooks/api/topics.ts"
] | [
"kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/topics/TopicsTest.java"
] | diff --git a/kafka-ui-react-app/src/lib/hooks/api/topics.ts b/kafka-ui-react-app/src/lib/hooks/api/topics.ts
index f71299f19bb..a87673368dd 100644
--- a/kafka-ui-react-app/src/lib/hooks/api/topics.ts
+++ b/kafka-ui-react-app/src/lib/hooks/api/topics.ts
@@ -122,9 +122,6 @@ export function useCreateTopicMutation(clusterName: ClusterName) {
}),
{
onSuccess: () => {
- showSuccessAlert({
- message: `Topic successfully created.`,
- });
client.invalidateQueries(topicKeys.all(clusterName));
},
}
| diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/topics/TopicsTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/topics/TopicsTest.java
index ad20f595a4f..bad6a9fcde0 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/topics/TopicsTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/topics/TopicsTest.java
@@ -486,11 +486,7 @@ public void checkCopyTopicPossibility() {
topicDetails
.waitUntilScreenReady();
TOPIC_LIST.add(topicToCopy);
- SoftAssert softly = new SoftAssert();
- softly.assertTrue(topicDetails.isAlertWithMessageVisible(SUCCESS, "Topic successfully created."),
- "isAlertWithMessageVisible()");
- softly.assertTrue(topicDetails.isTopicHeaderVisible(topicToCopy.getName()), "isTopicHeaderVisible()");
- softly.assertAll();
+ Assert.assertTrue(topicDetails.isTopicHeaderVisible(topicToCopy.getName()), "isTopicHeaderVisible()");
}
@AfterClass(alwaysRun = true)
| val | test | 2023-04-20T19:13:05 | "2023-02-28T08:59:57Z" | armenuikafka | train |
provectus/kafka-ui/3568_3582 | provectus/kafka-ui | provectus/kafka-ui/3568 | provectus/kafka-ui/3582 | [
"keyword_pr_to_issue"
] | de21721e00b5109d6792630707db6cff568df824 | 83f94325691554ec783e84fb3fb0c912fd936719 | [
"Hello there seralyzh! π\n\nThank you and congratulations π for opening your very first issue in this project! π\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. π",
"Hi, can you verify that it works now with `latest` tag?",
"Hi,\r\n\r\nIt works now for the timestamp filter now.\r\nBut the seek by offset is still broken \r\n`{\"code\":4002,\"message\":\"seekTo should be set if seekType is OFFSET....}.`\r\n\r\nAlso paging does not work : going to next page always returns the same page (got an NS_ERROR_ABORT and then reload) Should I create a new bug for this ?\r\nThanks.",
"This issue has been automatically marked as stale because no requested feedback has been provided. It will be closed if no further activity occurs. Thank you for your contributions."
] | [
"Correct me if I'm wrong: seekTo exists in the query only with this seek types?",
"@Haarolean @iliax ",
"@NeiruBugz yep that's right",
"doesn't this give us the issue where we had all the partitions selected, now the first part of the `if` condition is `false` but the second one because we are using `OR` will be true hence we will have that `url` length issue again during `offset` and `timestamp`",
"@Mgrdich \r\nhi, this is my suggestion https://github.com/provectus/kafka-ui/issues/3525 to solve the `url length isssue`. could you teams review it?",
"Hey @Mgrdich\r\nI've made a short review of the @p-eye solution for not combining offset with partition - it seems this one should work fine, but it probably needs some task on the backend to support this kind of payload and a bit of refactoring on the client side",
"@Haarolean if @p-eye suggestions of the seek error is going to be implemented in the future , it is okay for me with that change.",
"@Mgrdich the suggested change doesn't work well with pagination. The decision, for now, is to fix search modes considering that we might break the URL length fix."
] | "2023-03-29T07:31:14Z" | [
"type/bug",
"scope/frontend",
"status/accepted",
"status/confirmed"
] | Messages filtering by offset & timestamp doesn't work |


when we use all partitions, the filter does not work.
when we use certain partitions, the filter works | [
"kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.tsx"
] | [
"kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.tsx"
] | [] | diff --git a/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.tsx b/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.tsx
index f9fa3401fc0..c5be097dad6 100644
--- a/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.tsx
+++ b/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.tsx
@@ -231,9 +231,13 @@ const Filters: React.FC<FiltersProps> = ({
props.seekType = SeekType.TIMESTAMP;
}
+ const isSeekTypeWithSeekTo =
+ props.seekType === SeekType.TIMESTAMP ||
+ props.seekType === SeekType.OFFSET;
+
if (
selectedPartitions.length !== partitions.length ||
- currentSeekType === SeekType.TIMESTAMP
+ isSeekTypeWithSeekTo
) {
// not everything in the partition is selected
props.seekTo = selectedPartitions.map(({ value }) => {
| null | train | test | 2023-04-03T07:10:14 | "2023-03-28T08:17:29Z" | seralyzh | train |
provectus/kafka-ui/3255_3585 | provectus/kafka-ui | provectus/kafka-ui/3255 | provectus/kafka-ui/3585 | [
"keyword_pr_to_issue"
] | c79660b32ad0f0c139c6c9489dc3736fd8b975c5 | 15b78c0a2e047351e7a543a5449d312369886d1e | [] | [
"Do we need the to put it in a string quotation string literals here for the `${originalRow.name}`?",
"I think not, it is obviously a string value)",
"Isn't it already a string?",
"Yes, it's a string, so we do not need quotation marks - update in a moment"
] | "2023-03-29T10:28:34Z" | [
"type/bug",
"good first issue",
"scope/frontend",
"status/accepted",
"status/confirmed"
] | [Topics] All topics are unchecking with Toggling off Show Internal Topics |
**Describe the bug** (Actual behavior)
All topics checking (unchecking) with toggling off(on) the Show internal Topics
**Expected behavior**
In case of toggling the Show internal Topics, the Topics should not becomes checked(unchecked) as the Internal topics are not checkable in any cases
**Set up**
https://www.kafka-ui.provectus.io/
**Steps to Reproduce**
<!-- We'd like you to provide an example setup (via docker-compose, helm, etc.)
to reproduce the problem, especially with a complex setups. -->
1. Login to Kafka-ui
2. Navigate to Topics
3. Make sure Show Internal Topics is toggled on
4. Check all topics with on top checkbox
5. Toggle off the Show Internal Topics
https://user-images.githubusercontent.com/104780608/214019758-7d8d3fa9-d8c3-4df6-adf6-34bc4f5193a7.mov
| [
"kafka-ui-react-app/src/components/common/NewTable/Table.tsx"
] | [
"kafka-ui-react-app/src/components/common/NewTable/Table.tsx"
] | [] | diff --git a/kafka-ui-react-app/src/components/common/NewTable/Table.tsx b/kafka-ui-react-app/src/components/common/NewTable/Table.tsx
index 1cdc67001d7..55652df082d 100644
--- a/kafka-ui-react-app/src/components/common/NewTable/Table.tsx
+++ b/kafka-ui-react-app/src/components/common/NewTable/Table.tsx
@@ -142,15 +142,12 @@ const Table: React.FC<TableProps<any>> = ({
(updater: UpdaterFn<PaginationState>) => {
const newState = updatePaginationState(updater, searchParams);
setSearchParams(searchParams);
+ setRowSelection({});
return newState;
},
[searchParams]
);
- React.useEffect(() => {
- setRowSelection({});
- }, [searchParams]);
-
const table = useReactTable({
data,
pageCount,
@@ -160,6 +157,9 @@ const Table: React.FC<TableProps<any>> = ({
pagination: getPaginationFromSearchParams(searchParams),
rowSelection,
},
+ getRowId: (originalRow, index) => {
+ return originalRow.name ? originalRow.name : `${index}`;
+ },
onSortingChange: onSortingChange as OnChangeFn<SortingState>,
onPaginationChange: onPaginationChange as OnChangeFn<PaginationState>,
onRowSelectionChange: setRowSelection,
| null | val | test | 2023-03-30T06:47:08 | "2023-01-23T10:43:12Z" | armenuikafka | train |
provectus/kafka-ui/3290_3591 | provectus/kafka-ui | provectus/kafka-ui/3290 | provectus/kafka-ui/3591 | [
"keyword_pr_to_issue"
] | d06f77ad5338775b56a61b7a24d217a993924df4 | ecc8db1948c260ec47a5bd1aba1c982f7be3bb9e | [] | [] | "2023-03-30T10:45:54Z" | [
"type/bug",
"good first issue",
"scope/frontend",
"status/accepted"
] | HTTP 400 for clusters with special symbols | I have a single node Kafka (kafka_2.13-3.3.2.tgz) cluster running on a Linux server (no internet).
It is a default setup using KRaft and the only config I have set is the /config/kraft/server.properties: `advertised.listeners=PLAINTEXT://192.168.20.109:9092`, where 192.168.20.109 is the LAN IP.
I am using UI for Apache Kafka v0.5.0 (027d9b4) running locally on my PC on port 8080, directly from Java without using docker.
application-local.yml as below:
```
kafka:
clusters:
- name: "[UAT] Kafka"
bootstrapServers: 192.168.20.109:9092
```
Only port 9092-9093 firewall rules opened between my PC and the server.
I created a test topic using the UI, and produced some test message.

However, when I try to view the message using the UI, it still shows no messages found and when I check the API call, the response is 400 Bad Request.

May I get some help on what the problem could be?
Thanks.
_Originally posted by @Zeeeeta in https://github.com/provectus/kafka-ui/discussions/3277_
| [
"kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.tsx",
"kafka-ui-react-app/src/lib/hooks/api/ksqlDb.tsx",
"kafka-ui-react-app/src/lib/hooks/api/topicMessages.tsx"
] | [
"kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.tsx",
"kafka-ui-react-app/src/lib/hooks/api/ksqlDb.tsx",
"kafka-ui-react-app/src/lib/hooks/api/topicMessages.tsx"
] | [] | diff --git a/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.tsx b/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.tsx
index ec62f266ed4..f9fa3401fc0 100644
--- a/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.tsx
+++ b/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.tsx
@@ -323,7 +323,9 @@ const Filters: React.FC<FiltersProps> = ({
// eslint-disable-next-line consistent-return
React.useEffect(() => {
if (location.search?.length !== 0) {
- const url = `${BASE_PARAMS.basePath}/api/clusters/${clusterName}/topics/${topicName}/messages${location.search}`;
+ const url = `${BASE_PARAMS.basePath}/api/clusters/${encodeURIComponent(
+ clusterName
+ )}/topics/${topicName}/messages${location.search}`;
const sse = new EventSource(url);
source.current = sse;
diff --git a/kafka-ui-react-app/src/lib/hooks/api/ksqlDb.tsx b/kafka-ui-react-app/src/lib/hooks/api/ksqlDb.tsx
index 6685c6223f6..366141e82a4 100644
--- a/kafka-ui-react-app/src/lib/hooks/api/ksqlDb.tsx
+++ b/kafka-ui-react-app/src/lib/hooks/api/ksqlDb.tsx
@@ -90,7 +90,9 @@ export const useKsqlkDbSSE = ({ clusterName, pipeId }: UseKsqlkDbSSEProps) => {
React.useEffect(() => {
const fetchData = async () => {
- const url = `${BASE_PARAMS.basePath}/api/clusters/${clusterName}/ksql/response`;
+ const url = `${BASE_PARAMS.basePath}/api/clusters/${encodeURIComponent(
+ clusterName
+ )}/ksql/response`;
await fetchEventSource(
`${url}?${new URLSearchParams({ pipeId: pipeId || '' }).toString()}`,
{
diff --git a/kafka-ui-react-app/src/lib/hooks/api/topicMessages.tsx b/kafka-ui-react-app/src/lib/hooks/api/topicMessages.tsx
index 73a5753ba22..886b2979c0f 100644
--- a/kafka-ui-react-app/src/lib/hooks/api/topicMessages.tsx
+++ b/kafka-ui-react-app/src/lib/hooks/api/topicMessages.tsx
@@ -51,7 +51,9 @@ export const useTopicMessages = ({
React.useEffect(() => {
const fetchData = async () => {
setIsFetching(true);
- const url = `${BASE_PARAMS.basePath}/api/clusters/${clusterName}/topics/${topicName}/messages`;
+ const url = `${BASE_PARAMS.basePath}/api/clusters/${encodeURIComponent(
+ clusterName
+ )}/topics/${topicName}/messages`;
const requestParams = new URLSearchParams({
limit,
seekTo: seekTo.replaceAll('-', '::').replaceAll('.', ','),
| null | train | test | 2023-03-30T09:45:36 | "2023-01-31T08:18:35Z" | Haarolean | train |
provectus/kafka-ui/3538_3592 | provectus/kafka-ui | provectus/kafka-ui/3538 | provectus/kafka-ui/3592 | [
"keyword_pr_to_issue"
] | 005e74f2480f3ea961ec0c3d9b4a633b666df552 | 29f49b667d4a666ec4c72676b109e984d9618eb5 | [
"Hello there AaronCoad! π\n\nThank you and congratulations π for opening your very first issue in this project! π\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. π",
"Checked BE - all endpoints work as expected (json-schema schemas are not processed on BE at all). I guess UI library that parses serde's schemas might fail on frontend. \r\n\r\n@Haarolean , please reassign to someone from FE team"
] | [] | "2023-03-30T11:33:42Z" | [
"type/bug",
"scope/frontend",
"status/accepted",
"type/regression"
] | Topics: Messages: 404 with schema created by .NET containing guid | **Describe the bug** (Actual behavior)
When viewing the list of messages for a topic, if the schema has been created from .NET with a GUID field, Kafka UI returns a 404 due to it only accepting the format to be UUID. When we do a string replace to change guid to uuid, the messages show correctly provided the latest version of the schema has "uuid" for the format.
**Expected behavior**
Kafka UI should be allowing guids to be displayed and in an earlier version this worked correctly. It only seems to have started since moving to version 0.6.0.
**Set up**
Commit: https://github.com/provectus/kafka-ui/commit/e72f6d6
Version: v0.6.0
Environment: AWS ECS
**Steps to Reproduce**
In .NET create a class that contains a guid property and then use Confluent.SchemaRegistry.Serdes.Json.JsonSerializer for the value/key Serializer to generate the schema that is sent to the schema registry.
**Screenshots**

**Additional context**
An alternative is to do a string replace and disable the AutoRegisterSchema option in the configuration settings of the JsonSerializer. However, the AutoRegisterSchema option would need to be disabled for any connections otherwise the "guid" format value will reappear as the latest version of the schema and break the display again.
| [
"kafka-ui-react-app/src/components/Topics/Topic/SendMessage/utils.ts"
] | [
"kafka-ui-react-app/src/components/Topics/Topic/SendMessage/utils.ts"
] | [] | diff --git a/kafka-ui-react-app/src/components/Topics/Topic/SendMessage/utils.ts b/kafka-ui-react-app/src/components/Topics/Topic/SendMessage/utils.ts
index 8a368036c06..6f98c5916d3 100644
--- a/kafka-ui-react-app/src/components/Topics/Topic/SendMessage/utils.ts
+++ b/kafka-ui-react-app/src/components/Topics/Topic/SendMessage/utils.ts
@@ -11,6 +11,7 @@ import upperFirst from 'lodash/upperFirst';
jsf.option('fillProperties', false);
jsf.option('alwaysFakeOptionals', true);
+jsf.option('failOnInvalidFormat', false);
const generateValueFromSchema = (preffered?: SerdeDescription) => {
if (!preffered?.schema) {
| null | train | test | 2023-04-10T17:35:14 | "2023-03-23T00:19:15Z" | AaronCoad | train |
provectus/kafka-ui/3593_3594 | provectus/kafka-ui | provectus/kafka-ui/3593 | provectus/kafka-ui/3594 | [
"connected"
] | de21721e00b5109d6792630707db6cff568df824 | dd4b653b8e56cbde3b875aa6e35e113559ba0c86 | [
"Hello there iliax! π\n\nThank you and congratulations π for opening your very first issue in this project! π\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. π",
"Thanks for opening issue on behalf of me. I am attaching screenshots for better understanding (if needed)\r\n\r\n#### GUI: Where the Streams and Tables are showing as 0\r\n\r\n#### Terminal Window where I can see the Streams:\r\n\r\n#### Query From GUI: I can see the streams\r\n\r\n#### XHR Requests:\r\n\r\n#### Docker Configuration:\r\n\r\n#### Stack Traces\r\n```\r\nkafka-ui | 2023-03-29 05:41:43,822 ERROR [reactor-http-epoll-2] o.s.b.a.w.r.e.AbstractErrorWebExceptionHandler: [27cdebf5-156] 500 Server Error for HTTP GET \"/api/clusters/local-kafka-cluster/ksql/streams\"\r\nkafka-ui | java.lang.IndexOutOfBoundsException: Index -1 out of bounds for length 4\r\nkafka-ui | at java.base/jdk.internal.util.Preconditions.outOfBounds(Preconditions.java:64)\r\nkafka-ui | Suppressed: reactor.core.publisher.FluxOnAssembly$OnAssemblyException: \r\nkafka-ui | Error has been observed at the following site(s):\r\nkafka-ui | *__checkpoint β’ Handler com.provectus.kafka.ui.controller.KsqlController#listStreams(String, ServerWebExchange) [DispatcherHandler]\r\nkafka-ui | *__checkpoint β’ com.provectus.kafka.ui.config.ReadOnlyModeFilter [DefaultWebFilterChain]\r\nkafka-ui | *__checkpoint β’ com.provectus.kafka.ui.config.CustomWebFilter [DefaultWebFilterChain]\r\nkafka-ui | *__checkpoint β’ org.springframework.security.web.server.authorization.AuthorizationWebFilter [DefaultWebFilterChain]\r\nkafka-ui | *__checkpoint β’ org.springframework.security.web.server.authorization.ExceptionTranslationWebFilter [DefaultWebFilterChain]\r\nkafka-ui | *__checkpoint β’ org.springframework.security.web.server.authentication.logout.LogoutWebFilter [DefaultWebFilterChain]\r\nkafka-ui | *__checkpoint β’ org.springframework.security.web.server.savedrequest.ServerRequestCacheWebFilter [DefaultWebFilterChain]\r\nkafka-ui | *__checkpoint β’ org.springframework.security.web.server.context.SecurityContextServerWebExchangeWebFilter [DefaultWebFilterChain]\r\nkafka-ui | *__checkpoint β’ org.springframework.security.web.server.authentication.AuthenticationWebFilter [DefaultWebFilterChain]\r\nkafka-ui | *__checkpoint β’ org.springframework.security.web.server.ui.LogoutPageGeneratingWebFilter [DefaultWebFilterChain]\r\nkafka-ui | *__checkpoint β’ org.springframework.security.web.server.context.ReactorContextWebFilter [DefaultWebFilterChain]\r\nkafka-ui | *__checkpoint β’ org.springframework.security.web.server.header.HttpHeaderWriterWebFilter [DefaultWebFilterChain]\r\nkafka-ui | *__checkpoint β’ org.springframework.security.config.web.server.ServerHttpSecurity$ServerWebExchangeReactorContextWebFilter [DefaultWebFilterChain]\r\nkafka-ui | *__checkpoint β’ org.springframework.security.web.server.WebFilterChainProxy [DefaultWebFilterChain]\r\nkafka-ui | *__checkpoint β’ org.springframework.boot.actuate.metrics.web.reactive.server.MetricsWebFilter [DefaultWebFilterChain]\r\nkafka-ui | *__checkpoint β’ HTTP GET \"/api/clusters/local-kafka-cluster/ksql/streams\" [ExceptionHandlingWebHandler]\r\nkafka-ui | Original Stack Trace:\r\nkafka-ui | at java.base/jdk.internal.util.Preconditions.outOfBounds(Preconditions.java:64)\r\nkafka-ui | at java.base/jdk.internal.util.Preconditions.outOfBoundsCheckIndex(Preconditions.java:70)\r\nkafka-ui | at java.base/jdk.internal.util.Preconditions.checkIndex(Preconditions.java:266)\r\nkafka-ui | at java.base/java.util.Objects.checkIndex(Objects.java:359)\r\nkafka-ui | at java.base/java.util.ArrayList.get(ArrayList.java:427)\r\nkafka-ui | at com.provectus.kafka.ui.service.ksql.KsqlApiClient$KsqlResponseTable.getColumnValue(KsqlApiClient.java:55)\r\nkafka-ui | at com.provectus.kafka.ui.service.ksql.KsqlServiceV2.lambda$listStreams$5(KsqlServiceV2.java:91)\r\nkafka-ui | at java.base/java.util.stream.ReferencePipeline$3$1.accept(ReferencePipeline.java:197)\r\nkafka-ui | at java.base/java.util.ArrayList$ArrayListSpliterator.forEachRemaining(ArrayList.java:1625)\r\nkafka-ui | at java.base/java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:509)\r\nkafka-ui | at java.base/java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:499)\r\nkafka-ui | at java.base/java.util.stream.ReduceOps$ReduceOp.evaluateSequential(ReduceOps.java:921)\r\nkafka-ui | at java.base/java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234)\r\nkafka-ui | at java.base/java.util.stream.ReferencePipeline.collect(ReferencePipeline.java:682)\r\nkafka-ui | at com.provectus.kafka.ui.service.ksql.KsqlServiceV2.lambda$listStreams$6(KsqlServiceV2.java:93)\r\nkafka-ui | at reactor.core.publisher.FluxFlatMap$FlatMapMain.onNext(FluxFlatMap.java:386)\r\nkafka-ui | at reactor.core.publisher.FluxOnErrorResume$ResumeSubscriber.onNext(FluxOnErrorResume.java:79)\r\nkafka-ui | at reactor.core.publisher.FluxOnErrorResume$ResumeSubscriber.onNext(FluxOnErrorResume.java:79)\r\nkafka-ui | at reactor.core.publisher.MonoFlatMapMany$FlatMapManyInner.onNext(MonoFlatMapMany.java:250)\r\nkafka-ui | at reactor.core.publisher.FluxOnErrorResume$ResumeSubscriber.onNext(FluxOnErrorResume.java:79)\r\nkafka-ui | at reactor.core.publisher.FluxConcatArray$ConcatArraySubscriber.onNext(FluxConcatArray.java:201)\r\nkafka-ui | at reactor.core.publisher.FluxSwitchIfEmpty$SwitchIfEmptySubscriber.onNext(FluxSwitchIfEmpty.java:74)\r\nkafka-ui | at reactor.core.publisher.FluxFlatMap$FlatMapMain.drainLoop(FluxFlatMap.java:712)\r\nkafka-ui | at reactor.core.publisher.FluxFlatMap$FlatMapMain.drain(FluxFlatMap.java:588)\r\nkafka-ui | at reactor.core.publisher.FluxFlatMap$FlatMapInner.onSubscribe(FluxFlatMap.java:955)\r\nkafka-ui | at reactor.core.publisher.FluxIterable.subscribe(FluxIterable.java:165)\r\nkafka-ui | at reactor.core.publisher.FluxIterable.subscribe(FluxIterable.java:109)\r\nkafka-ui | at reactor.core.publisher.FluxFlattenIterable.subscribeOrReturn(FluxFlattenIterable.java:105)\r\nkafka-ui | at reactor.core.publisher.Flux.subscribe(Flux.java:8512)\r\nkafka-ui | at reactor.core.publisher.FluxFlatMap$FlatMapMain.onNext(FluxFlatMap.java:426)\r\nkafka-ui | at reactor.core.publisher.FluxHandle$HandleSubscriber.onNext(FluxHandle.java:126)\r\nkafka-ui | at reactor.core.publisher.FluxConcatArray$ConcatArraySubscriber.onNext(FluxConcatArray.java:201)\r\nkafka-ui | at reactor.core.publisher.FluxFlattenIterable$FlattenIterableSubscriber.drainAsync(FluxFlattenIterable.java:421)\r\nkafka-ui | at reactor.core.publisher.FluxFlattenIterable$FlattenIterableSubscriber.drain(FluxFlattenIterable.java:686)\r\nkafka-ui | at reactor.core.publisher.FluxFlattenIterable$FlattenIterableSubscriber.onNext(FluxFlattenIterable.java:250)\r\nkafka-ui | at reactor.core.publisher.FluxMap$MapSubscriber.onNext(FluxMap.java:122)\r\nkafka-ui | at reactor.core.publisher.FluxPeek$PeekSubscriber.onNext(FluxPeek.java:200)\r\nkafka-ui | at reactor.core.publisher.FluxMap$MapSubscriber.onNext(FluxMap.java:122)\r\nkafka-ui | at reactor.netty.channel.FluxReceive.drainReceiver(FluxReceive.java:279)\r\nkafka-ui | at reactor.netty.channel.FluxReceive.onInboundNext(FluxReceive.java:388)\r\nkafka-ui | at reactor.netty.channel.ChannelOperations.onInboundNext(ChannelOperations.java:404)\r\nkafka-ui | at reactor.netty.http.client.HttpClientOperations.onInboundNext(HttpClientOperations.java:724)\r\nkafka-ui | at reactor.netty.channel.ChannelOperationsHandler.channelRead(ChannelOperationsHandler.java:113)\r\nkafka-ui | at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:444)\r\nkafka-ui | at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420)\r\nkafka-ui | at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412)\r\nkafka-ui | at io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\r\nkafka-ui | at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:444)\r\nkafka-ui | at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420)\r\nkafka-ui | at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412)\r\nkafka-ui | at io.netty.channel.CombinedChannelDuplexHandler$DelegatingChannelHandlerContext.fireChannelRead(CombinedChannelDuplexHandler.java:436)\r\nkafka-ui | at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:336)\r\nkafka-ui | at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:323)\r\nkafka-ui | at io.netty.handler.codec.ByteToMessageDecoder.callDecode(ByteToMessageDecoder.java:444)\r\nkafka-ui | at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:280)\r\nkafka-ui | at io.netty.channel.CombinedChannelDuplexHandler.channelRead(CombinedChannelDuplexHandler.java:251)\r\nkafka-ui | at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:442)\r\nkafka-ui | at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420)\r\nkafka-ui | at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412)\r\nkafka-ui | at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\r\nkafka-ui | at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:440)\r\nkafka-ui | at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420)\r\nkafka-ui | at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\r\nkafka-ui | at io.netty.channel.epoll.AbstractEpollStreamChannel$EpollStreamUnsafe.epollInReady(AbstractEpollStreamChannel.java:800)\r\nkafka-ui | at io.netty.channel.epoll.EpollEventLoop.processReady(EpollEventLoop.java:499)\r\nkafka-ui | at io.netty.channel.epoll.EpollEventLoop.run(EpollEventLoop.java:397)\r\nkafka-ui | at io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:997)\r\nkafka-ui | at io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\r\nkafka-ui | at io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\r\nkafka-ui | at java.base/java.lang.Thread.run(Thread.java:833)\r\nkafka-ui | 2023-03-29 05:41:43,824 ERROR [reactor-http-epoll-1] o.s.b.a.w.r.e.AbstractErrorWebExceptionHandler: [5eaaff66-157] 500 Server Error for HTTP GET \"/api/clusters/local-kafka-cluster/ksql/tables\"\r\nkafka-ui | java.lang.IndexOutOfBoundsException: Index -1 out of bounds for length 5\r\nkafka-ui | at java.base/jdk.internal.util.Preconditions.outOfBounds(Preconditions.java:64)\r\nkafka-ui | Suppressed: reactor.core.publisher.FluxOnAssembly$OnAssemblyException: \r\nkafka-ui | Error has been observed at the following site(s):\r\nkafka-ui | *__checkpoint β’ Handler com.provectus.kafka.ui.controller.KsqlController#listTables(String, ServerWebExchange) [DispatcherHandler]\r\nkafka-ui | *__checkpoint β’ com.provectus.kafka.ui.config.ReadOnlyModeFilter [DefaultWebFilterChain]\r\nkafka-ui | *__checkpoint β’ com.provectus.kafka.ui.config.CustomWebFilter [DefaultWebFilterChain]\r\nkafka-ui | *__checkpoint β’ org.springframework.security.web.server.authorization.AuthorizationWebFilter [DefaultWebFilterChain]\r\nkafka-ui | *__checkpoint β’ org.springframework.security.web.server.authorization.ExceptionTranslationWebFilter [DefaultWebFilterChain]\r\nkafka-ui | *__checkpoint β’ org.springframework.security.web.server.authentication.logout.LogoutWebFilter [DefaultWebFilterChain]\r\nkafka-ui | *__checkpoint β’ org.springframework.security.web.server.savedrequest.ServerRequestCacheWebFilter [DefaultWebFilterChain]\r\nkafka-ui | *__checkpoint β’ org.springframework.security.web.server.context.SecurityContextServerWebExchangeWebFilter [DefaultWebFilterChain]\r\nkafka-ui | *__checkpoint β’ org.springframework.security.web.server.authentication.AuthenticationWebFilter [DefaultWebFilterChain]\r\nkafka-ui | *__checkpoint β’ org.springframework.security.web.server.ui.LogoutPageGeneratingWebFilter [DefaultWebFilterChain]\r\nkafka-ui | *__checkpoint β’ org.springframework.security.web.server.context.ReactorContextWebFilter [DefaultWebFilterChain]\r\nkafka-ui | *__checkpoint β’ org.springframework.security.web.server.header.HttpHeaderWriterWebFilter [DefaultWebFilterChain]\r\nkafka-ui | *__checkpoint β’ org.springframework.security.config.web.server.ServerHttpSecurity$ServerWebExchangeReactorContextWebFilter [DefaultWebFilterChain]\r\nkafka-ui | *__checkpoint β’ org.springframework.security.web.server.WebFilterChainProxy [DefaultWebFilterChain]\r\nkafka-ui | *__checkpoint β’ org.springframework.boot.actuate.metrics.web.reactive.server.MetricsWebFilter [DefaultWebFilterChain]\r\nkafka-ui | *__checkpoint β’ HTTP GET \"/api/clusters/local-kafka-cluster/ksql/tables\" [ExceptionHandlingWebHandler]\r\nkafka-ui | Original Stack Trace:\r\nkafka-ui | at java.base/jdk.internal.util.Preconditions.outOfBounds(Preconditions.java:64)\r\nkafka-ui | at java.base/jdk.internal.util.Preconditions.outOfBoundsCheckIndex(Preconditions.java:70)\r\nkafka-ui | at java.base/jdk.internal.util.Preconditions.checkIndex(Preconditions.java:266)\r\nkafka-ui | at java.base/java.util.Objects.checkIndex(Objects.java:359)\r\nkafka-ui | at java.base/java.util.ArrayList.get(ArrayList.java:427)\r\nkafka-ui | at com.provectus.kafka.ui.service.ksql.KsqlApiClient$KsqlResponseTable.getColumnValue(KsqlApiClient.java:55)\r\nkafka-ui | at com.provectus.kafka.ui.service.ksql.KsqlServiceV2.lambda$listTables$2(KsqlServiceV2.java:69)\r\nkafka-ui | at java.base/java.util.stream.ReferencePipeline$3$1.accept(ReferencePipeline.java:197)\r\nkafka-ui | at java.base/java.util.ArrayList$ArrayListSpliterator.forEachRemaining(ArrayList.java:1625)\r\nkafka-ui | at java.base/java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:509)\r\nkafka-ui | at java.base/java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:499)\r\nkafka-ui | at java.base/java.util.stream.ReduceOps$ReduceOp.evaluateSequential(ReduceOps.java:921)\r\nkafka-ui | at java.base/java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234)\r\nkafka-ui | at java.base/java.util.stream.ReferencePipeline.collect(ReferencePipeline.java:682)\r\nkafka-ui | at com.provectus.kafka.ui.service.ksql.KsqlServiceV2.lambda$listTables$3(KsqlServiceV2.java:72)\r\nkafka-ui | at reactor.core.publisher.FluxFlatMap$FlatMapMain.onNext(FluxFlatMap.java:386)\r\nkafka-ui | at reactor.core.publisher.FluxOnErrorResume$ResumeSubscriber.onNext(FluxOnErrorResume.java:79)\r\nkafka-ui | at reactor.core.publisher.FluxOnErrorResume$ResumeSubscriber.onNext(FluxOnErrorResume.java:79)\r\nkafka-ui | at reactor.core.publisher.MonoFlatMapMany$FlatMapManyInner.onNext(MonoFlatMapMany.java:250)\r\nkafka-ui | at reactor.core.publisher.FluxOnErrorResume$ResumeSubscriber.onNext(FluxOnErrorResume.java:79)\r\nkafka-ui | at reactor.core.publisher.FluxConcatArray$ConcatArraySubscriber.onNext(FluxConcatArray.java:201)\r\nkafka-ui | at reactor.core.publisher.FluxSwitchIfEmpty$SwitchIfEmptySubscriber.onNext(FluxSwitchIfEmpty.java:74)\r\nkafka-ui | at reactor.core.publisher.FluxFlatMap$FlatMapMain.drainLoop(FluxFlatMap.java:712)\r\nkafka-ui | at reactor.core.publisher.FluxFlatMap$FlatMapMain.drain(FluxFlatMap.java:588)\r\nkafka-ui | at reactor.core.publisher.FluxFlatMap$FlatMapInner.onSubscribe(FluxFlatMap.java:955)\r\nkafka-ui | at reactor.core.publisher.FluxIterable.subscribe(FluxIterable.java:165)\r\nkafka-ui | at reactor.core.publisher.FluxIterable.subscribe(FluxIterable.java:109)\r\nkafka-ui | at reactor.core.publisher.FluxFlattenIterable.subscribeOrReturn(FluxFlattenIterable.java:105)\r\nkafka-ui | at reactor.core.publisher.Flux.subscribe(Flux.java:8512)\r\nkafka-ui | at reactor.core.publisher.FluxFlatMap$FlatMapMain.onNext(FluxFlatMap.java:426)\r\nkafka-ui | at reactor.core.publisher.FluxHandle$HandleSubscriber.onNext(FluxHandle.java:126)\r\nkafka-ui | at reactor.core.publisher.FluxConcatArray$ConcatArraySubscriber.onNext(FluxConcatArray.java:201)\r\nkafka-ui | at reactor.core.publisher.FluxFlattenIterable$FlattenIterableSubscriber.drainAsync(FluxFlattenIterable.java:421)\r\nkafka-ui | at reactor.core.publisher.FluxFlattenIterable$FlattenIterableSubscriber.drain(FluxFlattenIterable.java:686)\r\nkafka-ui | at reactor.core.publisher.FluxFlattenIterable$FlattenIterableSubscriber.onNext(FluxFlattenIterable.java:250)\r\nkafka-ui | at reactor.core.publisher.FluxMap$MapSubscriber.onNext(FluxMap.java:122)\r\nkafka-ui | at reactor.core.publisher.FluxPeek$PeekSubscriber.onNext(FluxPeek.java:200)\r\nkafka-ui | at reactor.core.publisher.FluxMap$MapSubscriber.onNext(FluxMap.java:122)\r\nkafka-ui | at reactor.netty.channel.FluxReceive.drainReceiver(FluxReceive.java:279)\r\nkafka-ui | at reactor.netty.channel.FluxReceive.onInboundNext(FluxReceive.java:388)\r\nkafka-ui | at reactor.netty.channel.ChannelOperations.onInboundNext(ChannelOperations.java:404)\r\nkafka-ui | at reactor.netty.http.client.HttpClientOperations.onInboundNext(HttpClientOperations.java:724)\r\nkafka-ui | at reactor.netty.channel.ChannelOperationsHandler.channelRead(ChannelOperationsHandler.java:113)\r\nkafka-ui | at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:444)\r\nkafka-ui | at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420)\r\nkafka-ui | at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412)\r\nkafka-ui | at io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)\r\nkafka-ui | at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:444)\r\nkafka-ui | at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420)\r\nkafka-ui | at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412)\r\nkafka-ui | at io.netty.channel.CombinedChannelDuplexHandler$DelegatingChannelHandlerContext.fireChannelRead(CombinedChannelDuplexHandler.java:436)\r\nkafka-ui | at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:336)\r\nkafka-ui | at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:323)\r\nkafka-ui | at io.netty.handler.codec.ByteToMessageDecoder.callDecode(ByteToMessageDecoder.java:444)\r\nkafka-ui | at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:280)\r\nkafka-ui | at io.netty.channel.CombinedChannelDuplexHandler.channelRead(CombinedChannelDuplexHandler.java:251)\r\nkafka-ui | at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:442)\r\nkafka-ui | at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420)\r\nkafka-ui | at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412)\r\nkafka-ui | at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)\r\nkafka-ui | at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:440)\r\nkafka-ui | at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420)\r\nkafka-ui | at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)\r\nkafka-ui | at io.netty.channel.epoll.AbstractEpollStreamChannel$EpollStreamUnsafe.epollInReady(AbstractEpollStreamChannel.java:800)\r\nkafka-ui | at io.netty.channel.epoll.EpollEventLoop.processReady(EpollEventLoop.java:499)\r\nkafka-ui | at io.netty.channel.epoll.EpollEventLoop.run(EpollEventLoop.java:397)\r\nkafka-ui | at io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:997)\r\nkafka-ui | at io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)\r\nkafka-ui | at io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)\r\nkafka-ui | at java.base/java.lang.Thread.run(Thread.java:833)\r\nkafka-ui | 2023-03-29 05:42:07,153 DEBUG [parallel-2] c.p.k.u.s.ClustersStatisticsScheduler: Start getting metrics for kafkaCluster: local-kafka-cluster\r\nkafka-ui | 2023-03-29 05:42:07,215 DEBUG [parallel-2] c.p.k.u.s.ClustersStatisticsScheduler: Metrics updated for cluster: local-kafka-cluster\r\n\r\n\r\n```"
] | [] | "2023-03-30T14:34:45Z" | [
"type/bug",
"scope/backend",
"status/accepted",
"status/confirmed"
] | KSQL is not working with cp-ksqldb-server:5.5.3 | Due to API changes ksql list streams api is not working for cp-ksqldb-server with versions <= 5.5.3 | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ksql/KsqlApiClient.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ksql/KsqlServiceV2.java"
] | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ksql/KsqlApiClient.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ksql/KsqlServiceV2.java"
] | [] | diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ksql/KsqlApiClient.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ksql/KsqlApiClient.java
index fd68add7267..e8f4954bf0a 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ksql/KsqlApiClient.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ksql/KsqlApiClient.java
@@ -52,7 +52,10 @@ public static class KsqlResponseTable {
boolean error;
public Optional<JsonNode> getColumnValue(List<JsonNode> row, String column) {
- return Optional.ofNullable(row.get(columnNames.indexOf(column)));
+ int colIdx = columnNames.indexOf(column);
+ return colIdx >= 0
+ ? Optional.ofNullable(row.get(colIdx))
+ : Optional.empty();
}
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ksql/KsqlServiceV2.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ksql/KsqlServiceV2.java
index efd7e9ca2e9..e8c2a4c65a3 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ksql/KsqlServiceV2.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ksql/KsqlServiceV2.java
@@ -89,7 +89,14 @@ public Flux<KsqlStreamDescriptionDTO> listStreams(KafkaCluster cluster) {
.name(resp.getColumnValue(row, "name").map(JsonNode::asText).orElse(null))
.topic(resp.getColumnValue(row, "topic").map(JsonNode::asText).orElse(null))
.keyFormat(resp.getColumnValue(row, "keyFormat").map(JsonNode::asText).orElse(null))
- .valueFormat(resp.getColumnValue(row, "valueFormat").map(JsonNode::asText).orElse(null)))
+ .valueFormat(
+ // for old versions (<0.13) "format" column is filled,
+ // for new version "keyFormat" & "valueFormat" columns should be filled
+ resp.getColumnValue(row, "valueFormat")
+ .or(() -> resp.getColumnValue(row, "format"))
+ .map(JsonNode::asText)
+ .orElse(null))
+ )
.collect(Collectors.toList()));
});
}
| null | train | test | 2023-04-03T07:10:14 | "2023-03-30T14:10:12Z" | iliax | train |
provectus/kafka-ui/3576_3601 | provectus/kafka-ui | provectus/kafka-ui/3576 | provectus/kafka-ui/3601 | [
"connected"
] | d06f77ad5338775b56a61b7a24d217a993924df4 | de21721e00b5109d6792630707db6cff568df824 | [] | [] | "2023-03-31T08:51:53Z" | [
"good first issue",
"scope/frontend",
"status/triage",
"type/chore"
] | Edit config: Selected text background color is transparent | 1. create connector
2. select some text within config field
3. the selection is not visible | [
"kafka-ui-react-app/src/components/common/Editor/Editor.tsx",
"kafka-ui-react-app/src/components/common/EditorViewer/EditorViewer.styled.ts"
] | [
"kafka-ui-react-app/src/components/common/Editor/Editor.tsx",
"kafka-ui-react-app/src/components/common/EditorViewer/EditorViewer.styled.ts"
] | [] | diff --git a/kafka-ui-react-app/src/components/common/Editor/Editor.tsx b/kafka-ui-react-app/src/components/common/Editor/Editor.tsx
index b8744762c96..05c91e3557b 100644
--- a/kafka-ui-react-app/src/components/common/Editor/Editor.tsx
+++ b/kafka-ui-react-app/src/components/common/Editor/Editor.tsx
@@ -50,8 +50,10 @@ export default styled(Editor)`
theme.ksqlDb.query.editor.cell.backgroundColor};
color: ${({ theme }) => theme.default.color.normal};
}
- .ace_line {
+ .ace_scroller {
background-color: ${({ theme }) => theme.default.backgroundColor};
+ }
+ .ace_line {
color: ${({ theme }) => theme.default.color.normal};
}
.ace_cursor {
diff --git a/kafka-ui-react-app/src/components/common/EditorViewer/EditorViewer.styled.ts b/kafka-ui-react-app/src/components/common/EditorViewer/EditorViewer.styled.ts
index d60603f635c..a135f4dfcc2 100644
--- a/kafka-ui-react-app/src/components/common/EditorViewer/EditorViewer.styled.ts
+++ b/kafka-ui-react-app/src/components/common/EditorViewer/EditorViewer.styled.ts
@@ -5,7 +5,7 @@ export const Wrapper = styled.div`
padding: 8px 16px;
.ace_active-line {
background-color: ${({ theme }) =>
- theme.viewer.wrapper.backgroundColor} !important;
+ theme.default.backgroundColor} !important;
}
.ace_line {
color: ${({ theme }) => theme.viewer.wrapper.color} !important;
| null | train | test | 2023-03-30T09:45:36 | "2023-03-28T15:33:52Z" | Haarolean | train |
provectus/kafka-ui/2728_3602 | provectus/kafka-ui | provectus/kafka-ui/2728 | provectus/kafka-ui/3602 | [
"keyword_pr_to_issue"
] | d06f77ad5338775b56a61b7a24d217a993924df4 | 0ff7e6338632b9f901edf530662d369351d8f8d9 | [
"Hello there AydinChavez! π\n\nThank you and congratulations π for opening your very first issue in this project! π\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. π",
"Hey, thanks for reaching out. It seems like we already have a PR by a volunteer, we'll review it soon :)"
] | [] | "2023-03-31T11:39:57Z" | [
"type/enhancement",
"scope/frontend",
"status/accepted"
] | AirGap: Remove internet dependency / google fonts | Hi!
we are accessing latest kafka-ui via a corporate socks proxy, which does not have internet access.
Would it be possible to embed the fonts in the react-app (as part of static folder e.g.) instead of referencering those online?
https://github.com/provectus/kafka-ui/blob/master/kafka-ui-react-app/index.html#L10
```
<!-- Google fonts -->
<link rel="preconnect" href="https://fonts.googleapis.com" />
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin />
<link
href="https://fonts.googleapis.com/css2?family=Inter:wght@400;500&family=Roboto+Mono:wght@400;500&display=swap"
rel="stylesheet"
/>
```
At the moment, our browser tries to reach following two google font online urls via socks proxy, which leads to a initial load of the kafka-ui of 30 seconds (browser gets a network timeout after 30 seconds):
https://fonts.googleapis.com/css2?family=Roboto+Mono:wght@400;500&display=swap
https://fonts.googleapis.com/css2?family=Inter:wght@400;500&display=swap | [
"kafka-ui-react-app/index.html"
] | [
"kafka-ui-react-app/index.html"
] | [] | diff --git a/kafka-ui-react-app/index.html b/kafka-ui-react-app/index.html
index 33e18ad2688..be10fc78a3a 100644
--- a/kafka-ui-react-app/index.html
+++ b/kafka-ui-react-app/index.html
@@ -3,13 +3,6 @@
<head>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
- <!-- Google fonts -->
- <link rel="preconnect" href="https://fonts.googleapis.com" />
- <link rel="preconnect" href="https://fonts.gstatic.com" crossorigin />
- <link
- href="https://fonts.googleapis.com/css2?family=Inter:wght@400;500&family=Roboto+Mono:wght@400;500&display=swap"
- rel="stylesheet"
- />
<!-- Favicons -->
<link rel="icon" href="<%= PUBLIC_PATH %>/favicon/favicon.ico" sizes="any" />
@@ -25,6 +18,35 @@
return window.basePath+ "/" + importer;
};
</script>
+ <style>
+ @font-face {
+ font-family: 'Inter';
+ src: url('<%= PUBLIC_PATH %>/fonts/Inter-Medium.ttf') format('truetype');
+ font-weight: 500;
+ font-display: swap;
+ }
+
+ @font-face {
+ font-family: 'Inter';
+ src: url('<%= PUBLIC_PATH %>/fonts/Inter-Regular.ttf') format('truetype');
+ font-weight: 400;
+ font-display: swap;
+ }
+
+ @font-face {
+ font-family: 'Roboto Mono';
+ src: url('<%= PUBLIC_PATH %>/fonts/RobotoMono-Medium.ttf') format('truetype');
+ font-weight: 500;
+ font-display: swap;
+ }
+
+ @font-face {
+ font-family: 'Roboto Mono';
+ src: url('<%= PUBLIC_PATH %>/fonts/RobotoMono-Regular.ttf') format('truetype');
+ font-weight: 400;
+ font-display: swap;
+ }
+ </style>
</head>
<body>
| null | test | test | 2023-03-30T09:45:36 | "2022-10-11T12:16:23Z" | AydinChavez | train |
provectus/kafka-ui/2366_3608 | provectus/kafka-ui | provectus/kafka-ui/2366 | provectus/kafka-ui/3608 | [
"keyword_pr_to_issue"
] | 005e74f2480f3ea961ec0c3d9b4a633b666df552 | 89019dae19e8ef3a1e62262ff62435348f06f1c4 | [] | [
"i would suggest to use `same` instead of current here \r\n```suggestion\r\n message: 'Filter with the same name already exists',\r\n```",
"Updated"
] | "2023-04-03T06:25:33Z" | [
"type/bug",
"good first issue",
"scope/frontend",
"status/accepted",
"status/confirmed"
] | It's possible to save the filters with the same name for Messages |
**Describe the bug**
It's possible to save the filters with the same name for Messages
**Set up**
https://www.kafka-ui.provectus.io/
**Steps to Reproduce**
<!-- We'd like you to provide an example setup (via docker-compose, helm, etc.)
to reproduce the problem, especially with a complex setups. -->
Steps to reproduce the behavior:
1. Navigate to Topics
2. Turn to Messages tab for any Topic
3. Press "+ Add Filters"
4. Turn to Saved Filters
5. Check the existing Filters
6. Add the filter with the same name
**Expected behavior**
Should not be possible to submit the Filter with the same name
**Screenshots**
https://user-images.githubusercontent.com/104780608/182073770-3e266ef0-e268-44cb-8d08-c12c9aded862.mov
**Additional context**
<!--
(Add any other context about the problem here)
-->
| [
"kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/AddEditFilterContainer.tsx",
"kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/AddFilter.tsx"
] | [
"kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/AddEditFilterContainer.tsx",
"kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/AddFilter.tsx"
] | [] | diff --git a/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/AddEditFilterContainer.tsx b/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/AddEditFilterContainer.tsx
index 557db159ba7..757b6e171dd 100644
--- a/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/AddEditFilterContainer.tsx
+++ b/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/AddEditFilterContainer.tsx
@@ -27,7 +27,7 @@ export interface AddEditFilterContainerProps {
inputDisplayNameDefaultValue?: string;
inputCodeDefaultValue?: string;
isAdd?: boolean;
- submitCallback?: (values: AddMessageFilters) => void;
+ submitCallback?: (values: AddMessageFilters) => Promise<void>;
}
const AddEditFilterContainer: React.FC<AddEditFilterContainerProps> = ({
diff --git a/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/AddFilter.tsx b/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/AddFilter.tsx
index 7d3d95ecc76..035d98c3a39 100644
--- a/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/AddFilter.tsx
+++ b/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/AddFilter.tsx
@@ -6,6 +6,7 @@ import SavedFilters from 'components/Topics/Topic/Messages/Filters/SavedFilters'
import SavedIcon from 'components/common/Icons/SavedIcon';
import QuestionIcon from 'components/common/Icons/QuestionIcon';
import useBoolean from 'lib/hooks/useBoolean';
+import { showAlert } from 'lib/errorHandling';
import AddEditFilterContainer from './AddEditFilterContainer';
import InfoModal from './InfoModal';
@@ -43,6 +44,19 @@ const AddFilter: React.FC<FilterModalProps> = ({
const onSubmit = React.useCallback(
async (values: AddMessageFilters) => {
+ const isFilterExists = filters.some(
+ (filter) => filter.name === values.name
+ );
+
+ if (isFilterExists) {
+ showAlert('error', {
+ id: '',
+ title: 'Validation Error',
+ message: 'Filter with the same name already exists',
+ });
+ return;
+ }
+
const data = { ...values };
if (data.saveFilter) {
addFilter(data);
| null | train | test | 2023-04-10T17:35:14 | "2022-08-01T04:42:56Z" | armenuikafka | train |
provectus/kafka-ui/3168_3621 | provectus/kafka-ui | provectus/kafka-ui/3168 | provectus/kafka-ui/3621 | [
"keyword_pr_to_issue"
] | 005e74f2480f3ea961ec0c3d9b4a633b666df552 | 98f1f6ebcd5412e8d09a4ea6f72eec4f9cdf9e4e | [] | [
"* `const ipV4ToNum = (ip?: string) => {` undefined can be written like this\r\n\r\n* Here the typescript can automatically infer your return type you don't need to specify the return type.",
"Updated"
] | "2023-04-04T08:23:19Z" | [
"type/enhancement",
"good first issue",
"scope/frontend",
"status/accepted"
] | Consumers: Topic list: Implement sorting | <!--
Don't forget to check for existing issues/discussions regarding your proposal. We might already have it.
https://github.com/provectus/kafka-ui/issues
https://github.com/provectus/kafka-ui/discussions
-->
**Describe the bug**
<!--(A clear and concise description of what the bug is.)-->
No sorting on visible values is applied, so the list looks as in random order (Partition would be the logical suggestion). Of the "Partition", "Consumer ID", "Host", "Messages behind", "Current offset", "End offset" columns
**Expected behavior**
<!--
(A clear and concise description of what you expected to happen)
-->
At least the first 4 columns can be valid candidates for sorting via click on the column headers in a nested table.
**Set up**
<!--
How do you run the app? Please provide as much info as possible:
1. App version (docker image version or check commit hash in the top left corner in UI)
2. Helm chart version, if you use one
3. Any IAAC configs
We might close the issue without further explanation if you don't provide such information.
-->
[f4e6afe](https://github.com/provectus/kafka-ui/commit/f4e6afe)
**Steps to Reproduce**
<!-- We'd like you to provide an example setup (via docker-compose, helm, etc.)
to reproduce the problem, especially with a complex setups. -->
1. Log in to Kafka UI and navigate to the Consumers page.
2. Open consumer group details of the topic with the large number of partitions.
**Screenshots**
<!--
(If applicable, add screenshots to help explain your problem)
-->

**Additional context**
<!--
(Add any other context about the problem here)
-->
@Haarolean required to open this as a separate issue from the #2651 | [
"kafka-ui-react-app/src/components/ConsumerGroups/Details/TopicContents/TopicContents.tsx"
] | [
"kafka-ui-react-app/src/components/ConsumerGroups/Details/TopicContents/TopicContents.tsx"
] | [] | diff --git a/kafka-ui-react-app/src/components/ConsumerGroups/Details/TopicContents/TopicContents.tsx b/kafka-ui-react-app/src/components/ConsumerGroups/Details/TopicContents/TopicContents.tsx
index 6637821020a..b234fb8e192 100644
--- a/kafka-ui-react-app/src/components/ConsumerGroups/Details/TopicContents/TopicContents.tsx
+++ b/kafka-ui-react-app/src/components/ConsumerGroups/Details/TopicContents/TopicContents.tsx
@@ -1,6 +1,6 @@
import { Table } from 'components/common/table/Table/Table.styled';
import TableHeaderCell from 'components/common/table/TableHeaderCell/TableHeaderCell';
-import { ConsumerGroupTopicPartition } from 'generated-sources';
+import { ConsumerGroupTopicPartition, SortOrder } from 'generated-sources';
import React from 'react';
import { ContentBox, TopicContentWrapper } from './TopicContent.styled';
@@ -9,7 +9,125 @@ interface Props {
consumers: ConsumerGroupTopicPartition[];
}
+type OrderByKey = keyof ConsumerGroupTopicPartition;
+interface Headers {
+ title: string;
+ orderBy: OrderByKey | undefined;
+}
+
+const TABLE_HEADERS_MAP: Headers[] = [
+ { title: 'Partition', orderBy: 'partition' },
+ { title: 'Consumer ID', orderBy: 'consumerId' },
+ { title: 'Host', orderBy: 'host' },
+ { title: 'Messages Behind', orderBy: 'messagesBehind' },
+ { title: 'Current Offset', orderBy: 'currentOffset' },
+ { title: 'End offset', orderBy: 'endOffset' },
+];
+
+const ipV4ToNum = (ip?: string) => {
+ if (typeof ip === 'string' && ip.length !== 0) {
+ const withoutSlash = ip.indexOf('/') !== -1 ? ip.slice(1) : ip;
+ return Number(
+ withoutSlash
+ .split('.')
+ .map((octet) => `000${octet}`.slice(-3))
+ .join('')
+ );
+ }
+ return 0;
+};
+
+type ComparatorFunction<T> = (
+ valueA: T,
+ valueB: T,
+ order: SortOrder,
+ property?: keyof T
+) => number;
+
+const numberComparator: ComparatorFunction<ConsumerGroupTopicPartition> = (
+ valueA,
+ valueB,
+ order,
+ property
+) => {
+ if (property !== undefined) {
+ return order === SortOrder.ASC
+ ? Number(valueA[property]) - Number(valueB[property])
+ : Number(valueB[property]) - Number(valueA[property]);
+ }
+ return 0;
+};
+
+const ipComparator: ComparatorFunction<ConsumerGroupTopicPartition> = (
+ valueA,
+ valueB,
+ order
+) =>
+ order === SortOrder.ASC
+ ? ipV4ToNum(valueA.host) - ipV4ToNum(valueB.host)
+ : ipV4ToNum(valueB.host) - ipV4ToNum(valueA.host);
+
+const consumerIdComparator: ComparatorFunction<ConsumerGroupTopicPartition> = (
+ valueA,
+ valueB,
+ order
+) => {
+ if (valueA.consumerId && valueB.consumerId) {
+ if (order === SortOrder.ASC) {
+ if (valueA.consumerId?.toLowerCase() > valueB.consumerId?.toLowerCase()) {
+ return 1;
+ }
+ }
+
+ if (order === SortOrder.DESC) {
+ if (valueB.consumerId?.toLowerCase() > valueA.consumerId?.toLowerCase()) {
+ return -1;
+ }
+ }
+ }
+
+ return 0;
+};
+
const TopicContents: React.FC<Props> = ({ consumers }) => {
+ const [orderBy, setOrderBy] = React.useState<OrderByKey>('partition');
+ const [sortOrder, setSortOrder] = React.useState<SortOrder>(SortOrder.DESC);
+
+ const handleOrder = React.useCallback((columnName: string | null) => {
+ if (typeof columnName === 'string') {
+ setOrderBy(columnName as OrderByKey);
+ setSortOrder((prevOrder) =>
+ prevOrder === SortOrder.DESC ? SortOrder.ASC : SortOrder.DESC
+ );
+ }
+ }, []);
+
+ const sortedConsumers = React.useMemo(() => {
+ if (orderBy && sortOrder) {
+ const isNumberProperty =
+ orderBy === 'partition' ||
+ orderBy === 'currentOffset' ||
+ orderBy === 'endOffset' ||
+ orderBy === 'messagesBehind';
+
+ let comparator: ComparatorFunction<ConsumerGroupTopicPartition>;
+ if (isNumberProperty) {
+ comparator = numberComparator;
+ }
+
+ if (orderBy === 'host') {
+ comparator = ipComparator;
+ }
+
+ if (orderBy === 'consumerId') {
+ comparator = consumerIdComparator;
+ }
+
+ return consumers.sort((a, b) => comparator(a, b, sortOrder, orderBy));
+ }
+ return consumers;
+ }, [orderBy, sortOrder, consumers]);
+
return (
<TopicContentWrapper>
<td colSpan={3}>
@@ -17,16 +135,20 @@ const TopicContents: React.FC<Props> = ({ consumers }) => {
<Table isFullwidth>
<thead>
<tr>
- <TableHeaderCell title="Partition" />
- <TableHeaderCell title="Consumer ID" />
- <TableHeaderCell title="Host" />
- <TableHeaderCell title="Messages behind" />
- <TableHeaderCell title="Current offset" />
- <TableHeaderCell title="End offset" />
+ {TABLE_HEADERS_MAP.map((header) => (
+ <TableHeaderCell
+ key={header.orderBy}
+ title={header.title}
+ orderBy={orderBy}
+ sortOrder={sortOrder}
+ orderValue={header.orderBy}
+ handleOrderBy={handleOrder}
+ />
+ ))}
</tr>
</thead>
<tbody>
- {consumers.map((consumer) => (
+ {sortedConsumers.map((consumer) => (
<tr key={consumer.partition}>
<td>{consumer.partition}</td>
<td>{consumer.consumerId}</td>
| null | train | test | 2023-04-10T17:35:14 | "2022-12-28T14:04:08Z" | BulatKha | train |
provectus/kafka-ui/3385_3623 | provectus/kafka-ui | provectus/kafka-ui/3385 | provectus/kafka-ui/3623 | [
"connected"
] | dbdced5babfc5787c4e242348b8142f8b9a03252 | 005e74f2480f3ea961ec0c3d9b4a633b666df552 | [] | [] | "2023-04-04T10:15:44Z" | [
"type/bug",
"good first issue",
"scope/frontend",
"status/accepted",
"status/confirmed"
] | Format field previews one per line | Setup:
reddit connector, json, topic reddit-comments-json
Add multiple fields to preview:
```
sub : $.payload.subreddit
link : $.payload.link_url
```
The result has no line breaks:
<img width="260" alt="image" src="https://user-images.githubusercontent.com/1494347/220510677-5e20dafa-ab44-444f-b684-73cc03f79e38.png">
| [
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/models/Topic.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/ProduceMessagePanel.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicDetails.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/services/ApiService.java",
"kafka-ui-react-app/src/components/Topics/Topic/Messages/Message.tsx",
"kafka-ui-react-app/src/components/Topics/Topic/Messages/MessageContent/MessageContent.styled.ts",
"kafka-ui-react-app/src/components/Topics/Topic/Messages/__test__/Message.spec.tsx"
] | [
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/models/Topic.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/ProduceMessagePanel.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicDetails.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/services/ApiService.java",
"kafka-ui-react-app/src/components/Topics/Topic/Messages/Message.tsx",
"kafka-ui-react-app/src/components/Topics/Topic/Messages/MessageContent/MessageContent.styled.ts",
"kafka-ui-react-app/src/components/Topics/Topic/Messages/__test__/Message.spec.tsx"
] | [
"kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/connectors/ConnectorsTest.java",
"kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/topics/MessagesTest.java",
"kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/topics/TopicsTest.java"
] | diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/models/Topic.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/models/Topic.java
index ece16b4cc10..8fb3df086ea 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/models/Topic.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/models/Topic.java
@@ -11,7 +11,7 @@
@Accessors(chain = true)
public class Topic {
- private String name, timeToRetainData, maxMessageBytes, messageKey, messageContent, customParameterValue;
+ private String name, timeToRetainData, maxMessageBytes, messageKey, messageValue, customParameterValue;
private int numberOfPartitions;
private CustomParameterType customParameterType;
private CleanupPolicyValue cleanupPolicyValue;
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/ProduceMessagePanel.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/ProduceMessagePanel.java
index e407d4fe3dc..c4e65c65bee 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/ProduceMessagePanel.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/ProduceMessagePanel.java
@@ -12,7 +12,7 @@
public class ProduceMessagePanel extends BasePage {
protected SelenideElement keyTextArea = $x("//div[@id='key']/textarea");
- protected SelenideElement contentTextArea = $x("//div[@id='content']/textarea");
+ protected SelenideElement valueTextArea = $x("//div[@id='content']/textarea");
protected SelenideElement headersTextArea = $x("//div[@id='headers']/textarea");
protected SelenideElement submitBtn = headersTextArea.$x("../../../..//button[@type='submit']");
protected SelenideElement partitionDdl = $x("//ul[@name='partition']");
@@ -34,14 +34,14 @@ public ProduceMessagePanel setKeyField(String value) {
}
@Step
- public ProduceMessagePanel setContentFiled(String value) {
- clearByKeyboard(contentTextArea);
- contentTextArea.setValue(value);
+ public ProduceMessagePanel setValueFiled(String value) {
+ clearByKeyboard(valueTextArea);
+ valueTextArea.setValue(value);
return this;
}
@Step
- public ProduceMessagePanel setHeaderFiled(String value) {
+ public ProduceMessagePanel setHeadersFld(String value) {
headersTextArea.setValue(value);
return this;
}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicDetails.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicDetails.java
index a171c728e46..b7d03dcf3a7 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicDetails.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicDetails.java
@@ -1,6 +1,5 @@
package com.provectus.kafka.ui.pages.topics;
-import static com.codeborne.selenide.Selenide.$;
import static com.codeborne.selenide.Selenide.$$x;
import static com.codeborne.selenide.Selenide.$x;
import static com.codeborne.selenide.Selenide.sleep;
@@ -296,16 +295,6 @@ public TopicDetails openConsumerGroup(String consumerId) {
return this;
}
- @Step
- public boolean isKeyMessageVisible(String keyMessage) {
- return keyMessage.equals($("td[title]").getText());
- }
-
- @Step
- public boolean isContentMessageVisible(String contentMessage) {
- return contentMessage.matches(contentMessageTab.getText().trim());
- }
-
private void selectYear(int expectedYear) {
while (getActualCalendarDate().getYear() > expectedYear) {
clickByJavaScript(previousMonthButton);
@@ -382,6 +371,13 @@ public TopicDetails.MessageGridItem getMessageByOffset(int offset) {
.findFirst().orElseThrow();
}
+ @Step
+ public TopicDetails.MessageGridItem getMessageByKey(String key) {
+ return initItems().stream()
+ .filter(e -> e.getKey().equals(key))
+ .findFirst().orElseThrow();
+ }
+
@Step
public List<MessageGridItem> getAllMessages() {
return initItems();
@@ -451,7 +447,7 @@ public String getKey() {
@Step
public String getValue() {
- return element.$x("./td[6]/span/p").getText().trim();
+ return element.$x("./td[6]").getAttribute("title");
}
@Step
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/services/ApiService.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/services/ApiService.java
index ea08f57fe49..a041defc93e 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/services/ApiService.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/services/ApiService.java
@@ -185,7 +185,7 @@ private void sendMessage(String clusterName, Topic topic) {
createMessage.setKeySerde("String");
createMessage.setValueSerde("String");
createMessage.setKey(topic.getMessageKey());
- createMessage.setContent(topic.getMessageContent());
+ createMessage.setContent(topic.getMessageValue());
try {
messageApi().sendTopicMessages(clusterName, topic.getName(), createMessage).block();
} catch (WebClientResponseException ex) {
diff --git a/kafka-ui-react-app/src/components/Topics/Topic/Messages/Message.tsx b/kafka-ui-react-app/src/components/Topics/Topic/Messages/Message.tsx
index 7a9df2c16ff..60f09b82933 100644
--- a/kafka-ui-react-app/src/components/Topics/Topic/Messages/Message.tsx
+++ b/kafka-ui-react-app/src/components/Topics/Topic/Messages/Message.tsx
@@ -80,19 +80,20 @@ const Message: React.FC<Props> = ({
filters?: PreviewFilter[]
) => {
if (!filters?.length || !jsonValue) return jsonValue;
-
const parsedJson = getParsedJson(jsonValue);
return (
<>
- {filters.map((item) => (
- <span key={`${item.path}--${item.field}`}>
- {item.field}:{' '}
- {JSON.stringify(
- JSONPath({ path: item.path, json: parsedJson, wrap: false })
- )}
- </span>
- ))}
+ {filters.map((item) => {
+ return (
+ <div key={`${item.path}--${item.field}`}>
+ {item.field}:{' '}
+ {JSON.stringify(
+ JSONPath({ path: item.path, json: parsedJson, wrap: false })
+ )}
+ </div>
+ );
+ })}
</>
);
};
diff --git a/kafka-ui-react-app/src/components/Topics/Topic/Messages/MessageContent/MessageContent.styled.ts b/kafka-ui-react-app/src/components/Topics/Topic/Messages/MessageContent/MessageContent.styled.ts
index 1c190ba9caa..9eb5e6b0627 100644
--- a/kafka-ui-react-app/src/components/Topics/Topic/Messages/MessageContent/MessageContent.styled.ts
+++ b/kafka-ui-react-app/src/components/Topics/Topic/Messages/MessageContent/MessageContent.styled.ts
@@ -58,7 +58,7 @@ export const MetadataLabel = styled.p`
width: 80px;
`;
-export const MetadataValue = styled.p`
+export const MetadataValue = styled.div`
color: ${({ theme }) => theme.topicMetaData.color.value};
font-size: 14px;
`;
diff --git a/kafka-ui-react-app/src/components/Topics/Topic/Messages/__test__/Message.spec.tsx b/kafka-ui-react-app/src/components/Topics/Topic/Messages/__test__/Message.spec.tsx
index c01f4af3d41..c96b395f4c7 100644
--- a/kafka-ui-react-app/src/components/Topics/Topic/Messages/__test__/Message.spec.tsx
+++ b/kafka-ui-react-app/src/components/Topics/Topic/Messages/__test__/Message.spec.tsx
@@ -1,6 +1,9 @@
import React from 'react';
import { TopicMessage, TopicMessageTimestampTypeEnum } from 'generated-sources';
-import Message, { Props } from 'components/Topics/Topic/Messages/Message';
+import Message, {
+ PreviewFilter,
+ Props,
+} from 'components/Topics/Topic/Messages/Message';
import { screen } from '@testing-library/react';
import { render } from 'lib/testHelpers';
import userEvent from '@testing-library/user-event';
@@ -8,6 +11,9 @@ import { formatTimestamp } from 'lib/dateTimeHelpers';
const messageContentText = 'messageContentText';
+const keyTest = '{"payload":{"subreddit":"learnprogramming"}}';
+const contentTest =
+ '{"payload":{"author":"DwaywelayTOP","archived":false,"name":"t3_11jshwd","id":"11jshwd"}}';
jest.mock(
'components/Topics/Topic/Messages/MessageContent/MessageContent',
() => () =>
@@ -28,10 +34,19 @@ describe('Message component', () => {
content: '{"data": "test"}',
headers: { header: 'test' },
};
-
+ const mockKeyFilters: PreviewFilter = {
+ field: 'sub',
+ path: '$.payload.subreddit',
+ };
+ const mockContentFilters: PreviewFilter = {
+ field: 'author',
+ path: '$.payload.author',
+ };
const renderComponent = (
props: Partial<Props> = {
message: mockMessage,
+ keyFilters: [],
+ contentFilters: [],
}
) =>
render(
@@ -39,8 +54,8 @@ describe('Message component', () => {
<tbody>
<Message
message={props.message || mockMessage}
- keyFilters={[]}
- contentFilters={[]}
+ keyFilters={props.keyFilters || []}
+ contentFilters={props.contentFilters || []}
/>
</tbody>
</table>
@@ -88,4 +103,24 @@ describe('Message component', () => {
await userEvent.click(messageToggleIcon);
expect(screen.getByText(messageContentText)).toBeInTheDocument();
});
+
+ it('should check if Preview filter showing for key', () => {
+ const props = {
+ message: { ...mockMessage, key: keyTest as string },
+ keyFilters: [mockKeyFilters],
+ };
+ renderComponent(props);
+ const keyFiltered = screen.getByText('sub: "learnprogramming"');
+ expect(keyFiltered).toBeInTheDocument();
+ });
+
+ it('should check if Preview filter showing for Value', () => {
+ const props = {
+ message: { ...mockMessage, content: contentTest as string },
+ contentFilters: [mockContentFilters],
+ };
+ renderComponent(props);
+ const keyFiltered = screen.getByText('author: "DwaywelayTOP"');
+ expect(keyFiltered).toBeInTheDocument();
+ });
});
| diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/connectors/ConnectorsTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/connectors/ConnectorsTest.java
index 72fe769e312..9ca3526c710 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/connectors/ConnectorsTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/connectors/ConnectorsTest.java
@@ -23,13 +23,13 @@ public class ConnectorsTest extends BaseTest {
private static final String MESSAGE_KEY = " ";
private static final Topic TOPIC_FOR_CREATE = new Topic()
.setName("topic-for-create-connector-" + randomAlphabetic(5))
- .setMessageContent(MESSAGE_CONTENT).setMessageKey(MESSAGE_KEY);
+ .setMessageValue(MESSAGE_CONTENT).setMessageKey(MESSAGE_KEY);
private static final Topic TOPIC_FOR_DELETE = new Topic()
.setName("topic-for-delete-connector-" + randomAlphabetic(5))
- .setMessageContent(MESSAGE_CONTENT).setMessageKey(MESSAGE_KEY);
+ .setMessageValue(MESSAGE_CONTENT).setMessageKey(MESSAGE_KEY);
private static final Topic TOPIC_FOR_UPDATE = new Topic()
.setName("topic-for-update-connector-" + randomAlphabetic(5))
- .setMessageContent(MESSAGE_CONTENT).setMessageKey(MESSAGE_KEY);
+ .setMessageValue(MESSAGE_CONTENT).setMessageKey(MESSAGE_KEY);
private static final Connector CONNECTOR_FOR_DELETE = new Connector()
.setName("connector-for-delete-" + randomAlphabetic(5))
.setConfig(getResourceAsString("testData/connectors/delete_connector_config.json"));
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/topics/MessagesTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/topics/MessagesTest.java
index 24048386982..3bbc7e7cd3d 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/topics/MessagesTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/topics/MessagesTest.java
@@ -28,23 +28,23 @@ public class MessagesTest extends BaseTest {
private static final Topic TOPIC_FOR_MESSAGES = new Topic()
.setName("topic-with-clean-message-attribute-" + randomAlphabetic(5))
.setMessageKey(randomAlphabetic(5))
- .setMessageContent(randomAlphabetic(10));
+ .setMessageValue(randomAlphabetic(10));
private static final Topic TOPIC_TO_CLEAR_AND_PURGE_MESSAGES = new Topic()
.setName("topic-to-clear-and-purge-messages-" + randomAlphabetic(5))
.setMessageKey(randomAlphabetic(5))
- .setMessageContent(randomAlphabetic(10));
+ .setMessageValue(randomAlphabetic(10));
private static final Topic TOPIC_FOR_CHECK_FILTERS = new Topic()
.setName("topic-for-check-filters-" + randomAlphabetic(5))
.setMessageKey(randomAlphabetic(5))
- .setMessageContent(randomAlphabetic(10));
+ .setMessageValue(randomAlphabetic(10));
private static final Topic TOPIC_TO_RECREATE = new Topic()
.setName("topic-to-recreate-attribute-" + randomAlphabetic(5))
.setMessageKey(randomAlphabetic(5))
- .setMessageContent(randomAlphabetic(10));
+ .setMessageValue(randomAlphabetic(10));
private static final Topic TOPIC_FOR_CHECK_MESSAGES_COUNT = new Topic()
.setName("topic-for-check-messages-count" + randomAlphabetic(5))
.setMessageKey(randomAlphabetic(5))
- .setMessageContent(randomAlphabetic(10));
+ .setMessageValue(randomAlphabetic(10));
private static final List<Topic> TOPIC_LIST = new ArrayList<>();
@BeforeClass(alwaysRun = true)
@@ -65,12 +65,8 @@ public void produceMessageCheck() {
topicDetails
.openDetailsTab(MESSAGES);
produceMessage(TOPIC_FOR_MESSAGES);
- SoftAssert softly = new SoftAssert();
- softly.assertTrue(topicDetails.isKeyMessageVisible((TOPIC_FOR_MESSAGES.getMessageKey())),
- "isKeyMessageVisible()");
- softly.assertTrue(topicDetails.isContentMessageVisible((TOPIC_FOR_MESSAGES.getMessageContent()).trim()),
- "isContentMessageVisible()");
- softly.assertAll();
+ Assert.assertEquals(topicDetails.getMessageByKey(TOPIC_FOR_MESSAGES.getMessageKey()).getValue(),
+ TOPIC_FOR_MESSAGES.getMessageValue(), "message.getValue()");
}
@QaseId(19)
@@ -266,7 +262,7 @@ private void produceMessage(Topic topic) {
produceMessagePanel
.waitUntilScreenReady()
.setKeyField(topic.getMessageKey())
- .setContentFiled(topic.getMessageContent())
+ .setValueFiled(topic.getMessageValue())
.submitProduceMessage();
topicDetails
.waitUntilScreenReady();
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/topics/TopicsTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/topics/TopicsTest.java
index 319c5f74f9e..ad20f595a4f 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/topics/TopicsTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/topics/TopicsTest.java
@@ -45,7 +45,7 @@ public class TopicsTest extends BaseTest {
.setMaxSizeOnDisk(NOT_SET)
.setMaxMessageBytes("1048588")
.setMessageKey(randomAlphabetic(5))
- .setMessageContent(randomAlphabetic(10));
+ .setMessageValue(randomAlphabetic(10));
private static final Topic TOPIC_TO_CHECK_SETTINGS = new Topic()
.setName("new-topic-" + randomAlphabetic(5))
.setNumberOfPartitions(1)
| train | test | 2023-04-10T14:40:15 | "2023-02-22T03:04:38Z" | Haarolean | train |
provectus/kafka-ui/3427_3626 | provectus/kafka-ui | provectus/kafka-ui/3427 | provectus/kafka-ui/3626 | [
"connected"
] | f6fe14cea55675f03656428aa3366c556ccddf22 | 9ac8549d7db1f32696ec8f61907fdb60c9c66a7e | [
"TODO, brokers page:\r\nadd:\r\n- partitions count per broker (+skew %)\r\n- leaders count per broker (+skew %)\r\n\r\nFrontend:\r\nBrokers table:\r\n- merge segment size & segment count, make it a single column \"Disk usage\", data format: \"%Size%, %x% segment(s)\"\r\n- Partitions skew (Column tooltip: \"The divergence from the average brokers' value\"). Display \"-\" if null, partitionsSkew field. \r\n**Percentage might be either negative or positive**\r\nChange the font color depending on the skew:\r\n10-20% -> yellow\r\n20%+ -> red\r\n- Leaders\r\n- Leader skew\r\n- Online partitions (inSyncPartitions of partitions). If inSyncPartitions != partitions -> font color red"
] | [
"why are we added this ? ",
"After changes of the `SizeCell` props the props string needs to be formatted as below and the eslint-disable comment ignored, so I added `AsAny`, to ignore it before props declaration",
"lets not call `getValue` multiple times lets just call it once and put it in a variable",
"Updated"
] | "2023-04-06T07:41:53Z" | [
"type/enhancement",
"scope/backend",
"scope/frontend",
"status/accepted"
] | View broker skew in kafka-ui | Hi there,
is there a way to see if there is a broker skew in kafka ui?
screenshot from conduktor.
<img width="1465" alt="MicrosoftTeams-image" src="https://user-images.githubusercontent.com/51237742/222124221-865dcf40-3a9c-49a4-a60f-60359bde667e.png">
Cheers | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/PartitionDistributionStats.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersList.java",
"kafka-ui-react-app/src/components/Brokers/BrokersList/BrokersList.tsx",
"kafka-ui-react-app/src/components/common/NewTable/SizeCell.tsx",
"kafka-ui-react-app/src/theme/theme.ts"
] | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/PartitionDistributionStats.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersList.java",
"kafka-ui-react-app/src/components/Brokers/BrokersList/BrokersList.tsx",
"kafka-ui-react-app/src/components/Brokers/BrokersList/SkewHeader/SkewHeader.styled.ts",
"kafka-ui-react-app/src/components/Brokers/BrokersList/SkewHeader/SkewHeader.tsx",
"kafka-ui-react-app/src/components/common/NewTable/ColoredCell.tsx",
"kafka-ui-react-app/src/components/common/NewTable/SizeCell.tsx",
"kafka-ui-react-app/src/theme/theme.ts"
] | [] | diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/PartitionDistributionStats.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/PartitionDistributionStats.java
index b625533d1dd..46efc670008 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/PartitionDistributionStats.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/PartitionDistributionStats.java
@@ -1,7 +1,7 @@
package com.provectus.kafka.ui.model;
import java.math.BigDecimal;
-import java.math.MathContext;
+import java.math.RoundingMode;
import java.util.HashMap;
import java.util.Map;
import javax.annotation.Nullable;
@@ -21,8 +21,6 @@ public class PartitionDistributionStats {
// avg skew will show unuseful results on low number of partitions
private static final int MIN_PARTITIONS_FOR_SKEW_CALCULATION = 50;
- private static final MathContext ROUNDING_MATH_CTX = new MathContext(3);
-
private final Map<Node, Integer> partitionLeaders;
private final Map<Node, Integer> partitionsCount;
private final Map<Node, Integer> inSyncPartitions;
@@ -88,6 +86,7 @@ private BigDecimal calculateAvgSkew(@Nullable Integer value, double avgValue) {
return null;
}
value = value == null ? 0 : value;
- return new BigDecimal((value - avgValue) / avgValue * 100.0).round(ROUNDING_MATH_CTX);
+ return new BigDecimal((value - avgValue) / avgValue * 100.0)
+ .setScale(1, RoundingMode.HALF_UP);
}
}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersList.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersList.java
index 50ecdff3597..9e81a0795cc 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersList.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersList.java
@@ -48,7 +48,8 @@ public List<SelenideElement> getAllVisibleElements() {
}
private List<SelenideElement> getEnabledColumnHeaders() {
- return Stream.of("Broker ID", "Segment Size", "Segment Count", "Port", "Host")
+ return Stream.of("Broker ID", "Disk usage", "Partitions skew",
+ "Leaders", "Leader skew", "Online partitions", "Port", "Host")
.map(name -> $x(String.format(columnHeaderLocator, name)))
.collect(Collectors.toList());
}
diff --git a/kafka-ui-react-app/src/components/Brokers/BrokersList/BrokersList.tsx b/kafka-ui-react-app/src/components/Brokers/BrokersList/BrokersList.tsx
index 966edecf1fd..d8cd0a2f763 100644
--- a/kafka-ui-react-app/src/components/Brokers/BrokersList/BrokersList.tsx
+++ b/kafka-ui-react-app/src/components/Brokers/BrokersList/BrokersList.tsx
@@ -11,7 +11,9 @@ import CheckMarkRoundIcon from 'components/common/Icons/CheckMarkRoundIcon';
import { ColumnDef } from '@tanstack/react-table';
import { clusterBrokerPath } from 'lib/paths';
import Tooltip from 'components/common/Tooltip/Tooltip';
+import ColoredCell from 'components/common/NewTable/ColoredCell';
+import SkewHeader from './SkewHeader/SkewHeader';
import * as S from './BrokersList.styled';
const NA = 'N/A';
@@ -57,11 +59,15 @@ const BrokersList: React.FC = () => {
count: segmentCount || NA,
port: broker?.port,
host: broker?.host,
+ partitionsLeader: broker?.partitionsLeader,
+ partitionsSkew: broker?.partitionsSkew,
+ leadersSkew: broker?.leadersSkew,
+ inSyncPartitions: broker?.inSyncPartitions,
};
});
}, [diskUsage, brokers]);
- const columns = React.useMemo<ColumnDef<typeof rows>[]>(
+ const columns = React.useMemo<ColumnDef<(typeof rows)[number]>[]>(
() => [
{
header: 'Broker ID',
@@ -84,7 +90,7 @@ const BrokersList: React.FC = () => {
),
},
{
- header: 'Segment Size',
+ header: 'Disk usage',
accessorKey: 'size',
// eslint-disable-next-line react/no-unstable-nested-components
cell: ({ getValue, table, cell, column, renderValue, row }) =>
@@ -98,10 +104,56 @@ const BrokersList: React.FC = () => {
cell={cell}
getValue={getValue}
renderValue={renderValue}
+ renderSegments
/>
),
},
- { header: 'Segment Count', accessorKey: 'count' },
+ {
+ // eslint-disable-next-line react/no-unstable-nested-components
+ header: () => <SkewHeader />,
+ accessorKey: 'partitionsSkew',
+ // eslint-disable-next-line react/no-unstable-nested-components
+ cell: ({ getValue }) => {
+ const value = getValue<number>();
+ return (
+ <ColoredCell
+ value={value ? `${value.toFixed(2)}%` : '-'}
+ warn={value >= 10 && value < 20}
+ attention={value >= 20}
+ />
+ );
+ },
+ },
+ { header: 'Leaders', accessorKey: 'partitionsLeader' },
+ {
+ header: 'Leader skew',
+ accessorKey: 'leadersSkew',
+ // eslint-disable-next-line react/no-unstable-nested-components
+ cell: ({ getValue }) => {
+ const value = getValue<number>();
+ return (
+ <ColoredCell
+ value={value ? `${value.toFixed(2)}%` : '-'}
+ warn={value >= 10 && value < 20}
+ attention={value >= 20}
+ />
+ );
+ },
+ },
+ {
+ header: 'Online partitions',
+ accessorKey: 'inSyncPartitions',
+ // eslint-disable-next-line react/no-unstable-nested-components
+ cell: ({ getValue, row }) => {
+ const value = getValue<number>();
+ return (
+ <ColoredCell
+ value={value}
+ attention={value !== row.original.count}
+ />
+ );
+ },
+ },
{ header: 'Port', accessorKey: 'port' },
{
header: 'Host',
diff --git a/kafka-ui-react-app/src/components/Brokers/BrokersList/SkewHeader/SkewHeader.styled.ts b/kafka-ui-react-app/src/components/Brokers/BrokersList/SkewHeader/SkewHeader.styled.ts
new file mode 100644
index 00000000000..eea2fa3cd98
--- /dev/null
+++ b/kafka-ui-react-app/src/components/Brokers/BrokersList/SkewHeader/SkewHeader.styled.ts
@@ -0,0 +1,11 @@
+import styled from 'styled-components';
+import { MessageTooltip } from 'components/common/Tooltip/Tooltip.styled';
+
+export const CellWrapper = styled.div`
+ display: flex;
+ gap: 10px;
+
+ ${MessageTooltip} {
+ max-height: unset;
+ }
+`;
diff --git a/kafka-ui-react-app/src/components/Brokers/BrokersList/SkewHeader/SkewHeader.tsx b/kafka-ui-react-app/src/components/Brokers/BrokersList/SkewHeader/SkewHeader.tsx
new file mode 100644
index 00000000000..978d1768dd7
--- /dev/null
+++ b/kafka-ui-react-app/src/components/Brokers/BrokersList/SkewHeader/SkewHeader.tsx
@@ -0,0 +1,17 @@
+import React from 'react';
+import Tooltip from 'components/common/Tooltip/Tooltip';
+import InfoIcon from 'components/common/Icons/InfoIcon';
+
+import * as S from './SkewHeader.styled';
+
+const SkewHeader: React.FC = () => (
+ <S.CellWrapper>
+ Partitions skew
+ <Tooltip
+ value={<InfoIcon />}
+ content="The divergence from the average brokers' value"
+ />
+ </S.CellWrapper>
+);
+
+export default SkewHeader;
diff --git a/kafka-ui-react-app/src/components/common/NewTable/ColoredCell.tsx b/kafka-ui-react-app/src/components/common/NewTable/ColoredCell.tsx
new file mode 100644
index 00000000000..df8ab2d6a8d
--- /dev/null
+++ b/kafka-ui-react-app/src/components/common/NewTable/ColoredCell.tsx
@@ -0,0 +1,41 @@
+import React from 'react';
+import styled from 'styled-components';
+
+interface CellProps {
+ isWarning?: boolean;
+ isAttention?: boolean;
+}
+
+interface ColoredCellProps {
+ value: number | string;
+ warn?: boolean;
+ attention?: boolean;
+}
+
+const Cell = styled.div<CellProps>`
+ color: ${(props) => {
+ if (props.isAttention) {
+ return props.theme.table.colored.color.attention;
+ }
+
+ if (props.isWarning) {
+ return props.theme.table.colored.color.warning;
+ }
+
+ return 'inherit';
+ }};
+`;
+
+const ColoredCell: React.FC<ColoredCellProps> = ({
+ value,
+ warn,
+ attention,
+}) => {
+ return (
+ <Cell isWarning={warn} isAttention={attention}>
+ {value}
+ </Cell>
+ );
+};
+
+export default ColoredCell;
diff --git a/kafka-ui-react-app/src/components/common/NewTable/SizeCell.tsx b/kafka-ui-react-app/src/components/common/NewTable/SizeCell.tsx
index 00a60086d9d..24485342aa4 100644
--- a/kafka-ui-react-app/src/components/common/NewTable/SizeCell.tsx
+++ b/kafka-ui-react-app/src/components/common/NewTable/SizeCell.tsx
@@ -3,8 +3,15 @@ import { CellContext } from '@tanstack/react-table';
import BytesFormatted from 'components/common/BytesFormatted/BytesFormatted';
// eslint-disable-next-line @typescript-eslint/no-explicit-any
-const SizeCell: React.FC<CellContext<any, unknown>> = ({ getValue }) => (
- <BytesFormatted value={getValue<string | number>()} />
+type AsAny = any;
+
+const SizeCell: React.FC<
+ CellContext<AsAny, unknown> & { renderSegments?: boolean }
+> = ({ getValue, row, renderSegments = false }) => (
+ <>
+ <BytesFormatted value={getValue<string | number>()} />
+ {renderSegments ? `, ${row?.original.count} segment(s)` : null}
+ </>
);
export default SizeCell;
diff --git a/kafka-ui-react-app/src/theme/theme.ts b/kafka-ui-react-app/src/theme/theme.ts
index 33dbf1c619e..80cc58991c3 100644
--- a/kafka-ui-react-app/src/theme/theme.ts
+++ b/kafka-ui-react-app/src/theme/theme.ts
@@ -533,6 +533,12 @@ export const theme = {
active: Colors.neutral[90],
},
},
+ colored: {
+ color: {
+ attention: Colors.red[50],
+ warning: Colors.yellow[20],
+ },
+ },
expander: {
normal: Colors.brand[30],
hover: Colors.brand[40],
@@ -928,6 +934,12 @@ export const darkTheme: ThemeType = {
active: Colors.neutral[0],
},
},
+ colored: {
+ color: {
+ attention: Colors.red[50],
+ warning: Colors.yellow[20],
+ },
+ },
expander: {
normal: Colors.brand[30],
hover: Colors.brand[40],
| null | train | test | 2023-05-08T10:00:43 | "2023-03-01T11:16:49Z" | wanisfahmyDE | train |
provectus/kafka-ui/2668_3628 | provectus/kafka-ui | provectus/kafka-ui/2668 | provectus/kafka-ui/3628 | [
"connected"
] | 87a8f08ae1f72719c02e570a74de6cdb8f4a6058 | ca225440d84744add5ce291ba792eccda2e2e627 | [
"can you assign me this issue\r\n",
"@Karthik-PM sure :)",
"@Karthik-PM any luck?",
"sorry can i get some more time? i have been facing a lot of build errors"
] | [] | "2023-04-06T10:19:17Z" | [
"good first issue",
"scope/frontend",
"status/accepted",
"type/chore"
] | Get rid of "Topic analysis successfully started" message | It makes no sense considering the page contents changes | [
"kafka-ui-react-app/src/lib/hooks/api/topics.ts"
] | [
"kafka-ui-react-app/src/lib/hooks/api/topics.ts"
] | [] | diff --git a/kafka-ui-react-app/src/lib/hooks/api/topics.ts b/kafka-ui-react-app/src/lib/hooks/api/topics.ts
index 23e6d65ce37..f71299f19bb 100644
--- a/kafka-ui-react-app/src/lib/hooks/api/topics.ts
+++ b/kafka-ui-react-app/src/lib/hooks/api/topics.ts
@@ -314,9 +314,6 @@ export function useAnalyzeTopic(props: GetTopicDetailsRequest) {
const client = useQueryClient();
return useMutation(() => api.analyzeTopic(props), {
onSuccess: () => {
- showSuccessAlert({
- message: `Topic analysis successfully started`,
- });
client.invalidateQueries(topicKeys.statistics(props));
},
});
| null | train | test | 2023-04-04T05:56:02 | "2022-09-29T17:22:23Z" | Haarolean | train |
provectus/kafka-ui/3164_3633 | provectus/kafka-ui | provectus/kafka-ui/3164 | provectus/kafka-ui/3633 | [
"connected"
] | 8783da313fb342c883a7c54ea98c3a5240650773 | 734d4ccdf71ee9b5c85570faa0c4767db3624509 | [
"Sorting by \"Num of Topics\" and by \"messages Behind\" was added and merged to the master. \r\nI am adding references to the relevant PRs:\r\n[https://github.com/provectus/kafka-ui/pull/3633](url)\r\n[https://github.com/provectus/kafka-ui/pull/3527](url)\r\n\r\nI think this issue can be closed\r\n"
] | [
"Please use `.distinct().count()` instead",
"please format code using `/etc/checkstyle.xml`",
"Done. \r\nIn order to have compatibility to the topic field type in consumerGroupDTO I casted the resulted long to Integer. is it fine ?",
"The build finishes successfully without any errors or warnings (from the checkstyle plugin) regarding style issues here. \r\nWhat exactly is the issue ? what should I do ?",
" return (int) Stream.concat(\r\n offsets.keySet().stream().map(TopicPartition::topic),\r\n members.stream().flatMap(m -> m.getAssignment().stream().map(TopicPartition::topic))\r\n )\r\n .distinct()\r\n .count();",
"Fixed"
] | "2023-04-07T10:58:40Z" | [
"type/enhancement",
"good first issue",
"scope/backend",
"scope/frontend",
"status/accepted"
] | Consumers: Add sorting by additional columns | <!--
Don't forget to check for existing issues/discussions regarding your proposal. We might already have it.
https://github.com/provectus/kafka-ui/issues
https://github.com/provectus/kafka-ui/discussions
-->
**Describe the bug**
<!--(A clear and concise description of what the bug is.)-->
Sorting only possible by the Num of messages and State. Not possible by the Num of topics or Messages behind
**Expected behavior**
<!--
(A clear and concise description of what you expected to happen)
-->
Sorting by the "Num of topics" and "Messages behind" should be possible as well.
**Set up**
<!--
How do you run the app? Please provide as much info as possible:
1. App version (docker image version or check commit hash in the top left corner in UI)
2. Helm chart version, if you use one
3. Any IAAC configs
We might close the issue without further explanation if you don't provide such information.
-->
[f4e6afe](https://github.com/provectus/kafka-ui/commit/f4e6afe)
**Steps to Reproduce**
<!-- We'd like you to provide an example setup (via docker-compose, helm, etc.)
to reproduce the problem, especially with a complex setups. -->
Steps to reproduce the behavior:
1. Log in to Kafka UI and navigate to the Consumers page.
**Screenshots**
<!--
(If applicable, add screenshots to help explain your problem)
-->

**Additional context**
<!--
(Add any other context about the problem here)
-->
@Haarolean requested to create a separate issue from the #2651 "Please raise an issue for sorting by num of topics and messages behind." | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ConsumerGroupMapper.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalConsumerGroup.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java",
"kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml",
"kafka-ui-react-app/src/components/ConsumerGroups/List.tsx"
] | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ConsumerGroupMapper.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalConsumerGroup.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java",
"kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml",
"kafka-ui-react-app/src/components/ConsumerGroups/List.tsx"
] | [] | diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ConsumerGroupMapper.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ConsumerGroupMapper.java
index 21d9efda9ca..4ebbf4c70f5 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ConsumerGroupMapper.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ConsumerGroupMapper.java
@@ -11,8 +11,6 @@
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
-import java.util.stream.Collectors;
-import java.util.stream.Stream;
import org.apache.kafka.common.Node;
import org.apache.kafka.common.TopicPartition;
@@ -82,15 +80,8 @@ private static <T extends ConsumerGroupDTO> T convertToConsumerGroup(
InternalConsumerGroup c, T consumerGroup) {
consumerGroup.setGroupId(c.getGroupId());
consumerGroup.setMembers(c.getMembers().size());
-
- int numTopics = Stream.concat(
- c.getOffsets().keySet().stream().map(TopicPartition::topic),
- c.getMembers().stream()
- .flatMap(m -> m.getAssignment().stream().map(TopicPartition::topic))
- ).collect(Collectors.toSet()).size();
-
consumerGroup.setMessagesBehind(c.getMessagesBehind());
- consumerGroup.setTopics(numTopics);
+ consumerGroup.setTopics(c.getTopicNum());
consumerGroup.setSimple(c.isSimple());
Optional.ofNullable(c.getState())
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalConsumerGroup.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalConsumerGroup.java
index e8199fa8ef5..06de3cb7d68 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalConsumerGroup.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalConsumerGroup.java
@@ -5,6 +5,7 @@
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
+import java.util.stream.Stream;
import lombok.Builder;
import lombok.Data;
import org.apache.kafka.clients.admin.ConsumerGroupDescription;
@@ -21,6 +22,7 @@ public class InternalConsumerGroup {
private final Map<TopicPartition, Long> offsets;
private final Map<TopicPartition, Long> endOffsets;
private final Long messagesBehind;
+ private final Integer topicNum;
private final String partitionAssignor;
private final ConsumerGroupState state;
private final Node coordinator;
@@ -44,22 +46,12 @@ public static InternalConsumerGroup create(
builder.simple(description.isSimpleConsumerGroup());
builder.state(description.state());
builder.partitionAssignor(description.partitionAssignor());
- builder.members(
- description.members().stream()
- .map(m ->
- InternalConsumerGroup.InternalMember.builder()
- .assignment(m.assignment().topicPartitions())
- .clientId(m.clientId())
- .groupInstanceId(m.groupInstanceId().orElse(""))
- .consumerId(m.consumerId())
- .clientId(m.clientId())
- .host(m.host())
- .build()
- ).collect(Collectors.toList())
- );
+ Collection<InternalMember> internalMembers = initInternalMembers(description);
+ builder.members(internalMembers);
builder.offsets(groupOffsets);
builder.endOffsets(topicEndOffsets);
builder.messagesBehind(calculateMessagesBehind(groupOffsets, topicEndOffsets));
+ builder.topicNum(calculateTopicNum(groupOffsets, internalMembers));
Optional.ofNullable(description.coordinator()).ifPresent(builder::coordinator);
return builder.build();
}
@@ -80,4 +72,31 @@ private static Long calculateMessagesBehind(Map<TopicPartition, Long> offsets, M
return messagesBehind;
}
+ private static Integer calculateTopicNum(Map<TopicPartition, Long> offsets, Collection<InternalMember> members) {
+
+ long topicNum = Stream.concat(
+ offsets.keySet().stream().map(TopicPartition::topic),
+ members.stream()
+ .flatMap(m -> m.getAssignment().stream().map(TopicPartition::topic))
+ ).distinct().count();
+
+ return Integer.valueOf((int) topicNum);
+
+ }
+
+ private static Collection<InternalMember> initInternalMembers(ConsumerGroupDescription description) {
+ return description.members().stream()
+ .map(m ->
+ InternalConsumerGroup.InternalMember.builder()
+ .assignment(m.assignment().topicPartitions())
+ .clientId(m.clientId())
+ .groupInstanceId(m.groupInstanceId().orElse(""))
+ .consumerId(m.consumerId())
+ .clientId(m.clientId())
+ .host(m.host())
+ .build()
+ ).collect(Collectors.toList());
+ }
+
+
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java
index e848146881d..815fdbef6a2 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumerGroupService.java
@@ -101,6 +101,9 @@ private boolean isConsumerGroupRelatesToTopic(String topic,
public record ConsumerGroupsPage(List<InternalConsumerGroup> consumerGroups, int totalPages) {
}
+ private record GroupWithDescr(InternalConsumerGroup icg, ConsumerGroupDescription cgd) {
+ }
+
public Mono<ConsumerGroupsPage> getConsumerGroupsPage(
KafkaCluster cluster,
int pageNum,
@@ -159,22 +162,19 @@ private Mono<List<ConsumerGroupDescription>> loadSortedDescriptions(ReactiveAdmi
sortAndPaginate(descriptions.values(), comparator, pageNum, perPage, sortOrderDto).toList());
}
case MESSAGES_BEHIND -> {
- record GroupWithDescr(InternalConsumerGroup icg, ConsumerGroupDescription cgd) { }
Comparator<GroupWithDescr> comparator = Comparator.comparingLong(gwd ->
gwd.icg.getMessagesBehind() == null ? 0L : gwd.icg.getMessagesBehind());
- var groupNames = groups.stream().map(ConsumerGroupListing::groupId).toList();
+ yield loadDescriptionsByInternalConsumerGroups(ac, groups, comparator, pageNum, perPage, sortOrderDto);
+ }
+
+ case TOPIC_NUM -> {
+
+ Comparator<GroupWithDescr> comparator = Comparator.comparingInt(gwd -> gwd.icg.getTopicNum());
+
+ yield loadDescriptionsByInternalConsumerGroups(ac, groups, comparator, pageNum, perPage, sortOrderDto);
- yield ac.describeConsumerGroups(groupNames)
- .flatMap(descriptionsMap -> {
- List<ConsumerGroupDescription> descriptions = descriptionsMap.values().stream().toList();
- return getConsumerGroups(ac, descriptions)
- .map(icg -> Streams.zip(icg.stream(), descriptions.stream(), GroupWithDescr::new).toList())
- .map(gwd -> sortAndPaginate(gwd, comparator, pageNum, perPage, sortOrderDto)
- .map(GroupWithDescr::cgd).toList());
- }
- );
}
};
}
@@ -209,6 +209,27 @@ private Mono<List<ConsumerGroupDescription>> describeConsumerGroups(ReactiveAdmi
.map(cgs -> new ArrayList<>(cgs.values()));
}
+
+ private Mono<List<ConsumerGroupDescription>> loadDescriptionsByInternalConsumerGroups(ReactiveAdminClient ac,
+ List<ConsumerGroupListing> groups,
+ Comparator<GroupWithDescr> comparator,
+ int pageNum,
+ int perPage,
+ SortOrderDTO sortOrderDto) {
+ var groupNames = groups.stream().map(ConsumerGroupListing::groupId).toList();
+
+ return ac.describeConsumerGroups(groupNames)
+ .flatMap(descriptionsMap -> {
+ List<ConsumerGroupDescription> descriptions = descriptionsMap.values().stream().toList();
+ return getConsumerGroups(ac, descriptions)
+ .map(icg -> Streams.zip(icg.stream(), descriptions.stream(), GroupWithDescr::new).toList())
+ .map(gwd -> sortAndPaginate(gwd, comparator, pageNum, perPage, sortOrderDto)
+ .map(GroupWithDescr::cgd).toList());
+ }
+ );
+
+ }
+
public Mono<InternalConsumerGroup> getConsumerGroupDetail(KafkaCluster cluster,
String consumerGroupId) {
return adminClientService.get(cluster)
diff --git a/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml b/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
index aef72444662..0d54fa7e79a 100644
--- a/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
+++ b/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
@@ -2441,6 +2441,7 @@ components:
- MEMBERS
- STATE
- MESSAGES_BEHIND
+ - TOPIC_NUM
ConsumerGroupsPageResponse:
type: object
diff --git a/kafka-ui-react-app/src/components/ConsumerGroups/List.tsx b/kafka-ui-react-app/src/components/ConsumerGroups/List.tsx
index ef8d73f5156..795ac75a5e9 100644
--- a/kafka-ui-react-app/src/components/ConsumerGroups/List.tsx
+++ b/kafka-ui-react-app/src/components/ConsumerGroups/List.tsx
@@ -51,9 +51,9 @@ const List = () => {
accessorKey: 'members',
},
{
+ id: ConsumerGroupOrdering.TOPIC_NUM,
header: 'Num Of Topics',
accessorKey: 'topics',
- enableSorting: false,
},
{
id: ConsumerGroupOrdering.MESSAGES_BEHIND,
| null | val | test | 2023-04-20T13:18:34 | "2022-12-28T12:30:46Z" | BulatKha | train |
provectus/kafka-ui/2686_3634 | provectus/kafka-ui | provectus/kafka-ui/2686 | provectus/kafka-ui/3634 | [
"connected"
] | 727f38401babcf25d5bb47e675149882ff3ede14 | db86942e47621cf2a2ded26d2c7cdbd2b0ee202a | [
"Can I work on this issue? can you please assign me this?",
"> Can I work on this issue? can you please assign me this?\r\n\r\nsure :)",
"Hi @Haarolean I am very new to apache kafka can you please guide me hou to setup a local cluster so that I can test this UI and can work on this issue? I really want to contribue to this project but without setting up the cluster It is impossible to work so I need your help.",
"Hi @Haarolean could you please help me?",
"@detronetdip please refer to:\r\n[contributing guide](https://github.com/provectus/kafka-ui/blob/master/CONTRIBUTING.md)\r\n[frontend build guide](https://github.com/provectus/kafka-ui/blob/master/kafka-ui-react-app/README.md)\r\nYou'll have to run the compose: `docker-compose -f ./documentation/compose/kafka-ui.yaml up` and set `VITE_DEV_PROXY` to `http://localhost:8080`.",
"@detronetdip any luck?",
"@Haarolean I am working",
"@Haarolean I followed you instruction, and on opening `localhost:3000` it is showing a blank screen. I think I need your help again\r\n\r\n\r\n",
"> @Haarolean I followed you instruction, and on opening `localhost:3000` it is showing a blank screen. I think I need your help again\r\n> \r\n> \r\n\r\nI suppose you could check the red error in your browser console as a start.",
"@workshur by saying search input should be clearable, did you mean there will be a button to clear the input field? and please guide me which search input you are taking about.",
"> @workshur by saying search input should be clearable, did you mean there will be a button to clear the input field? and please guide me which search input you are taking about.\r\n\r\nYes it's about having a button to clear the inout.\r\nWe're talking about all the search inputs in the app, there's a component class mentioned in the issue.",
"@Haarolean I have run `docker-compose -f ./documentation/compose/kafka-ui.yaml up` command and have set `VITE_DEV_PROXY` to `http://localhost:8080` in the `.env` file inside `kafka-ui-react-app` folder and then I visit `localhost:8080` in my browser and the finally I can see the UI but. Is this the correct way? If it is then should I stop the docker containers and restart it again to view changes? Actually I am unable to find how to properly setup the development environment and how to view my changes. Please guide me.",
"@detronetdip as it is stated in [frontend module readme](https://github.com/provectus/kafka-ui/tree/master/kafka-ui-react-app) if you develop the frontend part, you should run it via `pnpm start` and visit the link produced by this command. The running containers are used as a backend which you don't have to rebuild unless you do modifications to the backend side as well.",
"\r\n@Haarolean It is showing this error.",
"Itβs your ad blocker.Β On 31 Oct 2022, at 00:44, Ayondip Jana ***@***.***> wrote:ο»Ώ\n\n@Haarolean It is showing this error.\n\nβReply to this email directly, view it on GitHub, or unsubscribe.You are receiving this because you were mentioned.Message ID: ***@***.***>",
"@Haarolean I have disabled the ad-blocker and now it is throwing a branch of errors\r\n\r\n\r\n",
"> @Haarolean I have disabled the ad-blocker and now it is throwing a branch of errors\r\n> \r\n> \r\n\r\nI honestly have no foggiest what you're doing wrong from this obscure screenshots. There's a decent guide which does work without any problems in 90% of cases. Please ensure that backend is accessible via localhost:3000/api/xxx, the proxy is set properly, etc.",
"Ok let try again from the very beginning.",
"@detronetdip any luck?",
"Hi, I am looking forward to diving deep into the project, but I went into the same problem as @detronetdip did. After I followed the instruction on the readme page (stated below) and set the .env file as empty\r\n<img width=\"811\" alt=\"image\" src=\"https://user-images.githubusercontent.com/43143420/226153059-4b62cfd7-af2a-41be-aa83-f171a498ff0f.png\">\r\nand I got a blank screen and the error in the browser console showed\r\n<img width=\"423\" alt=\"image\" src=\"https://user-images.githubusercontent.com/43143420/226153082-27b368a3-d0db-4791-9e05-0af0fec6ab27.png\">\r\n<img width=\"507\" alt=\"image\" src=\"https://user-images.githubusercontent.com/43143420/226153100-acbe80d6-1b7d-4165-beb3-b298226023a5.png\">\r\nIt seems that there are some cache problems with the front end when starting the project using pnpm. The backend works totally fine since I can access UI through the 8080 port. Do we actually need to configure the dev proxy even when using docker as the backend? If so, how to set it? The readme is not very clear with the API server. What is that and how to set it as a development environment? \r\nBTW, I am using WSL2 as my dev environment, hope this is not the cause of the problem",
"@Mano-Liaoyan hey, wanna jump into discord so we don't spam over here? We can figure out what's happening. The link is available at top of the readme, but here's another one: https://discord.gg/4DWzD7pGE5\r\n\r\nEDIT: Solved: missing dev proxy in .env.local"
] | [
"i don't think passing it as `children` here is very intuitive , we can pass it as a prop , because i think it will be hard to understand what is a `children` of a `Input` component.",
"ok i will try to change ",
"why we are using `createRef` instead of `useRef` ?",
"yeh i will replace with useRef it's more effectively"
] | "2023-04-07T12:05:09Z" | [
"good first issue",
"scope/frontend",
"status/accepted",
"type/chore"
] | Add clear button to the search component | ### Describe the solution you'd like
Search input should be clearable
### Additional context
Component: src/components/common/Search/Search.tsx
| [
"kafka-ui-react-app/src/components/common/Input/Input.styled.ts",
"kafka-ui-react-app/src/components/common/Input/Input.tsx",
"kafka-ui-react-app/src/components/common/Search/Search.tsx",
"kafka-ui-react-app/src/components/common/Search/__tests__/Search.spec.tsx"
] | [
"kafka-ui-react-app/src/components/common/Input/Input.styled.ts",
"kafka-ui-react-app/src/components/common/Input/Input.tsx",
"kafka-ui-react-app/src/components/common/Search/Search.tsx",
"kafka-ui-react-app/src/components/common/Search/__tests__/Search.spec.tsx"
] | [] | diff --git a/kafka-ui-react-app/src/components/common/Input/Input.styled.ts b/kafka-ui-react-app/src/components/common/Input/Input.styled.ts
index 9495aaecbeb..f21962fe6b6 100644
--- a/kafka-ui-react-app/src/components/common/Input/Input.styled.ts
+++ b/kafka-ui-react-app/src/components/common/Input/Input.styled.ts
@@ -29,6 +29,16 @@ export const Wrapper = styled.div`
width: 16px;
fill: ${({ theme }) => theme.input.icon.color};
}
+ svg:last-child {
+ position: absolute;
+ top: 8px;
+ line-height: 0;
+ z-index: 1;
+ left: unset;
+ right: 12px;
+ height: 16px;
+ width: 16px;
+ }
`;
export const Input = styled.input<InputProps>(
diff --git a/kafka-ui-react-app/src/components/common/Input/Input.tsx b/kafka-ui-react-app/src/components/common/Input/Input.tsx
index ae76bc47178..4d04b730e51 100644
--- a/kafka-ui-react-app/src/components/common/Input/Input.tsx
+++ b/kafka-ui-react-app/src/components/common/Input/Input.tsx
@@ -16,6 +16,7 @@ export interface InputProps
withError?: boolean;
label?: React.ReactNode;
hint?: React.ReactNode;
+ clearIcon?: React.ReactNode;
// Some may only accept integer, like `Number of Partitions`
// some may accept decimal
@@ -99,19 +100,22 @@ function pasteNumberCheck(
return value;
}
-const Input: React.FC<InputProps> = ({
- name,
- hookFormOptions,
- search,
- inputSize = 'L',
- type,
- positiveOnly,
- integerOnly,
- withError = false,
- label,
- hint,
- ...rest
-}) => {
+const Input = React.forwardRef<HTMLInputElement, InputProps>((props, ref) => {
+ const {
+ name,
+ hookFormOptions,
+ search,
+ inputSize = 'L',
+ type,
+ positiveOnly,
+ integerOnly,
+ withError = false,
+ label,
+ hint,
+ clearIcon,
+ ...rest
+ } = props;
+
const methods = useFormContext();
const fieldId = React.useId();
@@ -168,7 +172,6 @@ const Input: React.FC<InputProps> = ({
// if the field is a part of react-hook-form form
inputOptions = { ...rest, ...methods.register(name, hookFormOptions) };
}
-
return (
<div>
{label && <InputLabel htmlFor={rest.id || fieldId}>{label}</InputLabel>}
@@ -181,8 +184,11 @@ const Input: React.FC<InputProps> = ({
type={type}
onKeyPress={keyPressEventHandler}
onPaste={pasteEventHandler}
+ ref={ref}
{...inputOptions}
/>
+ {clearIcon}
+
{withError && isHookFormField && (
<S.FormError>
<ErrorMessage name={name} />
@@ -192,6 +198,6 @@ const Input: React.FC<InputProps> = ({
</S.Wrapper>
</div>
);
-};
+});
export default Input;
diff --git a/kafka-ui-react-app/src/components/common/Search/Search.tsx b/kafka-ui-react-app/src/components/common/Search/Search.tsx
index 66c0e95030b..65116d645a6 100644
--- a/kafka-ui-react-app/src/components/common/Search/Search.tsx
+++ b/kafka-ui-react-app/src/components/common/Search/Search.tsx
@@ -1,7 +1,9 @@
-import React from 'react';
+import React, { useRef } from 'react';
import { useDebouncedCallback } from 'use-debounce';
import Input from 'components/common/Input/Input';
import { useSearchParams } from 'react-router-dom';
+import CloseIcon from 'components/common/Icons/CloseIcon';
+import styled from 'styled-components';
interface SearchProps {
placeholder?: string;
@@ -10,6 +12,16 @@ interface SearchProps {
value?: string;
}
+const IconButtonWrapper = styled.span.attrs(() => ({
+ role: 'button',
+ tabIndex: '0',
+}))`
+ height: 16px !important;
+ display: inline-block;
+ &:hover {
+ cursor: pointer;
+ }
+`;
const Search: React.FC<SearchProps> = ({
placeholder = 'Search',
disabled = false,
@@ -17,7 +29,11 @@ const Search: React.FC<SearchProps> = ({
onChange,
}) => {
const [searchParams, setSearchParams] = useSearchParams();
+ const ref = useRef<HTMLInputElement>(null);
const handleChange = useDebouncedCallback((e) => {
+ if (ref.current != null) {
+ ref.current.value = e.target.value;
+ }
if (onChange) {
onChange(e.target.value);
} else {
@@ -28,6 +44,15 @@ const Search: React.FC<SearchProps> = ({
setSearchParams(searchParams);
}
}, 500);
+ const clearSearchValue = () => {
+ if (searchParams.get('q')) {
+ searchParams.set('q', '');
+ setSearchParams(searchParams);
+ }
+ if (ref.current != null) {
+ ref.current.value = '';
+ }
+ };
return (
<Input
@@ -37,7 +62,13 @@ const Search: React.FC<SearchProps> = ({
defaultValue={value || searchParams.get('q') || ''}
inputSize="M"
disabled={disabled}
+ ref={ref}
search
+ clearIcon={
+ <IconButtonWrapper onClick={clearSearchValue}>
+ <CloseIcon />
+ </IconButtonWrapper>
+ }
/>
);
};
diff --git a/kafka-ui-react-app/src/components/common/Search/__tests__/Search.spec.tsx b/kafka-ui-react-app/src/components/common/Search/__tests__/Search.spec.tsx
index 808f229317e..2103d223367 100644
--- a/kafka-ui-react-app/src/components/common/Search/__tests__/Search.spec.tsx
+++ b/kafka-ui-react-app/src/components/common/Search/__tests__/Search.spec.tsx
@@ -41,4 +41,24 @@ describe('Search', () => {
render(<Search />);
expect(screen.queryByPlaceholderText('Search')).toBeInTheDocument();
});
+
+ it('Clear button is visible', () => {
+ render(<Search placeholder={placeholder} />);
+
+ const clearButton = screen.getByRole('button');
+ expect(clearButton).toBeInTheDocument();
+ });
+
+ it('Clear button should clear text from input', async () => {
+ render(<Search placeholder={placeholder} />);
+
+ const searchField = screen.getAllByRole('textbox')[0];
+ await userEvent.type(searchField, 'some text');
+ expect(searchField).toHaveValue('some text');
+
+ const clearButton = screen.getByRole('button');
+ await userEvent.click(clearButton);
+
+ expect(searchField).toHaveValue('');
+ });
});
| null | test | test | 2023-05-02T14:34:57 | "2022-09-30T11:13:06Z" | workshur | train |
provectus/kafka-ui/3430_3655 | provectus/kafka-ui | provectus/kafka-ui/3430 | provectus/kafka-ui/3655 | [
"connected"
] | 5771c11316d9e4ac67cf6b1ed2b015c7f2d12d4a | 20bb274f0ed15f15029960a6373c61592ab1aeeb | [] | [
"can we clear the `mockedNavigate` , with the `beforeEach` or `afterEach` hook so that if it gets called once it won't interfere with the other ones.",
"we checking mockedNavigate call 3 times with different data render component so we cant use beforeEach because it different every time "
] | "2023-04-11T10:18:21Z" | [
"type/enhancement",
"scope/frontend",
"status/accepted",
"area/wizard"
] | Redirect the user to the wizard page if no clusters present upon app startup | If:
1. there are no clusters configured
2. dynamic config is ON
then redirect the user to the wizard page initially | [
"kafka-ui-react-app/src/components/PageContainer/PageContainer.tsx",
"kafka-ui-react-app/src/components/PageContainer/__tests__/PageContainer.spec.tsx"
] | [
"kafka-ui-react-app/src/components/PageContainer/PageContainer.tsx",
"kafka-ui-react-app/src/components/PageContainer/__tests__/PageContainer.spec.tsx"
] | [
"kafka-ui-react-app/src/lib/testHelpers.tsx"
] | diff --git a/kafka-ui-react-app/src/components/PageContainer/PageContainer.tsx b/kafka-ui-react-app/src/components/PageContainer/PageContainer.tsx
index a5697e89abd..fd52331f921 100644
--- a/kafka-ui-react-app/src/components/PageContainer/PageContainer.tsx
+++ b/kafka-ui-react-app/src/components/PageContainer/PageContainer.tsx
@@ -1,9 +1,14 @@
-import React, { PropsWithChildren } from 'react';
-import { useLocation } from 'react-router-dom';
+import React, { PropsWithChildren, useEffect, useMemo } from 'react';
+import { useLocation, useNavigate } from 'react-router-dom';
import NavBar from 'components/NavBar/NavBar';
import * as S from 'components/PageContainer/PageContainer.styled';
import Nav from 'components/Nav/Nav';
import useBoolean from 'lib/hooks/useBoolean';
+import { clusterNewConfigPath } from 'lib/paths';
+import { GlobalSettingsContext } from 'components/contexts/GlobalSettingsContext';
+import { useClusters } from 'lib/hooks/api/clusters';
+import { ResourceType } from 'generated-sources';
+import { useGetUserInfo } from 'lib/hooks/api/roles';
const PageContainer: React.FC<
PropsWithChildren<{ setDarkMode: (value: boolean) => void }>
@@ -13,12 +18,30 @@ const PageContainer: React.FC<
toggle,
setFalse: closeSidebar,
} = useBoolean(false);
+ const clusters = useClusters();
+ const appInfo = React.useContext(GlobalSettingsContext);
const location = useLocation();
+ const navigate = useNavigate();
+ const { data: authInfo } = useGetUserInfo();
React.useEffect(() => {
closeSidebar();
}, [location, closeSidebar]);
+ const hasApplicationPermissions = useMemo(() => {
+ if (!authInfo?.rbacEnabled) return true;
+ return !!authInfo?.userInfo?.permissions.some(
+ (permission) => permission.resource === ResourceType.APPLICATIONCONFIG
+ );
+ }, [authInfo]);
+
+ useEffect(() => {
+ if (!appInfo.hasDynamicConfig) return;
+ if (clusters?.data?.length !== 0) return;
+ if (!hasApplicationPermissions) return;
+ navigate(clusterNewConfigPath);
+ }, [clusters?.data, appInfo.hasDynamicConfig]);
+
return (
<>
<NavBar onBurgerClick={toggle} setDarkMode={setDarkMode} />
diff --git a/kafka-ui-react-app/src/components/PageContainer/__tests__/PageContainer.spec.tsx b/kafka-ui-react-app/src/components/PageContainer/__tests__/PageContainer.spec.tsx
index 25f8fe0a2ab..ca91cd20783 100644
--- a/kafka-ui-react-app/src/components/PageContainer/__tests__/PageContainer.spec.tsx
+++ b/kafka-ui-react-app/src/components/PageContainer/__tests__/PageContainer.spec.tsx
@@ -4,21 +4,24 @@ import userEvent from '@testing-library/user-event';
import { render } from 'lib/testHelpers';
import PageContainer from 'components/PageContainer/PageContainer';
import { useClusters } from 'lib/hooks/api/clusters';
+import { Cluster, ServerStatus } from 'generated-sources';
const burgerButtonOptions = { name: 'burger' };
-jest.mock('lib/hooks/api/clusters', () => ({
- ...jest.requireActual('lib/hooks/api/roles'),
- useClusters: jest.fn(),
-}));
-
jest.mock('components/Version/Version', () => () => <div>Version</div>);
-
+interface DataType {
+ data: Cluster[] | undefined;
+}
+jest.mock('lib/hooks/api/clusters');
+const mockedNavigate = jest.fn();
+jest.mock('react-router-dom', () => ({
+ ...jest.requireActual('react-router-dom'),
+ useNavigate: () => mockedNavigate,
+}));
describe('Page Container', () => {
- beforeEach(() => {
- (useClusters as jest.Mock).mockImplementation(() => ({
- isSuccess: false,
- }));
+ const renderComponent = (hasDynamicConfig: boolean, data: DataType) => {
+ const useClustersMock = useClusters as jest.Mock;
+ useClustersMock.mockReturnValue(data);
Object.defineProperty(window, 'matchMedia', {
writable: true,
value: jest.fn().mockImplementation(() => ({
@@ -26,15 +29,18 @@ describe('Page Container', () => {
addListener: jest.fn(),
})),
});
-
render(
<PageContainer setDarkMode={jest.fn()}>
<div>child</div>
- </PageContainer>
+ </PageContainer>,
+ {
+ globalSettings: { hasDynamicConfig },
+ }
);
- });
+ };
it('handle burger click correctly', async () => {
+ renderComponent(false, { data: undefined });
const burger = within(screen.getByLabelText('Page Header')).getByRole(
'button',
burgerButtonOptions
@@ -49,6 +55,31 @@ describe('Page Container', () => {
});
it('render the inner container', async () => {
+ renderComponent(false, { data: undefined });
expect(screen.getByText('child')).toBeInTheDocument();
});
+
+ describe('Redirect to the Wizard page', () => {
+ it('redirects to new cluster configuration page if there are no clusters and dynamic config is enabled', async () => {
+ await renderComponent(true, { data: [] });
+
+ expect(mockedNavigate).toHaveBeenCalled();
+ });
+
+ it('should not navigate to new cluster config page when there are clusters', async () => {
+ await renderComponent(true, {
+ data: [{ name: 'Cluster 1', status: ServerStatus.ONLINE }],
+ });
+
+ expect(mockedNavigate).not.toHaveBeenCalled();
+ });
+
+ it('should not navigate to new cluster config page when there are no clusters and hasDynamicConfig is false', async () => {
+ await renderComponent(false, {
+ data: [],
+ });
+
+ expect(mockedNavigate).not.toHaveBeenCalled();
+ });
+ });
});
| diff --git a/kafka-ui-react-app/src/lib/testHelpers.tsx b/kafka-ui-react-app/src/lib/testHelpers.tsx
index 508904d1466..06fcfcad495 100644
--- a/kafka-ui-react-app/src/lib/testHelpers.tsx
+++ b/kafka-ui-react-app/src/lib/testHelpers.tsx
@@ -39,6 +39,9 @@ interface CustomRenderOptions extends Omit<RenderOptions, 'wrapper'> {
roles?: RolesType;
rbacFlag: boolean;
};
+ globalSettings?: {
+ hasDynamicConfig: boolean;
+ };
}
interface WithRouteProps {
@@ -111,6 +114,7 @@ const customRender = (
}),
initialEntries,
userInfo,
+ globalSettings,
...renderOptions
}: CustomRenderOptions = {}
) => {
@@ -119,7 +123,9 @@ const customRender = (
children,
}) => (
<TestQueryClientProvider>
- <GlobalSettingsContext.Provider value={{ hasDynamicConfig: false }}>
+ <GlobalSettingsContext.Provider
+ value={globalSettings || { hasDynamicConfig: false }}
+ >
<ThemeProvider theme={theme}>
<TestUserInfoProvider data={userInfo}>
<ConfirmContextProvider>
| test | test | 2023-06-02T14:27:11 | "2023-03-02T08:52:28Z" | Haarolean | train |
provectus/kafka-ui/3379_3656 | provectus/kafka-ui | provectus/kafka-ui/3379 | provectus/kafka-ui/3656 | [
"connected"
] | c743067ffa082a24fba83e81b4a1007c347dbe8c | 328d91de8b84f5c808d2f11a0fe75eec44453894 | [] | [] | "2023-04-11T12:54:30Z" | [
"type/enhancement",
"scope/backend",
"scope/frontend",
"status/accepted",
"status/pending-frontend",
"area/ux",
"area/messages"
] | Implement a filter evaluator | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/MessagesController.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/MessagesService.java",
"kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml"
] | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/MessagesController.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/MessagesService.java",
"kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml"
] | [
"kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/MessagesServiceTest.java"
] | diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/MessagesController.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/MessagesController.java
index 4311fbd8d2c..00eae8a7c71 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/MessagesController.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/MessagesController.java
@@ -15,6 +15,8 @@
import com.provectus.kafka.ui.model.SeekDirectionDTO;
import com.provectus.kafka.ui.model.SeekTypeDTO;
import com.provectus.kafka.ui.model.SerdeUsageDTO;
+import com.provectus.kafka.ui.model.SmartFilterTestExecutionDTO;
+import com.provectus.kafka.ui.model.SmartFilterTestExecutionResultDTO;
import com.provectus.kafka.ui.model.TopicMessageEventDTO;
import com.provectus.kafka.ui.model.TopicSerdeSuggestionDTO;
import com.provectus.kafka.ui.model.rbac.AccessContext;
@@ -70,6 +72,14 @@ public Mono<ResponseEntity<Void>> deleteTopicMessages(
).doOnEach(sig -> auditService.audit(context, sig));
}
+ @Override
+ public Mono<ResponseEntity<SmartFilterTestExecutionResultDTO>> executeSmartFilterTest(
+ Mono<SmartFilterTestExecutionDTO> smartFilterTestExecutionDto, ServerWebExchange exchange) {
+ return smartFilterTestExecutionDto
+ .map(MessagesService::execSmartFilterTest)
+ .map(ResponseEntity::ok);
+ }
+
@Override
public Mono<ResponseEntity<Flux<TopicMessageEventDTO>>> getTopicMessages(String clusterName,
String topicName,
@@ -188,4 +198,8 @@ public Mono<ResponseEntity<TopicSerdeSuggestionDTO>> getSerdes(String clusterNam
.map(ResponseEntity::ok)
);
}
+
+
+
+
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/MessagesService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/MessagesService.java
index 625784f8ee8..dcc122ba282 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/MessagesService.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/MessagesService.java
@@ -14,11 +14,16 @@
import com.provectus.kafka.ui.model.KafkaCluster;
import com.provectus.kafka.ui.model.MessageFilterTypeDTO;
import com.provectus.kafka.ui.model.SeekDirectionDTO;
+import com.provectus.kafka.ui.model.SmartFilterTestExecutionDTO;
+import com.provectus.kafka.ui.model.SmartFilterTestExecutionResultDTO;
import com.provectus.kafka.ui.model.TopicMessageDTO;
import com.provectus.kafka.ui.model.TopicMessageEventDTO;
import com.provectus.kafka.ui.serde.api.Serde;
import com.provectus.kafka.ui.serdes.ProducerRecordCreator;
import com.provectus.kafka.ui.util.SslPropertiesUtil;
+import java.time.Instant;
+import java.time.OffsetDateTime;
+import java.time.ZoneOffset;
import java.util.List;
import java.util.Map;
import java.util.Optional;
@@ -81,6 +86,40 @@ private Mono<TopicDescription> withExistingTopic(KafkaCluster cluster, String to
.switchIfEmpty(Mono.error(new TopicNotFoundException()));
}
+ public static SmartFilterTestExecutionResultDTO execSmartFilterTest(SmartFilterTestExecutionDTO execData) {
+ Predicate<TopicMessageDTO> predicate;
+ try {
+ predicate = MessageFilters.createMsgFilter(
+ execData.getFilterCode(),
+ MessageFilterTypeDTO.GROOVY_SCRIPT
+ );
+ } catch (Exception e) {
+ log.info("Smart filter '{}' compilation error", execData.getFilterCode(), e);
+ return new SmartFilterTestExecutionResultDTO()
+ .error("Compilation error : " + e.getMessage());
+ }
+ try {
+ var result = predicate.test(
+ new TopicMessageDTO()
+ .key(execData.getKey())
+ .content(execData.getValue())
+ .headers(execData.getHeaders())
+ .offset(execData.getOffset())
+ .partition(execData.getPartition())
+ .timestamp(
+ Optional.ofNullable(execData.getTimestampMs())
+ .map(ts -> OffsetDateTime.ofInstant(Instant.ofEpochMilli(ts), ZoneOffset.UTC))
+ .orElse(null))
+ );
+ return new SmartFilterTestExecutionResultDTO()
+ .result(result);
+ } catch (Exception e) {
+ log.info("Smart filter {} execution error", execData, e);
+ return new SmartFilterTestExecutionResultDTO()
+ .error("Execution error : " + e.getMessage());
+ }
+ }
+
public Mono<Void> deleteTopicMessages(KafkaCluster cluster, String topicName,
List<Integer> partitionsToInclude) {
return withExistingTopic(cluster, topicName)
diff --git a/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml b/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
index 3774a631f79..e15425a45ec 100644
--- a/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
+++ b/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
@@ -625,6 +625,25 @@ paths:
schema:
$ref: '#/components/schemas/TopicSerdeSuggestion'
+ /api/smartfilters/testexecutions:
+ put:
+ tags:
+ - Messages
+ summary: executeSmartFilterTest
+ operationId: executeSmartFilterTest
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/SmartFilterTestExecution'
+ responses:
+ 200:
+ description: OK
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/SmartFilterTestExecutionResult'
+
/api/clusters/{clusterName}/topics/{topicName}/messages:
get:
@@ -2584,6 +2603,37 @@ components:
items:
$ref: '#/components/schemas/ConsumerGroup'
+ SmartFilterTestExecution:
+ type: object
+ required: [filterCode]
+ properties:
+ filterCode:
+ type: string
+ key:
+ type: string
+ value:
+ type: string
+ headers:
+ type: object
+ additionalProperties:
+ type: string
+ partition:
+ type: integer
+ offset:
+ type: integer
+ format: int64
+ timestampMs:
+ type: integer
+ format: int64
+
+ SmartFilterTestExecutionResult:
+ type: object
+ properties:
+ result:
+ type: boolean
+ error:
+ type: string
+
CreateTopicMessage:
type: object
properties:
| diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/MessagesServiceTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/MessagesServiceTest.java
index 75a69adec71..cb50c0eb818 100644
--- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/MessagesServiceTest.java
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/MessagesServiceTest.java
@@ -1,5 +1,8 @@
package com.provectus.kafka.ui.service;
+import static com.provectus.kafka.ui.service.MessagesService.execSmartFilterTest;
+import static org.assertj.core.api.Assertions.assertThat;
+
import com.provectus.kafka.ui.AbstractIntegrationTest;
import com.provectus.kafka.ui.exception.TopicNotFoundException;
import com.provectus.kafka.ui.model.ConsumerPosition;
@@ -7,11 +10,13 @@
import com.provectus.kafka.ui.model.KafkaCluster;
import com.provectus.kafka.ui.model.SeekDirectionDTO;
import com.provectus.kafka.ui.model.SeekTypeDTO;
+import com.provectus.kafka.ui.model.SmartFilterTestExecutionDTO;
import com.provectus.kafka.ui.model.TopicMessageDTO;
import com.provectus.kafka.ui.model.TopicMessageEventDTO;
import com.provectus.kafka.ui.producer.KafkaTestProducer;
import com.provectus.kafka.ui.serdes.builtin.StringSerde;
import java.util.List;
+import java.util.Map;
import java.util.UUID;
import org.apache.kafka.clients.admin.NewTopic;
import org.junit.jupiter.api.BeforeEach;
@@ -91,4 +96,40 @@ void maskingAppliedOnConfiguredClusters() throws Exception {
}
}
+ @Test
+ void execSmartFilterTestReturnsExecutionResult() {
+ var params = new SmartFilterTestExecutionDTO()
+ .filterCode("key != null && value != null && headers != null && timestampMs != null && offset != null")
+ .key("1234")
+ .value("{ \"some\" : \"value\" } ")
+ .headers(Map.of("h1", "hv1"))
+ .offset(12345L)
+ .timestampMs(System.currentTimeMillis())
+ .partition(1);
+ assertThat(execSmartFilterTest(params).getResult()).isTrue();
+
+ params.setFilterCode("return false");
+ assertThat(execSmartFilterTest(params).getResult()).isFalse();
+ }
+
+ @Test
+ void execSmartFilterTestReturnsErrorOnFilterApplyError() {
+ var result = execSmartFilterTest(
+ new SmartFilterTestExecutionDTO()
+ .filterCode("return 1/0")
+ );
+ assertThat(result.getResult()).isNull();
+ assertThat(result.getError()).containsIgnoringCase("execution error");
+ }
+
+ @Test
+ void execSmartFilterTestReturnsErrorOnFilterCompilationError() {
+ var result = execSmartFilterTest(
+ new SmartFilterTestExecutionDTO()
+ .filterCode("this is invalid groovy syntax = 1")
+ );
+ assertThat(result.getResult()).isNull();
+ assertThat(result.getError()).containsIgnoringCase("Compilation error");
+ }
+
}
| val | test | 2023-06-21T10:02:27 | "2023-02-21T08:14:46Z" | Haarolean | train |
|
provectus/kafka-ui/2722_3660 | provectus/kafka-ui | provectus/kafka-ui/2722 | provectus/kafka-ui/3660 | [
"connected"
] | fb515871cba27686037d06e5a8dbaa417c681732 | 1b2827fb2ffd8b0890960845dfaab2ab1f7ebf2e | [
"@Haarolean, can u assign on me?",
"> @Haarolean, can u assign on me?\r\n\r\nsure, go ahead :)",
"@corners2wall any luck?",
"@Haarolean not started",
"@corners2wall are you planning to? We have a lot of people wishing to participate in hacktoberfest and we don't have that many frontend tasks available :)",
"@Haarolean yeap, I planning resolve this issue"
] | [
"do we need this `useMemo` ?",
"you are right we do not need any useMemo ",
"this will invalidate everything and everything , if we can do more specific we need to specify the query keys.",
"removed invalidateQueries from useConnectors and set it in useUpdateConnectorState with query keys "
] | "2023-04-12T16:19:31Z" | [
"type/enhancement",
"good first issue",
"scope/frontend",
"status/accepted"
] | Add KC quick actions into the sandwich menu | Add the following actions into this menu:
<img width="303" alt="image" src="https://user-images.githubusercontent.com/1494347/195043659-57b84a7f-c139-4ba0-bc34-8adefd149bf9.png">
1. restart connector
2. restart all tasks
3. restart failed tasks
All the actions are available in a connector overview, we can copy them | [
"kafka-ui-react-app/src/components/Connect/List/ActionsCell.tsx",
"kafka-ui-react-app/src/components/Connect/List/__tests__/List.spec.tsx",
"kafka-ui-react-app/src/components/common/Dropdown/Dropdown.styled.ts",
"kafka-ui-react-app/src/lib/hooks/api/kafkaConnect.ts"
] | [
"kafka-ui-react-app/src/components/Connect/List/ActionsCell.tsx",
"kafka-ui-react-app/src/components/Connect/List/__tests__/List.spec.tsx",
"kafka-ui-react-app/src/components/common/Dropdown/Dropdown.styled.ts",
"kafka-ui-react-app/src/lib/hooks/api/kafkaConnect.ts"
] | [] | diff --git a/kafka-ui-react-app/src/components/Connect/List/ActionsCell.tsx b/kafka-ui-react-app/src/components/Connect/List/ActionsCell.tsx
index 30b3df8a56d..5b3a24cdb70 100644
--- a/kafka-ui-react-app/src/components/Connect/List/ActionsCell.tsx
+++ b/kafka-ui-react-app/src/components/Connect/List/ActionsCell.tsx
@@ -1,26 +1,41 @@
import React from 'react';
-import { FullConnectorInfo } from 'generated-sources';
+import {
+ Action,
+ ConnectorAction,
+ ConnectorState,
+ FullConnectorInfo,
+ ResourceType,
+} from 'generated-sources';
import { CellContext } from '@tanstack/react-table';
import { ClusterNameRoute } from 'lib/paths';
import useAppParams from 'lib/hooks/useAppParams';
import { Dropdown, DropdownItem } from 'components/common/Dropdown';
-import { useDeleteConnector } from 'lib/hooks/api/kafkaConnect';
+import {
+ useDeleteConnector,
+ useUpdateConnectorState,
+} from 'lib/hooks/api/kafkaConnect';
import { useConfirm } from 'lib/hooks/useConfirm';
+import { useIsMutating } from '@tanstack/react-query';
+import { ActionDropdownItem } from 'components/common/ActionComponent';
const ActionsCell: React.FC<CellContext<FullConnectorInfo, unknown>> = ({
row,
}) => {
- const { connect, name } = row.original;
-
+ const { connect, name, status } = row.original;
const { clusterName } = useAppParams<ClusterNameRoute>();
-
+ const mutationsNumber = useIsMutating();
+ const isMutating = mutationsNumber > 0;
const confirm = useConfirm();
const deleteMutation = useDeleteConnector({
clusterName,
connectName: connect,
connectorName: name,
});
-
+ const stateMutation = useUpdateConnectorState({
+ clusterName,
+ connectName: connect,
+ connectorName: name,
+ });
const handleDelete = () => {
confirm(
<>
@@ -31,8 +46,66 @@ const ActionsCell: React.FC<CellContext<FullConnectorInfo, unknown>> = ({
}
);
};
+ // const stateMutation = useUpdateConnectorState(routerProps);
+ const resumeConnectorHandler = () =>
+ stateMutation.mutateAsync(ConnectorAction.RESUME);
+ const restartConnectorHandler = () =>
+ stateMutation.mutateAsync(ConnectorAction.RESTART);
+
+ const restartAllTasksHandler = () =>
+ stateMutation.mutateAsync(ConnectorAction.RESTART_ALL_TASKS);
+
+ const restartFailedTasksHandler = () =>
+ stateMutation.mutateAsync(ConnectorAction.RESTART_FAILED_TASKS);
+
return (
<Dropdown>
+ {status.state === ConnectorState.PAUSED && (
+ <ActionDropdownItem
+ onClick={resumeConnectorHandler}
+ disabled={isMutating}
+ permission={{
+ resource: ResourceType.CONNECT,
+ action: Action.EDIT,
+ value: name,
+ }}
+ >
+ Resume
+ </ActionDropdownItem>
+ )}
+ <ActionDropdownItem
+ onClick={restartConnectorHandler}
+ disabled={isMutating}
+ permission={{
+ resource: ResourceType.CONNECT,
+ action: Action.EDIT,
+ value: name,
+ }}
+ >
+ Restart Connector
+ </ActionDropdownItem>
+ <ActionDropdownItem
+ onClick={restartAllTasksHandler}
+ disabled={isMutating}
+ permission={{
+ resource: ResourceType.CONNECT,
+ action: Action.EDIT,
+ value: name,
+ }}
+ >
+ Restart All Tasks
+ </ActionDropdownItem>
+ <ActionDropdownItem
+ onClick={restartFailedTasksHandler}
+ disabled={isMutating}
+ permission={{
+ resource: ResourceType.CONNECT,
+ action: Action.EDIT,
+ value: name,
+ }}
+ >
+ Restart Failed Tasks
+ </ActionDropdownItem>
<DropdownItem onClick={handleDelete} danger>
Remove Connector
</DropdownItem>
diff --git a/kafka-ui-react-app/src/components/Connect/List/__tests__/List.spec.tsx b/kafka-ui-react-app/src/components/Connect/List/__tests__/List.spec.tsx
index 9de28f38ffd..82b4aab2126 100644
--- a/kafka-ui-react-app/src/components/Connect/List/__tests__/List.spec.tsx
+++ b/kafka-ui-react-app/src/components/Connect/List/__tests__/List.spec.tsx
@@ -9,7 +9,11 @@ import { screen, waitFor } from '@testing-library/react';
import userEvent from '@testing-library/user-event';
import { render, WithRoute } from 'lib/testHelpers';
import { clusterConnectConnectorPath, clusterConnectorsPath } from 'lib/paths';
-import { useConnectors, useDeleteConnector } from 'lib/hooks/api/kafkaConnect';
+import {
+ useConnectors,
+ useDeleteConnector,
+ useUpdateConnectorState,
+} from 'lib/hooks/api/kafkaConnect';
const mockedUsedNavigate = jest.fn();
const mockDelete = jest.fn();
@@ -22,6 +26,7 @@ jest.mock('react-router-dom', () => ({
jest.mock('lib/hooks/api/kafkaConnect', () => ({
useConnectors: jest.fn(),
useDeleteConnector: jest.fn(),
+ useUpdateConnectorState: jest.fn(),
}));
const clusterName = 'local';
@@ -42,6 +47,10 @@ describe('Connectors List', () => {
(useConnectors as jest.Mock).mockImplementation(() => ({
data: connectors,
}));
+ const restartConnector = jest.fn();
+ (useUpdateConnectorState as jest.Mock).mockImplementation(() => ({
+ mutateAsync: restartConnector,
+ }));
});
it('renders', async () => {
diff --git a/kafka-ui-react-app/src/components/common/Dropdown/Dropdown.styled.ts b/kafka-ui-react-app/src/components/common/Dropdown/Dropdown.styled.ts
index f63fc5fe2a8..d7db888a096 100644
--- a/kafka-ui-react-app/src/components/common/Dropdown/Dropdown.styled.ts
+++ b/kafka-ui-react-app/src/components/common/Dropdown/Dropdown.styled.ts
@@ -70,7 +70,7 @@ export const DropdownButton = styled.button`
`;
export const DangerItem = styled.div`
- color: ${({ theme: { dropdown } }) => dropdown.item.color.normal};
+ color: ${({ theme: { dropdown } }) => dropdown.item.color.danger};
`;
export const DropdownItemHint = styled.div`
diff --git a/kafka-ui-react-app/src/lib/hooks/api/kafkaConnect.ts b/kafka-ui-react-app/src/lib/hooks/api/kafkaConnect.ts
index b8a17c558d3..1d01d491954 100644
--- a/kafka-ui-react-app/src/lib/hooks/api/kafkaConnect.ts
+++ b/kafka-ui-react-app/src/lib/hooks/api/kafkaConnect.ts
@@ -76,7 +76,8 @@ export function useUpdateConnectorState(props: UseConnectorProps) {
return useMutation(
(action: ConnectorAction) => api.updateConnectorState({ ...props, action }),
{
- onSuccess: () => client.invalidateQueries(connectorKey(props)),
+ onSuccess: () =>
+ client.invalidateQueries(['clusters', props.clusterName, 'connectors']),
}
);
}
| null | train | test | 2023-04-24T11:56:01 | "2022-10-11T08:49:01Z" | Haarolean | train |
provectus/kafka-ui/3554_3661 | provectus/kafka-ui | provectus/kafka-ui/3554 | provectus/kafka-ui/3661 | [
"keyword_pr_to_issue"
] | 005e74f2480f3ea961ec0c3d9b4a633b666df552 | c148f112a404815d6645fa97209199eced054728 | [
"Hello there nadig-aditya! π\n\nThank you and congratulations π for opening your very first issue in this project! π\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. π",
"Hi,\r\n\r\nI'm not able to reproduce the issue, vid attached.\r\nPlease attach the similar one with steps to reproduce.\r\n\r\nhttps://user-images.githubusercontent.com/1494347/228045696-0238d4f0-9019-415c-9ba5-1823fb39e756.mov\r\n\r\n",
"i observed issue us happening only for Kafka version 2.2.1 (MSK cluster) and not for later versions",
"This issue has been automatically marked as stale because no requested feedback has been provided. It will be closed if no further activity occurs. Thank you for your contributions.",
"@nadig-aditya can you do a screen record?",
"> @nadig-aditya can you do a screen record?\r\n\r\nHi,\r\nCant provider recording at this moment, but have same issue with Kafka cluster 2.1, returns 404 on topic edit settings\r\nNo errors on service side, yet UI complains:\r\n\r\n\r\n\r\n\r\nKafka cluster 3.2-IV0 works fine, downgrading kafka-ui to 0.4.0 works for both kafka clusters versions",
"It looks like its because of message format version: https://github.com/provectus/kafka-ui/blob/master/kafka-ui-react-app/src/lib/constants.ts#L31\r\n\r\nMy kafka 2.1 cluster uses **message.format.version**: 2.1-IV2\r\nwhich does not have **max.compaction.lag.ms** described in: https://github.com/provectus/kafka-ui/blob/master/kafka-ui-react-app/src/lib/constants.ts#L33\r\n\r\n",
"Thanks for the clarification. To be resolved by #3661."
] | [] | "2023-04-12T17:05:30Z" | [
"type/bug",
"scope/frontend",
"status/accepted",
"status/confirmed",
"severity/medium"
] | Unable to Edit Setting of a topic from kafka ui getting 404 | <!--
We will close the issue without further explanation if you don't follow this template and don't provide the information requested within this template.
Don't forget to check for existing issues/discussions regarding your proposal. We might already have it.
https://github.com/provectus/kafka-ui/issues
https://github.com/provectus/kafka-ui/discussions
-->
<!--
Please follow the naming conventions for bugs:
<Feature/Area/Scope> : <Compact, but specific problem summary>
Avoid generic titles, like βTopics: incorrect layout of message sorting drop-down listβ. Better use something like: βTopics: Message sorting drop-down list overlaps the "Submit" buttonβ.
-->
**Describe the bug** (Actual behavior)
<!--(A clear and concise description of what the bug is.Use a list, if there is more than one problem)-->
Getting 404 Error page when i click on Edit setting button for a topic.
**Expected behavior**
<!--(A clear and concise description of what you expected to happen.)-->
We should be allowed to update setting for the topic.
I tried to CLI was able to update the setting for the topic.
**Set up**
<!--
WE MIGHT CLOSE THE ISSUE without further explanation IF YOU DON'T PROVIDE THIS INFORMATION.
How do you run the app? Please provide as much info as possible:
1. App version (docker image version or check commit hash in the top left corner in UI)
2. Helm chart version, if you use one
3. Any IAAC configs
-->
1. App version - V0.6.0
2. Helm chart version - 0.6.0
**Steps to Reproduce**
<!-- We'd like you to provide an example setup (via docker-compose, helm, etc.)
to reproduce the problem, especially with a complex setups. -->
bring up kafka ui and crate a topic and try to edit setting for that topic from ui
1.
**Screenshots**
<!--
(If applicable, add screenshots to help explain your problem)
<img width="1350" alt="image" src="https://user-images.githubusercontent.com/103026073/227729847-bc0f05f5-2aff-4673-9c12-de1d7cfabf68.png">
-->
**Additional context**
<!--
Add any other context about the problem here. E.g.:
1. Are there any alternative scenarios (different data/methods/configuration/setup) you have tried?
Were they successfull or same issue occured? Please provide steps as well.
3. Related issues (if there are any).
4. Logs (if available)
5. Is there any serious impact or behaviour on the end-user because of this issue, that can be overlooked?
-->
| [
"kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParamField.tsx"
] | [
"kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParamField.tsx"
] | [] | diff --git a/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParamField.tsx b/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParamField.tsx
index 7edbc5426dd..5ba51ad2890 100644
--- a/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParamField.tsx
+++ b/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParamField.tsx
@@ -49,7 +49,7 @@ const CustomParamField: React.FC<Props> = ({
label: option,
disabled:
(config &&
- config[option].source !== ConfigSource.DYNAMIC_TOPIC_CONFIG) ||
+ config[option]?.source !== ConfigSource.DYNAMIC_TOPIC_CONFIG) ||
existingFields.includes(option),
}));
| null | train | test | 2023-04-10T17:35:14 | "2023-03-25T16:31:42Z" | nadig-aditya | train |
provectus/kafka-ui/2962_3664 | provectus/kafka-ui | provectus/kafka-ui/2962 | provectus/kafka-ui/3664 | [
"connected"
] | 744bdb32a310306eefe8641923d712db697b1c70 | c7a7921b8242b819ebb80769eb3966fd69c26329 | [
"This would be a nice feature. In a wild cluster you can't specify the exact field name you could want to mask or replace. Having just a kind of blacklisted words list that are searched in field name could be a great feature and easy to use.",
"It would be great if this feature can also be used to provide a regex that says \"exclude all fields except certain ones I care about\". That way, it would function more so as an allowlist. "
] | [] | "2023-04-13T15:22:24Z" | [
"type/enhancement",
"good first issue",
"scope/backend",
"status/accepted"
] | Topics - Data masking: Support regexp for fields | <!--
Don't forget to check for existing issues/discussions regarding your proposal. We might already have it.
https://github.com/provectus/kafka-ui/issues
https://github.com/provectus/kafka-ui/discussions
-->
**Describe the bug**
<!--(A clear and concise description of what the bug is.)-->
Masking (REMOVE | REPLACE | MASK) configuration for the field names in message key/value does not support regular expressions, e.g.:
```
KAFKA_CLUSTERS_0_MASKING_0_FIELDS_0: "personal.data.*"
KAFKA_CLUSTERS_0_MASKING_0_FIELDS_1: ".*password.*"
```
So the value and key patter could be set like below and apply to all the topics within a cluster
```
KAFKA_CLUSTERS_0_MASKING_0_TOPICKEYSPATTERN: ".*"
KAFKA_CLUSTERS_0_MASKING_0_TOPICVALUESPATTERN: ".*"
```
**Set up**
<!--
How do you run the app? Please provide as much info as possible:
1. App version (docker image version or check commit hash in the top left corner in UI)
2. Helm chart version, if you use one
3. Any IAAC configs
We might close the issue without further explanation if you don't provide such information.
-->
**Steps to Reproduce**
<!-- We'd like you to provide an example setup (via docker-compose, helm, etc.)
to reproduce the problem, especially with a complex setups. -->
Steps to reproduce the behavior:
1. Add an environment variable to set up the field value for masking, using the RegEx.
2. Install the Kafka UI
**Actual behavior**
Masking will apply only the field name exactly matching the field, including the '.' and '*' chars.
**Expected behavior**
<!--
(A clear and concise description of what you expected to happen)
-->
Ability to use regular expressions to the field names as well, for something obviously confidential, or fields with the same prefix or suffix
| [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/DataMasking.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/policies/Mask.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/policies/MaskingPolicy.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/policies/Remove.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/policies/Replace.java",
"kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml"
] | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/DataMasking.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/policies/FieldsSelector.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/policies/Mask.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/policies/MaskingPolicy.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/policies/Remove.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/policies/Replace.java",
"kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml"
] | [
"kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/masking/policies/FieldsSelectorTest.java",
"kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/masking/policies/MaskTest.java",
"kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/masking/policies/RemoveTest.java",
"kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/masking/policies/ReplaceTest.java"
] | diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java
index 1d5cc5393c2..64ec894cd55 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java
@@ -131,8 +131,9 @@ public static class KeystoreConfig {
@Data
public static class Masking {
Type type;
- List<String> fields; //if null or empty list - policy will be applied to all fields
- List<String> pattern; //used when type=MASK
+ List<String> fields;
+ String fieldsNamePattern;
+ List<String> maskingCharsReplacement; //used when type=MASK
String replacement; //used when type=REPLACE
String topicKeysPattern;
String topicValuesPattern;
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/DataMasking.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/DataMasking.java
index 78e74f33321..ad1c2da31e5 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/DataMasking.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/DataMasking.java
@@ -44,7 +44,7 @@ boolean shouldBeApplied(String topic, Serde.Target target) {
public static DataMasking create(@Nullable List<ClustersProperties.Masking> config) {
return new DataMasking(
Optional.ofNullable(config).orElse(List.of()).stream().map(property -> {
- Preconditions.checkNotNull(property.getType(), "masking type not specifed");
+ Preconditions.checkNotNull(property.getType(), "masking type not specified");
Preconditions.checkArgument(
StringUtils.isNotEmpty(property.getTopicKeysPattern())
|| StringUtils.isNotEmpty(property.getTopicValuesPattern()),
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/policies/FieldsSelector.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/policies/FieldsSelector.java
new file mode 100644
index 00000000000..99563943984
--- /dev/null
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/policies/FieldsSelector.java
@@ -0,0 +1,28 @@
+package com.provectus.kafka.ui.service.masking.policies;
+
+import com.provectus.kafka.ui.config.ClustersProperties;
+import com.provectus.kafka.ui.exception.ValidationException;
+import java.util.regex.Pattern;
+import org.springframework.util.CollectionUtils;
+import org.springframework.util.StringUtils;
+
+interface FieldsSelector {
+
+ static FieldsSelector create(ClustersProperties.Masking property) {
+ if (StringUtils.hasText(property.getFieldsNamePattern()) && !CollectionUtils.isEmpty(property.getFields())) {
+ throw new ValidationException("You can't provide both fieldNames & fieldsNamePattern for masking");
+ }
+ if (StringUtils.hasText(property.getFieldsNamePattern())) {
+ Pattern pattern = Pattern.compile(property.getFieldsNamePattern());
+ return f -> pattern.matcher(f).matches();
+ }
+ if (!CollectionUtils.isEmpty(property.getFields())) {
+ return f -> property.getFields().contains(f);
+ }
+ //no pattern, no field names - mean all fields should be masked
+ return fieldName -> true;
+ }
+
+ boolean shouldBeMasked(String fieldName);
+
+}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/policies/Mask.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/policies/Mask.java
index dbbc5d131a7..e6a469f2c03 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/policies/Mask.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/policies/Mask.java
@@ -15,8 +15,8 @@ class Mask extends MaskingPolicy {
private final UnaryOperator<String> masker;
- Mask(List<String> fieldNames, List<String> maskingChars) {
- super(fieldNames);
+ Mask(FieldsSelector fieldsSelector, List<String> maskingChars) {
+ super(fieldsSelector);
this.masker = createMasker(maskingChars);
}
@@ -38,22 +38,13 @@ private static UnaryOperator<String> createMasker(List<String> maskingChars) {
for (int i = 0; i < input.length(); i++) {
int cp = input.codePointAt(i);
switch (Character.getType(cp)) {
- case Character.SPACE_SEPARATOR:
- case Character.LINE_SEPARATOR:
- case Character.PARAGRAPH_SEPARATOR:
- sb.appendCodePoint(cp); // keeping separators as-is
- break;
- case Character.UPPERCASE_LETTER:
- sb.append(maskingChars.get(0));
- break;
- case Character.LOWERCASE_LETTER:
- sb.append(maskingChars.get(1));
- break;
- case Character.DECIMAL_DIGIT_NUMBER:
- sb.append(maskingChars.get(2));
- break;
- default:
- sb.append(maskingChars.get(3));
+ case Character.SPACE_SEPARATOR,
+ Character.LINE_SEPARATOR,
+ Character.PARAGRAPH_SEPARATOR -> sb.appendCodePoint(cp); // keeping separators as-is
+ case Character.UPPERCASE_LETTER -> sb.append(maskingChars.get(0));
+ case Character.LOWERCASE_LETTER -> sb.append(maskingChars.get(1));
+ case Character.DECIMAL_DIGIT_NUMBER -> sb.append(maskingChars.get(2));
+ default -> sb.append(maskingChars.get(3));
}
}
return sb.toString();
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/policies/MaskingPolicy.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/policies/MaskingPolicy.java
index 7a753382102..9b80da0cb18 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/policies/MaskingPolicy.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/policies/MaskingPolicy.java
@@ -2,46 +2,36 @@
import com.fasterxml.jackson.databind.node.ContainerNode;
import com.provectus.kafka.ui.config.ClustersProperties;
-import java.util.List;
import lombok.RequiredArgsConstructor;
@RequiredArgsConstructor
public abstract class MaskingPolicy {
-
public static MaskingPolicy create(ClustersProperties.Masking property) {
- List<String> fields = property.getFields() == null
- ? List.of() // empty list means that policy will be applied to all fields
- : property.getFields();
- switch (property.getType()) {
- case REMOVE:
- return new Remove(fields);
- case REPLACE:
- return new Replace(
- fields,
- property.getReplacement() == null
- ? Replace.DEFAULT_REPLACEMENT
- : property.getReplacement()
- );
- case MASK:
- return new Mask(
- fields,
- property.getPattern() == null
- ? Mask.DEFAULT_PATTERN
- : property.getPattern()
- );
- default:
- throw new IllegalStateException("Unknown policy type: " + property.getType());
- }
+ FieldsSelector fieldsSelector = FieldsSelector.create(property);
+ return switch (property.getType()) {
+ case REMOVE -> new Remove(fieldsSelector);
+ case REPLACE -> new Replace(
+ fieldsSelector,
+ property.getReplacement() == null
+ ? Replace.DEFAULT_REPLACEMENT
+ : property.getReplacement()
+ );
+ case MASK -> new Mask(
+ fieldsSelector,
+ property.getMaskingCharsReplacement() == null
+ ? Mask.DEFAULT_PATTERN
+ : property.getMaskingCharsReplacement()
+ );
+ };
}
//----------------------------------------------------------------
- // empty list means policy will be applied to all fields
- private final List<String> fieldNames;
+ private final FieldsSelector fieldsSelector;
protected boolean fieldShouldBeMasked(String fieldName) {
- return fieldNames.isEmpty() || fieldNames.contains(fieldName);
+ return fieldsSelector.shouldBeMasked(fieldName);
}
public abstract ContainerNode<?> applyToJsonContainer(ContainerNode<?> node);
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/policies/Remove.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/policies/Remove.java
index eb38b0d3e3a..cc5cdd14159 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/policies/Remove.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/policies/Remove.java
@@ -4,12 +4,12 @@
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ContainerNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
-import java.util.List;
+
class Remove extends MaskingPolicy {
- Remove(List<String> fieldNames) {
- super(fieldNames);
+ Remove(FieldsSelector fieldsSelector) {
+ super(fieldsSelector);
}
@Override
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/policies/Replace.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/policies/Replace.java
index 3af645cb111..1cf91793d22 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/policies/Replace.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/masking/policies/Replace.java
@@ -6,7 +6,6 @@
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.fasterxml.jackson.databind.node.TextNode;
import com.google.common.base.Preconditions;
-import java.util.List;
class Replace extends MaskingPolicy {
@@ -14,8 +13,8 @@ class Replace extends MaskingPolicy {
private final String replacement;
- Replace(List<String> fieldNames, String replacementString) {
- super(fieldNames);
+ Replace(FieldsSelector fieldsSelector, String replacementString) {
+ super(fieldsSelector);
this.replacement = Preconditions.checkNotNull(replacementString);
}
diff --git a/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml b/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
index 78c7cf3bf52..2bafb05faaa 100644
--- a/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
+++ b/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
@@ -3632,7 +3632,9 @@ components:
type: array
items:
type: string
- pattern:
+ fieldsNamePattern:
+ type: string
+ maskingCharsReplacement:
type: array
items:
type: string
| diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/masking/policies/FieldsSelectorTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/masking/policies/FieldsSelectorTest.java
new file mode 100644
index 00000000000..497a9365d75
--- /dev/null
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/masking/policies/FieldsSelectorTest.java
@@ -0,0 +1,53 @@
+package com.provectus.kafka.ui.service.masking.policies;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+
+import com.provectus.kafka.ui.config.ClustersProperties;
+import com.provectus.kafka.ui.exception.ValidationException;
+import java.util.List;
+import org.junit.jupiter.api.Test;
+
+class FieldsSelectorTest {
+
+ @Test
+ void selectsFieldsDueToProvidedPattern() {
+ var properties = new ClustersProperties.Masking();
+ properties.setFieldsNamePattern("f1|f2");
+
+ var selector = FieldsSelector.create(properties);
+ assertThat(selector.shouldBeMasked("f1")).isTrue();
+ assertThat(selector.shouldBeMasked("f2")).isTrue();
+ assertThat(selector.shouldBeMasked("doesNotMatchPattern")).isFalse();
+ }
+
+ @Test
+ void selectsFieldsDueToProvidedFieldNames() {
+ var properties = new ClustersProperties.Masking();
+ properties.setFields(List.of("f1", "f2"));
+
+ var selector = FieldsSelector.create(properties);
+ assertThat(selector.shouldBeMasked("f1")).isTrue();
+ assertThat(selector.shouldBeMasked("f2")).isTrue();
+ assertThat(selector.shouldBeMasked("notInAList")).isFalse();
+ }
+
+ @Test
+ void selectAllFieldsIfNoPatternAndNoNamesProvided() {
+ var properties = new ClustersProperties.Masking();
+
+ var selector = FieldsSelector.create(properties);
+ assertThat(selector.shouldBeMasked("anyPropertyName")).isTrue();
+ }
+
+ @Test
+ void throwsExceptionIfBothFieldListAndPatternProvided() {
+ var properties = new ClustersProperties.Masking();
+ properties.setFieldsNamePattern("f1|f2");
+ properties.setFields(List.of("f3", "f4"));
+
+ assertThatThrownBy(() -> FieldsSelector.create(properties))
+ .isInstanceOf(ValidationException.class);
+ }
+
+}
diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/masking/policies/MaskTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/masking/policies/MaskTest.java
index 9cb97012458..b33a26f3000 100644
--- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/masking/policies/MaskTest.java
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/masking/policies/MaskTest.java
@@ -15,35 +15,35 @@
class MaskTest {
- private static final List<String> TARGET_FIELDS = List.of("id", "name");
+ private static final FieldsSelector FIELDS_SELECTOR = fieldName -> List.of("id", "name").contains(fieldName);
private static final List<String> PATTERN = List.of("X", "x", "n", "-");
@ParameterizedTest
@MethodSource
- void testApplyToJsonContainer(List<String> fields, ContainerNode<?> original, ContainerNode<?> expected) {
- Mask policy = new Mask(fields, PATTERN);
+ void testApplyToJsonContainer(FieldsSelector selector, ContainerNode<?> original, ContainerNode<?> expected) {
+ Mask policy = new Mask(selector, PATTERN);
assertThat(policy.applyToJsonContainer(original)).isEqualTo(expected);
}
private static Stream<Arguments> testApplyToJsonContainer() {
return Stream.of(
Arguments.of(
- TARGET_FIELDS,
+ FIELDS_SELECTOR,
parse("{ \"id\": 123, \"name\": { \"first\": \"James\", \"surname\": \"Bond777!\"}}"),
parse("{ \"id\": \"nnn\", \"name\": { \"first\": \"Xxxxx\", \"surname\": \"Xxxxnnn-\"}}")
),
Arguments.of(
- TARGET_FIELDS,
+ FIELDS_SELECTOR,
parse("[{ \"id\": 123, \"f2\": 234}, { \"name\": \"1.2\", \"f2\": 345} ]"),
parse("[{ \"id\": \"nnn\", \"f2\": 234}, { \"name\": \"n-n\", \"f2\": 345} ]")
),
Arguments.of(
- TARGET_FIELDS,
+ FIELDS_SELECTOR,
parse("{ \"outer\": { \"f1\": \"James\", \"name\": \"Bond777!\"}}"),
parse("{ \"outer\": { \"f1\": \"James\", \"name\": \"Xxxxnnn-\"}}")
),
Arguments.of(
- List.of(),
+ (FieldsSelector) (fieldName -> true),
parse("{ \"outer\": { \"f1\": \"James\", \"name\": \"Bond777!\"}}"),
parse("{ \"outer\": { \"f1\": \"Xxxxx\", \"name\": \"Xxxxnnn-\"}}")
)
@@ -57,7 +57,7 @@ private static Stream<Arguments> testApplyToJsonContainer() {
"null, xxxx"
})
void testApplyToString(String original, String expected) {
- Mask policy = new Mask(List.of(), PATTERN);
+ Mask policy = new Mask(fieldName -> true, PATTERN);
assertThat(policy.applyToString(original)).isEqualTo(expected);
}
diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/masking/policies/RemoveTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/masking/policies/RemoveTest.java
index 31ef4eb3c35..9393ea1c626 100644
--- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/masking/policies/RemoveTest.java
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/masking/policies/RemoveTest.java
@@ -15,39 +15,39 @@
class RemoveTest {
- private static final List<String> TARGET_FIELDS = List.of("id", "name");
+ private static final FieldsSelector FIELDS_SELECTOR = fieldName -> List.of("id", "name").contains(fieldName);
@ParameterizedTest
@MethodSource
- void testApplyToJsonContainer(List<String> fields, ContainerNode<?> original, ContainerNode<?> expected) {
- var policy = new Remove(fields);
+ void testApplyToJsonContainer(FieldsSelector fieldsSelector, ContainerNode<?> original, ContainerNode<?> expected) {
+ var policy = new Remove(fieldsSelector);
assertThat(policy.applyToJsonContainer(original)).isEqualTo(expected);
}
private static Stream<Arguments> testApplyToJsonContainer() {
return Stream.of(
Arguments.of(
- TARGET_FIELDS,
+ FIELDS_SELECTOR,
parse("{ \"id\": 123, \"name\": { \"first\": \"James\", \"surname\": \"Bond777!\"}}"),
parse("{}")
),
Arguments.of(
- TARGET_FIELDS,
+ FIELDS_SELECTOR,
parse("[{ \"id\": 123, \"f2\": 234}, { \"name\": \"1.2\", \"f2\": 345} ]"),
parse("[{ \"f2\": 234}, { \"f2\": 345} ]")
),
Arguments.of(
- TARGET_FIELDS,
+ FIELDS_SELECTOR,
parse("{ \"outer\": { \"f1\": \"James\", \"name\": \"Bond777!\"}}"),
parse("{ \"outer\": { \"f1\": \"James\"}}")
),
Arguments.of(
- List.of(),
+ (FieldsSelector) (fieldName -> true),
parse("{ \"outer\": { \"f1\": \"v1\", \"f2\": \"v2\", \"inner\" : {\"if1\": \"iv1\"}}}"),
parse("{}")
),
Arguments.of(
- List.of(),
+ (FieldsSelector) (fieldName -> true),
parse("[{ \"f1\": 123}, { \"f2\": \"1.2\"} ]"),
parse("[{}, {}]")
)
@@ -66,7 +66,7 @@ private static JsonNode parse(String str) {
"null, null"
})
void testApplyToString(String original, String expected) {
- var policy = new Remove(List.of());
+ var policy = new Remove(fieldName -> true);
assertThat(policy.applyToString(original)).isEqualTo(expected);
}
-}
\ No newline at end of file
+}
diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/masking/policies/ReplaceTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/masking/policies/ReplaceTest.java
index f3ac69747b8..9f2fcd90c4c 100644
--- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/masking/policies/ReplaceTest.java
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/masking/policies/ReplaceTest.java
@@ -15,35 +15,35 @@
class ReplaceTest {
- private static final List<String> TARGET_FIELDS = List.of("id", "name");
+ private static final FieldsSelector FIELDS_SELECTOR = fieldName -> List.of("id", "name").contains(fieldName);
private static final String REPLACEMENT_STRING = "***";
@ParameterizedTest
@MethodSource
- void testApplyToJsonContainer(List<String> fields, ContainerNode<?> original, ContainerNode<?> expected) {
- var policy = new Replace(fields, REPLACEMENT_STRING);
+ void testApplyToJsonContainer(FieldsSelector fieldsSelector, ContainerNode<?> original, ContainerNode<?> expected) {
+ var policy = new Replace(fieldsSelector, REPLACEMENT_STRING);
assertThat(policy.applyToJsonContainer(original)).isEqualTo(expected);
}
private static Stream<Arguments> testApplyToJsonContainer() {
return Stream.of(
Arguments.of(
- TARGET_FIELDS,
+ FIELDS_SELECTOR,
parse("{ \"id\": 123, \"name\": { \"first\": \"James\", \"surname\": \"Bond777!\"}}"),
parse("{ \"id\": \"***\", \"name\": { \"first\": \"***\", \"surname\": \"***\"}}")
),
Arguments.of(
- TARGET_FIELDS,
+ FIELDS_SELECTOR,
parse("[{ \"id\": 123, \"f2\": 234}, { \"name\": \"1.2\", \"f2\": 345} ]"),
parse("[{ \"id\": \"***\", \"f2\": 234}, { \"name\": \"***\", \"f2\": 345} ]")
),
Arguments.of(
- TARGET_FIELDS,
+ FIELDS_SELECTOR,
parse("{ \"outer\": { \"f1\": \"James\", \"name\": \"Bond777!\"}}"),
parse("{ \"outer\": { \"f1\": \"James\", \"name\": \"***\"}}")
),
Arguments.of(
- List.of(),
+ (FieldsSelector) (fieldName -> true),
parse("{ \"outer\": { \"f1\": \"v1\", \"f2\": \"v2\", \"inner\" : {\"if1\": \"iv1\"}}}"),
parse("{ \"outer\": { \"f1\": \"***\", \"f2\": \"***\", \"inner\" : {\"if1\": \"***\"}}}}")
)
@@ -62,7 +62,7 @@ private static JsonNode parse(String str) {
"null, ***"
})
void testApplyToString(String original, String expected) {
- var policy = new Replace(List.of(), REPLACEMENT_STRING);
+ var policy = new Replace(fieldName -> true, REPLACEMENT_STRING);
assertThat(policy.applyToString(original)).isEqualTo(expected);
}
-}
\ No newline at end of file
+}
| train | test | 2023-05-01T01:56:28 | "2022-11-21T18:55:44Z" | BulatKha | train |
provectus/kafka-ui/3652_3667 | provectus/kafka-ui | provectus/kafka-ui/3652 | provectus/kafka-ui/3667 | [
"connected"
] | 696cde7dccd655e656c19c613bf54240e47ffded | 39aca05fe3a754dae09289aa57a20f4bef5a9811 | [] | [] | "2023-04-14T06:20:21Z" | [
"scope/QA",
"scope/AQA"
] | [e2e] Clear entered queue check | Autotest implementation for:
https://app.qase.io/project/KAFKAUI?case=276&previewMode=side&suite=8 | [
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlDbList.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlQueryForm.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/WebUtils.java"
] | [
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlDbList.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlQueryForm.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/WebUtils.java"
] | [
"kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/backlog/SmokeBacklog.java",
"kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/ksqldb/KsqlDbTest.java",
"kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/topics/MessagesTest.java",
"kafka-ui-e2e-checks/src/test/resources/regression.xml",
"kafka-ui-e2e-checks/src/test/resources/sanity.xml",
"kafka-ui-e2e-checks/src/test/resources/smoke.xml"
] | diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java
index fb2e0877e22..8bd7901a635 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java
@@ -37,9 +37,13 @@ public abstract class BasePage extends WebUtils {
protected String pageTitleFromHeader = "//h1[text()='%s']";
protected String pagePathFromHeader = "//a[text()='%s']/../h1";
+ protected boolean isSpinnerVisible(int... timeoutInSeconds) {
+ return isVisible(loadingSpinner, timeoutInSeconds);
+ }
+
protected void waitUntilSpinnerDisappear(int... timeoutInSeconds) {
log.debug("\nwaitUntilSpinnerDisappear");
- if (isVisible(loadingSpinner, timeoutInSeconds)) {
+ if (isSpinnerVisible(timeoutInSeconds)) {
loadingSpinner.shouldBe(Condition.disappear, Duration.ofSeconds(60));
}
}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlDbList.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlDbList.java
index 7eb35d52f33..98980cef4d1 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlDbList.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlDbList.java
@@ -1,5 +1,6 @@
package com.provectus.kafka.ui.pages.ksqldb;
+import static com.codeborne.selenide.Condition.visible;
import static com.codeborne.selenide.Selenide.$;
import static com.codeborne.selenide.Selenide.$x;
import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.KSQL_DB;
@@ -10,12 +11,12 @@
import com.provectus.kafka.ui.pages.BasePage;
import com.provectus.kafka.ui.pages.ksqldb.enums.KsqlMenuTabs;
import io.qameta.allure.Step;
+import java.time.Duration;
import java.util.ArrayList;
import java.util.List;
import org.openqa.selenium.By;
public class KsqlDbList extends BasePage {
-
protected SelenideElement executeKsqlBtn = $x("//button[text()='Execute KSQL Request']");
protected SelenideElement tablesTab = $x("//nav[@role='navigation']/a[text()='Tables']");
protected SelenideElement streamsTab = $x("//nav[@role='navigation']/a[text()='Streams']");
@@ -76,9 +77,24 @@ public KsqlTablesGridItem(SelenideElement element) {
this.element = element;
}
+ private SelenideElement getNameElm() {
+ return element.$x("./td[1]");
+ }
+
@Step
public String getTableName() {
- return element.$x("./td[1]").getText().trim();
+ return getNameElm().getText().trim();
+ }
+
+ @Step
+ public boolean isVisible() {
+ boolean isVisible = false;
+ try {
+ getNameElm().shouldBe(visible, Duration.ofMillis(500));
+ isVisible = true;
+ } catch (Throwable ignored) {
+ }
+ return isVisible;
}
@Step
@@ -110,9 +126,24 @@ public KsqlStreamsGridItem(SelenideElement element) {
this.element = element;
}
+ private SelenideElement getNameElm() {
+ return element.$x("./td[1]");
+ }
+
@Step
public String getStreamName() {
- return element.$x("./td[1]").getText().trim();
+ return getNameElm().getText().trim();
+ }
+
+ @Step
+ public boolean isVisible() {
+ boolean isVisible = false;
+ try {
+ getNameElm().shouldBe(visible, Duration.ofMillis(500));
+ isVisible = true;
+ } catch (Throwable ignored) {
+ }
+ return isVisible;
}
@Step
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlQueryForm.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlQueryForm.java
index ab24cbe9abb..4ce282b6cc5 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlQueryForm.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlQueryForm.java
@@ -40,9 +40,14 @@ public KsqlQueryForm clickClearBtn() {
}
@Step
- public KsqlQueryForm clickExecuteBtn() {
+ public String getEnteredQuery() {
+ return queryAreaValue.getText().trim();
+ }
+
+ @Step
+ public KsqlQueryForm clickExecuteBtn(String query) {
clickByActions(executeBtn);
- if (queryAreaValue.getText().contains("EMIT CHANGES;")) {
+ if (query.contains("EMIT CHANGES")) {
loadingSpinner.shouldBe(Condition.visible);
} else {
waitUntilSpinnerDisappear();
@@ -66,19 +71,19 @@ public KsqlQueryForm clickClearResultsBtn() {
@Step
public KsqlQueryForm clickAddStreamProperty() {
- clickByJavaScript(addStreamPropertyBtn);
+ clickByActions(addStreamPropertyBtn);
return this;
}
@Step
public KsqlQueryForm setQuery(String query) {
queryAreaValue.shouldBe(Condition.visible).click();
- queryArea.setValue(query);
+ sendKeysByActions(queryArea, query);
return this;
}
@Step
- public KsqlQueryForm.KsqlResponseGridItem getTableByName(String name) {
+ public KsqlQueryForm.KsqlResponseGridItem getItemByName(String name) {
return initItems().stream()
.filter(e -> e.getName().equalsIgnoreCase(name))
.findFirst().orElseThrow();
@@ -114,16 +119,20 @@ public String getType() {
return element.$x("./td[1]").getText().trim();
}
+ private SelenideElement getNameElm() {
+ return element.$x("./td[2]");
+ }
+
@Step
public String getName() {
- return element.$x("./td[2]").scrollTo().getText().trim();
+ return getNameElm().scrollTo().getText().trim();
}
@Step
public boolean isVisible() {
boolean isVisible = false;
try {
- element.$x("./td[2]").shouldBe(visible, Duration.ofMillis(500));
+ getNameElm().shouldBe(visible, Duration.ofMillis(500));
isVisible = true;
} catch (Throwable ignored) {
}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/WebUtils.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/WebUtils.java
index fef5ef654a0..a1b1523aa51 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/WebUtils.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/WebUtils.java
@@ -95,7 +95,7 @@ public static boolean isSelected(SelenideElement element, int... timeoutInSecond
return isSelected;
}
- public static boolean selectElement(SelenideElement element, boolean select) {
+ public static void selectElement(SelenideElement element, boolean select) {
if (select) {
if (!element.isSelected()) {
clickByJavaScript(element);
@@ -105,6 +105,5 @@ public static boolean selectElement(SelenideElement element, boolean select) {
clickByJavaScript(element);
}
}
- return true;
}
}
| diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/backlog/SmokeBacklog.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/backlog/SmokeBacklog.java
index b89a1d0cf76..d96bbb7f3a2 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/backlog/SmokeBacklog.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/backlog/SmokeBacklog.java
@@ -20,59 +20,52 @@ public class SmokeBacklog extends BaseManualTest {
public void testCaseA() {
}
- @Automation(state = TO_BE_AUTOMATED)
- @Suite(id = KSQL_DB_SUITE_ID)
- @QaseId(276)
- @Test
- public void testCaseB() {
- }
-
@Automation(state = TO_BE_AUTOMATED)
@Suite(id = KSQL_DB_SUITE_ID)
@QaseId(277)
@Test
- public void testCaseC() {
+ public void testCaseB() {
}
@Automation(state = TO_BE_AUTOMATED)
@Suite(id = KSQL_DB_SUITE_ID)
@QaseId(278)
@Test
- public void testCaseD() {
+ public void testCaseC() {
}
@Automation(state = TO_BE_AUTOMATED)
@Suite(id = KSQL_DB_SUITE_ID)
@QaseId(284)
@Test
- public void testCaseE() {
+ public void testCaseD() {
}
@Automation(state = TO_BE_AUTOMATED)
@Suite(id = BROKERS_SUITE_ID)
@QaseId(331)
@Test
- public void testCaseF() {
+ public void testCaseE() {
}
@Automation(state = TO_BE_AUTOMATED)
@Suite(id = BROKERS_SUITE_ID)
@QaseId(332)
@Test
- public void testCaseG() {
+ public void testCaseF() {
}
@Automation(state = TO_BE_AUTOMATED)
@Suite(id = TOPICS_PROFILE_SUITE_ID)
@QaseId(335)
@Test
- public void testCaseH() {
+ public void testCaseG() {
}
@Automation(state = TO_BE_AUTOMATED)
@Suite(id = TOPICS_PROFILE_SUITE_ID)
@QaseId(336)
@Test
- public void testCaseI() {
+ public void testCaseH() {
}
}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/ksqldb/KsqlDbTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/ksqldb/KsqlDbTest.java
index d8bda606dcd..c4bbe0def4c 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/ksqldb/KsqlDbTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/ksqldb/KsqlDbTest.java
@@ -1,14 +1,17 @@
package com.provectus.kafka.ui.smokesuite.ksqldb;
import static com.provectus.kafka.ui.pages.ksqldb.enums.KsqlQueryConfig.SHOW_TABLES;
+import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.KSQL_DB;
import static org.apache.commons.lang3.RandomStringUtils.randomAlphabetic;
import com.provectus.kafka.ui.BaseTest;
import com.provectus.kafka.ui.pages.ksqldb.models.Stream;
import com.provectus.kafka.ui.pages.ksqldb.models.Table;
+import io.qameta.allure.Step;
import io.qase.api.annotation.QaseId;
import java.util.ArrayList;
import java.util.List;
+import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
@@ -16,60 +19,71 @@
public class KsqlDbTest extends BaseTest {
- private static final Stream STREAM_FOR_CHECK_TABLES = new Stream()
- .setName("STREAM_FOR_CHECK_TABLES_" + randomAlphabetic(4).toUpperCase())
- .setTopicName("TOPIC_FOR_STREAM_" + randomAlphabetic(4).toUpperCase());
+ private static final Stream DEFAULT_STREAM = new Stream()
+ .setName("DEFAULT_STREAM_" + randomAlphabetic(4).toUpperCase())
+ .setTopicName("DEFAULT_TOPIC_" + randomAlphabetic(4).toUpperCase());
private static final Table FIRST_TABLE = new Table()
- .setName("FIRST_TABLE" + randomAlphabetic(4).toUpperCase())
- .setStreamName(STREAM_FOR_CHECK_TABLES.getName());
+ .setName("FIRST_TABLE_" + randomAlphabetic(4).toUpperCase())
+ .setStreamName(DEFAULT_STREAM.getName());
private static final Table SECOND_TABLE = new Table()
- .setName("SECOND_TABLE" + randomAlphabetic(4).toUpperCase())
- .setStreamName(STREAM_FOR_CHECK_TABLES.getName());
+ .setName("SECOND_TABLE_" + randomAlphabetic(4).toUpperCase())
+ .setStreamName(DEFAULT_STREAM.getName());
private static final List<String> TOPIC_NAMES_LIST = new ArrayList<>();
@BeforeClass(alwaysRun = true)
public void beforeClass() {
apiService
- .createStream(STREAM_FOR_CHECK_TABLES)
+ .createStream(DEFAULT_STREAM)
.createTables(FIRST_TABLE, SECOND_TABLE);
- TOPIC_NAMES_LIST.addAll(List.of(STREAM_FOR_CHECK_TABLES.getTopicName(),
+ TOPIC_NAMES_LIST.addAll(List.of(DEFAULT_STREAM.getTopicName(),
FIRST_TABLE.getName(), SECOND_TABLE.getName()));
}
- @QaseId(41)
+ @QaseId(86)
@Test(priority = 1)
- public void checkShowTablesRequestExecution() {
- navigateToKsqlDb();
- ksqlDbList
- .clickExecuteKsqlRequestBtn();
- ksqlQueryForm
- .waitUntilScreenReady()
- .setQuery(SHOW_TABLES.getQuery())
- .clickExecuteBtn();
+ public void clearResultsForExecutedRequest() {
+ navigateToKsqlDbAndExecuteRequest(SHOW_TABLES.getQuery());
SoftAssert softly = new SoftAssert();
softly.assertTrue(ksqlQueryForm.areResultsVisible(), "areResultsVisible()");
- softly.assertTrue(ksqlQueryForm.getTableByName(FIRST_TABLE.getName()).isVisible(), "getTableName()");
- softly.assertTrue(ksqlQueryForm.getTableByName(SECOND_TABLE.getName()).isVisible(), "getTableName()");
+ softly.assertAll();
+ ksqlQueryForm
+ .clickClearResultsBtn();
+ softly.assertFalse(ksqlQueryForm.areResultsVisible(), "areResultsVisible()");
softly.assertAll();
}
- @QaseId(86)
+ @QaseId(276)
@Test(priority = 2)
- public void clearResultsForExecutedRequest() {
- navigateToKsqlDb();
- ksqlDbList
- .clickExecuteKsqlRequestBtn();
+ public void clearEnteredQueryCheck() {
+ navigateToKsqlDbAndExecuteRequest(SHOW_TABLES.getQuery());
+ Assert.assertFalse(ksqlQueryForm.getEnteredQuery().isEmpty(), "getEnteredQuery()");
ksqlQueryForm
- .waitUntilScreenReady()
- .setQuery(SHOW_TABLES.getQuery())
- .clickExecuteBtn();
+ .clickClearBtn();
+ Assert.assertTrue(ksqlQueryForm.getEnteredQuery().isEmpty(), "getEnteredQuery()");
+ }
+
+ @QaseId(41)
+ @Test(priority = 3)
+ public void checkShowTablesRequestExecution() {
+ navigateToKsqlDbAndExecuteRequest(SHOW_TABLES.getQuery());
SoftAssert softly = new SoftAssert();
softly.assertTrue(ksqlQueryForm.areResultsVisible(), "areResultsVisible()");
+ softly.assertTrue(ksqlQueryForm.getItemByName(FIRST_TABLE.getName()).isVisible(), "getItemByName()");
+ softly.assertTrue(ksqlQueryForm.getItemByName(SECOND_TABLE.getName()).isVisible(), "getItemByName()");
softly.assertAll();
+ }
+
+ @Step
+ private void navigateToKsqlDbAndExecuteRequest(String query) {
+ naviSideBar
+ .openSideMenu(KSQL_DB);
+ ksqlDbList
+ .waitUntilScreenReady()
+ .clickExecuteKsqlRequestBtn();
ksqlQueryForm
- .clickClearResultsBtn();
- softly.assertFalse(ksqlQueryForm.areResultsVisible(), "areResultsVisible()");
- softly.assertAll();
+ .waitUntilScreenReady()
+ .setQuery(query)
+ .clickExecuteBtn(query);
}
@AfterClass(alwaysRun = true)
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/topics/MessagesTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/topics/MessagesTest.java
index 3bbc7e7cd3d..508a3b95be8 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/topics/MessagesTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/topics/MessagesTest.java
@@ -8,7 +8,6 @@
import com.provectus.kafka.ui.BaseTest;
import com.provectus.kafka.ui.models.Topic;
-import com.provectus.kafka.ui.pages.topics.TopicDetails;
import io.qameta.allure.Issue;
import io.qameta.allure.Step;
import io.qase.api.annotation.QaseId;
@@ -140,24 +139,22 @@ public void checkPurgeMessagePossibility() {
softly.assertAll();
}
- @Ignore
- @Issue("https://github.com/provectus/kafka-ui/issues/2394")
@QaseId(15)
@Test(priority = 6)
public void checkMessageFilteringByOffset() {
navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECK_FILTERS.getName());
- topicDetails
- .openDetailsTab(MESSAGES);
- TopicDetails.MessageGridItem secondMessage = topicDetails.getMessageByOffset(1);
+ int nextOffset = topicDetails
+ .openDetailsTab(MESSAGES)
+ .getAllMessages().stream()
+ .findFirst().orElseThrow().getOffset() + 1;
topicDetails
.selectSeekTypeDdlMessagesTab("Offset")
- .setSeekTypeValueFldMessagesTab(String.valueOf(secondMessage.getOffset()))
+ .setSeekTypeValueFldMessagesTab(String.valueOf(nextOffset))
.clickSubmitFiltersBtnMessagesTab();
SoftAssert softly = new SoftAssert();
topicDetails.getAllMessages().forEach(message ->
- softly.assertTrue(message.getOffset() == secondMessage.getOffset()
- || message.getOffset() > secondMessage.getOffset(),
- String.format("Expected offset is: %s, but found: %s", secondMessage.getOffset(), message.getOffset())));
+ softly.assertTrue(message.getOffset() >= nextOffset,
+ String.format("Expected offset not less: %s, but found: %s", nextOffset, message.getOffset())));
softly.assertAll();
}
@@ -168,13 +165,11 @@ public void checkMessageFilteringByOffset() {
@Test(priority = 7)
public void checkMessageFilteringByTimestamp() {
navigateToTopicsAndOpenDetails(TOPIC_FOR_CHECK_FILTERS.getName());
- topicDetails
- .openDetailsTab(MESSAGES);
- LocalDateTime firstTimestamp = topicDetails.getMessageByOffset(0).getTimestamp();
- List<TopicDetails.MessageGridItem> nextMessages = topicDetails.getAllMessages().stream()
+ LocalDateTime firstTimestamp = topicDetails
+ .openDetailsTab(MESSAGES)
+ .getMessageByOffset(0).getTimestamp();
+ LocalDateTime nextTimestamp = topicDetails.getAllMessages().stream()
.filter(message -> message.getTimestamp().getMinute() != firstTimestamp.getMinute())
- .toList();
- LocalDateTime nextTimestamp = nextMessages.stream()
.findFirst().orElseThrow().getTimestamp();
topicDetails
.selectSeekTypeDdlMessagesTab("Timestamp")
@@ -183,8 +178,7 @@ public void checkMessageFilteringByTimestamp() {
.clickSubmitFiltersBtnMessagesTab();
SoftAssert softly = new SoftAssert();
topicDetails.getAllMessages().forEach(message ->
- softly.assertTrue(message.getTimestamp().isEqual(nextTimestamp)
- || message.getTimestamp().isAfter(nextTimestamp),
+ softly.assertFalse(message.getTimestamp().isBefore(nextTimestamp),
String.format("Expected that %s is not before %s.", message.getTimestamp(), nextTimestamp)));
softly.assertAll();
}
diff --git a/kafka-ui-e2e-checks/src/test/resources/regression.xml b/kafka-ui-e2e-checks/src/test/resources/regression.xml
index fe102bae3e3..c6461ea14ca 100644
--- a/kafka-ui-e2e-checks/src/test/resources/regression.xml
+++ b/kafka-ui-e2e-checks/src/test/resources/regression.xml
@@ -1,6 +1,6 @@
<!DOCTYPE suite SYSTEM "https://testng.org/testng-1.0.dtd">
<suite name="RegressionSuite">
- <test name="RegressionTest" enabled="true" parallel="classes" thread-count="3">
+ <test name="RegressionTest" enabled="true" parallel="classes" thread-count="2">
<packages>
<package name="com.provectus.kafka.ui.smokesuite.*"/>
<package name="com.provectus.kafka.ui.sanitysuite.*"/>
diff --git a/kafka-ui-e2e-checks/src/test/resources/sanity.xml b/kafka-ui-e2e-checks/src/test/resources/sanity.xml
index c6b9b060249..bb67922402c 100644
--- a/kafka-ui-e2e-checks/src/test/resources/sanity.xml
+++ b/kafka-ui-e2e-checks/src/test/resources/sanity.xml
@@ -1,6 +1,6 @@
<!DOCTYPE suite SYSTEM "https://testng.org/testng-1.0.dtd">
<suite name="SanitySuite">
- <test name="SanityTest" enabled="true" parallel="classes" thread-count="3">
+ <test name="SanityTest" enabled="true" parallel="classes" thread-count="2">
<packages>
<package name="com.provectus.kafka.ui.sanitysuite.*"/>
</packages>
diff --git a/kafka-ui-e2e-checks/src/test/resources/smoke.xml b/kafka-ui-e2e-checks/src/test/resources/smoke.xml
index ab2929ff340..db93607727f 100644
--- a/kafka-ui-e2e-checks/src/test/resources/smoke.xml
+++ b/kafka-ui-e2e-checks/src/test/resources/smoke.xml
@@ -1,6 +1,6 @@
<!DOCTYPE suite SYSTEM "https://testng.org/testng-1.0.dtd">
<suite name="SmokeSuite">
- <test name="SmokeTest" enabled="true" parallel="classes" thread-count="3">
+ <test name="SmokeTest" enabled="true" parallel="classes" thread-count="2">
<packages>
<package name="com.provectus.kafka.ui.smokesuite.*"/>
</packages>
| train | test | 2023-04-13T18:03:43 | "2023-04-11T07:35:31Z" | VladSenyuta | train |
provectus/kafka-ui/2753_3672 | provectus/kafka-ui | provectus/kafka-ui/2753 | provectus/kafka-ui/3672 | [
"connected"
] | 039f50273e57ca219748606677539d101040a462 | da3932e3422e45bdb0dc27cf538b79c8bf872602 | [
"Hello there LaurentDanti! π\n\nThank you and congratulations π for opening your very first issue in this project! π\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. π",
"@LaurentDanti thank you for the issue!\r\n\r\n@Haarolean I think we already have issue created for fonts, maybe we add github call handling to that issue? ",
"hey @LaurentDanti, thanks for reaching out.\r\nI know already at least a few cases in which people successfully use the app in the \"air gap\" environment (e.g. #1390).\r\nWe already have the fonts issue in progress (#2728), we'll keep this open for considering opting out of the version check.",
"@Haarolean hello, I also have the fonts issue, UI doesn't work in isolated network, would you give workaround ?",
"@Fuou we're working on it (#2372)",
" @Haarolean what's the status here?",
"@cassanellicarlo it's up for grabs for the 3rd-party contributors since we don't have enough capacity to address this issue.",
"Hey π what is the status woth the opt out option?",
"Frontend implementation is invalid.\r\n\r\nTagged versions before:\r\n<img width=\"361\" alt=\"image\" src=\"https://github.com/provectus/kafka-ui/assets/1494347/c44b0863-63dd-446f-a25a-a5d816843a60\">\r\nTagged versions now are missing the tag and have a commit displayed anyway:\r\n<img width=\"408\" alt=\"image\" src=\"https://github.com/provectus/kafka-ui/assets/1494347/ed5fb888-52fa-4f8a-aa18-9d0d768d1402\">\r\n\r\nif version == versionTag display version rather than commitId\r\n<img width=\"766\" alt=\"image\" src=\"https://github.com/provectus/kafka-ui/assets/1494347/66a9a277-d452-46df-ac17-2696fa8bba87\">\r\n\r\n@David-DB88 \r\n\r\n\r\n",
"ok\r\n"
] | [
"why did we remove this ?",
"<img width=\"612\" alt=\"Screen Shot 2023-04-20 at 19 55 05\" src=\"https://user-images.githubusercontent.com/58771979/233421166-3898ea13-58b8-44a0-b081-63734d97ea59.png\">\r\ni changed it with this "
] | "2023-04-14T14:40:26Z" | [
"type/enhancement",
"scope/backend",
"scope/frontend",
"status/pending-frontend"
] | Air Gap: Enhancement: Opt out of version check | Hello,
i'm using your great UI for kafka cluster instance and it's perfect : functionality, rapidity, installating in k8s world with helm.
But i try to install this UI in isolated network without internet access so github.com is not allow.
Is it possible to disable call from the ui to https://api.github.com/repos/provectus/kafka-ui/releases/latest and load the font inside the image ?
Like that the UI could be use in isolated network.
| [
"kafka-ui-react-app/src/components/Version/Version.tsx",
"kafka-ui-react-app/src/components/Version/__tests__/Version.spec.tsx",
"kafka-ui-react-app/src/components/common/Icons/WarningIcon.tsx",
"kafka-ui-react-app/src/lib/fixtures/actuatorInfo.ts",
"kafka-ui-react-app/src/lib/fixtures/latestVersion.ts",
"kafka-ui-react-app/src/lib/hooks/api/__tests__/actuatorInfo.spec.ts",
"kafka-ui-react-app/src/lib/hooks/api/__tests__/latestVersion.spec.ts",
"kafka-ui-react-app/src/lib/hooks/api/actuatorInfo.ts",
"kafka-ui-react-app/src/lib/hooks/api/latestVersion.ts"
] | [
"kafka-ui-react-app/src/components/Version/Version.tsx",
"kafka-ui-react-app/src/components/Version/__tests__/Version.spec.tsx",
"kafka-ui-react-app/src/components/common/Icons/WarningIcon.tsx",
"kafka-ui-react-app/src/lib/fixtures/latestVersion.ts",
"kafka-ui-react-app/src/lib/hooks/api/__tests__/latestVersion.spec.ts",
"kafka-ui-react-app/src/lib/hooks/api/latestVersion.ts"
] | [] | diff --git a/kafka-ui-react-app/src/components/Version/Version.tsx b/kafka-ui-react-app/src/components/Version/Version.tsx
index 7a820b116a2..6788605d92b 100644
--- a/kafka-ui-react-app/src/components/Version/Version.tsx
+++ b/kafka-ui-react-app/src/components/Version/Version.tsx
@@ -1,52 +1,38 @@
import React from 'react';
import WarningIcon from 'components/common/Icons/WarningIcon';
import { gitCommitPath } from 'lib/paths';
-import { useActuatorInfo } from 'lib/hooks/api/actuatorInfo';
-import { BUILD_VERSION_PATTERN } from 'lib/constants';
import { useLatestVersion } from 'lib/hooks/api/latestVersion';
import { formatTimestamp } from 'lib/dateTimeHelpers';
import * as S from './Version.styled';
-import compareVersions from './compareVersions';
const Version: React.FC = () => {
- const { data: actuatorInfo = {} } = useActuatorInfo();
const { data: latestVersionInfo = {} } = useLatestVersion();
-
- const tag = actuatorInfo?.build?.version;
- const commit = actuatorInfo?.git?.commit.id;
- const { tag_name: latestTag } = latestVersionInfo;
-
- const outdated = compareVersions(tag, latestTag);
-
- const currentVersion = tag?.match(BUILD_VERSION_PATTERN)
- ? tag
- : formatTimestamp(actuatorInfo?.build?.time);
-
- if (!tag) return null;
+ const { buildTime, commitId, isLatestRelease } = latestVersionInfo.build;
+ const { versionTag } = latestVersionInfo?.latestRelease || '';
return (
<S.Wrapper>
- {!!outdated && (
+ {!isLatestRelease && (
<S.OutdatedWarning
- title={`Your app version is outdated. Current latest version is ${latestTag}`}
+ title={`Your app version is outdated. Current latest version is ${versionTag}`}
>
<WarningIcon />
</S.OutdatedWarning>
)}
- {commit && (
+ {commitId && (
<div>
<S.CurrentCommitLink
title="Current commit"
target="__blank"
- href={gitCommitPath(commit)}
+ href={gitCommitPath(commitId)}
>
- {commit}
+ {commitId}
</S.CurrentCommitLink>
</div>
)}
- <S.CurrentVersion>{currentVersion}</S.CurrentVersion>
+ <S.CurrentVersion>{formatTimestamp(buildTime)}</S.CurrentVersion>
</S.Wrapper>
);
};
diff --git a/kafka-ui-react-app/src/components/Version/__tests__/Version.spec.tsx b/kafka-ui-react-app/src/components/Version/__tests__/Version.spec.tsx
index d407966058c..2700dac8947 100644
--- a/kafka-ui-react-app/src/components/Version/__tests__/Version.spec.tsx
+++ b/kafka-ui-react-app/src/components/Version/__tests__/Version.spec.tsx
@@ -2,87 +2,40 @@ import React from 'react';
import { screen } from '@testing-library/dom';
import Version from 'components/Version/Version';
import { render } from 'lib/testHelpers';
-import { formatTimestamp } from 'lib/dateTimeHelpers';
-import { useActuatorInfo } from 'lib/hooks/api/actuatorInfo';
import { useLatestVersion } from 'lib/hooks/api/latestVersion';
-import { actuatorInfoPayload } from 'lib/fixtures/actuatorInfo';
-import { latestVersionPayload } from 'lib/fixtures/latestVersion';
+import {
+ deprecatedVersionPayload,
+ latestVersionPayload,
+} from 'lib/fixtures/latestVersion';
-jest.mock('lib/hooks/api/actuatorInfo', () => ({
- useActuatorInfo: jest.fn(),
-}));
jest.mock('lib/hooks/api/latestVersion', () => ({
useLatestVersion: jest.fn(),
}));
-
describe('Version Component', () => {
- const versionTag = 'v0.5.0';
- const snapshotTag = 'test-SNAPSHOT';
- const commitTag = 'befd3b328e2c9c7df57b0c5746561b2f7fee8813';
-
- const actuatorVersionPayload = actuatorInfoPayload(versionTag);
- const formattedTimestamp = formatTimestamp(actuatorVersionPayload.build.time);
-
- beforeEach(() => {
- (useActuatorInfo as jest.Mock).mockImplementation(() => ({
- data: actuatorVersionPayload,
- }));
- (useLatestVersion as jest.Mock).mockImplementation(() => ({
- data: latestVersionPayload,
- }));
- });
-
- describe('tag does not exist', () => {
- it('does not render component', async () => {
- (useActuatorInfo as jest.Mock).mockImplementation(() => ({
- data: null,
- }));
- const { container } = render(<Version />);
- expect(container.firstChild).toBeEmptyDOMElement();
- });
- });
+ const commitId = '96a577a';
- describe('renders current version', () => {
- it('renders release build version as current version', async () => {
- render(<Version />);
- expect(screen.getByText(versionTag)).toBeInTheDocument();
- });
- it('renders formatted timestamp as current version when version is commit', async () => {
- (useActuatorInfo as jest.Mock).mockImplementation(() => ({
- data: actuatorInfoPayload(commitTag),
+ describe('render latest version', () => {
+ beforeEach(() => {
+ (useLatestVersion as jest.Mock).mockImplementation(() => ({
+ data: latestVersionPayload,
}));
- render(<Version />);
- expect(screen.getByText(formattedTimestamp)).toBeInTheDocument();
});
- it('renders formatted timestamp as current version when version contains -SNAPSHOT', async () => {
- (useActuatorInfo as jest.Mock).mockImplementation(() => ({
- data: actuatorInfoPayload(snapshotTag),
- }));
+ it('renders latest release version as current version', async () => {
render(<Version />);
- expect(screen.getByText(formattedTimestamp)).toBeInTheDocument();
+ expect(screen.getByText(commitId)).toBeInTheDocument();
});
- });
- describe('outdated build version', () => {
- it('renders warning message', async () => {
- (useActuatorInfo as jest.Mock).mockImplementation(() => ({
- data: actuatorInfoPayload('v0.3.0'),
- }));
+ it('should not show warning icon if it is last release', async () => {
render(<Version />);
- expect(
- screen.getByTitle(
- `Your app version is outdated. Current latest version is ${latestVersionPayload.tag_name}`
- )
- ).toBeInTheDocument();
+ expect(screen.queryByRole('img')).not.toBeInTheDocument();
});
});
- describe('current commit id with link', () => {
- it('renders', async () => {
- render(<Version />);
- expect(
- screen.getByText(actuatorVersionPayload.git.commit.id)
- ).toBeInTheDocument();
- });
+ it('show warning icon if it is not last release', async () => {
+ (useLatestVersion as jest.Mock).mockImplementation(() => ({
+ data: deprecatedVersionPayload,
+ }));
+ render(<Version />);
+ expect(screen.getByRole('img')).toBeInTheDocument();
});
});
diff --git a/kafka-ui-react-app/src/components/common/Icons/WarningIcon.tsx b/kafka-ui-react-app/src/components/common/Icons/WarningIcon.tsx
index ab2f8dee759..1bffe0db537 100644
--- a/kafka-ui-react-app/src/components/common/Icons/WarningIcon.tsx
+++ b/kafka-ui-react-app/src/components/common/Icons/WarningIcon.tsx
@@ -13,6 +13,7 @@ const WarningIcon: React.FC = () => {
return (
<WarningIconContainer>
<svg
+ role="img"
width="14"
height="13"
viewBox="0 0 14 13"
diff --git a/kafka-ui-react-app/src/lib/fixtures/actuatorInfo.ts b/kafka-ui-react-app/src/lib/fixtures/actuatorInfo.ts
deleted file mode 100644
index e7dff6e1eca..00000000000
--- a/kafka-ui-react-app/src/lib/fixtures/actuatorInfo.ts
+++ /dev/null
@@ -1,12 +0,0 @@
-export const actuatorInfoPayload = (
- version = 'befd3b328e2c9c7df57b0c5746561b2f7fee8813'
-) => ({
- git: { commit: { id: 'befd3b3' } },
- build: {
- artifact: 'kafka-ui-api',
- name: 'kafka-ui-api',
- time: '2022-09-15T09:52:21.753Z',
- version,
- group: 'com.provectus',
- },
-});
diff --git a/kafka-ui-react-app/src/lib/fixtures/latestVersion.ts b/kafka-ui-react-app/src/lib/fixtures/latestVersion.ts
index a6c9eca856e..d1e62da6b80 100644
--- a/kafka-ui-react-app/src/lib/fixtures/latestVersion.ts
+++ b/kafka-ui-react-app/src/lib/fixtures/latestVersion.ts
@@ -1,3 +1,16 @@
+export const deprecatedVersionPayload = {
+ build: {
+ buildTime: '2023-04-14T09:47:35.463Z',
+ commitId: '96a577a',
+ isLatestRelease: false,
+ version: '96a577a98c6069376c5d22ed49cffd3739f1bbdc',
+ },
+};
export const latestVersionPayload = {
- tag_name: 'v0.4.0',
+ build: {
+ buildTime: '2023-04-14T09:47:35.463Z',
+ commitId: '96a577a',
+ isLatestRelease: true,
+ version: '96a577a98c6069376c5d22ed49cffd3739f1bbdc',
+ },
};
diff --git a/kafka-ui-react-app/src/lib/hooks/api/__tests__/actuatorInfo.spec.ts b/kafka-ui-react-app/src/lib/hooks/api/__tests__/actuatorInfo.spec.ts
deleted file mode 100644
index c4e639680ed..00000000000
--- a/kafka-ui-react-app/src/lib/hooks/api/__tests__/actuatorInfo.spec.ts
+++ /dev/null
@@ -1,17 +0,0 @@
-import fetchMock from 'fetch-mock';
-import * as hooks from 'lib/hooks/api/actuatorInfo';
-import { expectQueryWorks, renderQueryHook } from 'lib/testHelpers';
-import { actuatorInfoPayload } from 'lib/fixtures/actuatorInfo';
-
-const actuatorInfoPath = '/actuator/info';
-
-describe('Actuator info hooks', () => {
- beforeEach(() => fetchMock.restore());
- describe('useActuatorInfo', () => {
- it('returns the correct data', async () => {
- const mock = fetchMock.getOnce(actuatorInfoPath, actuatorInfoPayload());
- const { result } = renderQueryHook(() => hooks.useActuatorInfo());
- await expectQueryWorks(mock, result);
- });
- });
-});
diff --git a/kafka-ui-react-app/src/lib/hooks/api/__tests__/latestVersion.spec.ts b/kafka-ui-react-app/src/lib/hooks/api/__tests__/latestVersion.spec.ts
index 0770bd4606d..a12f2629950 100644
--- a/kafka-ui-react-app/src/lib/hooks/api/__tests__/latestVersion.spec.ts
+++ b/kafka-ui-react-app/src/lib/hooks/api/__tests__/latestVersion.spec.ts
@@ -1,18 +1,16 @@
import fetchMock from 'fetch-mock';
import { expectQueryWorks, renderQueryHook } from 'lib/testHelpers';
-import * as hooks from 'lib/hooks/api/latestVersion';
-import { GIT_REPO_LATEST_RELEASE_LINK } from 'lib/constants';
import { latestVersionPayload } from 'lib/fixtures/latestVersion';
+import { useLatestVersion } from 'lib/hooks/api/latestVersion';
+
+const latestVersionPath = '/api/info';
describe('Latest version hooks', () => {
beforeEach(() => fetchMock.restore());
describe('useLatestVersion', () => {
it('returns the correct data', async () => {
- const mock = fetchMock.getOnce(
- GIT_REPO_LATEST_RELEASE_LINK,
- latestVersionPayload
- );
- const { result } = renderQueryHook(() => hooks.useLatestVersion());
+ const mock = fetchMock.getOnce(latestVersionPath, latestVersionPayload);
+ const { result } = renderQueryHook(() => useLatestVersion());
await expectQueryWorks(mock, result);
});
});
diff --git a/kafka-ui-react-app/src/lib/hooks/api/actuatorInfo.ts b/kafka-ui-react-app/src/lib/hooks/api/actuatorInfo.ts
deleted file mode 100644
index 7e1835d9079..00000000000
--- a/kafka-ui-react-app/src/lib/hooks/api/actuatorInfo.ts
+++ /dev/null
@@ -1,19 +0,0 @@
-import { useQuery } from '@tanstack/react-query';
-import { BASE_PARAMS, QUERY_REFETCH_OFF_OPTIONS } from 'lib/constants';
-
-const fetchActuatorInfo = async () => {
- const data = await fetch(
- `${BASE_PARAMS.basePath}/actuator/info`,
- BASE_PARAMS
- ).then((res) => res.json());
-
- return data;
-};
-
-export function useActuatorInfo() {
- return useQuery(
- ['actuatorInfo'],
- fetchActuatorInfo,
- QUERY_REFETCH_OFF_OPTIONS
- );
-}
diff --git a/kafka-ui-react-app/src/lib/hooks/api/latestVersion.ts b/kafka-ui-react-app/src/lib/hooks/api/latestVersion.ts
index 1087fb572e1..0711ad34d91 100644
--- a/kafka-ui-react-app/src/lib/hooks/api/latestVersion.ts
+++ b/kafka-ui-react-app/src/lib/hooks/api/latestVersion.ts
@@ -1,21 +1,19 @@
import { useQuery } from '@tanstack/react-query';
-import {
- QUERY_REFETCH_OFF_OPTIONS,
- GIT_REPO_LATEST_RELEASE_LINK,
-} from 'lib/constants';
+import { BASE_PARAMS, QUERY_REFETCH_OFF_OPTIONS } from 'lib/constants';
-const fetchLatestVersion = async () => {
- const data = await fetch(GIT_REPO_LATEST_RELEASE_LINK).then((res) =>
- res.json()
- );
+const fetchLatestVersionInfo = async () => {
+ const data = await fetch(
+ `${BASE_PARAMS.basePath}/api/info`,
+ BASE_PARAMS
+ ).then((res) => res.json());
return data;
};
export function useLatestVersion() {
return useQuery(
- ['latestVersion'],
- fetchLatestVersion,
+ ['versionInfo'],
+ fetchLatestVersionInfo,
QUERY_REFETCH_OFF_OPTIONS
);
}
| null | val | test | 2023-04-24T21:01:32 | "2022-10-14T10:49:29Z" | LaurentDanti | train |
provectus/kafka-ui/3646_3684 | provectus/kafka-ui | provectus/kafka-ui/3646 | provectus/kafka-ui/3684 | [
"connected"
] | 727f38401babcf25d5bb47e675149882ff3ede14 | a1e7a20887c624195e68593d8aa6ae7a4e6c3daa | [] | [
"i think we can `!!` put it in front of the notation , then remove the `|| false` but it is not that important ",
"ΠΎΠΊ"
] | "2023-04-17T14:51:41Z" | [
"type/bug",
"good first issue",
"scope/frontend",
"status/accepted",
"status/confirmed"
] | Wizard: RBAC: Disable configure buttons if there are no permissions | buttons should be disabled if there's no "applicationconfig" rbac permission present for the user | [
"kafka-ui-react-app/src/components/Dashboard/ClusterTableActionsCell.tsx",
"kafka-ui-react-app/src/components/Dashboard/Dashboard.tsx"
] | [
"kafka-ui-react-app/src/components/Dashboard/ClusterTableActionsCell.tsx",
"kafka-ui-react-app/src/components/Dashboard/Dashboard.tsx"
] | [] | diff --git a/kafka-ui-react-app/src/components/Dashboard/ClusterTableActionsCell.tsx b/kafka-ui-react-app/src/components/Dashboard/ClusterTableActionsCell.tsx
index cb41ab06a83..19fefd784ce 100644
--- a/kafka-ui-react-app/src/components/Dashboard/ClusterTableActionsCell.tsx
+++ b/kafka-ui-react-app/src/components/Dashboard/ClusterTableActionsCell.tsx
@@ -1,17 +1,31 @@
-import React from 'react';
-import { Cluster } from 'generated-sources';
+import React, { useMemo } from 'react';
+import { Cluster, ResourceType } from 'generated-sources';
import { CellContext } from '@tanstack/react-table';
-import { Button } from 'components/common/Button/Button';
import { clusterConfigPath } from 'lib/paths';
+import { useGetUserInfo } from 'lib/hooks/api/roles';
+import { ActionCanButton } from 'components/common/ActionComponent';
type Props = CellContext<Cluster, unknown>;
const ClusterTableActionsCell: React.FC<Props> = ({ row }) => {
const { name } = row.original;
+ const { data } = useGetUserInfo();
+
+ const isApplicationConfig = useMemo(() => {
+ return !!data?.userInfo?.permissions.some(
+ (permission) => permission.resource === ResourceType.APPLICATIONCONFIG
+ );
+ }, [data]);
+
return (
- <Button buttonType="secondary" buttonSize="S" to={clusterConfigPath(name)}>
+ <ActionCanButton
+ buttonType="secondary"
+ buttonSize="S"
+ to={clusterConfigPath(name)}
+ canDoAction={isApplicationConfig}
+ >
Configure
- </Button>
+ </ActionCanButton>
);
};
diff --git a/kafka-ui-react-app/src/components/Dashboard/Dashboard.tsx b/kafka-ui-react-app/src/components/Dashboard/Dashboard.tsx
index 7eab4c1d2ff..c7b64aef1cf 100644
--- a/kafka-ui-react-app/src/components/Dashboard/Dashboard.tsx
+++ b/kafka-ui-react-app/src/components/Dashboard/Dashboard.tsx
@@ -1,23 +1,25 @@
-import React, { useEffect } from 'react';
+import React, { useEffect, useMemo } from 'react';
import PageHeading from 'components/common/PageHeading/PageHeading';
import * as Metrics from 'components/common/Metrics';
import { Tag } from 'components/common/Tag/Tag.styled';
import Switch from 'components/common/Switch/Switch';
import { useClusters } from 'lib/hooks/api/clusters';
-import { Cluster, ServerStatus } from 'generated-sources';
+import { Cluster, ResourceType, ServerStatus } from 'generated-sources';
import { ColumnDef } from '@tanstack/react-table';
import Table, { SizeCell } from 'components/common/NewTable';
import useBoolean from 'lib/hooks/useBoolean';
-import { Button } from 'components/common/Button/Button';
import { clusterNewConfigPath } from 'lib/paths';
import { GlobalSettingsContext } from 'components/contexts/GlobalSettingsContext';
import { useNavigate } from 'react-router-dom';
+import { ActionCanButton } from 'components/common/ActionComponent';
+import { useGetUserInfo } from 'lib/hooks/api/roles';
import * as S from './Dashboard.styled';
import ClusterName from './ClusterName';
import ClusterTableActionsCell from './ClusterTableActionsCell';
const Dashboard: React.FC = () => {
+ const { data } = useGetUserInfo();
const clusters = useClusters();
const { value: showOfflineOnly, toggle } = useBoolean(false);
const appInfo = React.useContext(GlobalSettingsContext);
@@ -62,6 +64,11 @@ const Dashboard: React.FC = () => {
}
}, [clusters, appInfo.hasDynamicConfig]);
+ const isApplicationConfig = useMemo(() => {
+ return !!data?.userInfo?.permissions.some(
+ (permission) => permission.resource === ResourceType.APPLICATIONCONFIG
+ );
+ }, [data]);
return (
<>
<PageHeading text="Dashboard" />
@@ -87,9 +94,14 @@ const Dashboard: React.FC = () => {
<label>Only offline clusters</label>
</div>
{appInfo.hasDynamicConfig && (
- <Button buttonType="primary" buttonSize="M" to={clusterNewConfigPath}>
+ <ActionCanButton
+ buttonType="primary"
+ buttonSize="M"
+ to={clusterNewConfigPath}
+ canDoAction={isApplicationConfig}
+ >
Configure new cluster
- </Button>
+ </ActionCanButton>
)}
</S.Toolbar>
<Table
| null | train | test | 2023-05-02T14:34:57 | "2023-04-10T10:46:47Z" | Haarolean | train |
provectus/kafka-ui/3044_3687 | provectus/kafka-ui | provectus/kafka-ui/3044 | provectus/kafka-ui/3687 | [
"connected"
] | 1b2827fb2ffd8b0890960845dfaab2ab1f7ebf2e | ad9d7dec2cd2effc6f3832e121830c99d98dce0f | [
"@Haarolean should it be moved to QA 0.6 or will it be done in Release 0.5?"
] | [] | "2023-04-18T15:03:24Z" | [
"type/bug",
"good first issue",
"scope/frontend",
"status/accepted",
"status/confirmed"
] | System redirects to Brokers after Topic delete from profile |
**Describe the bug**
System redirects to Brokers after Topic delete
**Set up**
https://www.kafka-ui.provectus.io/
**Steps to Reproduce**
<!-- We'd like you to provide an example setup (via docker-compose, helm, etc.)
to reproduce the problem, especially with a complex setups. -->
Steps to reproduce the behavior:
1. Navigate to Topics
2. Add a topic
3. Select "Remove topic" from 3dot menu within topic profile
**Actual result:** System redirects to Brokers
**Expected behavior**
System should redirect back to Topics
**Screenshots**
https://user-images.githubusercontent.com/104780608/206436487-27e48413-828b-4a8d-8c74-b44e36caa687.mov
**Additional context**
relates to https://app.qase.io/case/KAFKAUI-207 | [
"kafka-ui-react-app/src/components/Topics/Topic/Topic.tsx",
"kafka-ui-react-app/src/components/Topics/Topic/__test__/Topic.spec.tsx"
] | [
"kafka-ui-react-app/src/components/Topics/Topic/Topic.tsx",
"kafka-ui-react-app/src/components/Topics/Topic/__test__/Topic.spec.tsx"
] | [] | diff --git a/kafka-ui-react-app/src/components/Topics/Topic/Topic.tsx b/kafka-ui-react-app/src/components/Topics/Topic/Topic.tsx
index 8945523576b..9430e4b7499 100644
--- a/kafka-ui-react-app/src/components/Topics/Topic/Topic.tsx
+++ b/kafka-ui-react-app/src/components/Topics/Topic/Topic.tsx
@@ -59,7 +59,7 @@ const Topic: React.FC = () => {
const deleteTopicHandler = async () => {
await deleteTopic.mutateAsync(topicName);
- navigate('../..');
+ navigate(clusterTopicsPath(clusterName));
};
React.useEffect(() => {
diff --git a/kafka-ui-react-app/src/components/Topics/Topic/__test__/Topic.spec.tsx b/kafka-ui-react-app/src/components/Topics/Topic/__test__/Topic.spec.tsx
index 460e4ad5dee..4ec45c3a58f 100644
--- a/kafka-ui-react-app/src/components/Topics/Topic/__test__/Topic.spec.tsx
+++ b/kafka-ui-react-app/src/components/Topics/Topic/__test__/Topic.spec.tsx
@@ -10,6 +10,7 @@ import {
clusterTopicMessagesPath,
clusterTopicPath,
clusterTopicSettingsPath,
+ clusterTopicsPath,
clusterTopicStatisticsPath,
getNonExactPath,
} from 'lib/paths';
@@ -179,7 +180,9 @@ describe('Details', () => {
name: 'Confirm',
});
await userEvent.click(submitDeleteButton);
- expect(mockNavigate).toHaveBeenCalledWith('../..');
+ expect(mockNavigate).toHaveBeenCalledWith(
+ clusterTopicsPath(mockClusterName)
+ );
});
it('shows a confirmation popup on deleting topic messages', async () => {
| null | train | test | 2023-04-24T12:50:08 | "2022-12-08T11:35:20Z" | armenuikafka | train |
provectus/kafka-ui/3688_3689 | provectus/kafka-ui | provectus/kafka-ui/3688 | provectus/kafka-ui/3689 | [
"keyword_pr_to_issue"
] | 5dd690aa2438182cd2a0deb4f45f4db259618cc4 | 8783da313fb342c883a7c54ea98c3a5240650773 | [] | [] | "2023-04-19T15:42:42Z" | [
"type/bug",
"scope/frontend",
"status/triage",
"status/accepted",
"status/confirmed"
] | Topic: Messages: Invalid size for null key/value messages | If key/value is absent the size is always 4 bytes.
<img width="1135" alt="image" src="https://user-images.githubusercontent.com/1494347/233128171-30311e14-bd9a-4d0c-8002-dafbaeaebbea.png">
| [
"kafka-ui-react-app/src/components/Topics/Topic/Messages/Message.tsx",
"kafka-ui-react-app/src/components/Topics/Topic/Messages/MessageContent/MessageContent.tsx"
] | [
"kafka-ui-react-app/src/components/Topics/Topic/Messages/Message.tsx",
"kafka-ui-react-app/src/components/Topics/Topic/Messages/MessageContent/MessageContent.tsx"
] | [] | diff --git a/kafka-ui-react-app/src/components/Topics/Topic/Messages/Message.tsx b/kafka-ui-react-app/src/components/Topics/Topic/Messages/Message.tsx
index 0282cde2ea7..fb4e258cca5 100644
--- a/kafka-ui-react-app/src/components/Topics/Topic/Messages/Message.tsx
+++ b/kafka-ui-react-app/src/components/Topics/Topic/Messages/Message.tsx
@@ -29,8 +29,10 @@ const Message: React.FC<Props> = ({
timestampType,
offset,
key,
+ keySize,
partition,
content,
+ valueSize,
headers,
valueSerde,
keySerde,
@@ -138,6 +140,8 @@ const Message: React.FC<Props> = ({
headers={headers}
timestamp={timestamp}
timestampType={timestampType}
+ keySize={keySize}
+ contentSize={valueSize}
/>
)}
</>
diff --git a/kafka-ui-react-app/src/components/Topics/Topic/Messages/MessageContent/MessageContent.tsx b/kafka-ui-react-app/src/components/Topics/Topic/Messages/MessageContent/MessageContent.tsx
index fe472ad3b12..93616ca432a 100644
--- a/kafka-ui-react-app/src/components/Topics/Topic/Messages/MessageContent/MessageContent.tsx
+++ b/kafka-ui-react-app/src/components/Topics/Topic/Messages/MessageContent/MessageContent.tsx
@@ -15,6 +15,8 @@ export interface MessageContentProps {
headers?: { [key: string]: string | undefined };
timestamp?: Date;
timestampType?: TopicMessageTimestampTypeEnum;
+ keySize?: number;
+ contentSize?: number;
}
const MessageContent: React.FC<MessageContentProps> = ({
@@ -23,6 +25,8 @@ const MessageContent: React.FC<MessageContentProps> = ({
headers,
timestamp,
timestampType,
+ keySize,
+ contentSize,
}) => {
const [activeTab, setActiveTab] = React.useState<Tab>('content');
const [searchParams] = useSearchParams();
@@ -54,8 +58,7 @@ const MessageContent: React.FC<MessageContentProps> = ({
e.preventDefault();
setActiveTab('headers');
};
- const keySize = new TextEncoder().encode(messageKey).length;
- const contentSize = new TextEncoder().encode(messageContent).length;
+
const contentType =
messageContent && messageContent.trim().startsWith('{')
? SchemaType.JSON
| null | test | test | 2023-04-19T18:14:06 | "2023-04-19T15:40:14Z" | Haarolean | train |
provectus/kafka-ui/3670_3698 | provectus/kafka-ui | provectus/kafka-ui/3670 | provectus/kafka-ui/3698 | [
"connected"
] | 5efb380c42a97418b29fd5e72ed8372be7d1b48e | abfdf97a9fae6d402854cdaee427f17be8db2401 | [] | [] | "2023-04-21T13:37:57Z" | [
"type/enhancement",
"scope/frontend",
"status/accepted"
] | SR: Compare versions view doesn't have a "back" button | [
"kafka-ui-react-app/src/components/Schemas/Diff/Diff.styled.ts",
"kafka-ui-react-app/src/components/Schemas/Diff/Diff.tsx",
"kafka-ui-react-app/src/components/Schemas/Diff/__test__/Diff.spec.tsx"
] | [
"kafka-ui-react-app/src/components/Schemas/Diff/Diff.styled.ts",
"kafka-ui-react-app/src/components/Schemas/Diff/Diff.tsx",
"kafka-ui-react-app/src/components/Schemas/Diff/__test__/Diff.spec.tsx"
] | [] | diff --git a/kafka-ui-react-app/src/components/Schemas/Diff/Diff.styled.ts b/kafka-ui-react-app/src/components/Schemas/Diff/Diff.styled.ts
index 8f178d320d1..520f9f6c8ad 100644
--- a/kafka-ui-react-app/src/components/Schemas/Diff/Diff.styled.ts
+++ b/kafka-ui-react-app/src/components/Schemas/Diff/Diff.styled.ts
@@ -1,4 +1,5 @@
import styled from 'styled-components';
+import { Button } from 'components/common/Button/Button';
export const DiffWrapper = styled.div`
align-items: stretch;
@@ -81,3 +82,6 @@ export const DiffTile = styled.div`
export const DiffVersionsSelect = styled.div`
width: 0.625em;
`;
+export const BackButton = styled(Button)`
+ margin: 10px 9px;
+`;
diff --git a/kafka-ui-react-app/src/components/Schemas/Diff/Diff.tsx b/kafka-ui-react-app/src/components/Schemas/Diff/Diff.tsx
index d0016b46b44..05b1373ab60 100644
--- a/kafka-ui-react-app/src/components/Schemas/Diff/Diff.tsx
+++ b/kafka-ui-react-app/src/components/Schemas/Diff/Diff.tsx
@@ -20,6 +20,7 @@ import useAppParams from 'lib/hooks/useAppParams';
import PageHeading from 'components/common/PageHeading/PageHeading';
import * as S from './Diff.styled';
+import { BackButton } from './Diff.styled';
export interface DiffProps {
versions: SchemaSubject[];
@@ -77,6 +78,13 @@ const Diff: React.FC<DiffProps> = ({ versions, areVersionsFetched }) => {
backText="Schema Registry"
backTo={clusterSchemasPath(clusterName)}
/>
+ <BackButton
+ buttonType="secondary"
+ buttonSize="S"
+ onClick={() => navigate(-1)}
+ >
+ Back
+ </BackButton>
<S.Section>
{areVersionsFetched ? (
<S.DiffBox>
diff --git a/kafka-ui-react-app/src/components/Schemas/Diff/__test__/Diff.spec.tsx b/kafka-ui-react-app/src/components/Schemas/Diff/__test__/Diff.spec.tsx
index 0c614cf6617..2a9429eef1c 100644
--- a/kafka-ui-react-app/src/components/Schemas/Diff/__test__/Diff.spec.tsx
+++ b/kafka-ui-react-app/src/components/Schemas/Diff/__test__/Diff.spec.tsx
@@ -3,6 +3,7 @@ import Diff, { DiffProps } from 'components/Schemas/Diff/Diff';
import { render, WithRoute } from 'lib/testHelpers';
import { screen } from '@testing-library/react';
import { clusterSchemaComparePath } from 'lib/paths';
+import userEvent from '@testing-library/user-event';
import { versions } from './fixtures';
@@ -142,4 +143,24 @@ describe('Diff', () => {
expect(select).toHaveTextContent(versions[0].version);
});
});
+
+ describe('Back button', () => {
+ beforeEach(() => {
+ setupComponent({
+ areVersionsFetched: true,
+ versions,
+ });
+ });
+
+ it('back button is appear', () => {
+ const backButton = screen.getAllByRole('button', { name: 'Back' });
+ expect(backButton[0]).toBeInTheDocument();
+ });
+
+ it('click on back button', () => {
+ const backButton = screen.getAllByRole('button', { name: 'Back' });
+ userEvent.click(backButton[0]);
+ expect(screen.queryByRole('Back')).not.toBeInTheDocument();
+ });
+ });
});
| null | val | test | 2023-04-26T06:19:41 | "2023-04-14T09:25:50Z" | Haarolean | train |
|
provectus/kafka-ui/2752_3700 | provectus/kafka-ui | provectus/kafka-ui/2752 | provectus/kafka-ui/3700 | [
"connected"
] | da3932e3422e45bdb0dc27cf538b79c8bf872602 | 744bdb32a310306eefe8641923d712db697b1c70 | [
"We'd need some volunteers to test things out.\r\n\r\nWe coordinate in discord, a link to the thread -> [here](https://discord.com/channels/897805035122077716/1098621183584382976). \r\n\r\nImage: `docker pull public.ecr.aws/provectus/kafka-ui-custom-build:3700`\r\nConfig example: [here](https://github.com/provectus/kafka-ui/pull/3700#issuecomment-1518369213)\r\n\r\nPlease check\r\n1. That authentication with all users you need works fine.\r\n2. RBAC groups matching works fine.\r\n\r\nAD a bit later, it requires more black magic.",
"As I found no volunteers w/ AD, it's gonna be a separate issue. Please upvote #3741 if you're interested.",
"@Haarolean \r\nTested RBAC for image public.ecr.aws/provectus/kafka-ui-custom-build:3700 with LDAP configuration but not able login with this image and I dont see any errors in logs",
"> @Haarolean \n> \n> Tested RBAC for image public.ecr.aws/provectus/kafka-ui-custom-build:3700 with LDAP configuration but not able login with this image and I dont see any errors in logs\n\nMost likely, your configuration is invalid. Please share your config. \n",
"@Haarolean I have customized the docker image with Ldap configs and roles.yaml file, \r\nDockerfile\r\n```\r\nFROM public.ecr.aws/provectus/kafka-ui-custom-build:3700\r\nUSER root\r\nENV KAFKA_CLUSTERS_0_NAME=kafka\r\nENV KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS=<kafka broker:port>\r\nENV KAFKA_CLUSTERS_0_PROPERTIES_SECURITY_PROTOCOL=SASL_PLAINTEXT\r\nENV KAFKA_CLUSTERS_0_PROPERTIES_SASL_MECHANISM=GSSAPI\r\nENV KAFKA_CLUSTERS_0_PROPERTIES_SASL_JAAS_CONFIG='com.sun.security.auth.module.Krb5LoginModule required useTicketCache=false principal=\"******.COM\" useKeyTab=true serviceName=\"kafka\" keyTab=\"/keytabs/******.keytab\";'\r\nENV KAFKA_CLUSTERS_0_SCHEMAREGISTRY=<schema registry:port>\r\nENV KAFKA_CLUSTERS_0_KAFKACONNECT_0_NAME=dev\r\nENV KAFKA_CLUSTERS_0_KAFKACONNECT_0_ADDRESS=<kafka connect:port>\r\nENV SPRING_CONFIG_ADDITIONAL-LOCATION= /roles.yml\r\nENV KAFKA_CLUSTERS_0_READONLY=false\r\nCOPY krb5.conf /etc/krb5.conf\r\nCOPY roles.yml /roles.yml\r\nCOPY *****.keytab /keytabs/\r\nCOPY entrypoint.sh /\r\nENTRYPOINT [\"/entrypoint.sh\"]\r\n\r\n```\r\n\r\nroles.yml file\r\n```\r\nrbac:\r\n roles:\r\n - name: \"readonly\"\r\n clusters:\r\n - <kafka dev>\r\n subjects:\r\n - provider: ldap\r\n type: group\r\n value: \"OU=Users,OU=Accounts,OU=Resources,DC=<domain>,DC=com\"\r\n - provider: ldap_ad\r\n type: user\r\n value: \"CN <name>\"\r\n permissions:\r\n - resource: clusterconfig\r\n actions: [ \"view\" ]\r\n\r\n - resource: topic\r\n value: \".*\"\r\n actions:\r\n - VIEW\r\n - MESSAGES_READ\r\n\r\n - resource: consumer\r\n value: \".*\"\r\n actions: [ view ]\r\n\r\n - resource: schema\r\n value: \".*\"\r\n actions: [ view ]\r\n\r\n - resource: connect\r\n value: \".*\"\r\n actions: [ view ]\r\n```",
"@padmaachuth you haven't configured LDAP auth at all, see the first lines of the config example I provided",
"> @padmaachuth you haven't configured LDAP auth at all, see the first lines of the config example I provided\r\n\r\n@Haarolean deploying image in openshift passing LDAP config from ENV \r\n\r\nKAFKA_CLUSTERS_0_NAME: <Cluster Name>\r\nKAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: <Bootstrap servers:port>\r\nKAFKA_CLUSTERS_0_PROPERTIES_SECURITY_PROTOCOL: SASL_PLAINTEXT\r\nKAFKA_CLUSTERS_0_PROPERTIES_SASL_MECHANISM: GSSAPI\r\nKAFKA_CLUSTERS_0_SCHEMAREGISTRY: <Schema Registry URL>\r\nKAFKA_CLUSTERS_0_KAFKACONNECT_0_NAME: <Kafka Connector Name>\r\nKAFKA_CLUSTERS_0_PROPERTIES_SASL_JAAS_CONFIG: com.sun.security.auth.module.Krb5LoginModule required useTicketCache=false principal=\"<principal name>@DOMAIN.COM\" useKeyTab=true serviceName=\"kafka\" keyTab=\"/keytabs/<Principal Name>.keytab\";\r\nKAFKA_CLUSTERS_0_KAFKACONNECT_0_ADDRESS: <Kafka Connector URL>\r\nKAFKA_CLUSTERS_0_READONLY: false\r\nAUTH_TYPE: LDAP\r\nSPRING_CONFIG_ADDITIONAL-LOCATION: /roles.yml\r\nSPRING_LDAP_URLS: <LDAP URL>\r\nSPRING_LDAP_USER-FILTER-SEARCH-BASE: DC=Domain,DC=com\r\nSPRING_LDAP_USER-FILTER-SEARCH-FILTER: (&(sAMAccountName={0})(objectClass=person))\r\nSPRING_LDAP_ADMINUSER: <admin user>\r\nSPRING_LDAP_ADMINPASSWORD:<password> \r\n\r\nWith this LDAP config able to Login but it not listing cluster, topics,schema,connectors,etc.",
"@padmaachuth please raise a discussion or ask in discord community, that's unrelated to this issue",
"RBAC with LDAP does not work for me on v0.7.0.\r\nI have the following config:\r\n\r\n```yaml\r\nkafka:\r\n clusters:\r\n - name: dev\r\n readOnly: false\r\n bootstrapServers: ${KAFKA_BOOTSTRAP}:9093\r\n\r\nauth:\r\n type: LDAP\r\nspring:\r\n ldap:\r\n urls: ldap://${AD_SERVER}\r\n base: \"sAMAccountName={0},ou=${USERS_OU},dc=${AD_SERVER}\"\r\n admin-user: \"cn=${LDAP_ADMIN_USERNAME},dc=${AD_SERVER}\"\r\n admin-password: \"${LDAP_ADMIN_PASSWORD}\"\r\n user-filter-search-base: \"dc=${AD_SERVER}\"\r\n user-filter-search-filter: \"(&(sAMAccountName={0})(objectClass=user)(|(memberof=CN=${GROUP_NAME},OU=${GROUPS_OU},DC=${AD_SERVER})))\"\r\n group-filter-search-base: \"OU=${GROUPS_OU},DC=${AD_SERVER}\"\r\noauth2:\r\n ldap:\r\n activeDirectory: false\r\n activeDirectory.domain: \"${AD_SERVER}\"\r\n\r\nrbac:\r\n roles:\r\n - name: \"admins\"\r\n clusters:\r\n - dev\r\n subjects:\r\n - provider: ldap\r\n type: group\r\n value: \"${GROUP_NAME}\"\r\n permissions:\r\n - resource: applicationconfig\r\n # value not applicable for applicationconfig\r\n actions:\r\n - view\r\n # - edit\r\n - resource: clusterconfig\r\n # value not applicable for clusterconfig\r\n actions:\r\n - view\r\n # - edit\r\n - resource: topic\r\n value: \".*\"\r\n actions:\r\n - view\r\n # - create\r\n # - edit\r\n # - delete\r\n - messages_read\r\n - messages_produce\r\n - messages_delete\r\n - resource: consumer\r\n value: \".*\"\r\n actions:\r\n - view\r\n - delete\r\n - reset_offsets\r\n - resource: schema\r\n value: \".*\"\r\n actions:\r\n - view\r\n - create\r\n - delete\r\n - edit\r\n # - modify_global_compatibility\r\n - resource: connect\r\n value: \".*\"\r\n actions:\r\n - view\r\n # - edit\r\n # - create\r\n - resource: ksql\r\n # value not applicable for ksql\r\n actions:\r\n - execute\r\n # - resource: acl\r\n # # value not applicable for acl\r\n # value: \".*\" # FIXME: it crashes if this is removed\r\n # actions:\r\n # - view\r\n # # - edit\r\n```\r\n\r\nI am able to login but I cannot see any clusters:\r\n\r\n\r\n\r\nI have enabled DEBUG logs and I can see this:\r\n\r\n```log\r\nDEBUG [boundedElastic-3] o.s.s.l.u.DefaultLdapAuthoritiesPopulator: Found roles from search [{spring.security.ldap.dn=[CN=${GROUP_NAME},OU=${GROUPS_OU},DC=${AD_SERVER}], cn=[${GROUP_NAME}]}]\r\nDEBUG [reactor-http-epoll-1] o.s.s.w.s.a.DelegatingReactiveAuthorizationManager: Checking authorization on '/api/clusters' using org.springframework.security.authorization.AuthenticatedReactiveAuthorizationManager@XXXXX\r\nDEBUG [reactor-http-epoll-1] o.s.s.w.s.c.WebSessionServerSecurityContextRepository: Found SecurityContext 'SecurityContextImpl [Authentication=UsernamePasswordAuthenticationToken [Principal=com.provectus.kafka.ui.config.auth.RbacLdapUser@XXXXX, Credentials=[PROTECTED], Authenticated=true, Details=null, Granted Authorities=[${GROUP_NAME}]]]' in WebSession: 'org.springframework.web.server.session.InMemoryWebSessionStore$InMemoryWebSession@XXXXX'\r\nDEBUG [reactor-http-epoll-1] o.s.s.w.s.a.AuthorizationWebFilter: Authorization successful\r\nDEBUG [reactor-http-epoll-1] o.s.w.r.r.m.a.RequestMappingHandlerMapping: [XXXXX] Mapped to com.provectus.kafka.ui.controller.ClustersController#getClusters(ServerWebExchange)\r\nDEBUG [reactor-http-epoll-1] o.s.w.r.r.m.a.ResponseEntityResultHandler: [XXXXX] Using 'application/json' given [*/*] and supported [application/json]\r\nDEBUG [reactor-http-epoll-1] o.s.w.r.r.m.a.ResponseEntityResultHandler: [XXXXX] 0..N [com.provectus.kafka.ui.model.ClusterDTO]\r\nDEBUG [reactor-http-epoll-1] o.s.s.w.s.c.WebSessionServerSecurityContextRepository: Found SecurityContext 'SecurityContextImpl [Authentication=UsernamePasswordAuthenticationToken [Principal=com.provectus.kafka.ui.config.auth.RbacLdapUser@XXXXX, Credentials=[PROTECTED], Authenticated=true, Details=null, Granted Authorities=[${GROUP_NAME}]]]' in WebSession: 'org.springframework.web.server.session.InMemoryWebSessionStore$InMemoryWebSession@XXXXX'\r\nDEBUG [reactor-http-epoll-1] o.s.w.s.a.HttpWebHandlerAdapter: [XXXXX] Completed 200 OK\r\n```\r\n\r\nIf I remove the `rbac` section, I can login and I can see and do everything because there are no roles.\r\n\r\nLet me know if I should open a separate issue.\r\n\r\nThanks.",
"@alexisph please raise either a new _discussion_ or join us on discord (the link available in readme)"
] | [
"fail with error msg here",
"rm implements",
"done",
"done"
] | "2023-04-21T21:40:05Z" | [
"type/enhancement",
"scope/backend",
"status/accepted",
"area/rbac"
] | RBAC: Support LDAP | For RBAC AD support see #3741
Feature implemented, how to set this up:
- Set up LDAP authentication, docs [here](https://docs.kafka-ui.provectus.io/configuration/authentication/ldap-active-directory)
- Set up RBAC with LDAP subjects, docs [here](https://docs.kafka-ui.provectus.io/configuration/rbac-role-based-access-control)
| [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapSecurityConfig.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthSecurityConfig.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/AccessControlService.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/LdapAuthorityExtractor.java"
] | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapProperties.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapSecurityConfig.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthSecurityConfig.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/RbacLdapUser.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/condition/ActiveDirectoryCondition.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/AccessControlService.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/RbacLdapAuthoritiesExtractor.java"
] | [] | diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapProperties.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapProperties.java
new file mode 100644
index 00000000000..13119b3bb94
--- /dev/null
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapProperties.java
@@ -0,0 +1,26 @@
+package com.provectus.kafka.ui.config.auth;
+
+import lombok.Data;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.boot.context.properties.ConfigurationProperties;
+
+@ConfigurationProperties("spring.ldap")
+@Data
+public class LdapProperties {
+
+ private String urls;
+ private String base;
+ private String adminUser;
+ private String adminPassword;
+ private String userFilterSearchBase;
+ private String userFilterSearchFilter;
+
+ @Value("${oauth2.ldap.activeDirectory:false}")
+ private boolean isActiveDirectory;
+ @Value("${oauth2.ldap.aΡtiveDirectory.domain:@null}")
+ private String activeDirectoryDomain;
+
+ @Value("${oauth2.ldap.groupRoleAttribute:cn}")
+ private String groupRoleAttribute;
+
+}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapSecurityConfig.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapSecurityConfig.java
index 0ba5c231f4b..fae1125239e 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapSecurityConfig.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapSecurityConfig.java
@@ -1,13 +1,23 @@
package com.provectus.kafka.ui.config.auth;
+import static com.provectus.kafka.ui.config.auth.AbstractAuthSecurityConfig.AUTH_WHITELIST;
+
+import com.provectus.kafka.ui.service.rbac.AccessControlService;
+import com.provectus.kafka.ui.service.rbac.extractor.RbacLdapAuthoritiesExtractor;
+import java.util.Collection;
import java.util.List;
+import javax.annotation.Nullable;
+import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
-import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.autoconfigure.ldap.LdapAutoConfiguration;
+import org.springframework.boot.context.properties.EnableConfigurationProperties;
+import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
+import org.springframework.context.annotation.Primary;
+import org.springframework.ldap.core.DirContextOperations;
import org.springframework.ldap.core.support.BaseLdapPathContextSource;
import org.springframework.ldap.core.support.LdapContextSource;
import org.springframework.security.authentication.AuthenticationManager;
@@ -16,70 +26,71 @@
import org.springframework.security.authentication.ReactiveAuthenticationManagerAdapter;
import org.springframework.security.config.annotation.web.reactive.EnableWebFluxSecurity;
import org.springframework.security.config.web.server.ServerHttpSecurity;
+import org.springframework.security.core.GrantedAuthority;
+import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.security.ldap.authentication.AbstractLdapAuthenticationProvider;
import org.springframework.security.ldap.authentication.BindAuthenticator;
import org.springframework.security.ldap.authentication.LdapAuthenticationProvider;
import org.springframework.security.ldap.authentication.ad.ActiveDirectoryLdapAuthenticationProvider;
import org.springframework.security.ldap.search.FilterBasedLdapUserSearch;
import org.springframework.security.ldap.search.LdapUserSearch;
+import org.springframework.security.ldap.userdetails.LdapUserDetailsMapper;
import org.springframework.security.web.server.SecurityWebFilterChain;
@Configuration
@EnableWebFluxSecurity
@ConditionalOnProperty(value = "auth.type", havingValue = "LDAP")
@Import(LdapAutoConfiguration.class)
+@EnableConfigurationProperties(LdapProperties.class)
+@RequiredArgsConstructor
@Slf4j
-public class LdapSecurityConfig extends AbstractAuthSecurityConfig {
-
- @Value("${spring.ldap.urls}")
- private String ldapUrls;
- @Value("${spring.ldap.dn.pattern:#{null}}")
- private String ldapUserDnPattern;
- @Value("${spring.ldap.adminUser:#{null}}")
- private String adminUser;
- @Value("${spring.ldap.adminPassword:#{null}}")
- private String adminPassword;
- @Value("${spring.ldap.userFilter.searchBase:#{null}}")
- private String userFilterSearchBase;
- @Value("${spring.ldap.userFilter.searchFilter:#{null}}")
- private String userFilterSearchFilter;
-
- @Value("${oauth2.ldap.activeDirectory:false}")
- private boolean isActiveDirectory;
- @Value("${oauth2.ldap.aΡtiveDirectory.domain:#{null}}")
- private String activeDirectoryDomain;
+public class LdapSecurityConfig {
+
+ private final LdapProperties props;
@Bean
- public ReactiveAuthenticationManager authenticationManager(BaseLdapPathContextSource contextSource) {
+ public ReactiveAuthenticationManager authenticationManager(BaseLdapPathContextSource contextSource,
+ ApplicationContext context,
+ @Nullable AccessControlService acs) {
+ var rbacEnabled = acs != null && acs.isRbacEnabled();
BindAuthenticator ba = new BindAuthenticator(contextSource);
- if (ldapUserDnPattern != null) {
- ba.setUserDnPatterns(new String[] {ldapUserDnPattern});
+ if (props.getBase() != null) {
+ ba.setUserDnPatterns(new String[] {props.getBase()});
}
- if (userFilterSearchFilter != null) {
+ if (props.getUserFilterSearchFilter() != null) {
LdapUserSearch userSearch =
- new FilterBasedLdapUserSearch(userFilterSearchBase, userFilterSearchFilter, contextSource);
+ new FilterBasedLdapUserSearch(props.getUserFilterSearchBase(), props.getUserFilterSearchFilter(),
+ contextSource);
ba.setUserSearch(userSearch);
}
AbstractLdapAuthenticationProvider authenticationProvider;
- if (!isActiveDirectory) {
- authenticationProvider = new LdapAuthenticationProvider(ba);
+ if (!props.isActiveDirectory()) {
+ authenticationProvider = rbacEnabled
+ ? new LdapAuthenticationProvider(ba, new RbacLdapAuthoritiesExtractor(context))
+ : new LdapAuthenticationProvider(ba);
} else {
- authenticationProvider = new ActiveDirectoryLdapAuthenticationProvider(activeDirectoryDomain, ldapUrls);
+ authenticationProvider = new ActiveDirectoryLdapAuthenticationProvider(props.getActiveDirectoryDomain(),
+ props.getUrls()); // TODO Issue #3741
authenticationProvider.setUseAuthenticationRequestCredentials(true);
}
+ if (rbacEnabled) {
+ authenticationProvider.setUserDetailsContextMapper(new UserDetailsMapper());
+ }
+
AuthenticationManager am = new ProviderManager(List.of(authenticationProvider));
return new ReactiveAuthenticationManagerAdapter(am);
}
@Bean
+ @Primary
public BaseLdapPathContextSource contextSource() {
LdapContextSource ctx = new LdapContextSource();
- ctx.setUrl(ldapUrls);
- ctx.setUserDn(adminUser);
- ctx.setPassword(adminPassword);
+ ctx.setUrl(props.getUrls());
+ ctx.setUserDn(props.getAdminUser());
+ ctx.setPassword(props.getAdminPassword());
ctx.afterPropertiesSet();
return ctx;
}
@@ -87,20 +98,35 @@ public BaseLdapPathContextSource contextSource() {
@Bean
public SecurityWebFilterChain configureLdap(ServerHttpSecurity http) {
log.info("Configuring LDAP authentication.");
- if (isActiveDirectory) {
+ if (props.isActiveDirectory()) {
log.info("Active Directory support for LDAP has been enabled.");
}
- http
+ return http
.authorizeExchange()
.pathMatchers(AUTH_WHITELIST)
.permitAll()
.anyExchange()
.authenticated()
+
.and()
- .httpBasic();
+ .formLogin()
- return http.csrf().disable().build();
+ .and()
+ .logout()
+
+ .and()
+ .csrf().disable()
+ .build();
+ }
+
+ private static class UserDetailsMapper extends LdapUserDetailsMapper {
+ @Override
+ public UserDetails mapUserFromContext(DirContextOperations ctx, String username,
+ Collection<? extends GrantedAuthority> authorities) {
+ UserDetails userDetails = super.mapUserFromContext(ctx, username, authorities);
+ return new RbacLdapUser(userDetails);
+ }
}
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthSecurityConfig.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthSecurityConfig.java
index 1d237e01736..5db612f256e 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthSecurityConfig.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthSecurityConfig.java
@@ -115,7 +115,7 @@ public ServerLogoutSuccessHandler defaultOidcLogoutHandler(final ReactiveClientR
@Nullable
private ProviderAuthorityExtractor getExtractor(final String providerId, AccessControlService acs) {
final String provider = getProviderByProviderId(providerId);
- Optional<ProviderAuthorityExtractor> extractor = acs.getExtractors()
+ Optional<ProviderAuthorityExtractor> extractor = acs.getOauthExtractors()
.stream()
.filter(e -> e.isApplicable(provider))
.findFirst();
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/RbacLdapUser.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/RbacLdapUser.java
new file mode 100644
index 00000000000..037d2fd3020
--- /dev/null
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/RbacLdapUser.java
@@ -0,0 +1,60 @@
+package com.provectus.kafka.ui.config.auth;
+
+import java.util.Collection;
+import java.util.stream.Collectors;
+import org.springframework.security.core.GrantedAuthority;
+import org.springframework.security.core.userdetails.UserDetails;
+
+public class RbacLdapUser implements UserDetails, RbacUser {
+
+ private final UserDetails userDetails;
+
+ public RbacLdapUser(UserDetails userDetails) {
+ this.userDetails = userDetails;
+ }
+
+ @Override
+ public String name() {
+ return userDetails.getUsername();
+ }
+
+ @Override
+ public Collection<String> groups() {
+ return userDetails.getAuthorities().stream().map(GrantedAuthority::getAuthority).collect(Collectors.toSet());
+ }
+
+ @Override
+ public Collection<? extends GrantedAuthority> getAuthorities() {
+ return userDetails.getAuthorities();
+ }
+
+ @Override
+ public String getPassword() {
+ return userDetails.getPassword();
+ }
+
+ @Override
+ public String getUsername() {
+ return userDetails.getUsername();
+ }
+
+ @Override
+ public boolean isAccountNonExpired() {
+ return userDetails.isAccountNonExpired();
+ }
+
+ @Override
+ public boolean isAccountNonLocked() {
+ return userDetails.isAccountNonLocked();
+ }
+
+ @Override
+ public boolean isCredentialsNonExpired() {
+ return userDetails.isCredentialsNonExpired();
+ }
+
+ @Override
+ public boolean isEnabled() {
+ return userDetails.isEnabled();
+ }
+}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/condition/ActiveDirectoryCondition.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/condition/ActiveDirectoryCondition.java
new file mode 100644
index 00000000000..c38e83238af
--- /dev/null
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/condition/ActiveDirectoryCondition.java
@@ -0,0 +1,21 @@
+package com.provectus.kafka.ui.config.auth.condition;
+
+import org.springframework.boot.autoconfigure.condition.AllNestedConditions;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
+
+public class ActiveDirectoryCondition extends AllNestedConditions {
+
+ public ActiveDirectoryCondition() {
+ super(ConfigurationPhase.PARSE_CONFIGURATION);
+ }
+
+ @ConditionalOnProperty(value = "auth.type", havingValue = "LDAP")
+ public static class OnAuthType {
+
+ }
+
+ @ConditionalOnProperty(value = "${oauth2.ldap.activeDirectory}:false", havingValue = "true", matchIfMissing = false)
+ public static class OnActiveDirectory {
+
+ }
+}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/AccessControlService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/AccessControlService.java
index 3178feae34c..e964f64a9bf 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/AccessControlService.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/AccessControlService.java
@@ -12,6 +12,7 @@
import com.provectus.kafka.ui.model.rbac.Permission;
import com.provectus.kafka.ui.model.rbac.Resource;
import com.provectus.kafka.ui.model.rbac.Role;
+import com.provectus.kafka.ui.model.rbac.Subject;
import com.provectus.kafka.ui.model.rbac.permission.ConnectAction;
import com.provectus.kafka.ui.model.rbac.permission.ConsumerGroupAction;
import com.provectus.kafka.ui.model.rbac.permission.SchemaAction;
@@ -19,11 +20,11 @@
import com.provectus.kafka.ui.service.rbac.extractor.CognitoAuthorityExtractor;
import com.provectus.kafka.ui.service.rbac.extractor.GithubAuthorityExtractor;
import com.provectus.kafka.ui.service.rbac.extractor.GoogleAuthorityExtractor;
-import com.provectus.kafka.ui.service.rbac.extractor.LdapAuthorityExtractor;
import com.provectus.kafka.ui.service.rbac.extractor.ProviderAuthorityExtractor;
import jakarta.annotation.PostConstruct;
import java.util.Collections;
import java.util.List;
+import java.util.Objects;
import java.util.Set;
import java.util.function.Predicate;
import java.util.regex.Pattern;
@@ -34,6 +35,7 @@
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
+import org.springframework.core.env.Environment;
import org.springframework.security.access.AccessDeniedException;
import org.springframework.security.core.context.ReactiveSecurityContextHolder;
import org.springframework.security.core.context.SecurityContext;
@@ -50,10 +52,11 @@ public class AccessControlService {
@Nullable
private final InMemoryReactiveClientRegistrationRepository clientRegistrationRepository;
+ private final RoleBasedAccessControlProperties properties;
+ private final Environment environment;
private boolean rbacEnabled = false;
- private Set<ProviderAuthorityExtractor> extractors = Collections.emptySet();
- private final RoleBasedAccessControlProperties properties;
+ private Set<ProviderAuthorityExtractor> oauthExtractors = Collections.emptySet();
@PostConstruct
public void init() {
@@ -63,21 +66,26 @@ public void init() {
}
rbacEnabled = true;
- this.extractors = properties.getRoles()
+ this.oauthExtractors = properties.getRoles()
.stream()
.map(role -> role.getSubjects()
.stream()
- .map(provider -> switch (provider.getProvider()) {
+ .map(Subject::getProvider)
+ .distinct()
+ .map(provider -> switch (provider) {
case OAUTH_COGNITO -> new CognitoAuthorityExtractor();
case OAUTH_GOOGLE -> new GoogleAuthorityExtractor();
case OAUTH_GITHUB -> new GithubAuthorityExtractor();
- case LDAP, LDAP_AD -> new LdapAuthorityExtractor();
- }).collect(Collectors.toSet()))
+ default -> null;
+ })
+ .filter(Objects::nonNull)
+ .collect(Collectors.toSet()))
.flatMap(Set::stream)
.collect(Collectors.toSet());
- if ((clientRegistrationRepository == null || !clientRegistrationRepository.iterator().hasNext())
- && !properties.getRoles().isEmpty()) {
+ if (!properties.getRoles().isEmpty()
+ && "oauth2".equalsIgnoreCase(environment.getProperty("auth.type"))
+ && (clientRegistrationRepository == null || !clientRegistrationRepository.iterator().hasNext())) {
log.error("Roles are configured but no authentication methods are present. Authentication might fail.");
}
}
@@ -354,8 +362,8 @@ private boolean isKsqlAccessible(AccessContext context, AuthenticatedUser user)
return isAccessible(Resource.KSQL, null, user, context, requiredActions);
}
- public Set<ProviderAuthorityExtractor> getExtractors() {
- return extractors;
+ public Set<ProviderAuthorityExtractor> getOauthExtractors() {
+ return oauthExtractors;
}
public List<Role> getRoles() {
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/LdapAuthorityExtractor.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/LdapAuthorityExtractor.java
deleted file mode 100644
index 6284bb29234..00000000000
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/LdapAuthorityExtractor.java
+++ /dev/null
@@ -1,23 +0,0 @@
-package com.provectus.kafka.ui.service.rbac.extractor;
-
-import com.provectus.kafka.ui.service.rbac.AccessControlService;
-import java.util.Collections;
-import java.util.Map;
-import java.util.Set;
-import lombok.extern.slf4j.Slf4j;
-import reactor.core.publisher.Mono;
-
-@Slf4j
-public class LdapAuthorityExtractor implements ProviderAuthorityExtractor {
-
- @Override
- public boolean isApplicable(String provider) {
- return false; // TODO #2752
- }
-
- @Override
- public Mono<Set<String>> extract(AccessControlService acs, Object value, Map<String, Object> additionalParams) {
- return Mono.just(Collections.emptySet()); // TODO #2752
- }
-
-}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/RbacLdapAuthoritiesExtractor.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/RbacLdapAuthoritiesExtractor.java
new file mode 100644
index 00000000000..e24fc0aeda9
--- /dev/null
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/RbacLdapAuthoritiesExtractor.java
@@ -0,0 +1,70 @@
+package com.provectus.kafka.ui.service.rbac.extractor;
+
+import com.provectus.kafka.ui.config.auth.LdapProperties;
+import com.provectus.kafka.ui.model.rbac.Role;
+import com.provectus.kafka.ui.model.rbac.provider.Provider;
+import com.provectus.kafka.ui.service.rbac.AccessControlService;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.context.ApplicationContext;
+import org.springframework.ldap.core.DirContextOperations;
+import org.springframework.ldap.core.support.BaseLdapPathContextSource;
+import org.springframework.security.core.GrantedAuthority;
+import org.springframework.security.core.authority.SimpleGrantedAuthority;
+import org.springframework.security.ldap.userdetails.DefaultLdapAuthoritiesPopulator;
+import org.springframework.util.Assert;
+
+@Slf4j
+public class RbacLdapAuthoritiesExtractor extends DefaultLdapAuthoritiesPopulator {
+
+ private final AccessControlService acs;
+ private final LdapProperties props;
+
+ private final Function<Map<String, List<String>>, GrantedAuthority> authorityMapper = (record) -> {
+ String role = record.get(getGroupRoleAttribute()).get(0);
+ return new SimpleGrantedAuthority(role);
+ };
+
+ public RbacLdapAuthoritiesExtractor(ApplicationContext context) {
+ super(context.getBean(BaseLdapPathContextSource.class), null);
+ this.acs = context.getBean(AccessControlService.class);
+ this.props = context.getBean(LdapProperties.class);
+ }
+
+ @Override
+ public Set<GrantedAuthority> getAdditionalRoles(DirContextOperations user, String username) {
+ return acs.getRoles()
+ .stream()
+ .map(Role::getSubjects)
+ .flatMap(List::stream)
+ .filter(s -> s.getProvider().equals(Provider.LDAP))
+ .filter(s -> s.getType().equals("group"))
+ .flatMap(subject -> getRoles(subject.getValue(), user.getNameInNamespace(), username).stream())
+ .collect(Collectors.toSet());
+ }
+
+ private Set<GrantedAuthority> getRoles(String groupSearchBase, String userDn, String username) {
+ Assert.notNull(groupSearchBase, "groupSearchBase is empty");
+
+ log.trace(
+ "Searching for roles for user [{}] with DN [{}], groupRoleAttribute [{}] and filter [{}] in search base [{}]",
+ username, userDn, props.getGroupRoleAttribute(), getGroupSearchFilter(), groupSearchBase);
+
+ var ldapTemplate = getLdapTemplate();
+ ldapTemplate.setIgnoreNameNotFoundException(true);
+
+ Set<Map<String, List<String>>> userRoles = ldapTemplate.searchForMultipleAttributeValues(
+ groupSearchBase, getGroupSearchFilter(), new String[] {userDn, username},
+ new String[] {props.getGroupRoleAttribute()});
+
+ return userRoles.stream()
+ .map(authorityMapper)
+ .peek(a -> log.debug("Mapped role [{}] for user [{}]", a, username))
+ .collect(Collectors.toSet());
+ }
+
+}
| null | test | test | 2023-04-27T04:01:38 | "2022-10-14T10:38:54Z" | Haarolean | train |
provectus/kafka-ui/3328_3700 | provectus/kafka-ui | provectus/kafka-ui/3328 | provectus/kafka-ui/3700 | [
"connected"
] | da3932e3422e45bdb0dc27cf538b79c8bf872602 | 744bdb32a310306eefe8641923d712db697b1c70 | [
"WIll be fixed within #2752 "
] | [
"fail with error msg here",
"rm implements",
"done",
"done"
] | "2023-04-21T21:40:05Z" | [
"type/bug",
"scope/backend",
"status/accepted",
"status/confirmed"
] | LDAP: Implement logout | **Describe the bug** (Actual behavior)
When configuring kafka ui using LDAP authentication and click on logout button, the following error is shown:
```
{
code: 5000,
message: "404 NOT_FOUND",
timestamp: 1675945080674,
requestId: "5e1d68c0-13165",
fieldsErrors: null,
stackTrace: "org.springframework.web.server.ResponseStatusException: 404 NOT_FOUND
at org.springframework.web.reactive.resource.ResourceWebHandler.lambda$handle$1(ResourceWebHandler.java:408)
Suppressed: The stacktrace has been enhanced by Reactor, refer to additional information below:
Error has been observed at the following site(s):
*__checkpoint β’ com.provectus.kafka.ui.config.CustomWebFilter [DefaultWebFilterChain]
*__checkpoint β’ com.provectus.kafka.ui.config.ReadOnlyModeFilter [DefaultWebFilterChain]
*__checkpoint β’ org.springframework.security.web.server.authorization.AuthorizationWebFilter [DefaultWebFilterChain]
*__checkpoint β’ org.springframework.security.web.server.authorization.ExceptionTranslationWebFilter [DefaultWebFilterChain]
*__checkpoint β’ org.springframework.security.web.server.authentication.logout.LogoutWebFilter [DefaultWebFilterChain]
*__checkpoint β’ org.springframework.security.web.server.savedrequest.ServerRequestCacheWebFilter [DefaultWebFilterChain]
*__checkpoint β’ org.springframework.security.web.server.context.SecurityContextServerWebExchangeWebFilter [DefaultWebFilterChain]
*__checkpoint β’ org.springframework.security.web.server.authentication.AuthenticationWebFilter [DefaultWebFilterChain]
*__checkpoint β’ org.springframework.security.web.server.context.ReactorContextWebFilter [DefaultWebFilterChain]
*__checkpoint β’ org.springframework.security.web.server.header.HttpHeaderWriterWebFilter [DefaultWebFilterChain]
*__checkpoint β’ org.springframework.security.config.web.server.ServerHttpSecurity$ServerWebExchangeReactorContextWebFilter [DefaultWebFilterChain]
*__checkpoint β’ org.springframework.security.web.server.WebFilterChainProxy [DefaultWebFilterChain]
*__checkpoint β’ org.springframework.boot.actuate.metrics.web.reactive.server.MetricsWebFilter [DefaultWebFilterChain]
*__checkpoint β’ HTTP GET "/logout" [ExceptionHandlingWebHandler]
Original Stack Trace:
at org.springframework.web.reactive.resource.ResourceWebHandler.lambda$handle$1(ResourceWebHandler.java:408)
at reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:44)
at reactor.core.publisher.Mono.subscribe(Mono.java:4455)
at reactor.core.publisher.FluxSwitchIfEmpty$SwitchIfEmptySubscriber.onComplete(FluxSwitchIfEmpty.java:82)
at reactor.core.publisher.MonoFlatMap$FlatMapMain.onComplete(MonoFlatMap.java:181)
at reactor.core.publisher.MonoNext$NextSubscriber.onComplete(MonoNext.java:102)
at reactor.core.publisher.FluxConcatMap$ConcatMapImmediate.drain(FluxConcatMap.java:368)
at reactor.core.publisher.FluxConcatMap$ConcatMapImmediate.onSubscribe(FluxConcatMap.java:219)
at reactor.core.publisher.FluxIterable.subscribe(FluxIterable.java:165)
at reactor.core.publisher.FluxIterable.subscribe(FluxIterable.java:87)
at reactor.core.publisher.Mono.subscribe(Mono.java:4455)
at reactor.core.publisher.MonoIgnoreThen$ThenIgnoreMain.subscribeNext(MonoIgnoreThen.java:263)
at reactor.core.publisher.MonoIgnoreThen.subscribe(MonoIgnoreThen.java:51)
at reactor.core.publisher.MonoFlatMap$FlatMapMain.onNext(MonoFlatMap.java:157)
at reactor.core.publisher.FluxSwitchIfEmpty$SwitchIfEmptySubscriber.onNext(FluxSwitchIfEmpty.java:74)
at reactor.core.publisher.MonoNext$NextSubscriber.onNext(MonoNext.java:82)
at reactor.core.publisher.FluxConcatMap$ConcatMapImmediate.innerNext(FluxConcatMap.java:282)
at reactor.core.publisher.FluxConcatMap$ConcatMapInner.onNext(FluxConcatMap.java:863)
at reactor.core.publisher.FluxMapFuseable$MapFuseableSubscriber.onNext(FluxMapFuseable.java:129)
at reactor.core.publisher.Operators$ScalarSubscription.request(Operators.java:2398)
at reactor.core.publisher.FluxMapFuseable$MapFuseableSubscriber.request(FluxMapFuseable.java:171)
at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.set(Operators.java:2194)
at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.onSubscribe(Operators.java:2068)
at reactor.core.publisher.FluxMapFuseable$MapFuseableSubscriber.onSubscribe(FluxMapFuseable.java:96)
at reactor.core.publisher.MonoJust.subscribe(MonoJust.java:55)
at reactor.core.publisher.Mono.subscribe(Mono.java:4455)
at reactor.core.publisher.FluxConcatMap$ConcatMapImmediate.drain(FluxConcatMap.java:451)
at reactor.core.publisher.FluxConcatMap$ConcatMapImmediate.onSubscribe(FluxConcatMap.java:219)
at reactor.core.publisher.FluxIterable.subscribe(FluxIterable.java:165)
at reactor.core.publisher.FluxIterable.subscribe(FluxIterable.java:87)
at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64)
at reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:52)
at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64)
at reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:52)
at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64)
at reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:52)
at reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:52)
at reactor.core.publisher.Mono.subscribe(Mono.java:4455)
at reactor.core.publisher.FluxSwitchIfEmpty$SwitchIfEmptySubscriber.onComplete(FluxSwitchIfEmpty.java:82)
at reactor.core.publisher.MonoPeekTerminal$MonoTerminalPeekSubscriber.onComplete(MonoPeekTerminal.java:299)
at reactor.core.publisher.MonoPeekTerminal$MonoTerminalPeekSubscriber.onComplete(MonoPeekTerminal.java:299)
at reactor.core.publisher.MonoFlatMap$FlatMapMain.onNext(MonoFlatMap.java:148)
at reactor.core.publisher.FluxSwitchIfEmpty$SwitchIfEmptySubscriber.onNext(FluxSwitchIfEmpty.java:74)
at reactor.core.publisher.FluxFilter$FilterSubscriber.onNext(FluxFilter.java:113)
at reactor.core.publisher.FluxDefaultIfEmpty$DefaultIfEmptySubscriber.onNext(FluxDefaultIfEmpty.java:101)
at reactor.core.publisher.MonoNext$NextSubscriber.onNext(MonoNext.java:82)
at reactor.core.publisher.FluxConcatMap$ConcatMapImmediate.innerNext(FluxConcatMap.java:282)
at reactor.core.publisher.FluxConcatMap$ConcatMapInner.onNext(FluxConcatMap.java:863)
at reactor.core.publisher.Operators$MonoSubscriber.complete(Operators.java:1816)
at reactor.core.publisher.MonoFlatMap$FlatMapMain.onNext(MonoFlatMap.java:151)
at reactor.core.publisher.FluxMap$MapSubscriber.onNext(FluxMap.java:122)
at reactor.core.publisher.FluxFilter$FilterSubscriber.onNext(FluxFilter.java:113)
at reactor.core.publisher.FluxPeekFuseable$PeekConditionalSubscriber.onNext(FluxPeekFuseable.java:854)
at reactor.core.publisher.FluxSwitchIfEmpty$SwitchIfEmptySubscriber.onNext(FluxSwitchIfEmpty.java:74)
at reactor.core.publisher.MonoNext$NextSubscriber.onNext(MonoNext.java:82)
at reactor.core.publisher.FluxFilter$FilterSubscriber.onNext(FluxFilter.java:113)
at reactor.core.publisher.FluxFlatMap$FlatMapMain.tryEmitScalar(FluxFlatMap.java:488)
at reactor.core.publisher.FluxFlatMap$FlatMapMain.onNext(FluxFlatMap.java:421)
at reactor.core.publisher.FluxPeekFuseable$PeekFuseableSubscriber.onNext(FluxPeekFuseable.java:210)
at reactor.core.publisher.FluxIterable$IterableSubscription.slowPath(FluxIterable.java:272)
at reactor.core.publisher.FluxIterable$IterableSubscription.request(FluxIterable.java:230)
at reactor.core.publisher.FluxPeekFuseable$PeekFuseableSubscriber.request(FluxPeekFuseable.java:144)
at reactor.core.publisher.FluxFlatMap$FlatMapMain.onSubscribe(FluxFlatMap.java:371)
at reactor.core.publisher.FluxPeekFuseable$PeekFuseableSubscriber.onSubscribe(FluxPeekFuseable.java:178)
at reactor.core.publisher.FluxIterable.subscribe(FluxIterable.java:165)
at reactor.core.publisher.FluxIterable.subscribe(FluxIterable.java:87)
at reactor.core.publisher.Mono.subscribe(Mono.java:4455)
at reactor.core.publisher.FluxConcatMap$ConcatMapImmediate.drain(FluxConcatMap.java:451)
at reactor.core.publisher.FluxConcatMap$ConcatMapImmediate.onSubscribe(FluxConcatMap.java:219)
at reactor.core.publisher.FluxIterable.subscribe(FluxIterable.java:165)
at reactor.core.publisher.FluxIterable.subscribe(FluxIterable.java:87)
at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64)
at reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:52)
at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64)
at reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:52)
at reactor.core.publisher.Mono.subscribe(Mono.java:4455)
at reactor.core.publisher.MonoIgnoreThen$ThenIgnoreMain.subscribeNext(MonoIgnoreThen.java:263)
at reactor.core.publisher.MonoIgnoreThen.subscribe(MonoIgnoreThen.java:51)
at reactor.core.publisher.Mono.subscribe(Mono.java:4455)
at reactor.core.publisher.FluxSwitchIfEmpty$SwitchIfEmptySubscriber.onComplete(FluxSwitchIfEmpty.java:82)
at reactor.core.publisher.FluxFilter$FilterSubscriber.onComplete(FluxFilter.java:166)
at reactor.core.publisher.FluxPeekFuseable$PeekConditionalSubscriber.onComplete(FluxPeekFuseable.java:940)
at reactor.core.publisher.FluxSwitchIfEmpty$SwitchIfEmptySubscriber.onComplete(FluxSwitchIfEmpty.java:85)
at reactor.core.publisher.Operators$ScalarSubscription.request(Operators.java:2400)
at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.set(Operators.java:2194)
at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.onSubscribe(Operators.java:2068)
at reactor.core.publisher.MonoJust.subscribe(MonoJust.java:55)
at reactor.core.publisher.Mono.subscribe(Mono.java:4455)
at reactor.core.publisher.FluxSwitchIfEmpty$SwitchIfEmptySubscriber.onComplete(FluxSwitchIfEmpty.java:82)
at reactor.core.publisher.MonoNext$NextSubscriber.onComplete(MonoNext.java:102)
at reactor.core.publisher.FluxFilter$FilterSubscriber.onComplete(FluxFilter.java:166)
at reactor.core.publisher.FluxFlatMap$FlatMapMain.checkTerminated(FluxFlatMap.java:846)
at reactor.core.publisher.FluxFlatMap$FlatMapMain.drainLoop(FluxFlatMap.java:608)
at reactor.core.publisher.FluxFlatMap$FlatMapMain.drain(FluxFlatMap.java:588)
at reactor.core.publisher.FluxFlatMap$FlatMapMain.onComplete(FluxFlatMap.java:465)
at reactor.core.publisher.FluxPeekFuseable$PeekFuseableSubscriber.onComplete(FluxPeekFuseable.java:277)
at reactor.core.publisher.FluxIterable$IterableSubscription.slowPath(FluxIterable.java:294)
at reactor.core.publisher.FluxIterable$IterableSubscription.request(FluxIterable.java:230)
at reactor.core.publisher.FluxPeekFuseable$PeekFuseableSubscriber.request(FluxPeekFuseable.java:144)
at reactor.core.publisher.FluxFlatMap$FlatMapMain.onSubscribe(FluxFlatMap.java:371)
at reactor.core.publisher.FluxPeekFuseable$PeekFuseableSubscriber.onSubscribe(FluxPeekFuseable.java:178)
at reactor.core.publisher.FluxIterable.subscribe(FluxIterable.java:165)
at reactor.core.publisher.FluxIterable.subscribe(FluxIterable.java:87)
at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64)
at reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:52)
at reactor.core.publisher.MonoFlatMap$FlatMapMain.onNext(MonoFlatMap.java:157)
at reactor.core.publisher.Operators$MonoSubscriber.complete(Operators.java:1816)
at reactor.core.publisher.FluxDefaultIfEmpty$DefaultIfEmptySubscriber.onComplete(FluxDefaultIfEmpty.java:109)
at reactor.core.publisher.FluxMap$MapSubscriber.onComplete(FluxMap.java:144)
at reactor.core.publisher.FluxMap$MapSubscriber.onComplete(FluxMap.java:144)
at reactor.core.publisher.FluxFilter$FilterSubscriber.onComplete(FluxFilter.java:166)
at reactor.core.publisher.FluxMap$MapConditionalSubscriber.onComplete(FluxMap.java:275)
at reactor.core.publisher.Operators$MonoSubscriber.complete(Operators.java:1817)
at reactor.core.publisher.MonoCacheTime$CoordinatorSubscriber.signalCached(MonoCacheTime.java:337)
at reactor.core.publisher.MonoCacheTime$CoordinatorSubscriber.onNext(MonoCacheTime.java:354)
at reactor.core.publisher.FluxPeek$PeekSubscriber.onNext(FluxPeek.java:200)
at reactor.core.publisher.FluxSwitchIfEmpty$SwitchIfEmptySubscriber.onNext(FluxSwitchIfEmpty.java:74)
at reactor.core.publisher.MonoPeekTerminal$MonoTerminalPeekSubscriber.onNext(MonoPeekTerminal.java:180)
at reactor.core.publisher.MonoPublishOn$PublishOnSubscriber.run(MonoPublishOn.java:181)
at reactor.core.scheduler.SchedulerTask.call(SchedulerTask.java:68)
at reactor.core.scheduler.SchedulerTask.call(SchedulerTask.java:28)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:304)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1136)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:635)
at java.base/java.lang.Thread.run(Thread.java:833)
"
}
```
**Expected behavior**
Logout button works with LDAP authentication.
**Screenshots**

| [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapSecurityConfig.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthSecurityConfig.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/AccessControlService.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/LdapAuthorityExtractor.java"
] | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapProperties.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapSecurityConfig.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthSecurityConfig.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/RbacLdapUser.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/condition/ActiveDirectoryCondition.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/AccessControlService.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/RbacLdapAuthoritiesExtractor.java"
] | [] | diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapProperties.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapProperties.java
new file mode 100644
index 00000000000..13119b3bb94
--- /dev/null
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapProperties.java
@@ -0,0 +1,26 @@
+package com.provectus.kafka.ui.config.auth;
+
+import lombok.Data;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.boot.context.properties.ConfigurationProperties;
+
+@ConfigurationProperties("spring.ldap")
+@Data
+public class LdapProperties {
+
+ private String urls;
+ private String base;
+ private String adminUser;
+ private String adminPassword;
+ private String userFilterSearchBase;
+ private String userFilterSearchFilter;
+
+ @Value("${oauth2.ldap.activeDirectory:false}")
+ private boolean isActiveDirectory;
+ @Value("${oauth2.ldap.aΡtiveDirectory.domain:@null}")
+ private String activeDirectoryDomain;
+
+ @Value("${oauth2.ldap.groupRoleAttribute:cn}")
+ private String groupRoleAttribute;
+
+}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapSecurityConfig.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapSecurityConfig.java
index 0ba5c231f4b..fae1125239e 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapSecurityConfig.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapSecurityConfig.java
@@ -1,13 +1,23 @@
package com.provectus.kafka.ui.config.auth;
+import static com.provectus.kafka.ui.config.auth.AbstractAuthSecurityConfig.AUTH_WHITELIST;
+
+import com.provectus.kafka.ui.service.rbac.AccessControlService;
+import com.provectus.kafka.ui.service.rbac.extractor.RbacLdapAuthoritiesExtractor;
+import java.util.Collection;
import java.util.List;
+import javax.annotation.Nullable;
+import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
-import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.autoconfigure.ldap.LdapAutoConfiguration;
+import org.springframework.boot.context.properties.EnableConfigurationProperties;
+import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
+import org.springframework.context.annotation.Primary;
+import org.springframework.ldap.core.DirContextOperations;
import org.springframework.ldap.core.support.BaseLdapPathContextSource;
import org.springframework.ldap.core.support.LdapContextSource;
import org.springframework.security.authentication.AuthenticationManager;
@@ -16,70 +26,71 @@
import org.springframework.security.authentication.ReactiveAuthenticationManagerAdapter;
import org.springframework.security.config.annotation.web.reactive.EnableWebFluxSecurity;
import org.springframework.security.config.web.server.ServerHttpSecurity;
+import org.springframework.security.core.GrantedAuthority;
+import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.security.ldap.authentication.AbstractLdapAuthenticationProvider;
import org.springframework.security.ldap.authentication.BindAuthenticator;
import org.springframework.security.ldap.authentication.LdapAuthenticationProvider;
import org.springframework.security.ldap.authentication.ad.ActiveDirectoryLdapAuthenticationProvider;
import org.springframework.security.ldap.search.FilterBasedLdapUserSearch;
import org.springframework.security.ldap.search.LdapUserSearch;
+import org.springframework.security.ldap.userdetails.LdapUserDetailsMapper;
import org.springframework.security.web.server.SecurityWebFilterChain;
@Configuration
@EnableWebFluxSecurity
@ConditionalOnProperty(value = "auth.type", havingValue = "LDAP")
@Import(LdapAutoConfiguration.class)
+@EnableConfigurationProperties(LdapProperties.class)
+@RequiredArgsConstructor
@Slf4j
-public class LdapSecurityConfig extends AbstractAuthSecurityConfig {
-
- @Value("${spring.ldap.urls}")
- private String ldapUrls;
- @Value("${spring.ldap.dn.pattern:#{null}}")
- private String ldapUserDnPattern;
- @Value("${spring.ldap.adminUser:#{null}}")
- private String adminUser;
- @Value("${spring.ldap.adminPassword:#{null}}")
- private String adminPassword;
- @Value("${spring.ldap.userFilter.searchBase:#{null}}")
- private String userFilterSearchBase;
- @Value("${spring.ldap.userFilter.searchFilter:#{null}}")
- private String userFilterSearchFilter;
-
- @Value("${oauth2.ldap.activeDirectory:false}")
- private boolean isActiveDirectory;
- @Value("${oauth2.ldap.aΡtiveDirectory.domain:#{null}}")
- private String activeDirectoryDomain;
+public class LdapSecurityConfig {
+
+ private final LdapProperties props;
@Bean
- public ReactiveAuthenticationManager authenticationManager(BaseLdapPathContextSource contextSource) {
+ public ReactiveAuthenticationManager authenticationManager(BaseLdapPathContextSource contextSource,
+ ApplicationContext context,
+ @Nullable AccessControlService acs) {
+ var rbacEnabled = acs != null && acs.isRbacEnabled();
BindAuthenticator ba = new BindAuthenticator(contextSource);
- if (ldapUserDnPattern != null) {
- ba.setUserDnPatterns(new String[] {ldapUserDnPattern});
+ if (props.getBase() != null) {
+ ba.setUserDnPatterns(new String[] {props.getBase()});
}
- if (userFilterSearchFilter != null) {
+ if (props.getUserFilterSearchFilter() != null) {
LdapUserSearch userSearch =
- new FilterBasedLdapUserSearch(userFilterSearchBase, userFilterSearchFilter, contextSource);
+ new FilterBasedLdapUserSearch(props.getUserFilterSearchBase(), props.getUserFilterSearchFilter(),
+ contextSource);
ba.setUserSearch(userSearch);
}
AbstractLdapAuthenticationProvider authenticationProvider;
- if (!isActiveDirectory) {
- authenticationProvider = new LdapAuthenticationProvider(ba);
+ if (!props.isActiveDirectory()) {
+ authenticationProvider = rbacEnabled
+ ? new LdapAuthenticationProvider(ba, new RbacLdapAuthoritiesExtractor(context))
+ : new LdapAuthenticationProvider(ba);
} else {
- authenticationProvider = new ActiveDirectoryLdapAuthenticationProvider(activeDirectoryDomain, ldapUrls);
+ authenticationProvider = new ActiveDirectoryLdapAuthenticationProvider(props.getActiveDirectoryDomain(),
+ props.getUrls()); // TODO Issue #3741
authenticationProvider.setUseAuthenticationRequestCredentials(true);
}
+ if (rbacEnabled) {
+ authenticationProvider.setUserDetailsContextMapper(new UserDetailsMapper());
+ }
+
AuthenticationManager am = new ProviderManager(List.of(authenticationProvider));
return new ReactiveAuthenticationManagerAdapter(am);
}
@Bean
+ @Primary
public BaseLdapPathContextSource contextSource() {
LdapContextSource ctx = new LdapContextSource();
- ctx.setUrl(ldapUrls);
- ctx.setUserDn(adminUser);
- ctx.setPassword(adminPassword);
+ ctx.setUrl(props.getUrls());
+ ctx.setUserDn(props.getAdminUser());
+ ctx.setPassword(props.getAdminPassword());
ctx.afterPropertiesSet();
return ctx;
}
@@ -87,20 +98,35 @@ public BaseLdapPathContextSource contextSource() {
@Bean
public SecurityWebFilterChain configureLdap(ServerHttpSecurity http) {
log.info("Configuring LDAP authentication.");
- if (isActiveDirectory) {
+ if (props.isActiveDirectory()) {
log.info("Active Directory support for LDAP has been enabled.");
}
- http
+ return http
.authorizeExchange()
.pathMatchers(AUTH_WHITELIST)
.permitAll()
.anyExchange()
.authenticated()
+
.and()
- .httpBasic();
+ .formLogin()
- return http.csrf().disable().build();
+ .and()
+ .logout()
+
+ .and()
+ .csrf().disable()
+ .build();
+ }
+
+ private static class UserDetailsMapper extends LdapUserDetailsMapper {
+ @Override
+ public UserDetails mapUserFromContext(DirContextOperations ctx, String username,
+ Collection<? extends GrantedAuthority> authorities) {
+ UserDetails userDetails = super.mapUserFromContext(ctx, username, authorities);
+ return new RbacLdapUser(userDetails);
+ }
}
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthSecurityConfig.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthSecurityConfig.java
index 1d237e01736..5db612f256e 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthSecurityConfig.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/OAuthSecurityConfig.java
@@ -115,7 +115,7 @@ public ServerLogoutSuccessHandler defaultOidcLogoutHandler(final ReactiveClientR
@Nullable
private ProviderAuthorityExtractor getExtractor(final String providerId, AccessControlService acs) {
final String provider = getProviderByProviderId(providerId);
- Optional<ProviderAuthorityExtractor> extractor = acs.getExtractors()
+ Optional<ProviderAuthorityExtractor> extractor = acs.getOauthExtractors()
.stream()
.filter(e -> e.isApplicable(provider))
.findFirst();
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/RbacLdapUser.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/RbacLdapUser.java
new file mode 100644
index 00000000000..037d2fd3020
--- /dev/null
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/RbacLdapUser.java
@@ -0,0 +1,60 @@
+package com.provectus.kafka.ui.config.auth;
+
+import java.util.Collection;
+import java.util.stream.Collectors;
+import org.springframework.security.core.GrantedAuthority;
+import org.springframework.security.core.userdetails.UserDetails;
+
+public class RbacLdapUser implements UserDetails, RbacUser {
+
+ private final UserDetails userDetails;
+
+ public RbacLdapUser(UserDetails userDetails) {
+ this.userDetails = userDetails;
+ }
+
+ @Override
+ public String name() {
+ return userDetails.getUsername();
+ }
+
+ @Override
+ public Collection<String> groups() {
+ return userDetails.getAuthorities().stream().map(GrantedAuthority::getAuthority).collect(Collectors.toSet());
+ }
+
+ @Override
+ public Collection<? extends GrantedAuthority> getAuthorities() {
+ return userDetails.getAuthorities();
+ }
+
+ @Override
+ public String getPassword() {
+ return userDetails.getPassword();
+ }
+
+ @Override
+ public String getUsername() {
+ return userDetails.getUsername();
+ }
+
+ @Override
+ public boolean isAccountNonExpired() {
+ return userDetails.isAccountNonExpired();
+ }
+
+ @Override
+ public boolean isAccountNonLocked() {
+ return userDetails.isAccountNonLocked();
+ }
+
+ @Override
+ public boolean isCredentialsNonExpired() {
+ return userDetails.isCredentialsNonExpired();
+ }
+
+ @Override
+ public boolean isEnabled() {
+ return userDetails.isEnabled();
+ }
+}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/condition/ActiveDirectoryCondition.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/condition/ActiveDirectoryCondition.java
new file mode 100644
index 00000000000..c38e83238af
--- /dev/null
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/condition/ActiveDirectoryCondition.java
@@ -0,0 +1,21 @@
+package com.provectus.kafka.ui.config.auth.condition;
+
+import org.springframework.boot.autoconfigure.condition.AllNestedConditions;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
+
+public class ActiveDirectoryCondition extends AllNestedConditions {
+
+ public ActiveDirectoryCondition() {
+ super(ConfigurationPhase.PARSE_CONFIGURATION);
+ }
+
+ @ConditionalOnProperty(value = "auth.type", havingValue = "LDAP")
+ public static class OnAuthType {
+
+ }
+
+ @ConditionalOnProperty(value = "${oauth2.ldap.activeDirectory}:false", havingValue = "true", matchIfMissing = false)
+ public static class OnActiveDirectory {
+
+ }
+}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/AccessControlService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/AccessControlService.java
index 3178feae34c..e964f64a9bf 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/AccessControlService.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/AccessControlService.java
@@ -12,6 +12,7 @@
import com.provectus.kafka.ui.model.rbac.Permission;
import com.provectus.kafka.ui.model.rbac.Resource;
import com.provectus.kafka.ui.model.rbac.Role;
+import com.provectus.kafka.ui.model.rbac.Subject;
import com.provectus.kafka.ui.model.rbac.permission.ConnectAction;
import com.provectus.kafka.ui.model.rbac.permission.ConsumerGroupAction;
import com.provectus.kafka.ui.model.rbac.permission.SchemaAction;
@@ -19,11 +20,11 @@
import com.provectus.kafka.ui.service.rbac.extractor.CognitoAuthorityExtractor;
import com.provectus.kafka.ui.service.rbac.extractor.GithubAuthorityExtractor;
import com.provectus.kafka.ui.service.rbac.extractor.GoogleAuthorityExtractor;
-import com.provectus.kafka.ui.service.rbac.extractor.LdapAuthorityExtractor;
import com.provectus.kafka.ui.service.rbac.extractor.ProviderAuthorityExtractor;
import jakarta.annotation.PostConstruct;
import java.util.Collections;
import java.util.List;
+import java.util.Objects;
import java.util.Set;
import java.util.function.Predicate;
import java.util.regex.Pattern;
@@ -34,6 +35,7 @@
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
+import org.springframework.core.env.Environment;
import org.springframework.security.access.AccessDeniedException;
import org.springframework.security.core.context.ReactiveSecurityContextHolder;
import org.springframework.security.core.context.SecurityContext;
@@ -50,10 +52,11 @@ public class AccessControlService {
@Nullable
private final InMemoryReactiveClientRegistrationRepository clientRegistrationRepository;
+ private final RoleBasedAccessControlProperties properties;
+ private final Environment environment;
private boolean rbacEnabled = false;
- private Set<ProviderAuthorityExtractor> extractors = Collections.emptySet();
- private final RoleBasedAccessControlProperties properties;
+ private Set<ProviderAuthorityExtractor> oauthExtractors = Collections.emptySet();
@PostConstruct
public void init() {
@@ -63,21 +66,26 @@ public void init() {
}
rbacEnabled = true;
- this.extractors = properties.getRoles()
+ this.oauthExtractors = properties.getRoles()
.stream()
.map(role -> role.getSubjects()
.stream()
- .map(provider -> switch (provider.getProvider()) {
+ .map(Subject::getProvider)
+ .distinct()
+ .map(provider -> switch (provider) {
case OAUTH_COGNITO -> new CognitoAuthorityExtractor();
case OAUTH_GOOGLE -> new GoogleAuthorityExtractor();
case OAUTH_GITHUB -> new GithubAuthorityExtractor();
- case LDAP, LDAP_AD -> new LdapAuthorityExtractor();
- }).collect(Collectors.toSet()))
+ default -> null;
+ })
+ .filter(Objects::nonNull)
+ .collect(Collectors.toSet()))
.flatMap(Set::stream)
.collect(Collectors.toSet());
- if ((clientRegistrationRepository == null || !clientRegistrationRepository.iterator().hasNext())
- && !properties.getRoles().isEmpty()) {
+ if (!properties.getRoles().isEmpty()
+ && "oauth2".equalsIgnoreCase(environment.getProperty("auth.type"))
+ && (clientRegistrationRepository == null || !clientRegistrationRepository.iterator().hasNext())) {
log.error("Roles are configured but no authentication methods are present. Authentication might fail.");
}
}
@@ -354,8 +362,8 @@ private boolean isKsqlAccessible(AccessContext context, AuthenticatedUser user)
return isAccessible(Resource.KSQL, null, user, context, requiredActions);
}
- public Set<ProviderAuthorityExtractor> getExtractors() {
- return extractors;
+ public Set<ProviderAuthorityExtractor> getOauthExtractors() {
+ return oauthExtractors;
}
public List<Role> getRoles() {
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/LdapAuthorityExtractor.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/LdapAuthorityExtractor.java
deleted file mode 100644
index 6284bb29234..00000000000
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/LdapAuthorityExtractor.java
+++ /dev/null
@@ -1,23 +0,0 @@
-package com.provectus.kafka.ui.service.rbac.extractor;
-
-import com.provectus.kafka.ui.service.rbac.AccessControlService;
-import java.util.Collections;
-import java.util.Map;
-import java.util.Set;
-import lombok.extern.slf4j.Slf4j;
-import reactor.core.publisher.Mono;
-
-@Slf4j
-public class LdapAuthorityExtractor implements ProviderAuthorityExtractor {
-
- @Override
- public boolean isApplicable(String provider) {
- return false; // TODO #2752
- }
-
- @Override
- public Mono<Set<String>> extract(AccessControlService acs, Object value, Map<String, Object> additionalParams) {
- return Mono.just(Collections.emptySet()); // TODO #2752
- }
-
-}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/RbacLdapAuthoritiesExtractor.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/RbacLdapAuthoritiesExtractor.java
new file mode 100644
index 00000000000..e24fc0aeda9
--- /dev/null
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/RbacLdapAuthoritiesExtractor.java
@@ -0,0 +1,70 @@
+package com.provectus.kafka.ui.service.rbac.extractor;
+
+import com.provectus.kafka.ui.config.auth.LdapProperties;
+import com.provectus.kafka.ui.model.rbac.Role;
+import com.provectus.kafka.ui.model.rbac.provider.Provider;
+import com.provectus.kafka.ui.service.rbac.AccessControlService;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.context.ApplicationContext;
+import org.springframework.ldap.core.DirContextOperations;
+import org.springframework.ldap.core.support.BaseLdapPathContextSource;
+import org.springframework.security.core.GrantedAuthority;
+import org.springframework.security.core.authority.SimpleGrantedAuthority;
+import org.springframework.security.ldap.userdetails.DefaultLdapAuthoritiesPopulator;
+import org.springframework.util.Assert;
+
+@Slf4j
+public class RbacLdapAuthoritiesExtractor extends DefaultLdapAuthoritiesPopulator {
+
+ private final AccessControlService acs;
+ private final LdapProperties props;
+
+ private final Function<Map<String, List<String>>, GrantedAuthority> authorityMapper = (record) -> {
+ String role = record.get(getGroupRoleAttribute()).get(0);
+ return new SimpleGrantedAuthority(role);
+ };
+
+ public RbacLdapAuthoritiesExtractor(ApplicationContext context) {
+ super(context.getBean(BaseLdapPathContextSource.class), null);
+ this.acs = context.getBean(AccessControlService.class);
+ this.props = context.getBean(LdapProperties.class);
+ }
+
+ @Override
+ public Set<GrantedAuthority> getAdditionalRoles(DirContextOperations user, String username) {
+ return acs.getRoles()
+ .stream()
+ .map(Role::getSubjects)
+ .flatMap(List::stream)
+ .filter(s -> s.getProvider().equals(Provider.LDAP))
+ .filter(s -> s.getType().equals("group"))
+ .flatMap(subject -> getRoles(subject.getValue(), user.getNameInNamespace(), username).stream())
+ .collect(Collectors.toSet());
+ }
+
+ private Set<GrantedAuthority> getRoles(String groupSearchBase, String userDn, String username) {
+ Assert.notNull(groupSearchBase, "groupSearchBase is empty");
+
+ log.trace(
+ "Searching for roles for user [{}] with DN [{}], groupRoleAttribute [{}] and filter [{}] in search base [{}]",
+ username, userDn, props.getGroupRoleAttribute(), getGroupSearchFilter(), groupSearchBase);
+
+ var ldapTemplate = getLdapTemplate();
+ ldapTemplate.setIgnoreNameNotFoundException(true);
+
+ Set<Map<String, List<String>>> userRoles = ldapTemplate.searchForMultipleAttributeValues(
+ groupSearchBase, getGroupSearchFilter(), new String[] {userDn, username},
+ new String[] {props.getGroupRoleAttribute()});
+
+ return userRoles.stream()
+ .map(authorityMapper)
+ .peek(a -> log.debug("Mapped role [{}] for user [{}]", a, username))
+ .collect(Collectors.toSet());
+ }
+
+}
| null | train | test | 2023-04-27T04:01:38 | "2023-02-09T12:22:04Z" | joseacl | train |
provectus/kafka-ui/3651_3702 | provectus/kafka-ui | provectus/kafka-ui/3651 | provectus/kafka-ui/3702 | [
"connected"
] | 8ecb719e9b762a4bc132997dd660b8519c185ef6 | fb515871cba27686037d06e5a8dbaa417c681732 | [] | [] | "2023-04-24T06:24:16Z" | [
"scope/QA",
"scope/AQA"
] | [e2e] Check streams and tables displaying at the list | Autotest implementation for:
https://app.qase.io/project/KAFKAUI?case=284&previewMode=side&suite=8 | [
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/services/ApiService.java"
] | [
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/services/ApiService.java"
] | [
"kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/backlog/SmokeBacklog.java",
"kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/suite/TopicsTest.java",
"kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/suite/WizardTest.java",
"kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/ksqldb/KsqlDbTest.java"
] | diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/services/ApiService.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/services/ApiService.java
index a041defc93e..b4cc54a38f3 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/services/ApiService.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/services/ApiService.java
@@ -36,29 +36,31 @@
@Slf4j
public class ApiService extends BaseSource {
+ private final ApiClient apiClient = new ApiClient().setBasePath(BASE_API_URL);
+
@SneakyThrows
private TopicsApi topicApi() {
- return new TopicsApi(new ApiClient().setBasePath(BASE_API_URL));
+ return new TopicsApi(apiClient);
}
@SneakyThrows
private SchemasApi schemaApi() {
- return new SchemasApi(new ApiClient().setBasePath(BASE_API_URL));
+ return new SchemasApi(apiClient);
}
@SneakyThrows
private KafkaConnectApi connectorApi() {
- return new KafkaConnectApi(new ApiClient().setBasePath(BASE_API_URL));
+ return new KafkaConnectApi(apiClient);
}
@SneakyThrows
private MessagesApi messageApi() {
- return new MessagesApi(new ApiClient().setBasePath(BASE_API_URL));
+ return new MessagesApi(apiClient);
}
@SneakyThrows
private KsqlApi ksqlApi() {
- return new KsqlApi(new ApiClient().setBasePath(BASE_API_URL));
+ return new KsqlApi(apiClient);
}
@SneakyThrows
| diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/backlog/SmokeBacklog.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/backlog/SmokeBacklog.java
index d96bbb7f3a2..3ce086ee7bb 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/backlog/SmokeBacklog.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/backlog/SmokeBacklog.java
@@ -2,6 +2,7 @@
import static com.provectus.kafka.ui.qasesuite.BaseQaseTest.BROKERS_SUITE_ID;
import static com.provectus.kafka.ui.qasesuite.BaseQaseTest.KSQL_DB_SUITE_ID;
+import static com.provectus.kafka.ui.qasesuite.BaseQaseTest.SCHEMAS_SUITE_ID;
import static com.provectus.kafka.ui.qasesuite.BaseQaseTest.TOPICS_PROFILE_SUITE_ID;
import static com.provectus.kafka.ui.utilities.qase.enums.State.TO_BE_AUTOMATED;
@@ -35,37 +36,65 @@ public void testCaseC() {
}
@Automation(state = TO_BE_AUTOMATED)
- @Suite(id = KSQL_DB_SUITE_ID)
- @QaseId(284)
+ @Suite(id = BROKERS_SUITE_ID)
+ @QaseId(331)
@Test
public void testCaseD() {
}
@Automation(state = TO_BE_AUTOMATED)
@Suite(id = BROKERS_SUITE_ID)
- @QaseId(331)
+ @QaseId(332)
@Test
public void testCaseE() {
}
@Automation(state = TO_BE_AUTOMATED)
- @Suite(id = BROKERS_SUITE_ID)
- @QaseId(332)
+ @Suite(id = TOPICS_PROFILE_SUITE_ID)
+ @QaseId(335)
@Test
public void testCaseF() {
}
@Automation(state = TO_BE_AUTOMATED)
@Suite(id = TOPICS_PROFILE_SUITE_ID)
- @QaseId(335)
+ @QaseId(336)
@Test
public void testCaseG() {
}
@Automation(state = TO_BE_AUTOMATED)
@Suite(id = TOPICS_PROFILE_SUITE_ID)
- @QaseId(336)
+ @QaseId(343)
@Test
public void testCaseH() {
}
+
+ @Automation(state = TO_BE_AUTOMATED)
+ @Suite(id = KSQL_DB_SUITE_ID)
+ @QaseId(344)
+ @Test
+ public void testCaseI() {
+ }
+
+ @Automation(state = TO_BE_AUTOMATED)
+ @Suite(id = SCHEMAS_SUITE_ID)
+ @QaseId(345)
+ @Test
+ public void testCaseJ() {
+ }
+
+ @Automation(state = TO_BE_AUTOMATED)
+ @Suite(id = SCHEMAS_SUITE_ID)
+ @QaseId(346)
+ @Test
+ public void testCaseK() {
+ }
+
+ @Automation(state = TO_BE_AUTOMATED)
+ @Suite(id = TOPICS_PROFILE_SUITE_ID)
+ @QaseId(347)
+ @Test
+ public void testCaseL() {
+ }
}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/suite/TopicsTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/suite/TopicsTest.java
index 76f8506debb..758827e21bf 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/suite/TopicsTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/suite/TopicsTest.java
@@ -92,4 +92,28 @@ public void testCaseM() {
@Test
public void testCaseN() {
}
+
+ @Automation(state = NOT_AUTOMATED)
+ @QaseId(337)
+ @Test
+ public void testCaseO() {
+ }
+
+ @Automation(state = NOT_AUTOMATED)
+ @QaseId(339)
+ @Test
+ public void testCaseP() {
+ }
+
+ @Automation(state = NOT_AUTOMATED)
+ @QaseId(341)
+ @Test
+ public void testCaseQ() {
+ }
+
+ @Automation(state = NOT_AUTOMATED)
+ @QaseId(342)
+ @Test
+ public void testCaseR() {
+ }
}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/suite/WizardTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/suite/WizardTest.java
index 9621104b1a1..c74c1ba6f07 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/suite/WizardTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/suite/WizardTest.java
@@ -14,4 +14,16 @@ public class WizardTest extends BaseManualTest {
@Test
public void testCaseA() {
}
+
+ @Automation(state = NOT_AUTOMATED)
+ @QaseId(338)
+ @Test
+ public void testCaseB() {
+ }
+
+ @Automation(state = NOT_AUTOMATED)
+ @QaseId(340)
+ @Test
+ public void testCaseC() {
+ }
}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/ksqldb/KsqlDbTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/ksqldb/KsqlDbTest.java
index c4bbe0def4c..22ef931bf13 100644
--- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/ksqldb/KsqlDbTest.java
+++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/ksqldb/KsqlDbTest.java
@@ -1,5 +1,6 @@
package com.provectus.kafka.ui.smokesuite.ksqldb;
+import static com.provectus.kafka.ui.pages.ksqldb.enums.KsqlMenuTabs.STREAMS;
import static com.provectus.kafka.ui.pages.ksqldb.enums.KsqlQueryConfig.SHOW_TABLES;
import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.KSQL_DB;
import static org.apache.commons.lang3.RandomStringUtils.randomAlphabetic;
@@ -39,17 +40,21 @@ public void beforeClass() {
FIRST_TABLE.getName(), SECOND_TABLE.getName()));
}
- @QaseId(86)
+ @QaseId(284)
@Test(priority = 1)
- public void clearResultsForExecutedRequest() {
- navigateToKsqlDbAndExecuteRequest(SHOW_TABLES.getQuery());
+ public void streamsAndTablesVisibilityCheck() {
+ naviSideBar
+ .openSideMenu(KSQL_DB);
+ ksqlDbList
+ .waitUntilScreenReady();
SoftAssert softly = new SoftAssert();
- softly.assertTrue(ksqlQueryForm.areResultsVisible(), "areResultsVisible()");
- softly.assertAll();
- ksqlQueryForm
- .clickClearResultsBtn();
- softly.assertFalse(ksqlQueryForm.areResultsVisible(), "areResultsVisible()");
+ softly.assertTrue(ksqlDbList.getTableByName(FIRST_TABLE.getName()).isVisible(), "getTableByName()");
+ softly.assertTrue(ksqlDbList.getTableByName(SECOND_TABLE.getName()).isVisible(), "getTableByName()");
softly.assertAll();
+ ksqlDbList
+ .openDetailsTab(STREAMS)
+ .waitUntilScreenReady();
+ Assert.assertTrue(ksqlDbList.getStreamByName(DEFAULT_STREAM.getName()).isVisible(), "getStreamByName()");
}
@QaseId(276)
@@ -68,11 +73,31 @@ public void checkShowTablesRequestExecution() {
navigateToKsqlDbAndExecuteRequest(SHOW_TABLES.getQuery());
SoftAssert softly = new SoftAssert();
softly.assertTrue(ksqlQueryForm.areResultsVisible(), "areResultsVisible()");
- softly.assertTrue(ksqlQueryForm.getItemByName(FIRST_TABLE.getName()).isVisible(), "getItemByName()");
- softly.assertTrue(ksqlQueryForm.getItemByName(SECOND_TABLE.getName()).isVisible(), "getItemByName()");
+ softly.assertTrue(ksqlQueryForm.getItemByName(FIRST_TABLE.getName()).isVisible(),
+ String.format("getItemByName(%s)", FIRST_TABLE.getName()));
+ softly.assertTrue(ksqlQueryForm.getItemByName(SECOND_TABLE.getName()).isVisible(),
+ String.format("getItemByName(%s)", SECOND_TABLE.getName()));
+ softly.assertAll();
+ }
+
+ @QaseId(86)
+ @Test(priority = 4)
+ public void clearResultsForExecutedRequest() {
+ navigateToKsqlDbAndExecuteRequest(SHOW_TABLES.getQuery());
+ SoftAssert softly = new SoftAssert();
+ softly.assertTrue(ksqlQueryForm.areResultsVisible(), "areResultsVisible()");
+ softly.assertAll();
+ ksqlQueryForm
+ .clickClearResultsBtn();
+ softly.assertFalse(ksqlQueryForm.areResultsVisible(), "areResultsVisible()");
softly.assertAll();
}
+ @AfterClass(alwaysRun = true)
+ public void afterClass() {
+ TOPIC_NAMES_LIST.forEach(topicName -> apiService.deleteTopic(topicName));
+ }
+
@Step
private void navigateToKsqlDbAndExecuteRequest(String query) {
naviSideBar
@@ -85,9 +110,4 @@ private void navigateToKsqlDbAndExecuteRequest(String query) {
.setQuery(query)
.clickExecuteBtn(query);
}
-
- @AfterClass(alwaysRun = true)
- public void afterClass() {
- TOPIC_NAMES_LIST.forEach(topicName -> apiService.deleteTopic(topicName));
- }
}
| train | test | 2023-04-21T19:39:30 | "2023-04-11T07:34:36Z" | VladSenyuta | train |
provectus/kafka-ui/3251_3715 | provectus/kafka-ui | provectus/kafka-ui/3251 | provectus/kafka-ui/3715 | [
"connected"
] | 727f38401babcf25d5bb47e675149882ff3ede14 | f6fe14cea55675f03656428aa3366c556ccddf22 | [
"Hello there laser! π\n\nThank you and congratulations π for opening your very first issue in this project! π\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. π",
"Hey, thanks for reporting the issue.\r\n\r\nThat's really funny, I suppose if you live on Earth that's a bug.\r\n\r\nWe'll take a look :)",
"@Haarolean the issue was reproduced: \r\n\r\n**Steps:**\r\nAdd a message to the Topic with timestamp 1/27/2023, 22:09:00\r\nChange computer timezone to be midnight\r\n\r\n**Actual result:** Messageβs timestamp displayed as 1/28/2023, 24:09:00 instead of 1/28/2023, 00:09:00",
"I believe this issue is related to the language configuration of the current browser, I've tested that if your language is set to en-US, it displays 24:00:00. However, if you change to another language, for example, zh-CN, you will get 00:00:00\r\n<img width=\"579\" alt=\"image\" src=\"https://user-images.githubusercontent.com/43143420/227832716-b01cb8aa-e1d1-4b6a-b50f-59ded3621d40.png\">\r\n<img width=\"580\" alt=\"image\" src=\"https://user-images.githubusercontent.com/43143420/227832764-4400e01f-4da4-4c9e-96a1-1581eda34e25.png\">\r\n",
"@Mano-Liaoyan that's right, we take the user's locale into consideration, but this shouldn't happen.",
"Sorry, I am not quite understand what you mean. In src/components/Topics/Topic/Messages/Message.tsx we can find that it use formatTimeStamp to get the date\r\n\r\nwhich is a function declared in /src/lib/dateTimeHelpers.ts\r\n<img width=\"465\" alt=\"image\" src=\"https://user-images.githubusercontent.com/43143420/227951774-34faae9e-1250-430a-9746-deb9ab4f62a2.png\">\r\nThis shows that it use the user's locale to determine the time style. Do you mean we need to force the user to use a specific locale in order to show 00:00:00 ? Or, do you mean we convert 24 to 0 οΌ",
"@Mano-Liaoyan we introduced this code to consider user's browser's locale to format the dates (e.g. mm.dd for non-US, dd.mm for US), guess the timezone calculation is a side effect. We had some tasks to make it configurable (to view dates in a different tz than a browser's timezone), so we have to decide the scope for this one issue.\r\nWe either:\r\n1) Deal with timezone issues and format all the dates properly\r\n2) Do not consider timezones for the timestamps for now and just use the locale for formatting the date.\r\n\r\nIf you wanna grab this, let me know. Otherwise, we'd do this anyway within this release, we've got enough bandwidth rn."
] | [] | "2023-04-26T09:19:05Z" | [
"type/bug",
"good first issue",
"scope/frontend",
"status/accepted",
"status/confirmed"
] | Messages: UI displays nonsensical timestamps (e.g. "1/2/2023, 24:15:00") | ## Problem
Our topic contains a record with timestamp `1672647300792`, which is Monday, January 2, 2023 8:15:00.792 AM in GMT.
When I view this record in the UI using a computer configured with GMT-8, the timestamp displays as `1/2/2023, 24:15:00`.
## Expected Behavior
I would expect this timestamp to display in the UI as `1/2/2023, 00:15:00`.
## Additional Context
This bug reproduces using the current head of master, commit `ceb9c5dd85ce5543b9863c5bc500e67467c358fb`.
## Screenshots
<img width="1525" alt="Screen Shot 2023-01-22 at 10 02 56 PM" src="https://user-images.githubusercontent.com/884507/213975137-128428ca-e409-4134-9923-9f2a5b893a8b.png">
| [
"kafka-ui-react-app/src/lib/dateTimeHelpers.ts"
] | [
"kafka-ui-react-app/src/lib/dateTimeHelpers.ts"
] | [] | diff --git a/kafka-ui-react-app/src/lib/dateTimeHelpers.ts b/kafka-ui-react-app/src/lib/dateTimeHelpers.ts
index 3dce0edd78a..148a70d2a3d 100644
--- a/kafka-ui-react-app/src/lib/dateTimeHelpers.ts
+++ b/kafka-ui-react-app/src/lib/dateTimeHelpers.ts
@@ -1,6 +1,6 @@
export const formatTimestamp = (
timestamp?: number | string | Date,
- format: Intl.DateTimeFormatOptions = { hour12: false }
+ format: Intl.DateTimeFormatOptions = { hourCycle: 'h23' }
): string => {
if (!timestamp) {
return '';
@@ -8,7 +8,6 @@ export const formatTimestamp = (
// empty array gets the default one from the browser
const date = new Date(timestamp);
-
// invalid date
if (Number.isNaN(date.getTime())) {
return '';
| null | val | test | 2023-05-02T14:34:57 | "2023-01-23T06:07:40Z" | laser | train |
provectus/kafka-ui/3117_3718 | provectus/kafka-ui | provectus/kafka-ui/3117 | provectus/kafka-ui/3718 | [
"keyword_pr_to_issue"
] | 5efb380c42a97418b29fd5e72ed8372be7d1b48e | 4e255220788f39c15239fb1dda4e723339940ce9 | [
"@David-DB88 Implementation is invalid.\r\n#3117 clearly states that the value should be fetched from API (the second screenshot is of deprecated fields in API response).\r\n\r\n```\r\nconst keyFormat = searchParams.get('keySerde') || '';\r\n const valueFormat = searchParams.get('valueSerde') || '';\r\n```\r\nThis will display the currently selected serdes no matter which serde is used for the message itself.\r\n",
"Got it. Thank you for pointing out."
] | [] | "2023-04-26T10:27:57Z" | [
"good first issue",
"scope/frontend",
"status/accepted",
"type/chore"
] | [FE] Update display of key/value format | 
key/value types are not displayed, they're currently being fetched from `keyFormat` and `valueFormat` fields, but:

| [
"kafka-ui-react-app/src/components/Topics/Topic/Messages/Message.tsx",
"kafka-ui-react-app/src/components/Topics/Topic/Messages/MessageContent/MessageContent.tsx",
"kafka-ui-react-app/src/components/Topics/Topic/Messages/MessageContent/__tests__/MessageContent.spec.tsx"
] | [
"kafka-ui-react-app/src/components/Topics/Topic/Messages/Message.tsx",
"kafka-ui-react-app/src/components/Topics/Topic/Messages/MessageContent/MessageContent.tsx",
"kafka-ui-react-app/src/components/Topics/Topic/Messages/MessageContent/__tests__/MessageContent.spec.tsx"
] | [] | diff --git a/kafka-ui-react-app/src/components/Topics/Topic/Messages/Message.tsx b/kafka-ui-react-app/src/components/Topics/Topic/Messages/Message.tsx
index fb4e258cca5..dd5cfae7488 100644
--- a/kafka-ui-react-app/src/components/Topics/Topic/Messages/Message.tsx
+++ b/kafka-ui-react-app/src/components/Topics/Topic/Messages/Message.tsx
@@ -142,6 +142,8 @@ const Message: React.FC<Props> = ({
timestampType={timestampType}
keySize={keySize}
contentSize={valueSize}
+ keySerde={keySerde}
+ valueSerde={valueSerde}
/>
)}
</>
diff --git a/kafka-ui-react-app/src/components/Topics/Topic/Messages/MessageContent/MessageContent.tsx b/kafka-ui-react-app/src/components/Topics/Topic/Messages/MessageContent/MessageContent.tsx
index 93616ca432a..d1237ba0d40 100644
--- a/kafka-ui-react-app/src/components/Topics/Topic/Messages/MessageContent/MessageContent.tsx
+++ b/kafka-ui-react-app/src/components/Topics/Topic/Messages/MessageContent/MessageContent.tsx
@@ -3,7 +3,6 @@ import EditorViewer from 'components/common/EditorViewer/EditorViewer';
import BytesFormatted from 'components/common/BytesFormatted/BytesFormatted';
import { SchemaType, TopicMessageTimestampTypeEnum } from 'generated-sources';
import { formatTimestamp } from 'lib/dateTimeHelpers';
-import { useSearchParams } from 'react-router-dom';
import * as S from './MessageContent.styled';
@@ -17,6 +16,8 @@ export interface MessageContentProps {
timestampType?: TopicMessageTimestampTypeEnum;
keySize?: number;
contentSize?: number;
+ keySerde?: string;
+ valueSerde?: string;
}
const MessageContent: React.FC<MessageContentProps> = ({
@@ -27,12 +28,10 @@ const MessageContent: React.FC<MessageContentProps> = ({
timestampType,
keySize,
contentSize,
+ keySerde,
+ valueSerde,
}) => {
const [activeTab, setActiveTab] = React.useState<Tab>('content');
- const [searchParams] = useSearchParams();
- const keyFormat = searchParams.get('keySerde') || '';
- const valueFormat = searchParams.get('valueSerde') || '';
-
const activeTabContent = () => {
switch (activeTab) {
case 'content':
@@ -110,7 +109,7 @@ const MessageContent: React.FC<MessageContentProps> = ({
<S.Metadata>
<S.MetadataLabel>Key Serde</S.MetadataLabel>
<span>
- <S.MetadataValue>{keyFormat}</S.MetadataValue>
+ <S.MetadataValue>{keySerde}</S.MetadataValue>
<S.MetadataMeta>
Size: <BytesFormatted value={keySize} />
</S.MetadataMeta>
@@ -120,7 +119,7 @@ const MessageContent: React.FC<MessageContentProps> = ({
<S.Metadata>
<S.MetadataLabel>Value Serde</S.MetadataLabel>
<span>
- <S.MetadataValue>{valueFormat}</S.MetadataValue>
+ <S.MetadataValue>{valueSerde}</S.MetadataValue>
<S.MetadataMeta>
Size: <BytesFormatted value={contentSize} />
</S.MetadataMeta>
diff --git a/kafka-ui-react-app/src/components/Topics/Topic/Messages/MessageContent/__tests__/MessageContent.spec.tsx b/kafka-ui-react-app/src/components/Topics/Topic/Messages/MessageContent/__tests__/MessageContent.spec.tsx
index 91310a30e41..d76455242cf 100644
--- a/kafka-ui-react-app/src/components/Topics/Topic/Messages/MessageContent/__tests__/MessageContent.spec.tsx
+++ b/kafka-ui-react-app/src/components/Topics/Topic/Messages/MessageContent/__tests__/MessageContent.spec.tsx
@@ -20,6 +20,8 @@ const setupWrapper = (props?: Partial<MessageContentProps>) => {
headers={{ header: 'test' }}
timestamp={new Date(0)}
timestampType={TopicMessageTimestampTypeEnum.CREATE_TIME}
+ keySerde="SchemaRegistry"
+ valueSerde="Avro"
{...props}
/>
</tbody>
@@ -27,42 +29,20 @@ const setupWrapper = (props?: Partial<MessageContentProps>) => {
);
};
-const proto =
- 'syntax = "proto3";\npackage com.provectus;\n\nmessage TestProtoRecord {\n string f1 = 1;\n int32 f2 = 2;\n}\n';
-
global.TextEncoder = TextEncoder;
-const searchParamsContentAVRO = new URLSearchParams({
- keySerde: 'SchemaRegistry',
- valueSerde: 'AVRO',
- limit: '100',
-});
-
-const searchParamsContentJSON = new URLSearchParams({
- keySerde: 'SchemaRegistry',
- valueSerde: 'JSON',
- limit: '100',
-});
-
-const searchParamsContentPROTOBUF = new URLSearchParams({
- keySerde: 'SchemaRegistry',
- valueSerde: 'PROTOBUF',
- limit: '100',
-});
describe('MessageContent screen', () => {
beforeEach(() => {
- render(setupWrapper(), {
- initialEntries: [`/messages?${searchParamsContentAVRO}`],
- });
+ render(setupWrapper());
});
- describe('renders', () => {
- it('key format in document', () => {
+ describe('Checking keySerde and valueSerde', () => {
+ it('keySerde in document', () => {
expect(screen.getByText('SchemaRegistry')).toBeInTheDocument();
});
- it('content format in document', () => {
- expect(screen.getByText('AVRO')).toBeInTheDocument();
+ it('valueSerde in document', () => {
+ expect(screen.getByText('Avro')).toBeInTheDocument();
});
});
@@ -98,42 +78,3 @@ describe('MessageContent screen', () => {
});
});
});
-
-describe('checking content type depend on message type', () => {
- it('renders component with message having JSON type', () => {
- render(
- setupWrapper({
- messageContent: '{"data": "test"}',
- }),
- { initialEntries: [`/messages?${searchParamsContentJSON}`] }
- );
- expect(screen.getByText('JSON')).toBeInTheDocument();
- });
- it('renders component with message having AVRO type', () => {
- render(
- setupWrapper({
- messageContent: '{"data": "test"}',
- }),
- { initialEntries: [`/messages?${searchParamsContentAVRO}`] }
- );
- expect(screen.getByText('AVRO')).toBeInTheDocument();
- });
- it('renders component with message having PROTOBUF type', () => {
- render(
- setupWrapper({
- messageContent: proto,
- }),
- { initialEntries: [`/messages?${searchParamsContentPROTOBUF}`] }
- );
- expect(screen.getByText('PROTOBUF')).toBeInTheDocument();
- });
- it('renders component with message having no type which is equal to having PROTOBUF type', () => {
- render(
- setupWrapper({
- messageContent: '',
- }),
- { initialEntries: [`/messages?${searchParamsContentPROTOBUF}`] }
- );
- expect(screen.getByText('PROTOBUF')).toBeInTheDocument();
- });
-});
| null | train | test | 2023-04-26T06:19:41 | "2022-12-22T08:17:10Z" | Haarolean | train |
provectus/kafka-ui/3677_3721 | provectus/kafka-ui | provectus/kafka-ui/3677 | provectus/kafka-ui/3721 | [
"connected"
] | abfdf97a9fae6d402854cdaee427f17be8db2401 | 725c95f348dac8c9e4b681b49282fb9e2275ee5b | [
"Hello @Haarolean \r\n\r\nhttps://user-images.githubusercontent.com/55583718/234350864-01cfaee5-f812-46aa-8060-9c8c1d874bad.mov\r\n\r\nI made the changes according to Figma File. Is this acceptable?",
"@inanc-can looks nice, please raise a PR :)"
] | [] | "2023-04-26T13:59:53Z" | [
"good first issue",
"scope/frontend",
"type/refactoring",
"status/accepted"
] | UI: Refactor the toggle component | <img width="213" alt="image" src="https://user-images.githubusercontent.com/1494347/232467567-5c09bd3d-c138-4ed7-bca8-8813e6e25d28.png">
<img width="210" alt="image" src="https://user-images.githubusercontent.com/1494347/232467822-b67ece0a-5c73-4168-8965-209ddd1a5130.png">
The toggle state in the current implementation of the component is hardly distinguishable between "off" and "on".
Implement the new one: https://www.figma.com/file/ZkvysdRUmt9p2PDNU3a9WU/Kafka-UI?node-id=2051-6715&t=nOA9ZBMnJjZxRvZk-0 | [
"kafka-ui-react-app/src/components/common/Switch/Switch.styled.ts",
"kafka-ui-react-app/src/theme/theme.ts"
] | [
"kafka-ui-react-app/src/components/common/Switch/Switch.styled.ts",
"kafka-ui-react-app/src/theme/theme.ts"
] | [] | diff --git a/kafka-ui-react-app/src/components/common/Switch/Switch.styled.ts b/kafka-ui-react-app/src/components/common/Switch/Switch.styled.ts
index 88239280594..0f4f2c1d11c 100644
--- a/kafka-ui-react-app/src/components/common/Switch/Switch.styled.ts
+++ b/kafka-ui-react-app/src/components/common/Switch/Switch.styled.ts
@@ -39,6 +39,10 @@ export const StyledSlider = styled.span<Props>`
transition: 0.4s;
border-radius: 20px;
+ :hover {
+ background-color: ${({ theme }) => theme.switch.hover};
+ }
+
&::before {
position: absolute;
content: '';
diff --git a/kafka-ui-react-app/src/theme/theme.ts b/kafka-ui-react-app/src/theme/theme.ts
index 33dbf1c619e..ea833814221 100644
--- a/kafka-ui-react-app/src/theme/theme.ts
+++ b/kafka-ui-react-app/src/theme/theme.ts
@@ -235,12 +235,13 @@ const baseTheme = {
color: Colors.neutral[90],
},
switch: {
- unchecked: Colors.brand[30],
+ unchecked: Colors.neutral[20],
+ hover: Colors.neutral[40],
checked: Colors.brand[50],
circle: Colors.neutral[0],
disabled: Colors.neutral[10],
checkedIcon: {
- backgroundColor: Colors.neutral[70],
+ backgroundColor: Colors.neutral[10],
},
},
pageLoader: {
| null | train | test | 2023-05-02T07:38:16 | "2023-04-17T11:11:18Z" | Haarolean | train |
provectus/kafka-ui/3739_3746 | provectus/kafka-ui | provectus/kafka-ui/3739 | provectus/kafka-ui/3746 | [
"connected"
] | 727f38401babcf25d5bb47e675149882ff3ede14 | 86a7ba44fb4b47d60b43e43e6854e7c0962ed82f | [] | [] | "2023-04-27T13:24:54Z" | [
"type/bug",
"good first issue",
"scope/frontend",
"status/accepted",
"status/confirmed"
] | SR: Updating an existing schema with valid syntax says the syntax is invalid | 1. Create a schema:
```
syntax = "proto3";
package com.provectus;
message TestProtoRecord {
string f1 = 1;
int32 f2 = 2;
int32 f3 = 3;
}
```
2. Edit this schema, change something with keeping the syntax valid, e.g. replace `int32 f3 = 3;` with `int32 f4 = 3;`
3. Profit | [
"kafka-ui-react-app/src/components/Schemas/Edit/Form.tsx",
"kafka-ui-react-app/src/lib/__test__/yupExtended.spec.ts",
"kafka-ui-react-app/src/lib/yupExtended.ts"
] | [
"kafka-ui-react-app/src/components/Schemas/Edit/Form.tsx",
"kafka-ui-react-app/src/lib/__test__/yupExtended.spec.ts",
"kafka-ui-react-app/src/lib/yupExtended.ts"
] | [] | diff --git a/kafka-ui-react-app/src/components/Schemas/Edit/Form.tsx b/kafka-ui-react-app/src/components/Schemas/Edit/Form.tsx
index 2fce1ad7d79..56d7bdc8175 100644
--- a/kafka-ui-react-app/src/components/Schemas/Edit/Form.tsx
+++ b/kafka-ui-react-app/src/components/Schemas/Edit/Form.tsx
@@ -55,7 +55,7 @@ const Form: React.FC = () => {
yup.object().shape({
newSchema:
schema?.schemaType === SchemaType.PROTOBUF
- ? yup.string().required().isEnum('Schema syntax is not valid')
+ ? yup.string().required()
: yup.string().required().isJsonObject('Schema syntax is not valid'),
});
const methods = useForm<NewSchemaSubjectRaw>({
diff --git a/kafka-ui-react-app/src/lib/__test__/yupExtended.spec.ts b/kafka-ui-react-app/src/lib/__test__/yupExtended.spec.ts
index 8100b9a3264..bd43dd3f72a 100644
--- a/kafka-ui-react-app/src/lib/__test__/yupExtended.spec.ts
+++ b/kafka-ui-react-app/src/lib/__test__/yupExtended.spec.ts
@@ -1,19 +1,5 @@
-import { isValidEnum, isValidJsonObject } from 'lib/yupExtended';
+import { isValidJsonObject } from 'lib/yupExtended';
-const invalidEnum = `
-ennum SchemType {
- AVRO = 0;
- JSON = 1;
- PROTOBUF = 3;
-}
-`;
-const validEnum = `
-enum SchemType {
- AVRO = 0;
- JSON = 1;
- PROTOBUF = 3;
-}
-`;
describe('yup extended', () => {
describe('isValidJsonObject', () => {
it('returns false for no value', () => {
@@ -35,21 +21,4 @@ describe('yup extended', () => {
expect(isValidJsonObject('{ "foo": "bar" }')).toBeTruthy();
});
});
-
- describe('isValidEnum', () => {
- it('returns false for invalid enum', () => {
- expect(isValidEnum(invalidEnum)).toBeFalsy();
- });
- it('returns false for no value', () => {
- expect(isValidEnum()).toBeFalsy();
- });
- it('returns true should trim value', () => {
- expect(
- isValidEnum(` enum SchemType {AVRO = 0; PROTOBUF = 3;} `)
- ).toBeTruthy();
- });
- it('returns true for valid enum', () => {
- expect(isValidEnum(validEnum)).toBeTruthy();
- });
- });
});
diff --git a/kafka-ui-react-app/src/lib/yupExtended.ts b/kafka-ui-react-app/src/lib/yupExtended.ts
index 4c662ca8222..241dac9770a 100644
--- a/kafka-ui-react-app/src/lib/yupExtended.ts
+++ b/kafka-ui-react-app/src/lib/yupExtended.ts
@@ -10,7 +10,6 @@ declare module 'yup' {
TFlags extends yup.Flags = ''
> extends yup.Schema<TType, TContext, TDefault, TFlags> {
isJsonObject(message?: string): StringSchema<TType, TContext>;
- isEnum(message?: string): StringSchema<TType, TContext>;
}
}
@@ -40,32 +39,6 @@ const isJsonObject = (message?: string) => {
isValidJsonObject
);
};
-
-export const isValidEnum = (value?: string) => {
- try {
- if (!value) return false;
- const trimmedValue = value.trim();
- if (
- trimmedValue.indexOf('enum') === 0 &&
- trimmedValue.lastIndexOf('}') === trimmedValue.length - 1
- ) {
- return true;
- }
- } catch {
- // do nothing
- }
- return false;
-};
-
-const isEnum = (message?: string) => {
- return yup.string().test(
- 'isEnum',
- // eslint-disable-next-line no-template-curly-in-string
- message || '${path} is not Enum object',
- isValidEnum
- );
-};
-
/**
* due to yup rerunning all the object validiation during any render,
* it makes sense to cache the async results
@@ -88,7 +61,6 @@ export function cacheTest(
}
yup.addMethod(yup.StringSchema, 'isJsonObject', isJsonObject);
-yup.addMethod(yup.StringSchema, 'isEnum', isEnum);
export const topicFormValidationSchema = yup.object().shape({
name: yup
| null | train | test | 2023-05-02T14:34:57 | "2023-04-27T00:52:03Z" | Haarolean | train |
provectus/kafka-ui/3734_3750 | provectus/kafka-ui | provectus/kafka-ui/3734 | provectus/kafka-ui/3750 | [
"connected"
] | 379d9926df00e6388ee417b043652cf4d37ad4d0 | 147b539c376028268d98955e66f0672125cd263b | [] | [] | "2023-04-28T11:40:47Z" | [
"type/bug",
"good first issue",
"scope/frontend",
"status/accepted",
"status/confirmed"
] | Connectors: no error is displayed if the syntax is not valid | 1. Edit an existing connector config
2. Invalid syntax is not displayed ("config is not JSON object") until you click outside the config textarea. | [
"kafka-ui-react-app/src/components/Connect/Details/Config/Config.tsx"
] | [
"kafka-ui-react-app/src/components/Connect/Details/Config/Config.tsx"
] | [] | diff --git a/kafka-ui-react-app/src/components/Connect/Details/Config/Config.tsx b/kafka-ui-react-app/src/components/Connect/Details/Config/Config.tsx
index 0e86d48940d..8a372e9d12b 100644
--- a/kafka-ui-react-app/src/components/Connect/Details/Config/Config.tsx
+++ b/kafka-ui-react-app/src/components/Connect/Details/Config/Config.tsx
@@ -37,7 +37,7 @@ const Config: React.FC = () => {
formState: { isDirty, isSubmitting, isValid, errors },
setValue,
} = useForm<FormValues>({
- mode: 'onTouched',
+ mode: 'onChange',
resolver: yupResolver(validationSchema),
defaultValues: {
config: JSON.stringify(config, null, '\t'),
| null | train | test | 2023-05-06T19:12:36 | "2023-04-27T00:41:57Z" | Haarolean | train |
provectus/kafka-ui/1911_3752 | provectus/kafka-ui | provectus/kafka-ui/1911 | provectus/kafka-ui/3752 | [
"keyword_pr_to_issue",
"connected"
] | c7cb7a40272f110af1ad41387d1efffdf89e18ae | ba6d6b2b1fffaf03e5919091fb58c058146f352c | [
"blocked by #1537",
"@Haarolean is this issue still open to work on ?",
"@dshubhadeep yes",
"@dshubhadeep any luck?",
"@Haarolean will submit a PR by this weekend"
] | [] | "2023-04-30T10:30:37Z" | [
"good first issue",
"scope/backend",
"status/accepted",
"type/chore"
] | BE: Fix sonar code smells | https://sonarcloud.io/project/issues?resolved=false&id=com.provectus%3Akafka-ui_backend | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/JmxMetricsRetriever.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/KafkaServicesValidation.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/schemas/SchemaDetails.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/ProduceMessagePanel.java"
] | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/JmxMetricsRetriever.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/KafkaServicesValidation.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/schemas/SchemaDetails.java",
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/ProduceMessagePanel.java"
] | [
"kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/TopicsServicePaginationTest.java"
] | diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/JmxMetricsRetriever.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/JmxMetricsRetriever.java
index 78f5bdeced3..e7a58cbae27 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/JmxMetricsRetriever.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/metrics/JmxMetricsRetriever.java
@@ -72,17 +72,14 @@ private void withJmxConnector(String jmxUrl,
KafkaCluster c,
Consumer<JMXConnector> consumer) {
var env = prepareJmxEnvAndSetThreadLocal(c);
- try {
- JMXConnector connector = null;
+ try (JMXConnector connector = JMXConnectorFactory.newJMXConnector(new JMXServiceURL(jmxUrl), env)) {
try {
- connector = JMXConnectorFactory.newJMXConnector(new JMXServiceURL(jmxUrl), env);
connector.connect(env);
} catch (Exception exception) {
log.error("Error connecting to {}", jmxUrl, exception);
return;
}
consumer.accept(connector);
- connector.close();
} catch (Exception e) {
log.error("Error getting jmx metrics from {}", jmxUrl, e);
} finally {
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/KafkaServicesValidation.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/KafkaServicesValidation.java
index 31d90244ebe..4b8af81f851 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/KafkaServicesValidation.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/KafkaServicesValidation.java
@@ -45,12 +45,10 @@ private static Mono<ApplicationPropertyValidationDTO> invalid(Throwable th) {
*/
public static Optional<String> validateTruststore(TruststoreConfig truststoreConfig) {
if (truststoreConfig.getTruststoreLocation() != null && truststoreConfig.getTruststorePassword() != null) {
- try {
+ try (FileInputStream fileInputStream = new FileInputStream(
+ (ResourceUtils.getFile(truststoreConfig.getTruststoreLocation())))) {
KeyStore trustStore = KeyStore.getInstance(KeyStore.getDefaultType());
- trustStore.load(
- new FileInputStream((ResourceUtils.getFile(truststoreConfig.getTruststoreLocation()))),
- truststoreConfig.getTruststorePassword().toCharArray()
- );
+ trustStore.load(fileInputStream, truststoreConfig.getTruststorePassword().toCharArray());
TrustManagerFactory trustManagerFactory = TrustManagerFactory.getInstance(
TrustManagerFactory.getDefaultAlgorithm()
);
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/schemas/SchemaDetails.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/schemas/SchemaDetails.java
index 11c2d4a7ba3..38c12d35f3d 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/schemas/SchemaDetails.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/schemas/SchemaDetails.java
@@ -13,7 +13,7 @@ public class SchemaDetails extends BasePage {
protected SelenideElement compatibilityField = $x("//h4[contains(text(),'Compatibility')]/../p");
protected SelenideElement editSchemaBtn = $x("//button[contains(text(),'Edit Schema')]");
protected SelenideElement removeBtn = $x("//*[contains(text(),'Remove')]");
- protected SelenideElement confirmBtn = $x("//div[@role='dialog']//button[contains(text(),'Confirm')]");
+ protected SelenideElement schemaConfirmBtn = $x("//div[@role='dialog']//button[contains(text(),'Confirm')]");
protected SelenideElement schemaTypeField = $x("//h4[contains(text(),'Type')]/../p");
protected SelenideElement latestVersionField = $x("//h4[contains(text(),'Latest version')]/../p");
protected SelenideElement compareVersionBtn = $x("//button[text()='Compare Versions']");
@@ -62,8 +62,8 @@ public SchemaDetails openCompareVersionMenu() {
public SchemaDetails removeSchema() {
clickByJavaScript(dotMenuBtn);
removeBtn.shouldBe(Condition.enabled).click();
- confirmBtn.shouldBe(Condition.visible).click();
- confirmBtn.shouldBe(Condition.disappear);
+ schemaConfirmBtn.shouldBe(Condition.visible).click();
+ schemaConfirmBtn.shouldBe(Condition.disappear);
return this;
}
}
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/ProduceMessagePanel.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/ProduceMessagePanel.java
index c4e65c65bee..b683321bd51 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/ProduceMessagePanel.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/ProduceMessagePanel.java
@@ -14,7 +14,7 @@ public class ProduceMessagePanel extends BasePage {
protected SelenideElement keyTextArea = $x("//div[@id='key']/textarea");
protected SelenideElement valueTextArea = $x("//div[@id='content']/textarea");
protected SelenideElement headersTextArea = $x("//div[@id='headers']/textarea");
- protected SelenideElement submitBtn = headersTextArea.$x("../../../..//button[@type='submit']");
+ protected SelenideElement submitProduceMessageBtn = headersTextArea.$x("../../../..//button[@type='submit']");
protected SelenideElement partitionDdl = $x("//ul[@name='partition']");
protected SelenideElement keySerdeDdl = $x("//ul[@name='keySerde']");
protected SelenideElement contentSerdeDdl = $x("//ul[@name='valueSerde']");
@@ -48,8 +48,8 @@ public ProduceMessagePanel setHeadersFld(String value) {
@Step
public ProduceMessagePanel submitProduceMessage() {
- clickByActions(submitBtn);
- submitBtn.shouldBe(Condition.disappear);
+ clickByActions(submitProduceMessageBtn);
+ submitProduceMessageBtn.shouldBe(Condition.disappear);
refresh();
return this;
}
| diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/TopicsServicePaginationTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/TopicsServicePaginationTest.java
index 8867400fee3..ac00fd9982e 100644
--- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/TopicsServicePaginationTest.java
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/TopicsServicePaginationTest.java
@@ -131,7 +131,7 @@ public void shouldCalculateCorrectPageCountForNonDivisiblePageSize() {
assertThat(topics.getBody().getPageCount()).isEqualTo(4);
assertThat(topics.getBody().getTopics()).hasSize(1);
- assertThat(topics.getBody().getTopics().get(0).getName().equals("99"));
+ assertThat(topics.getBody().getTopics().get(0).getName()).isEqualTo("99");
}
@Test
| test | test | 2023-05-15T09:11:22 | "2022-04-29T20:13:49Z" | Haarolean | train |
provectus/kafka-ui/3346_3754 | provectus/kafka-ui | provectus/kafka-ui/3346 | provectus/kafka-ui/3754 | [
"connected"
] | c7a7921b8242b819ebb80769eb3966fd69c26329 | 3ef5a9f492db2c64d3b27986a089e94b285cd186 | [
"https://github.com/provectus/kafka-ui/issues/3043#issuecomment-1359684752",
"Hi, Can I work on this issue ? assign to me.",
"Hi @Haarolean , this issue seems to be front-end issue. Probably we need to change the label to front-end instead of back-end. I can fix it though it is front-end issue. "
] | [] | "2023-05-01T09:23:11Z" | [
"type/bug",
"good first issue",
"scope/frontend",
"status/accepted",
"status/confirmed"
] | Brokers: Broker's configs search by key is case-sensitive |
**Describe the bug** (Actual behavior)
Search in Broker configs is case-sensitive
**Expected behavior**
Search of Broker's configs should work as in other components and should not be case- sensitive
**Set up**
https://www.kafka-ui.provectus.io/
**Steps to Reproduce**
<!-- We'd like you to provide an example setup (via docker-compose, helm, etc.)
to reproduce the problem, especially with a complex setups. -->
1. Navigate to Brokers
2. Select the Broker
3. Turn to Configs tab
4. Search for config
**Screenshots**
https://user-images.githubusercontent.com/104780608/218951551-e8c528e1-72dc-477a-b474-7d5bd1989e6a.mov
| [
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlQueryForm.java",
"kafka-ui-react-app/src/components/Brokers/Broker/Configs/Configs.tsx"
] | [
"kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlQueryForm.java",
"kafka-ui-react-app/src/components/Brokers/Broker/Configs/Configs.tsx"
] | [] | diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlQueryForm.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlQueryForm.java
index 4ce282b6cc5..9388b914d33 100644
--- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlQueryForm.java
+++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlQueryForm.java
@@ -3,6 +3,7 @@
import static com.codeborne.selenide.Condition.visible;
import static com.codeborne.selenide.Selenide.$$x;
import static com.codeborne.selenide.Selenide.$x;
+import static com.codeborne.selenide.Selenide.sleep;
import com.codeborne.selenide.CollectionCondition;
import com.codeborne.selenide.Condition;
@@ -36,6 +37,7 @@ public KsqlQueryForm waitUntilScreenReady() {
@Step
public KsqlQueryForm clickClearBtn() {
clickByJavaScript(clearBtn);
+ sleep(500);
return this;
}
diff --git a/kafka-ui-react-app/src/components/Brokers/Broker/Configs/Configs.tsx b/kafka-ui-react-app/src/components/Brokers/Broker/Configs/Configs.tsx
index 8f909facac0..ad6c5087a0f 100644
--- a/kafka-ui-react-app/src/components/Brokers/Broker/Configs/Configs.tsx
+++ b/kafka-ui-react-app/src/components/Brokers/Broker/Configs/Configs.tsx
@@ -34,7 +34,11 @@ const Configs: React.FC = () => {
const getData = () => {
return data
- .filter((item) => item.name.toLocaleLowerCase().indexOf(keyword) > -1)
+ .filter(
+ (item) =>
+ item.name.toLocaleLowerCase().indexOf(keyword.toLocaleLowerCase()) >
+ -1
+ )
.sort((a, b) => {
if (a.source === b.source) return 0;
| null | train | test | 2023-05-01T02:17:53 | "2023-02-15T06:39:54Z" | armenuikafka | train |
provectus/kafka-ui/3761_3762 | provectus/kafka-ui | provectus/kafka-ui/3761 | provectus/kafka-ui/3762 | [
"connected"
] | 7857bd5000a00b7fee906212209a1ef91fcd723a | 690dcd3f74ecb11e2a827f48c189179e502fc9e1 | [] | [] | "2023-05-02T09:33:08Z" | [
"type/bug",
"scope/backend",
"status/accepted",
"status/confirmed"
] | Wizard: File upload is broken | ### Issue submitter TODO list
- [X] I've looked up my issue in [FAQ](https://docs.kafka-ui.provectus.io/faq/common-problems)
- [X] I've searched for an already existing issues [here](https://github.com/provectus/kafka-ui/issues)
- [X] I've tried running `master`-labeled docker image and the issue still persists there
- [X] I'm running a supported version of the application which is listed [here](https://github.com/provectus/kafka-ui/blob/master/SECURITY.md)
### Describe the bug (actual behavior)
Raised here https://discord.com/channels/897805035122077716/1101421039478640720
### Expected behavior
_No response_
### Your installation details
v0.6.x
### Steps to reproduce
Try to upload file from wizard
### Screenshots
_No response_
### Logs
_No response_
### Additional context
_No response_ | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/ApplicationConfigController.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/DynamicConfigOperations.java",
"kafka-ui-contract/pom.xml",
"kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml"
] | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/ApplicationConfigController.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/DynamicConfigOperations.java",
"kafka-ui-api/src/test/resources/fileForUploadTest.txt",
"kafka-ui-contract/pom.xml",
"kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml"
] | [
"kafka-ui-api/src/test/java/com/provectus/kafka/ui/AbstractIntegrationTest.java",
"kafka-ui-api/src/test/java/com/provectus/kafka/ui/controller/ApplicationConfigControllerTest.java"
] | diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/ApplicationConfigController.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/ApplicationConfigController.java
index 571250ba947..df04b40fab4 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/ApplicationConfigController.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/ApplicationConfigController.java
@@ -27,6 +27,7 @@
import org.mapstruct.factory.Mappers;
import org.springframework.http.ResponseEntity;
import org.springframework.http.codec.multipart.FilePart;
+import org.springframework.http.codec.multipart.Part;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.server.ServerWebExchange;
import reactor.core.publisher.Flux;
@@ -92,16 +93,19 @@ public Mono<ResponseEntity<Void>> restartWithConfig(Mono<RestartRequestDTO> rest
}
@Override
- public Mono<ResponseEntity<UploadedFileInfoDTO>> uploadConfigRelatedFile(FilePart file, ServerWebExchange exchange) {
+ public Mono<ResponseEntity<UploadedFileInfoDTO>> uploadConfigRelatedFile(Flux<Part> fileFlux,
+ ServerWebExchange exchange) {
return accessControlService
.validateAccess(
AccessContext.builder()
.applicationConfigActions(EDIT)
.build()
)
- .then(dynamicConfigOperations.uploadConfigRelatedFile(file))
- .map(path -> new UploadedFileInfoDTO().location(path.toString()))
- .map(ResponseEntity::ok);
+ .then(fileFlux.single())
+ .flatMap(file ->
+ dynamicConfigOperations.uploadConfigRelatedFile((FilePart) file)
+ .map(path -> new UploadedFileInfoDTO().location(path.toString()))
+ .map(ResponseEntity::ok));
}
@Override
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/DynamicConfigOperations.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/DynamicConfigOperations.java
index 75c6d25f959..68f826bd0fc 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/DynamicConfigOperations.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/DynamicConfigOperations.java
@@ -90,6 +90,7 @@ public Optional<PropertySource<?>> loadDynamicPropertySource() {
}
public PropertiesStructure getCurrentProperties() {
+ checkIfDynamicConfigEnabled();
return PropertiesStructure.builder()
.kafka(getNullableBean(ClustersProperties.class))
.rbac(getNullableBean(RoleBasedAccessControlProperties.class))
@@ -112,11 +113,7 @@ private <T> T getNullableBean(Class<T> clazz) {
}
public void persist(PropertiesStructure properties) {
- if (!dynamicConfigEnabled()) {
- throw new ValidationException(
- "Dynamic config change is not allowed. "
- + "Set dynamic.config.enabled property to 'true' to enabled it.");
- }
+ checkIfDynamicConfigEnabled();
properties.initAndValidate();
String yaml = serializeToYaml(properties);
@@ -124,8 +121,9 @@ public void persist(PropertiesStructure properties) {
}
public Mono<Path> uploadConfigRelatedFile(FilePart file) {
- String targetDirStr = (String) ctx.getEnvironment().getSystemEnvironment()
- .getOrDefault(CONFIG_RELATED_UPLOADS_DIR_PROPERTY, CONFIG_RELATED_UPLOADS_DIR_DEFAULT);
+ checkIfDynamicConfigEnabled();
+ String targetDirStr = ctx.getEnvironment()
+ .getProperty(CONFIG_RELATED_UPLOADS_DIR_PROPERTY, CONFIG_RELATED_UPLOADS_DIR_DEFAULT);
Path targetDir = Path.of(targetDirStr);
if (!Files.exists(targetDir)) {
@@ -149,6 +147,14 @@ public Mono<Path> uploadConfigRelatedFile(FilePart file) {
.onErrorMap(th -> new FileUploadException(targetFilePath, th));
}
+ private void checkIfDynamicConfigEnabled() {
+ if (!dynamicConfigEnabled()) {
+ throw new ValidationException(
+ "Dynamic config change is not allowed. "
+ + "Set dynamic.config.enabled property to 'true' to enabled it.");
+ }
+ }
+
@SneakyThrows
private void writeYamlToFile(String yaml, Path path) {
if (Files.isDirectory(path)) {
diff --git a/kafka-ui-api/src/test/resources/fileForUploadTest.txt b/kafka-ui-api/src/test/resources/fileForUploadTest.txt
new file mode 100644
index 00000000000..cc58280d075
--- /dev/null
+++ b/kafka-ui-api/src/test/resources/fileForUploadTest.txt
@@ -0,0 +1,1 @@
+some content goes here
diff --git a/kafka-ui-contract/pom.xml b/kafka-ui-contract/pom.xml
index f99f20d3d81..0d8e238368f 100644
--- a/kafka-ui-contract/pom.xml
+++ b/kafka-ui-contract/pom.xml
@@ -101,9 +101,6 @@
<useSpringBoot3>true</useSpringBoot3>
<dateLibrary>java8</dateLibrary>
</configOptions>
- <typeMappings>
- <mapping>filepart=org.springframework.http.codec.multipart.FilePart</mapping>
- </typeMappings>
</configuration>
</execution>
<execution>
diff --git a/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml b/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
index 2bafb05faaa..b589198b5a1 100644
--- a/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
+++ b/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
@@ -1819,7 +1819,7 @@ paths:
properties:
file:
type: string
- format: filepart
+ format: binary
responses:
200:
description: OK
| diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/AbstractIntegrationTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/AbstractIntegrationTest.java
index dbdfb67fd59..1938f93044c 100644
--- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/AbstractIntegrationTest.java
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/AbstractIntegrationTest.java
@@ -2,6 +2,7 @@
import com.provectus.kafka.ui.container.KafkaConnectContainer;
import com.provectus.kafka.ui.container.SchemaRegistryContainer;
+import java.nio.file.Path;
import java.util.List;
import java.util.Properties;
import org.apache.kafka.clients.admin.AdminClient;
@@ -9,6 +10,7 @@
import org.apache.kafka.clients.admin.NewTopic;
import org.jetbrains.annotations.NotNull;
import org.junit.jupiter.api.function.ThrowingConsumer;
+import org.junit.jupiter.api.io.TempDir;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.web.reactive.AutoConfigureWebTestClient;
import org.springframework.boot.test.context.SpringBootTest;
@@ -47,6 +49,9 @@ public abstract class AbstractIntegrationTest {
.dependsOn(kafka)
.dependsOn(schemaRegistry);
+ @TempDir
+ public static Path tmpDir;
+
static {
kafka.start();
schemaRegistry.start();
@@ -76,6 +81,9 @@ public void initialize(@NotNull ConfigurableApplicationContext context) {
System.setProperty("kafka.clusters.1.schemaRegistry", schemaRegistry.getUrl());
System.setProperty("kafka.clusters.1.kafkaConnect.0.name", "kafka-connect");
System.setProperty("kafka.clusters.1.kafkaConnect.0.address", kafkaConnect.getTarget());
+
+ System.setProperty("dynamic.config.enabled", "true");
+ System.setProperty("config.related.uploads.dir", tmpDir.toString());
}
}
diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/controller/ApplicationConfigControllerTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/controller/ApplicationConfigControllerTest.java
new file mode 100644
index 00000000000..7840760868b
--- /dev/null
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/controller/ApplicationConfigControllerTest.java
@@ -0,0 +1,49 @@
+package com.provectus.kafka.ui.controller;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+import com.provectus.kafka.ui.AbstractIntegrationTest;
+import com.provectus.kafka.ui.model.UploadedFileInfoDTO;
+import java.io.IOException;
+import java.nio.file.Path;
+import org.junit.jupiter.api.Test;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.core.io.ClassPathResource;
+import org.springframework.http.HttpEntity;
+import org.springframework.http.client.MultipartBodyBuilder;
+import org.springframework.test.web.reactive.server.WebTestClient;
+import org.springframework.util.MultiValueMap;
+
+class ApplicationConfigControllerTest extends AbstractIntegrationTest {
+
+ @Autowired
+ private WebTestClient webTestClient;
+
+ @Test
+ public void testUpload() throws IOException {
+ var fileToUpload = new ClassPathResource("/fileForUploadTest.txt", this.getClass());
+
+ UploadedFileInfoDTO result = webTestClient
+ .post()
+ .uri("/api/config/relatedfiles")
+ .bodyValue(generateBody(fileToUpload))
+ .exchange()
+ .expectStatus()
+ .isOk()
+ .expectBody(UploadedFileInfoDTO.class)
+ .returnResult()
+ .getResponseBody();
+
+ assertThat(result).isNotNull();
+ assertThat(result.getLocation()).isNotNull();
+ assertThat(Path.of(result.getLocation()))
+ .hasSameBinaryContentAs(fileToUpload.getFile().toPath());
+ }
+
+ private MultiValueMap<String, HttpEntity<?>> generateBody(ClassPathResource resource) {
+ MultipartBodyBuilder builder = new MultipartBodyBuilder();
+ builder.part("file", resource);
+ return builder.build();
+ }
+
+}
| train | test | 2023-05-02T08:34:36 | "2023-05-02T09:24:48Z" | iliax | train |
provectus/kafka-ui/3749_3764 | provectus/kafka-ui | provectus/kafka-ui/3749 | provectus/kafka-ui/3764 | [
"connected"
] | 727f38401babcf25d5bb47e675149882ff3ede14 | aa7429eeba2e942846fe071d72d4621dbd2aa70b | [
"Hello there timchenko-a! π\n\nThank you and congratulations π for opening your very first issue in this project! π\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. π",
"Hello @timchenko-a , thank you for raising it\r\ncan't say that it is a bug, but its better to update version (and version-related) info from time to time. \r\nwill be implemented within #3764"
] | [] | "2023-05-02T12:05:20Z" | [
"type/enhancement",
"scope/backend",
"status/accepted"
] | Broker version doesn't change without kafka ui restart | ### Issue submitter TODO list
- [X] I've looked up my issue in [FAQ](https://docs.kafka-ui.provectus.io/faq/common-problems)
- [X] I've searched for an already existing issues [here](https://github.com/provectus/kafka-ui/issues)
- [X] I've tried running `master`-labeled docker image and the issue still persists there
- [X] I'm running a supported version of the application which is listed [here](https://github.com/provectus/kafka-ui/blob/master/SECURITY.md)
### Describe the bug (actual behavior)
Kafka UI doesn't automatically update broker version information is case of broker is updated. It requires kafka ui restart.
### Expected behavior
Broker is updated -> kafka ui automatically detects new version.
### Your installation details
1. Kafka UI version: ef0dacb v0.6.1
2. No, I run just like a plain jar on EC2, systemd managed.
3. The next command is used to run kafka-ui:
`java -Xmx512M -Xms512M -Dspring.config.additional-location=/opt/config.yaml --add-opens java.rmi/javax.rmi.ssl=ALL-UNNAMED -jar /opt/kafka-ui.jar`
/opt/config.yaml content(only 1 cluster here, we have more, but config is pretty much the same everywhere):
```
---
kafka:
clusters:
- name: CLUSTER_NAME
bootstrapServers: SERVER_1_URL:9093,SERVER_2_URL:9093,SERVER_3_URL:9093
readonly: true
properties:
security:
protocol: SASL_PLAINTEXT
sasl:
mechanism: SCRAM-SHA-256
jaas:
config: 'org.apache.kafka.common.security.scram.ScramLoginModule required username="USERNAME" password="PASSWORD";'
schemaRegistry: http://SCHEMA_REGISTRY_URL:8081
```
### Steps to reproduce
Setup kafka cluster, non-latest version;
Add it to kafka UI. It will detect cluster's version correctly;
Upgrade kafka cluster;
Check broker version in kafka UI, it will stay the same;
Restart kafka UI, now new broker version will be detected.
### Screenshots
_No response_
### Logs
_No response_
### Additional context
Want to understand it it's a bug, or an expected behaviour :) | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/FeatureService.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/StatisticsService.java"
] | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/FeatureService.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/StatisticsService.java"
] | [] | diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/FeatureService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/FeatureService.java
index 7ba3f036e93..87b48072c2d 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/FeatureService.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/FeatureService.java
@@ -4,16 +4,13 @@
import com.provectus.kafka.ui.model.KafkaCluster;
import com.provectus.kafka.ui.service.ReactiveAdminClient.ClusterDescription;
import java.util.ArrayList;
-import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.function.Predicate;
-import javax.annotation.Nullable;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
-import org.apache.kafka.common.Node;
import org.apache.kafka.common.acl.AclOperation;
import org.springframework.stereotype.Service;
import reactor.core.publisher.Flux;
@@ -24,11 +21,10 @@
@Slf4j
public class FeatureService {
- private static final String DELETE_TOPIC_ENABLED_SERVER_PROPERTY = "delete.topic.enable";
-
private final AdminClientService adminClientService;
- public Mono<List<ClusterFeature>> getAvailableFeatures(KafkaCluster cluster,
+ public Mono<List<ClusterFeature>> getAvailableFeatures(ReactiveAdminClient adminClient,
+ KafkaCluster cluster,
ClusterDescription clusterDescription) {
List<Mono<ClusterFeature>> features = new ArrayList<>();
@@ -46,29 +42,17 @@ public Mono<List<ClusterFeature>> getAvailableFeatures(KafkaCluster cluster,
features.add(Mono.just(ClusterFeature.SCHEMA_REGISTRY));
}
- features.add(topicDeletionEnabled(cluster, clusterDescription.getController()));
+ features.add(topicDeletionEnabled(adminClient));
features.add(aclView(cluster));
features.add(aclEdit(clusterDescription));
return Flux.fromIterable(features).flatMap(m -> m).collectList();
}
- private Mono<ClusterFeature> topicDeletionEnabled(KafkaCluster cluster, @Nullable Node controller) {
- if (controller == null) {
- return Mono.just(ClusterFeature.TOPIC_DELETION); // assuming it is enabled by default
- }
- return adminClientService.get(cluster)
- .flatMap(ac -> ac.loadBrokersConfig(List.of(controller.id())))
- .map(config ->
- config.values().stream()
- .flatMap(Collection::stream)
- .filter(e -> e.name().equals(DELETE_TOPIC_ENABLED_SERVER_PROPERTY))
- .map(e -> Boolean.parseBoolean(e.value()))
- .findFirst()
- .orElse(true))
- .flatMap(enabled -> enabled
- ? Mono.just(ClusterFeature.TOPIC_DELETION)
- : Mono.empty());
+ private Mono<ClusterFeature> topicDeletionEnabled(ReactiveAdminClient adminClient) {
+ return adminClient.isTopicDeletionEnabled()
+ ? Mono.just(ClusterFeature.TOPIC_DELETION)
+ : Mono.empty();
}
private Mono<ClusterFeature> aclEdit(ClusterDescription clusterDescription) {
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java
index 8451a89f97d..0b6f16a2235 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java
@@ -32,8 +32,9 @@
import java.util.stream.Stream;
import javax.annotation.Nullable;
import lombok.AccessLevel;
+import lombok.AllArgsConstructor;
+import lombok.Builder;
import lombok.Getter;
-import lombok.RequiredArgsConstructor;
import lombok.Value;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.admin.AdminClient;
@@ -75,7 +76,6 @@
import org.apache.kafka.common.errors.UnknownTopicOrPartitionException;
import org.apache.kafka.common.errors.UnsupportedVersionException;
import org.apache.kafka.common.requests.DescribeLogDirsResponse;
-import org.apache.kafka.common.resource.ResourcePattern;
import org.apache.kafka.common.resource.ResourcePatternFilter;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
@@ -85,7 +85,7 @@
@Slf4j
-@RequiredArgsConstructor
+@AllArgsConstructor
public class ReactiveAdminClient implements Closeable {
public enum SupportedFeature {
@@ -104,7 +104,8 @@ public enum SupportedFeature {
this.predicate = (admin, ver) -> Mono.just(ver != null && ver >= fromVersion);
}
- static Mono<Set<SupportedFeature>> forVersion(AdminClient ac, @Nullable Float kafkaVersion) {
+ static Mono<Set<SupportedFeature>> forVersion(AdminClient ac, String kafkaVersionStr) {
+ @Nullable Float kafkaVersion = KafkaVersion.parse(kafkaVersionStr).orElse(null);
return Flux.fromArray(SupportedFeature.values())
.flatMap(f -> f.predicate.apply(ac, kafkaVersion).map(enabled -> Tuples.of(f, enabled)))
.filter(Tuple2::getT2)
@@ -123,19 +124,46 @@ public static class ClusterDescription {
Set<AclOperation> authorizedOperations;
}
- public static Mono<ReactiveAdminClient> create(AdminClient adminClient) {
- return getClusterVersion(adminClient)
- .flatMap(ver ->
- getSupportedUpdateFeaturesForVersion(adminClient, ver)
- .map(features ->
- new ReactiveAdminClient(adminClient, ver, features)));
+ @Builder
+ private record ConfigRelatedInfo(String version,
+ Set<SupportedFeature> features,
+ boolean topicDeletionIsAllowed) {
+
+ private static Mono<ConfigRelatedInfo> extract(AdminClient ac, int controllerId) {
+ return loadBrokersConfig(ac, List.of(controllerId))
+ .map(map -> map.isEmpty() ? List.<ConfigEntry>of() : map.get(controllerId))
+ .flatMap(configs -> {
+ String version = "1.0-UNKNOWN";
+ boolean topicDeletionEnabled = true;
+ for (ConfigEntry entry : configs) {
+ if (entry.name().contains("inter.broker.protocol.version")) {
+ version = entry.value();
+ }
+ if (entry.name().equals("delete.topic.enable")) {
+ topicDeletionEnabled = Boolean.parseBoolean(entry.value());
+ }
+ }
+ var builder = ConfigRelatedInfo.builder()
+ .version(version)
+ .topicDeletionIsAllowed(topicDeletionEnabled);
+ return SupportedFeature.forVersion(ac, version)
+ .map(features -> builder.features(features).build());
+ });
+ }
}
- private static Mono<Set<SupportedFeature>> getSupportedUpdateFeaturesForVersion(AdminClient ac, String versionStr) {
- @Nullable Float kafkaVersion = KafkaVersion.parse(versionStr).orElse(null);
- return SupportedFeature.forVersion(ac, kafkaVersion);
+ public static Mono<ReactiveAdminClient> create(AdminClient adminClient) {
+ return describeClusterImpl(adminClient, Set.of())
+ // choosing node from which we will get configs (starting with controller)
+ .flatMap(descr -> descr.controller != null
+ ? Mono.just(descr.controller)
+ : Mono.justOrEmpty(descr.nodes.stream().findFirst())
+ )
+ .flatMap(node -> ConfigRelatedInfo.extract(adminClient, node.id()))
+ .map(info -> new ReactiveAdminClient(adminClient, info));
}
+
private static Mono<Boolean> isAuthorizedSecurityEnabled(AdminClient ac, @Nullable Float kafkaVersion) {
return toMono(ac.describeAcls(AclBindingFilter.ANY).values())
.thenReturn(true)
@@ -174,11 +202,10 @@ public static <T> Mono<T> toMono(KafkaFuture<T> future) {
@Getter(AccessLevel.PACKAGE) // visible for testing
private final AdminClient client;
- private final String version;
- private final Set<SupportedFeature> features;
+ private volatile ConfigRelatedInfo configRelatedInfo;
public Set<SupportedFeature> getClusterFeatures() {
- return features;
+ return configRelatedInfo.features();
}
public Mono<Set<String>> listTopics(boolean listInternal) {
@@ -190,7 +217,20 @@ public Mono<Void> deleteTopic(String topicName) {
}
public String getVersion() {
- return version;
+ return configRelatedInfo.version();
+ }
+
+ public boolean isTopicDeletionEnabled() {
+ return configRelatedInfo.topicDeletionIsAllowed();
+ }
+
+ public Mono<Void> updateInternalStats(@Nullable Node controller) {
+ if (controller == null) {
+ return Mono.empty();
+ }
+ return ConfigRelatedInfo.extract(client, controller.id())
+ .doOnNext(info -> this.configRelatedInfo = info)
+ .then();
}
public Mono<Map<String, List<ConfigEntry>>> getTopicsConfig() {
@@ -200,7 +240,7 @@ public Mono<Map<String, List<ConfigEntry>>> getTopicsConfig() {
//NOTE: skips not-found topics (for which UnknownTopicOrPartitionException was thrown by AdminClient)
//and topics for which DESCRIBE_CONFIGS permission is not set (TopicAuthorizationException was thrown)
public Mono<Map<String, List<ConfigEntry>>> getTopicsConfig(Collection<String> topicNames, boolean includeDoc) {
- var includeDocFixed = features.contains(SupportedFeature.CONFIG_DOCUMENTATION_RETRIEVAL) && includeDoc;
+ var includeDocFixed = includeDoc && getClusterFeatures().contains(SupportedFeature.CONFIG_DOCUMENTATION_RETRIEVAL);
// we need to partition calls, because it can lead to AdminClient timeouts in case of large topics count
return partitionCalls(
topicNames,
@@ -349,7 +389,7 @@ public Mono<Map<Integer, Map<String, DescribeLogDirsResponse.LogDirInfo>>> descr
}
public Mono<ClusterDescription> describeCluster() {
- return describeClusterImpl(client, features);
+ return describeClusterImpl(client, getClusterFeatures());
}
private static Mono<ClusterDescription> describeClusterImpl(AdminClient client, Set<SupportedFeature> features) {
@@ -371,23 +411,6 @@ private static Mono<ClusterDescription> describeClusterImpl(AdminClient client,
);
}
- private static Mono<String> getClusterVersion(AdminClient client) {
- return describeClusterImpl(client, Set.of())
- // choosing node from which we will get configs (starting with controller)
- .flatMap(descr -> descr.controller != null
- ? Mono.just(descr.controller)
- : Mono.justOrEmpty(descr.nodes.stream().findFirst())
- )
- .flatMap(node -> loadBrokersConfig(client, List.of(node.id())))
- .flatMap(configs -> configs.values().stream()
- .flatMap(Collection::stream)
- .filter(entry -> entry.name().contains("inter.broker.protocol.version"))
- .findFirst()
- .map(configEntry -> Mono.just(configEntry.value()))
- .orElse(Mono.empty()))
- .switchIfEmpty(Mono.just("1.0-UNKNOWN"));
- }
-
public Mono<Void> deleteConsumerGroups(Collection<String> groupIds) {
return toMono(client.deleteConsumerGroups(groupIds).all())
.onErrorResume(GroupIdNotFoundException.class,
@@ -421,7 +444,7 @@ public Mono<Void> createPartitions(Map<String, NewPartitions> newPartitionsMap)
// NOTE: places whole current topic config with new one. Entries that were present in old config,
// but missed in new will be set to default
public Mono<Void> updateTopicConfig(String topicName, Map<String, String> configs) {
- if (features.contains(SupportedFeature.INCREMENTAL_ALTER_CONFIGS)) {
+ if (getClusterFeatures().contains(SupportedFeature.INCREMENTAL_ALTER_CONFIGS)) {
return getTopicsConfigImpl(List.of(topicName), false)
.map(conf -> conf.getOrDefault(topicName, List.of()))
.flatMap(currentConfigs -> incrementalAlterConfig(topicName, currentConfigs, configs));
@@ -596,17 +619,17 @@ Mono<Map<TopicPartition, Long>> listOffsetsUnsafe(Collection<TopicPartition> par
}
public Mono<Collection<AclBinding>> listAcls(ResourcePatternFilter filter) {
- Preconditions.checkArgument(features.contains(SupportedFeature.AUTHORIZED_SECURITY_ENABLED));
+ Preconditions.checkArgument(getClusterFeatures().contains(SupportedFeature.AUTHORIZED_SECURITY_ENABLED));
return toMono(client.describeAcls(new AclBindingFilter(filter, AccessControlEntryFilter.ANY)).values());
}
public Mono<Void> createAcls(Collection<AclBinding> aclBindings) {
- Preconditions.checkArgument(features.contains(SupportedFeature.AUTHORIZED_SECURITY_ENABLED));
+ Preconditions.checkArgument(getClusterFeatures().contains(SupportedFeature.AUTHORIZED_SECURITY_ENABLED));
return toMono(client.createAcls(aclBindings).all());
}
public Mono<Void> deleteAcls(Collection<AclBinding> aclBindings) {
- Preconditions.checkArgument(features.contains(SupportedFeature.AUTHORIZED_SECURITY_ENABLED));
+ Preconditions.checkArgument(getClusterFeatures().contains(SupportedFeature.AUTHORIZED_SECURITY_ENABLED));
var filters = aclBindings.stream().map(AclBinding::toFilter).collect(Collectors.toSet());
return toMono(client.deleteAcls(filters).all()).then();
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/StatisticsService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/StatisticsService.java
index 994c30714ae..19d946590c4 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/StatisticsService.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/StatisticsService.java
@@ -37,25 +37,26 @@ public Mono<Statistics> updateCache(KafkaCluster c) {
private Mono<Statistics> getStatistics(KafkaCluster cluster) {
return adminClientService.get(cluster).flatMap(ac ->
ac.describeCluster().flatMap(description ->
- Mono.zip(
- List.of(
- metricsCollector.getBrokerMetrics(cluster, description.getNodes()),
- getLogDirInfo(description, ac),
- featureService.getAvailableFeatures(cluster, description),
- loadTopicConfigs(cluster),
- describeTopics(cluster)),
- results ->
- Statistics.builder()
- .status(ServerStatusDTO.ONLINE)
- .clusterDescription(description)
- .version(ac.getVersion())
- .metrics((Metrics) results[0])
- .logDirInfo((InternalLogDirStats) results[1])
- .features((List<ClusterFeature>) results[2])
- .topicConfigs((Map<String, List<ConfigEntry>>) results[3])
- .topicDescriptions((Map<String, TopicDescription>) results[4])
- .build()
- )))
+ ac.updateInternalStats(description.getController()).then(
+ Mono.zip(
+ List.of(
+ metricsCollector.getBrokerMetrics(cluster, description.getNodes()),
+ getLogDirInfo(description, ac),
+ featureService.getAvailableFeatures(ac, cluster, description),
+ loadTopicConfigs(cluster),
+ describeTopics(cluster)),
+ results ->
+ Statistics.builder()
+ .status(ServerStatusDTO.ONLINE)
+ .clusterDescription(description)
+ .version(ac.getVersion())
+ .metrics((Metrics) results[0])
+ .logDirInfo((InternalLogDirStats) results[1])
+ .features((List<ClusterFeature>) results[2])
+ .topicConfigs((Map<String, List<ConfigEntry>>) results[3])
+ .topicDescriptions((Map<String, TopicDescription>) results[4])
+ .build()
+ ))))
.doOnError(e ->
log.error("Failed to collect cluster {} info", cluster.getName(), e))
.onErrorResume(
| null | train | test | 2023-05-02T14:34:57 | "2023-04-28T11:12:43Z" | timchenko-a | train |
provectus/kafka-ui/3590_3767 | provectus/kafka-ui | provectus/kafka-ui/3590 | provectus/kafka-ui/3767 | [
"connected"
] | 727f38401babcf25d5bb47e675149882ff3ede14 | e31cd2e442518efa7e2dd92abfad208b9587bb47 | [
"Hello there flfrolund! π\n\nThank you and congratulations π for opening your very first issue in this project! π\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. π",
"Related: #3576\r\n\r\n0.5.0:\r\n<img width=\"1006\" alt=\"image\" src=\"https://user-images.githubusercontent.com/1494347/232004306-ad152297-ed7b-4987-874c-e72d1c164a86.png\">\r\nmaster:\r\n<img width=\"983\" alt=\"image\" src=\"https://user-images.githubusercontent.com/1494347/232004357-30db3e4d-3e54-46ff-afbe-fb17530c44a4.png\">\r\n"
] | [] | "2023-05-02T19:02:15Z" | [
"type/bug",
"good first issue",
"scope/frontend",
"status/accepted",
"status/confirmed",
"type/regression"
] | Diff overlay of Avro schemas in comparision doesn't display anymore | **Describe the bug**
Diff overlay of Avro schemas in comparision doesn't display anymore.
**Expected behavior**
Diff overlay of Avro schema in comparision show changes between versions.
**Set up**
**Steps to Reproduce**
1. Enter a schema registry with avro schemas.
2. Open a avro schema with multiple version of it.
3. Enter comparision and select different versions of the avro schema.
**Additional context**
Introduced in version 0.6.1, works in version 0.6.0.
| [
"kafka-ui-react-app/src/components/KsqlDb/Query/QueryForm/QueryForm.styled.ts",
"kafka-ui-react-app/src/components/Schemas/Diff/Diff.styled.ts",
"kafka-ui-react-app/src/theme/theme.ts"
] | [
"kafka-ui-react-app/src/components/KsqlDb/Query/QueryForm/QueryForm.styled.ts",
"kafka-ui-react-app/src/components/Schemas/Diff/Diff.styled.ts",
"kafka-ui-react-app/src/theme/theme.ts"
] | [] | diff --git a/kafka-ui-react-app/src/components/KsqlDb/Query/QueryForm/QueryForm.styled.ts b/kafka-ui-react-app/src/components/KsqlDb/Query/QueryForm/QueryForm.styled.ts
index 6d0f6598b48..eb71ad1ef27 100644
--- a/kafka-ui-react-app/src/components/KsqlDb/Query/QueryForm/QueryForm.styled.ts
+++ b/kafka-ui-react-app/src/components/KsqlDb/Query/QueryForm/QueryForm.styled.ts
@@ -33,6 +33,7 @@ export const Fieldset = styled.fieldset`
flex: 1;
flex-direction: column;
gap: 8px;
+ color: ${({ theme }) => theme.default.color.normal};
`;
export const ButtonsContainer = styled.div`
diff --git a/kafka-ui-react-app/src/components/Schemas/Diff/Diff.styled.ts b/kafka-ui-react-app/src/components/Schemas/Diff/Diff.styled.ts
index 520f9f6c8ad..c5ecef258a0 100644
--- a/kafka-ui-react-app/src/components/Schemas/Diff/Diff.styled.ts
+++ b/kafka-ui-react-app/src/components/Schemas/Diff/Diff.styled.ts
@@ -14,9 +14,6 @@ export const DiffWrapper = styled.div`
background-color: ${({ theme }) => theme.default.backgroundColor};
color: ${({ theme }) => theme.default.color.normal};
}
- .ace_line {
- background-color: ${({ theme }) => theme.default.backgroundColor};
- }
.ace_gutter-cell {
background-color: ${({ theme }) =>
theme.ksqlDb.query.editor.cell.backgroundColor};
@@ -39,10 +36,10 @@ export const DiffWrapper = styled.div`
.ace_string {
color: ${({ theme }) => theme.ksqlDb.query.editor.aceString};
}
- > .codeMarker {
- background: ${({ theme }) => theme.icons.warningIcon};
+ .codeMarker {
+ background-color: ${({ theme }) => theme.ksqlDb.query.editor.codeMarker};
position: absolute;
- z-index: 20;
+ z-index: 2000;
}
`;
diff --git a/kafka-ui-react-app/src/theme/theme.ts b/kafka-ui-react-app/src/theme/theme.ts
index 33dbf1c619e..70196bdeae7 100644
--- a/kafka-ui-react-app/src/theme/theme.ts
+++ b/kafka-ui-react-app/src/theme/theme.ts
@@ -366,6 +366,7 @@ export const theme = {
cursor: Colors.neutral[90],
variable: Colors.red[50],
aceString: Colors.green[60],
+ codeMarker: Colors.yellow[20],
},
},
},
@@ -761,6 +762,7 @@ export const darkTheme: ThemeType = {
cursor: Colors.neutral[0],
variable: Colors.red[50],
aceString: Colors.green[60],
+ codeMarker: Colors.yellow[20],
},
},
},
| null | train | test | 2023-05-02T14:34:57 | "2023-03-30T08:25:33Z" | flfrolund | train |
provectus/kafka-ui/2752_3769 | provectus/kafka-ui | provectus/kafka-ui/2752 | provectus/kafka-ui/3769 | [
"connected"
] | bc85924d7ddbd163444e85c3dc0bf1cb83626855 | c813e74609a68225880f782a8eb0f975f37b44ee | [
"We'd need some volunteers to test things out.\r\n\r\nWe coordinate in discord, a link to the thread -> [here](https://discord.com/channels/897805035122077716/1098621183584382976). \r\n\r\nImage: `docker pull public.ecr.aws/provectus/kafka-ui-custom-build:3700`\r\nConfig example: [here](https://github.com/provectus/kafka-ui/pull/3700#issuecomment-1518369213)\r\n\r\nPlease check\r\n1. That authentication with all users you need works fine.\r\n2. RBAC groups matching works fine.\r\n\r\nAD a bit later, it requires more black magic.",
"As I found no volunteers w/ AD, it's gonna be a separate issue. Please upvote #3741 if you're interested.",
"@Haarolean \r\nTested RBAC for image public.ecr.aws/provectus/kafka-ui-custom-build:3700 with LDAP configuration but not able login with this image and I dont see any errors in logs",
"> @Haarolean \n> \n> Tested RBAC for image public.ecr.aws/provectus/kafka-ui-custom-build:3700 with LDAP configuration but not able login with this image and I dont see any errors in logs\n\nMost likely, your configuration is invalid. Please share your config. \n",
"@Haarolean I have customized the docker image with Ldap configs and roles.yaml file, \r\nDockerfile\r\n```\r\nFROM public.ecr.aws/provectus/kafka-ui-custom-build:3700\r\nUSER root\r\nENV KAFKA_CLUSTERS_0_NAME=kafka\r\nENV KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS=<kafka broker:port>\r\nENV KAFKA_CLUSTERS_0_PROPERTIES_SECURITY_PROTOCOL=SASL_PLAINTEXT\r\nENV KAFKA_CLUSTERS_0_PROPERTIES_SASL_MECHANISM=GSSAPI\r\nENV KAFKA_CLUSTERS_0_PROPERTIES_SASL_JAAS_CONFIG='com.sun.security.auth.module.Krb5LoginModule required useTicketCache=false principal=\"******.COM\" useKeyTab=true serviceName=\"kafka\" keyTab=\"/keytabs/******.keytab\";'\r\nENV KAFKA_CLUSTERS_0_SCHEMAREGISTRY=<schema registry:port>\r\nENV KAFKA_CLUSTERS_0_KAFKACONNECT_0_NAME=dev\r\nENV KAFKA_CLUSTERS_0_KAFKACONNECT_0_ADDRESS=<kafka connect:port>\r\nENV SPRING_CONFIG_ADDITIONAL-LOCATION= /roles.yml\r\nENV KAFKA_CLUSTERS_0_READONLY=false\r\nCOPY krb5.conf /etc/krb5.conf\r\nCOPY roles.yml /roles.yml\r\nCOPY *****.keytab /keytabs/\r\nCOPY entrypoint.sh /\r\nENTRYPOINT [\"/entrypoint.sh\"]\r\n\r\n```\r\n\r\nroles.yml file\r\n```\r\nrbac:\r\n roles:\r\n - name: \"readonly\"\r\n clusters:\r\n - <kafka dev>\r\n subjects:\r\n - provider: ldap\r\n type: group\r\n value: \"OU=Users,OU=Accounts,OU=Resources,DC=<domain>,DC=com\"\r\n - provider: ldap_ad\r\n type: user\r\n value: \"CN <name>\"\r\n permissions:\r\n - resource: clusterconfig\r\n actions: [ \"view\" ]\r\n\r\n - resource: topic\r\n value: \".*\"\r\n actions:\r\n - VIEW\r\n - MESSAGES_READ\r\n\r\n - resource: consumer\r\n value: \".*\"\r\n actions: [ view ]\r\n\r\n - resource: schema\r\n value: \".*\"\r\n actions: [ view ]\r\n\r\n - resource: connect\r\n value: \".*\"\r\n actions: [ view ]\r\n```",
"@padmaachuth you haven't configured LDAP auth at all, see the first lines of the config example I provided",
"> @padmaachuth you haven't configured LDAP auth at all, see the first lines of the config example I provided\r\n\r\n@Haarolean deploying image in openshift passing LDAP config from ENV \r\n\r\nKAFKA_CLUSTERS_0_NAME: <Cluster Name>\r\nKAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: <Bootstrap servers:port>\r\nKAFKA_CLUSTERS_0_PROPERTIES_SECURITY_PROTOCOL: SASL_PLAINTEXT\r\nKAFKA_CLUSTERS_0_PROPERTIES_SASL_MECHANISM: GSSAPI\r\nKAFKA_CLUSTERS_0_SCHEMAREGISTRY: <Schema Registry URL>\r\nKAFKA_CLUSTERS_0_KAFKACONNECT_0_NAME: <Kafka Connector Name>\r\nKAFKA_CLUSTERS_0_PROPERTIES_SASL_JAAS_CONFIG: com.sun.security.auth.module.Krb5LoginModule required useTicketCache=false principal=\"<principal name>@DOMAIN.COM\" useKeyTab=true serviceName=\"kafka\" keyTab=\"/keytabs/<Principal Name>.keytab\";\r\nKAFKA_CLUSTERS_0_KAFKACONNECT_0_ADDRESS: <Kafka Connector URL>\r\nKAFKA_CLUSTERS_0_READONLY: false\r\nAUTH_TYPE: LDAP\r\nSPRING_CONFIG_ADDITIONAL-LOCATION: /roles.yml\r\nSPRING_LDAP_URLS: <LDAP URL>\r\nSPRING_LDAP_USER-FILTER-SEARCH-BASE: DC=Domain,DC=com\r\nSPRING_LDAP_USER-FILTER-SEARCH-FILTER: (&(sAMAccountName={0})(objectClass=person))\r\nSPRING_LDAP_ADMINUSER: <admin user>\r\nSPRING_LDAP_ADMINPASSWORD:<password> \r\n\r\nWith this LDAP config able to Login but it not listing cluster, topics,schema,connectors,etc.",
"@padmaachuth please raise a discussion or ask in discord community, that's unrelated to this issue",
"RBAC with LDAP does not work for me on v0.7.0.\r\nI have the following config:\r\n\r\n```yaml\r\nkafka:\r\n clusters:\r\n - name: dev\r\n readOnly: false\r\n bootstrapServers: ${KAFKA_BOOTSTRAP}:9093\r\n\r\nauth:\r\n type: LDAP\r\nspring:\r\n ldap:\r\n urls: ldap://${AD_SERVER}\r\n base: \"sAMAccountName={0},ou=${USERS_OU},dc=${AD_SERVER}\"\r\n admin-user: \"cn=${LDAP_ADMIN_USERNAME},dc=${AD_SERVER}\"\r\n admin-password: \"${LDAP_ADMIN_PASSWORD}\"\r\n user-filter-search-base: \"dc=${AD_SERVER}\"\r\n user-filter-search-filter: \"(&(sAMAccountName={0})(objectClass=user)(|(memberof=CN=${GROUP_NAME},OU=${GROUPS_OU},DC=${AD_SERVER})))\"\r\n group-filter-search-base: \"OU=${GROUPS_OU},DC=${AD_SERVER}\"\r\noauth2:\r\n ldap:\r\n activeDirectory: false\r\n activeDirectory.domain: \"${AD_SERVER}\"\r\n\r\nrbac:\r\n roles:\r\n - name: \"admins\"\r\n clusters:\r\n - dev\r\n subjects:\r\n - provider: ldap\r\n type: group\r\n value: \"${GROUP_NAME}\"\r\n permissions:\r\n - resource: applicationconfig\r\n # value not applicable for applicationconfig\r\n actions:\r\n - view\r\n # - edit\r\n - resource: clusterconfig\r\n # value not applicable for clusterconfig\r\n actions:\r\n - view\r\n # - edit\r\n - resource: topic\r\n value: \".*\"\r\n actions:\r\n - view\r\n # - create\r\n # - edit\r\n # - delete\r\n - messages_read\r\n - messages_produce\r\n - messages_delete\r\n - resource: consumer\r\n value: \".*\"\r\n actions:\r\n - view\r\n - delete\r\n - reset_offsets\r\n - resource: schema\r\n value: \".*\"\r\n actions:\r\n - view\r\n - create\r\n - delete\r\n - edit\r\n # - modify_global_compatibility\r\n - resource: connect\r\n value: \".*\"\r\n actions:\r\n - view\r\n # - edit\r\n # - create\r\n - resource: ksql\r\n # value not applicable for ksql\r\n actions:\r\n - execute\r\n # - resource: acl\r\n # # value not applicable for acl\r\n # value: \".*\" # FIXME: it crashes if this is removed\r\n # actions:\r\n # - view\r\n # # - edit\r\n```\r\n\r\nI am able to login but I cannot see any clusters:\r\n\r\n\r\n\r\nI have enabled DEBUG logs and I can see this:\r\n\r\n```log\r\nDEBUG [boundedElastic-3] o.s.s.l.u.DefaultLdapAuthoritiesPopulator: Found roles from search [{spring.security.ldap.dn=[CN=${GROUP_NAME},OU=${GROUPS_OU},DC=${AD_SERVER}], cn=[${GROUP_NAME}]}]\r\nDEBUG [reactor-http-epoll-1] o.s.s.w.s.a.DelegatingReactiveAuthorizationManager: Checking authorization on '/api/clusters' using org.springframework.security.authorization.AuthenticatedReactiveAuthorizationManager@XXXXX\r\nDEBUG [reactor-http-epoll-1] o.s.s.w.s.c.WebSessionServerSecurityContextRepository: Found SecurityContext 'SecurityContextImpl [Authentication=UsernamePasswordAuthenticationToken [Principal=com.provectus.kafka.ui.config.auth.RbacLdapUser@XXXXX, Credentials=[PROTECTED], Authenticated=true, Details=null, Granted Authorities=[${GROUP_NAME}]]]' in WebSession: 'org.springframework.web.server.session.InMemoryWebSessionStore$InMemoryWebSession@XXXXX'\r\nDEBUG [reactor-http-epoll-1] o.s.s.w.s.a.AuthorizationWebFilter: Authorization successful\r\nDEBUG [reactor-http-epoll-1] o.s.w.r.r.m.a.RequestMappingHandlerMapping: [XXXXX] Mapped to com.provectus.kafka.ui.controller.ClustersController#getClusters(ServerWebExchange)\r\nDEBUG [reactor-http-epoll-1] o.s.w.r.r.m.a.ResponseEntityResultHandler: [XXXXX] Using 'application/json' given [*/*] and supported [application/json]\r\nDEBUG [reactor-http-epoll-1] o.s.w.r.r.m.a.ResponseEntityResultHandler: [XXXXX] 0..N [com.provectus.kafka.ui.model.ClusterDTO]\r\nDEBUG [reactor-http-epoll-1] o.s.s.w.s.c.WebSessionServerSecurityContextRepository: Found SecurityContext 'SecurityContextImpl [Authentication=UsernamePasswordAuthenticationToken [Principal=com.provectus.kafka.ui.config.auth.RbacLdapUser@XXXXX, Credentials=[PROTECTED], Authenticated=true, Details=null, Granted Authorities=[${GROUP_NAME}]]]' in WebSession: 'org.springframework.web.server.session.InMemoryWebSessionStore$InMemoryWebSession@XXXXX'\r\nDEBUG [reactor-http-epoll-1] o.s.w.s.a.HttpWebHandlerAdapter: [XXXXX] Completed 200 OK\r\n```\r\n\r\nIf I remove the `rbac` section, I can login and I can see and do everything because there are no roles.\r\n\r\nLet me know if I should open a separate issue.\r\n\r\nThanks.",
"@alexisph please raise either a new _discussion_ or join us on discord (the link available in readme)"
] | [] | "2023-05-03T10:10:26Z" | [
"type/enhancement",
"scope/backend",
"status/accepted",
"area/rbac"
] | RBAC: Support LDAP | For RBAC AD support see #3741
Feature implemented, how to set this up:
- Set up LDAP authentication, docs [here](https://docs.kafka-ui.provectus.io/configuration/authentication/ldap-active-directory)
- Set up RBAC with LDAP subjects, docs [here](https://docs.kafka-ui.provectus.io/configuration/rbac-role-based-access-control)
| [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapProperties.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapSecurityConfig.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/RbacLdapAuthoritiesExtractor.java"
] | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapProperties.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapSecurityConfig.java"
] | [] | diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapProperties.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapProperties.java
index 13119b3bb94..9d07aca2dd5 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapProperties.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapProperties.java
@@ -14,13 +14,11 @@ public class LdapProperties {
private String adminPassword;
private String userFilterSearchBase;
private String userFilterSearchFilter;
+ private String groupFilterSearchBase;
@Value("${oauth2.ldap.activeDirectory:false}")
private boolean isActiveDirectory;
@Value("${oauth2.ldap.aΡtiveDirectory.domain:@null}")
private String activeDirectoryDomain;
- @Value("${oauth2.ldap.groupRoleAttribute:cn}")
- private String groupRoleAttribute;
-
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapSecurityConfig.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapSecurityConfig.java
index fae1125239e..ce04a2e1659 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapSecurityConfig.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapSecurityConfig.java
@@ -3,7 +3,6 @@
import static com.provectus.kafka.ui.config.auth.AbstractAuthSecurityConfig.AUTH_WHITELIST;
import com.provectus.kafka.ui.service.rbac.AccessControlService;
-import com.provectus.kafka.ui.service.rbac.extractor.RbacLdapAuthoritiesExtractor;
import java.util.Collection;
import java.util.List;
import javax.annotation.Nullable;
@@ -12,7 +11,6 @@
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.autoconfigure.ldap.LdapAutoConfiguration;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
-import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
@@ -34,6 +32,8 @@
import org.springframework.security.ldap.authentication.ad.ActiveDirectoryLdapAuthenticationProvider;
import org.springframework.security.ldap.search.FilterBasedLdapUserSearch;
import org.springframework.security.ldap.search.LdapUserSearch;
+import org.springframework.security.ldap.userdetails.DefaultLdapAuthoritiesPopulator;
+import org.springframework.security.ldap.userdetails.LdapAuthoritiesPopulator;
import org.springframework.security.ldap.userdetails.LdapUserDetailsMapper;
import org.springframework.security.web.server.SecurityWebFilterChain;
@@ -50,7 +50,7 @@ public class LdapSecurityConfig {
@Bean
public ReactiveAuthenticationManager authenticationManager(BaseLdapPathContextSource contextSource,
- ApplicationContext context,
+ LdapAuthoritiesPopulator ldapAuthoritiesPopulator,
@Nullable AccessControlService acs) {
var rbacEnabled = acs != null && acs.isRbacEnabled();
BindAuthenticator ba = new BindAuthenticator(contextSource);
@@ -67,7 +67,7 @@ public ReactiveAuthenticationManager authenticationManager(BaseLdapPathContextSo
AbstractLdapAuthenticationProvider authenticationProvider;
if (!props.isActiveDirectory()) {
authenticationProvider = rbacEnabled
- ? new LdapAuthenticationProvider(ba, new RbacLdapAuthoritiesExtractor(context))
+ ? new LdapAuthenticationProvider(ba, ldapAuthoritiesPopulator)
: new LdapAuthenticationProvider(ba);
} else {
authenticationProvider = new ActiveDirectoryLdapAuthenticationProvider(props.getActiveDirectoryDomain(),
@@ -95,6 +95,15 @@ public BaseLdapPathContextSource contextSource() {
return ctx;
}
+ @Bean
+ @Primary
+ public LdapAuthoritiesPopulator ldapAuthoritiesPopulator(BaseLdapPathContextSource contextSource) {
+ var authoritiesPopulator = new DefaultLdapAuthoritiesPopulator(contextSource, props.getGroupFilterSearchBase());
+ authoritiesPopulator.setRolePrefix("");
+ authoritiesPopulator.setConvertToUpperCase(false);
+ return authoritiesPopulator;
+ }
+
@Bean
public SecurityWebFilterChain configureLdap(ServerHttpSecurity http) {
log.info("Configuring LDAP authentication.");
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/RbacLdapAuthoritiesExtractor.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/RbacLdapAuthoritiesExtractor.java
deleted file mode 100644
index e24fc0aeda9..00000000000
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/RbacLdapAuthoritiesExtractor.java
+++ /dev/null
@@ -1,70 +0,0 @@
-package com.provectus.kafka.ui.service.rbac.extractor;
-
-import com.provectus.kafka.ui.config.auth.LdapProperties;
-import com.provectus.kafka.ui.model.rbac.Role;
-import com.provectus.kafka.ui.model.rbac.provider.Provider;
-import com.provectus.kafka.ui.service.rbac.AccessControlService;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.function.Function;
-import java.util.stream.Collectors;
-import lombok.extern.slf4j.Slf4j;
-import org.springframework.context.ApplicationContext;
-import org.springframework.ldap.core.DirContextOperations;
-import org.springframework.ldap.core.support.BaseLdapPathContextSource;
-import org.springframework.security.core.GrantedAuthority;
-import org.springframework.security.core.authority.SimpleGrantedAuthority;
-import org.springframework.security.ldap.userdetails.DefaultLdapAuthoritiesPopulator;
-import org.springframework.util.Assert;
-
-@Slf4j
-public class RbacLdapAuthoritiesExtractor extends DefaultLdapAuthoritiesPopulator {
-
- private final AccessControlService acs;
- private final LdapProperties props;
-
- private final Function<Map<String, List<String>>, GrantedAuthority> authorityMapper = (record) -> {
- String role = record.get(getGroupRoleAttribute()).get(0);
- return new SimpleGrantedAuthority(role);
- };
-
- public RbacLdapAuthoritiesExtractor(ApplicationContext context) {
- super(context.getBean(BaseLdapPathContextSource.class), null);
- this.acs = context.getBean(AccessControlService.class);
- this.props = context.getBean(LdapProperties.class);
- }
-
- @Override
- public Set<GrantedAuthority> getAdditionalRoles(DirContextOperations user, String username) {
- return acs.getRoles()
- .stream()
- .map(Role::getSubjects)
- .flatMap(List::stream)
- .filter(s -> s.getProvider().equals(Provider.LDAP))
- .filter(s -> s.getType().equals("group"))
- .flatMap(subject -> getRoles(subject.getValue(), user.getNameInNamespace(), username).stream())
- .collect(Collectors.toSet());
- }
-
- private Set<GrantedAuthority> getRoles(String groupSearchBase, String userDn, String username) {
- Assert.notNull(groupSearchBase, "groupSearchBase is empty");
-
- log.trace(
- "Searching for roles for user [{}] with DN [{}], groupRoleAttribute [{}] and filter [{}] in search base [{}]",
- username, userDn, props.getGroupRoleAttribute(), getGroupSearchFilter(), groupSearchBase);
-
- var ldapTemplate = getLdapTemplate();
- ldapTemplate.setIgnoreNameNotFoundException(true);
-
- Set<Map<String, List<String>>> userRoles = ldapTemplate.searchForMultipleAttributeValues(
- groupSearchBase, getGroupSearchFilter(), new String[] {userDn, username},
- new String[] {props.getGroupRoleAttribute()});
-
- return userRoles.stream()
- .map(authorityMapper)
- .peek(a -> log.debug("Mapped role [{}] for user [{}]", a, username))
- .collect(Collectors.toSet());
- }
-
-}
| null | train | test | 2023-05-08T10:36:50 | "2022-10-14T10:38:54Z" | Haarolean | train |
provectus/kafka-ui/3742_3771 | provectus/kafka-ui | provectus/kafka-ui/3742 | provectus/kafka-ui/3771 | [
"connected"
] | b1ac3482db3d4157984effe0bb7be321a2a37090 | 8337c9c183d632ea27b7c253d776fdfda4b19840 | [] | [
"the whole method could be removed, as it's the same as the default method in the interface"
] | "2023-05-03T13:02:29Z" | [
"type/enhancement",
"scope/backend",
"status/accepted",
"area/serde"
] | Serde: Missing serde for __consumer_offsets topic | ### Issue submitter TODO list
- [X] I've searched for an already existing issues [here](https://github.com/provectus/kafka-ui/issues)
- [X] I'm running a supported version of the application which is listed [here](https://github.com/provectus/kafka-ui/blob/master/SECURITY.md) and the feature is not present there
### Is your proposal related to a problem?
When we try to read the message form internal topic __consumer_offsets we need specific deserializer to be able to read those Values. This is handy for debug or administration work and can be very useful with mirror maker usage as it's create another topic that use this same message format.
I couldn't find it really in the offcial doc but I'm sure it's somewhere. As resource you can look this:
https://stackoverflow.com/questions/66475992/use-formatter-to-consume-topic-offset-consumers-then-output-nothing-in-kafka
and also it's implementation in another UI for kafka: https://github.com/tchiotludo/akhq/commit/648978058eb3e6999cc144bf0d0028e2da10978e
### Describe the feature you're interested in
Add a new deserializer option for the specific offsets message format
### Describe alternatives you've considered
_No response_
### Version you're running
005e74f
### Additional context
_No response_ | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/SerdesInitializer.java"
] | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/SerdesInitializer.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/ConsumerOffsetsSerde.java"
] | [
"kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/ConsumerOffsetsSerdeTest.java"
] | diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/SerdesInitializer.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/SerdesInitializer.java
index 66692894a61..ac3c2241cfc 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/SerdesInitializer.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/SerdesInitializer.java
@@ -11,6 +11,7 @@
import com.provectus.kafka.ui.serde.api.Serde;
import com.provectus.kafka.ui.serdes.builtin.AvroEmbeddedSerde;
import com.provectus.kafka.ui.serdes.builtin.Base64Serde;
+import com.provectus.kafka.ui.serdes.builtin.ConsumerOffsetsSerde;
import com.provectus.kafka.ui.serdes.builtin.Int32Serde;
import com.provectus.kafka.ui.serdes.builtin.Int64Serde;
import com.provectus.kafka.ui.serdes.builtin.ProtobufFileSerde;
@@ -118,6 +119,8 @@ public ClusterSerdes init(Environment env,
}
});
+ registerTopicRelatedSerde(registeredSerdes);
+
return new ClusterSerdes(
registeredSerdes,
Optional.ofNullable(clusterProperties.getDefaultKeySerde())
@@ -132,6 +135,27 @@ public ClusterSerdes init(Environment env,
);
}
+ /**
+ * Registers serdse that should only be used for specific (hard-coded) topics, like ConsumerOffsetsSerde.
+ */
+ private void registerTopicRelatedSerde(Map<String, SerdeInstance> serdes) {
+ registerConsumerOffsetsSerde(serdes);
+ }
+
+ private void registerConsumerOffsetsSerde(Map<String, SerdeInstance> serdes) {
+ var pattern = Pattern.compile(ConsumerOffsetsSerde.TOPIC);
+ serdes.put(
+ ConsumerOffsetsSerde.name(),
+ new SerdeInstance(
+ ConsumerOffsetsSerde.name(),
+ new ConsumerOffsetsSerde(),
+ pattern,
+ pattern,
+ null
+ )
+ );
+ }
+
private SerdeInstance createFallbackSerde() {
StringSerde serde = new StringSerde();
serde.configure(PropertyResolverImpl.empty(), PropertyResolverImpl.empty(), PropertyResolverImpl.empty());
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/ConsumerOffsetsSerde.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/ConsumerOffsetsSerde.java
new file mode 100644
index 00000000000..240f0b68f2b
--- /dev/null
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/ConsumerOffsetsSerde.java
@@ -0,0 +1,294 @@
+package com.provectus.kafka.ui.serdes.builtin;
+
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.databind.JsonSerializer;
+import com.fasterxml.jackson.databind.SerializerProvider;
+import com.fasterxml.jackson.databind.json.JsonMapper;
+import com.fasterxml.jackson.databind.module.SimpleModule;
+import com.provectus.kafka.ui.serde.api.DeserializeResult;
+import com.provectus.kafka.ui.serde.api.SchemaDescription;
+import com.provectus.kafka.ui.serdes.BuiltInSerde;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.Map;
+import java.util.Optional;
+import lombok.SneakyThrows;
+import org.apache.kafka.common.protocol.types.ArrayOf;
+import org.apache.kafka.common.protocol.types.BoundField;
+import org.apache.kafka.common.protocol.types.CompactArrayOf;
+import org.apache.kafka.common.protocol.types.Field;
+import org.apache.kafka.common.protocol.types.Schema;
+import org.apache.kafka.common.protocol.types.Struct;
+import org.apache.kafka.common.protocol.types.Type;
+
+// Deserialization logic and message's schemas can be found in
+// kafka.coordinator.group.GroupMetadataManager (readMessageKey, readOffsetMessageValue, readGroupMessageValue)
+public class ConsumerOffsetsSerde implements BuiltInSerde {
+
+ private static final JsonMapper JSON_MAPPER = createMapper();
+
+ public static final String TOPIC = "__consumer_offsets";
+
+ public static String name() {
+ return "__consumer_offsets";
+ }
+
+ private static JsonMapper createMapper() {
+ var module = new SimpleModule();
+ module.addSerializer(Struct.class, new JsonSerializer<>() {
+ @Override
+ public void serialize(Struct value, JsonGenerator gen, SerializerProvider serializers) throws IOException {
+ gen.writeStartObject();
+ for (BoundField field : value.schema().fields()) {
+ var fieldVal = value.get(field);
+ gen.writeObjectField(field.def.name, fieldVal);
+ }
+ gen.writeEndObject();
+ }
+ });
+ var mapper = new JsonMapper();
+ mapper.registerModule(module);
+ return mapper;
+ }
+
+ @Override
+ public Optional<String> getDescription() {
+ return Optional.empty();
+ }
+
+ @Override
+ public Optional<SchemaDescription> getSchema(String topic, Target type) {
+ return Optional.empty();
+ }
+
+ @Override
+ public boolean canDeserialize(String topic, Target type) {
+ return topic.equals(TOPIC);
+ }
+
+ @Override
+ public boolean canSerialize(String topic, Target type) {
+ return false;
+ }
+
+ @Override
+ public Serializer serializer(String topic, Target type) {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public Deserializer deserializer(String topic, Target type) {
+ return switch (type) {
+ case KEY -> keyDeserializer();
+ case VALUE -> valueDeserializer();
+ };
+ }
+
+ private Deserializer keyDeserializer() {
+ final Schema commitKeySchema = new Schema(
+ new Field("group", Type.STRING, ""),
+ new Field("topic", Type.STRING, ""),
+ new Field("partition", Type.INT32, "")
+ );
+
+ final Schema groupMetadataSchema = new Schema(
+ new Field("group", Type.STRING, "")
+ );
+
+ return (headers, data) -> {
+ var bb = ByteBuffer.wrap(data);
+ short version = bb.getShort();
+ return new DeserializeResult(
+ toJson(
+ switch (version) {
+ case 0, 1 -> commitKeySchema.read(bb);
+ case 2 -> groupMetadataSchema.read(bb);
+ default -> throw new IllegalStateException("Unknown group metadata message version: " + version);
+ }
+ ),
+ DeserializeResult.Type.JSON,
+ Map.of()
+ );
+ };
+ }
+
+ private Deserializer valueDeserializer() {
+ final Schema commitOffsetSchemaV0 =
+ new Schema(
+ new Field("offset", Type.INT64, ""),
+ new Field("metadata", Type.STRING, ""),
+ new Field("commit_timestamp", Type.INT64, "")
+ );
+
+ final Schema commitOffsetSchemaV1 =
+ new Schema(
+ new Field("offset", Type.INT64, ""),
+ new Field("metadata", Type.STRING, ""),
+ new Field("commit_timestamp", Type.INT64, ""),
+ new Field("expire_timestamp", Type.INT64, "")
+ );
+
+ final Schema commitOffsetSchemaV2 =
+ new Schema(
+ new Field("offset", Type.INT64, ""),
+ new Field("metadata", Type.STRING, ""),
+ new Field("commit_timestamp", Type.INT64, "")
+ );
+
+ final Schema commitOffsetSchemaV3 =
+ new Schema(
+ new Field("offset", Type.INT64, ""),
+ new Field("leader_epoch", Type.INT32, ""),
+ new Field("metadata", Type.STRING, ""),
+ new Field("commit_timestamp", Type.INT64, "")
+ );
+
+ final Schema commitOffsetSchemaV4 = new Schema(
+ new Field("offset", Type.INT64, ""),
+ new Field("leader_epoch", Type.INT32, ""),
+ new Field("metadata", Type.COMPACT_STRING, ""),
+ new Field("commit_timestamp", Type.INT64, ""),
+ Field.TaggedFieldsSection.of()
+ );
+
+ final Schema metadataSchema0 =
+ new Schema(
+ new Field("protocol_type", Type.STRING, ""),
+ new Field("generation", Type.INT32, ""),
+ new Field("protocol", Type.NULLABLE_STRING, ""),
+ new Field("leader", Type.NULLABLE_STRING, ""),
+ new Field("members", new ArrayOf(new Schema(
+ new Field("member_id", Type.STRING, ""),
+ new Field("client_id", Type.STRING, ""),
+ new Field("client_host", Type.STRING, ""),
+ new Field("session_timeout", Type.INT32, ""),
+ new Field("subscription", Type.BYTES, ""),
+ new Field("assignment", Type.BYTES, "")
+ )), "")
+ );
+
+ final Schema metadataSchema1 =
+ new Schema(
+ new Field("protocol_type", Type.STRING, ""),
+ new Field("generation", Type.INT32, ""),
+ new Field("protocol", Type.NULLABLE_STRING, ""),
+ new Field("leader", Type.NULLABLE_STRING, ""),
+ new Field("members", new ArrayOf(new Schema(
+ new Field("member_id", Type.STRING, ""),
+ new Field("client_id", Type.STRING, ""),
+ new Field("client_host", Type.STRING, ""),
+ new Field("rebalance_timeout", Type.INT32, ""),
+ new Field("session_timeout", Type.INT32, ""),
+ new Field("subscription", Type.BYTES, ""),
+ new Field("assignment", Type.BYTES, "")
+ )), "")
+ );
+
+ final Schema metadataSchema2 =
+ new Schema(
+ new Field("protocol_type", Type.STRING, ""),
+ new Field("generation", Type.INT32, ""),
+ new Field("protocol", Type.NULLABLE_STRING, ""),
+ new Field("leader", Type.NULLABLE_STRING, ""),
+ new Field("current_state_timestamp", Type.INT64, ""),
+ new Field("members", new ArrayOf(new Schema(
+ new Field("member_id", Type.STRING, ""),
+ new Field("client_id", Type.STRING, ""),
+ new Field("client_host", Type.STRING, ""),
+ new Field("rebalance_timeout", Type.INT32, ""),
+ new Field("session_timeout", Type.INT32, ""),
+ new Field("subscription", Type.BYTES, ""),
+ new Field("assignment", Type.BYTES, "")
+ )), "")
+ );
+
+ final Schema metadataSchema3 =
+ new Schema(
+ new Field("protocol_type", Type.STRING, ""),
+ new Field("generation", Type.INT32, ""),
+ new Field("protocol", Type.NULLABLE_STRING, ""),
+ new Field("leader", Type.NULLABLE_STRING, ""),
+ new Field("current_state_timestamp", Type.INT64, ""),
+ new Field("members", new ArrayOf(new Schema(
+ new Field("member_id", Type.STRING, ""),
+ new Field("group_instance_id", Type.NULLABLE_STRING, ""),
+ new Field("client_id", Type.STRING, ""),
+ new Field("client_host", Type.STRING, ""),
+ new Field("rebalance_timeout", Type.INT32, ""),
+ new Field("session_timeout", Type.INT32, ""),
+ new Field("subscription", Type.BYTES, ""),
+ new Field("assignment", Type.BYTES, "")
+ )), "")
+ );
+
+ final Schema metadataSchema4 =
+ new Schema(
+ new Field("protocol_type", Type.COMPACT_STRING, ""),
+ new Field("generation", Type.INT32, ""),
+ new Field("protocol", Type.COMPACT_NULLABLE_STRING, ""),
+ new Field("leader", Type.COMPACT_NULLABLE_STRING, ""),
+ new Field("current_state_timestamp", Type.INT64, ""),
+ new Field("members", new CompactArrayOf(new Schema(
+ new Field("member_id", Type.COMPACT_STRING, ""),
+ new Field("group_instance_id", Type.COMPACT_NULLABLE_STRING, ""),
+ new Field("client_id", Type.COMPACT_STRING, ""),
+ new Field("client_host", Type.COMPACT_STRING, ""),
+ new Field("rebalance_timeout", Type.INT32, ""),
+ new Field("session_timeout", Type.INT32, ""),
+ new Field("subscription", Type.COMPACT_BYTES, ""),
+ new Field("assignment", Type.COMPACT_BYTES, ""),
+ Field.TaggedFieldsSection.of()
+ )), ""),
+ Field.TaggedFieldsSection.of()
+ );
+
+ return (headers, data) -> {
+ String result;
+ var bb = ByteBuffer.wrap(data);
+ short version = bb.getShort();
+ // ideally, we should distinguish if value is commit or metadata
+ // by checking record's key, but our current serde structure doesn't allow that.
+ // so, we trying to parse into metadata first and after into commit msg
+ try {
+ result = toJson(
+ switch (version) {
+ case 0 -> metadataSchema0.read(bb);
+ case 1 -> metadataSchema1.read(bb);
+ case 2 -> metadataSchema2.read(bb);
+ case 3 -> metadataSchema3.read(bb);
+ case 4 -> metadataSchema4.read(bb);
+ default -> throw new IllegalArgumentException("Unrecognized version: " + version);
+ }
+ );
+ } catch (Throwable e) {
+ bb = bb.rewind();
+ bb.getShort(); // skipping version
+ result = toJson(
+ switch (version) {
+ case 0 -> commitOffsetSchemaV0.read(bb);
+ case 1 -> commitOffsetSchemaV1.read(bb);
+ case 2 -> commitOffsetSchemaV2.read(bb);
+ case 3 -> commitOffsetSchemaV3.read(bb);
+ case 4 -> commitOffsetSchemaV4.read(bb);
+ default -> throw new IllegalArgumentException("Unrecognized version: " + version);
+ }
+ );
+ }
+
+ if (bb.remaining() != 0) {
+ throw new IllegalArgumentException(
+ "Message buffer is not read to the end, which is likely means message is unrecognized");
+ }
+ return new DeserializeResult(
+ result,
+ DeserializeResult.Type.JSON,
+ Map.of()
+ );
+ };
+ }
+
+ @SneakyThrows
+ private String toJson(Struct s) {
+ return JSON_MAPPER.writeValueAsString(s);
+ }
+}
| diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/ConsumerOffsetsSerdeTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/ConsumerOffsetsSerdeTest.java
new file mode 100644
index 00000000000..1fab56322a8
--- /dev/null
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/ConsumerOffsetsSerdeTest.java
@@ -0,0 +1,185 @@
+package com.provectus.kafka.ui.serdes.builtin;
+
+import static com.provectus.kafka.ui.serdes.builtin.ConsumerOffsetsSerde.TOPIC;
+import static org.assertj.core.api.Assertions.assertThat;
+
+import com.fasterxml.jackson.databind.json.JsonMapper;
+import com.provectus.kafka.ui.AbstractIntegrationTest;
+import com.provectus.kafka.ui.producer.KafkaTestProducer;
+import com.provectus.kafka.ui.serde.api.DeserializeResult;
+import com.provectus.kafka.ui.serde.api.Serde;
+import java.time.Duration;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Set;
+import java.util.UUID;
+import lombok.SneakyThrows;
+import org.apache.kafka.clients.admin.NewTopic;
+import org.apache.kafka.clients.consumer.ConsumerConfig;
+import org.apache.kafka.clients.consumer.KafkaConsumer;
+import org.apache.kafka.common.serialization.BytesDeserializer;
+import org.apache.kafka.common.utils.Bytes;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+import org.testcontainers.shaded.org.awaitility.Awaitility;
+import reactor.util.function.Tuple2;
+import reactor.util.function.Tuples;
+
+class ConsumerOffsetsSerdeTest extends AbstractIntegrationTest {
+
+ private static final int MSGS_TO_GENERATE = 10;
+
+ private static String consumerGroupName;
+ private static String committedTopic;
+
+ @BeforeAll
+ static void createTopicAndCommitItsOffset() {
+ committedTopic = ConsumerOffsetsSerdeTest.class.getSimpleName() + "-" + UUID.randomUUID();
+ consumerGroupName = committedTopic + "-group";
+ createTopic(new NewTopic(committedTopic, 1, (short) 1));
+
+ try (var producer = KafkaTestProducer.forKafka(kafka)) {
+ for (int i = 0; i < MSGS_TO_GENERATE; i++) {
+ producer.send(committedTopic, "i=" + i);
+ }
+ }
+ try (var consumer = createConsumer(consumerGroupName)) {
+ consumer.subscribe(List.of(committedTopic));
+ int polled = 0;
+ while (polled < MSGS_TO_GENERATE) {
+ polled += consumer.poll(Duration.ofMillis(100)).count();
+ }
+ consumer.commitSync();
+ }
+ }
+
+ @AfterAll
+ static void cleanUp() {
+ deleteTopic(committedTopic);
+ }
+
+ @Test
+ void canOnlyDeserializeConsumerOffsetsTopic() {
+ var serde = new ConsumerOffsetsSerde();
+ assertThat(serde.canDeserialize(ConsumerOffsetsSerde.TOPIC, Serde.Target.KEY)).isTrue();
+ assertThat(serde.canDeserialize(ConsumerOffsetsSerde.TOPIC, Serde.Target.VALUE)).isTrue();
+ assertThat(serde.canDeserialize("anyOtherTopic", Serde.Target.KEY)).isFalse();
+ assertThat(serde.canDeserialize("anyOtherTopic", Serde.Target.VALUE)).isFalse();
+ }
+
+ @Test
+ void deserializesMessagesMadeByConsumerActivity() {
+ var serde = new ConsumerOffsetsSerde();
+ var keyDeserializer = serde.deserializer(TOPIC, Serde.Target.KEY);
+ var valueDeserializer = serde.deserializer(TOPIC, Serde.Target.VALUE);
+
+ try (var consumer = createConsumer(consumerGroupName + "-check")) {
+ consumer.subscribe(List.of(ConsumerOffsetsSerde.TOPIC));
+ List<Tuple2<DeserializeResult, DeserializeResult>> polled = new ArrayList<>();
+
+ Awaitility.await()
+ .pollInSameThread()
+ .atMost(Duration.ofMinutes(1))
+ .untilAsserted(() -> {
+ for (var rec : consumer.poll(Duration.ofMillis(200))) {
+ DeserializeResult key = rec.key() != null
+ ? keyDeserializer.deserialize(null, rec.key().get())
+ : null;
+ DeserializeResult val = rec.value() != null
+ ? valueDeserializer.deserialize(null, rec.value().get())
+ : null;
+ if (key != null && val != null) {
+ polled.add(Tuples.of(key, val));
+ }
+ }
+ assertThat(polled).anyMatch(t -> isCommitMessage(t.getT1(), t.getT2()));
+ assertThat(polled).anyMatch(t -> isGroupMetadataMessage(t.getT1(), t.getT2()));
+ });
+ }
+ }
+
+ // Sample commit record:
+ //
+ // key: {
+ // "group": "test_Members_3",
+ // "topic": "test",
+ // "partition": 0
+ // }
+ //
+ // value:
+ // {
+ // "offset": 2,
+ // "leader_epoch": 0,
+ // "metadata": "",
+ // "commit_timestamp": 1683112980588
+ // }
+ private boolean isCommitMessage(DeserializeResult key, DeserializeResult value) {
+ var keyJson = toMapFromJsom(key);
+ boolean keyIsOk = consumerGroupName.equals(keyJson.get("group"))
+ && committedTopic.equals(keyJson.get("topic"))
+ && ((Integer) 0).equals(keyJson.get("partition"));
+
+ var valueJson = toMapFromJsom(value);
+ boolean valueIsOk = valueJson.containsKey("offset")
+ && valueJson.get("offset").equals(MSGS_TO_GENERATE)
+ && valueJson.containsKey("commit_timestamp");
+
+ return keyIsOk && valueIsOk;
+ }
+
+ // Sample group metadata record:
+ //
+ // key: {
+ // "group": "test_Members_3"
+ // }
+ //
+ // value:
+ // {
+ // "protocol_type": "consumer",
+ // "generation": 1,
+ // "protocol": "range",
+ // "leader": "consumer-test_Members_3-1-5a37876e-e42f-420e-9c7d-6902889bd5dd",
+ // "current_state_timestamp": 1683112974561,
+ // "members": [
+ // {
+ // "member_id": "consumer-test_Members_3-1-5a37876e-e42f-420e-9c7d-6902889bd5dd",
+ // "group_instance_id": null,
+ // "client_id": "consumer-test_Members_3-1",
+ // "client_host": "/192.168.16.1",
+ // "rebalance_timeout": 300000,
+ // "session_timeout": 45000,
+ // "subscription": "AAEAAAABAAR0ZXN0/////wAAAAA=",
+ // "assignment": "AAEAAAABAAR0ZXN0AAAAAQAAAAD/////"
+ // }
+ // ]
+ // }
+ private boolean isGroupMetadataMessage(DeserializeResult key, DeserializeResult value) {
+ var keyJson = toMapFromJsom(key);
+ boolean keyIsOk = consumerGroupName.equals(keyJson.get("group")) && keyJson.size() == 1;
+
+ var valueJson = toMapFromJsom(value);
+ boolean valueIsOk = valueJson.keySet()
+ .containsAll(Set.of("protocol_type", "generation", "leader", "members"));
+
+ return keyIsOk && valueIsOk;
+ }
+
+ @SneakyThrows
+ private Map<String, Object> toMapFromJsom(DeserializeResult result) {
+ return new JsonMapper().readValue(result.getResult(), Map.class);
+ }
+
+ private static KafkaConsumer<Bytes, Bytes> createConsumer(String groupId) {
+ Properties props = new Properties();
+ props.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
+ props.put(ConsumerConfig.CLIENT_ID_CONFIG, groupId);
+ props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafka.getBootstrapServers());
+ props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, BytesDeserializer.class);
+ props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, BytesDeserializer.class);
+ props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
+ return new KafkaConsumer<>(props);
+ }
+}
| test | test | 2023-06-21T14:33:30 | "2023-04-27T07:42:19Z" | Shawcs | train |
provectus/kafka-ui/3751_3779 | provectus/kafka-ui | provectus/kafka-ui/3751 | provectus/kafka-ui/3779 | [
"connected"
] | 29d91bca4b8c313e23278e126dad1020a87386e0 | e118aaba3d2ebc0f2a09e6fb2ca43b555254637e | [
"Hello there BiT793! π\n\nThank you and congratulations π for opening your very first issue in this project! π\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. π"
] | [] | "2023-05-04T14:42:57Z" | [
"type/bug",
"scope/frontend",
"status/accepted",
"status/confirmed"
] | Messages: Message data digits are rounded | ### Issue submitter TODO list
- [X] I've looked up my issue in [FAQ](https://docs.kafka-ui.provectus.io/faq/common-problems)
- [X] I've searched for an already existing issues [here](https://github.com/provectus/kafka-ui/issues)
- [X] I've tried running `master`-labeled docker image and the issue still persists there
- [X] I'm running a supported version of the application which is listed [here](https://github.com/provectus/kafka-ui/blob/master/SECURITY.md)
### Describe the bug (actual behavior)
Hi, I noticed a visual error,
probably the types don't converge
in more detail: when you look at the thumbnail, it looks right, and when you look at the "values", it is rounded.
Approximate values of 23958446825181076828 rounded up to 2395844682518107600
### Expected behavior
_No response_
### Your installation details
My docker-compose
---
version: '2'
services:
kafka-ui:
container_name: kafka-UI-localhost-8383
image: provectuslabs/kafka-ui:latest
ports:
- 8383:8080
environment:
KAFKA_CLUSTERS_0_NAME: local
KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: 192.168.0.122:9092
KAFKA_CLUSTERS_1_NAME: UAT
KAFKA_CLUSTERS_1_BOOTSTRAPSERVERS: 192.168.0.124:9092
KAFKA_CLUSTERS_2_NAME: DEV
KAFKA_CLUSTERS_2_BOOTSTRAPSERVERS: 192.168.0.127:9092
### Steps to reproduce
send the values 2395844682518107828 or 3019091576111503179 or 1005691536404004480 to the topic
### Screenshots

### Logs
_No response_
### Additional context
There is probably a rounding or conversion to type | [
"kafka-ui-react-app/package.json",
"kafka-ui-react-app/pnpm-lock.yaml",
"kafka-ui-react-app/src/components/common/EditorViewer/EditorViewer.tsx"
] | [
"kafka-ui-react-app/package.json",
"kafka-ui-react-app/pnpm-lock.yaml",
"kafka-ui-react-app/src/components/common/EditorViewer/EditorViewer.tsx"
] | [] | diff --git a/kafka-ui-react-app/package.json b/kafka-ui-react-app/package.json
index 50ab162eca8..fada92fa4fa 100644
--- a/kafka-ui-react-app/package.json
+++ b/kafka-ui-react-app/package.json
@@ -24,6 +24,7 @@
"json-schema-faker": "^0.5.0-rcv.44",
"jsonpath-plus": "^7.2.0",
"lodash": "^4.17.21",
+ "lossless-json": "^2.0.8",
"pretty-ms": "7.0.1",
"react": "^18.1.0",
"react-ace": "^10.1.0",
@@ -71,6 +72,7 @@
"@testing-library/user-event": "^14.4.3",
"@types/eventsource": "^1.1.8",
"@types/lodash": "^4.14.172",
+ "@types/lossless-json": "^1.0.1",
"@types/node": "^16.4.13",
"@types/react": "^18.0.9",
"@types/react-datepicker": "^4.8.0",
diff --git a/kafka-ui-react-app/pnpm-lock.yaml b/kafka-ui-react-app/pnpm-lock.yaml
index feb4221edef..fe57d64ed1f 100644
--- a/kafka-ui-react-app/pnpm-lock.yaml
+++ b/kafka-ui-react-app/pnpm-lock.yaml
@@ -19,6 +19,7 @@ specifiers:
'@testing-library/user-event': ^14.4.3
'@types/eventsource': ^1.1.8
'@types/lodash': ^4.14.172
+ '@types/lossless-json': ^1.0.1
'@types/node': ^16.4.13
'@types/react': ^18.0.9
'@types/react-datepicker': ^4.8.0
@@ -55,6 +56,7 @@ specifiers:
json-schema-faker: ^0.5.0-rcv.44
jsonpath-plus: ^7.2.0
lodash: ^4.17.21
+ lossless-json: ^2.0.8
prettier: ^2.8.4
pretty-ms: 7.0.1
react: ^18.1.0
@@ -96,7 +98,7 @@ dependencies:
'@types/testing-library__jest-dom': 5.14.5
ace-builds: 1.7.1
ajv: 8.8.2
- ajv-formats: 2.1.1
+ ajv-formats: [email protected]
classnames: 2.3.1
fetch-mock: 9.11.0
jest: 29.5.0_6m7kcbkkzjz4ln6z66tlzx44we
@@ -104,6 +106,7 @@ dependencies:
json-schema-faker: 0.5.0-rcv.44
jsonpath-plus: 7.2.0
lodash: 4.17.21
+ lossless-json: 2.0.8
pretty-ms: 7.0.1
react: 18.1.0
react-ace: 10.1.0_ef5jwxihqo6n7gxfmzogljlgcm
@@ -136,6 +139,7 @@ devDependencies:
'@testing-library/user-event': 14.4.3_@[email protected]
'@types/eventsource': 1.1.8
'@types/lodash': 4.14.177
+ '@types/lossless-json': 1.0.1
'@types/node': 16.11.7
'@types/react': 18.0.9
'@types/react-datepicker': [email protected]
@@ -1770,6 +1774,10 @@ packages:
resolution: {integrity: sha512-0fDwydE2clKe9MNfvXHBHF9WEahRuj+msTuQqOmAApNORFvhMYZKNGGJdCzuhheVjMps/ti0Ak/iJPACMaevvw==}
dev: true
+ /@types/lossless-json/1.0.1:
+ resolution: {integrity: sha512-zPE8kmpeL5/6L5gtTQHSOkAW/OSYYNTDRt6/2oEgLO1Zd3Rj5WVDoMloTtLJxQJhZGLGbL4pktKSh3NbzdaWdw==}
+ dev: true
+
/@types/node/16.11.7:
resolution: {integrity: sha512-QB5D2sqfSjCmTuWcBWyJ+/44bcjO7VbjSbOE0ucoVbAsSNQc4Lt6QkgkVXkTDwkL4z/beecZNDvVX15D4P8Jbw==}
@@ -2050,8 +2058,10 @@ packages:
- supports-color
dev: true
- /ajv-formats/2.1.1:
+ /ajv-formats/[email protected]:
resolution: {integrity: sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==}
+ peerDependencies:
+ ajv: ^8.0.0
peerDependenciesMeta:
ajv:
optional: true
@@ -2734,8 +2744,8 @@ packages:
ms: 2.1.2
supports-color: 5.5.0
- /decimal.js/10.3.1:
- resolution: {integrity: sha512-V0pfhfr8suzyPGOx3nmq4aHqabehUZn6Ch9kyFpV79TGDTWFmHqUqXdabR7QHqxzrYolF4+tVmJhUG4OURg5dQ==}
+ /decimal.js/10.4.3:
+ resolution: {integrity: sha512-VBBaLc1MgL5XpzgIP7ny5Z6Nx3UrRkIViUkPUdtl9aya5amy3De1gsUUSB1g3+3sExYNjCAsAznmukyxCb1GRA==}
dev: true
/dedent/0.7.0:
@@ -4649,7 +4659,7 @@ packages:
cssom: 0.5.0
cssstyle: 2.3.0
data-urls: 3.0.2
- decimal.js: 10.3.1
+ decimal.js: 10.4.3
domexception: 4.0.0
escodegen: 2.0.0
form-data: 4.0.0
@@ -4841,6 +4851,10 @@ packages:
dependencies:
js-tokens: 4.0.0
+ /lossless-json/2.0.8:
+ resolution: {integrity: sha512-7/GaZldUc7H5oNZlSk6bF06cRbtA7oF8zWXwbfMZm8yrYC2debx0KvWTBbQIbj6fh08LsXTWg+YtHJshXgYKow==}
+ dev: false
+
/lru-cache/6.0.0:
resolution: {integrity: sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==}
engines: {node: '>=10'}
diff --git a/kafka-ui-react-app/src/components/common/EditorViewer/EditorViewer.tsx b/kafka-ui-react-app/src/components/common/EditorViewer/EditorViewer.tsx
index e489d58f2cc..4b46fafbf1d 100644
--- a/kafka-ui-react-app/src/components/common/EditorViewer/EditorViewer.tsx
+++ b/kafka-ui-react-app/src/components/common/EditorViewer/EditorViewer.tsx
@@ -1,6 +1,7 @@
import React from 'react';
import Editor from 'components/common/Editor/Editor';
import { SchemaType } from 'generated-sources';
+import { parse, stringify } from 'lossless-json';
import * as S from './EditorViewer.styled';
@@ -9,10 +10,9 @@ export interface EditorViewerProps {
schemaType?: string;
maxLines?: number;
}
-
const getSchemaValue = (data: string, schemaType?: string) => {
if (schemaType === SchemaType.JSON || schemaType === SchemaType.AVRO) {
- return JSON.stringify(JSON.parse(data), null, '\t');
+ return stringify(parse(data), undefined, '\t');
}
return data;
};
| null | train | test | 2023-05-31T17:50:40 | "2023-04-28T16:21:51Z" | BiT793 | train |
provectus/kafka-ui/124_3783 | provectus/kafka-ui | provectus/kafka-ui/124 | provectus/kafka-ui/3783 | [
"connected"
] | 476cbfb691a0b19f9227ecb12b2e5ce4b5a7156c | 1cd303a90b497546db1eccc664e26a277257923c | [
"This issue has been automatically marked as stale because it has not had recent activity. It will be closed if no further activity occurs. Thank you for your contributions.\n",
"TODO BE: \r\n- add specific endpoints for editing (consumers, producers, streams, etc.)"
] | [] | "2023-05-05T06:15:41Z" | [
"scope/backend",
"scope/frontend",
"status/accepted",
"type/feature",
"area/acl"
] | BE: ACL management | Add ability to manage ACL for cluster | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/AclsController.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/acl/AclsService.java",
"kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml"
] | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/AclsController.java",
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/acl/AclsService.java",
"kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml"
] | [
"kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/acl/AclsServiceTest.java"
] | diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/AclsController.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/AclsController.java
index 1de8f4d71be..71700e3f7b7 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/AclsController.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/AclsController.java
@@ -2,6 +2,9 @@
import com.provectus.kafka.ui.api.AclsApi;
import com.provectus.kafka.ui.mapper.ClusterMapper;
+import com.provectus.kafka.ui.model.CreateConsumerAclDTO;
+import com.provectus.kafka.ui.model.CreateProducerAclDTO;
+import com.provectus.kafka.ui.model.CreateStreamAppAclDTO;
import com.provectus.kafka.ui.model.KafkaAclDTO;
import com.provectus.kafka.ui.model.KafkaAclNamePatternTypeDTO;
import com.provectus.kafka.ui.model.KafkaAclResourceTypeDTO;
@@ -123,4 +126,55 @@ public Mono<ResponseEntity<Void>> syncAclsCsv(String clusterName, Mono<String> c
.doOnEach(sig -> auditService.audit(context, sig))
.thenReturn(ResponseEntity.ok().build());
}
+
+ @Override
+ public Mono<ResponseEntity<Void>> createConsumerAcl(String clusterName,
+ Mono<CreateConsumerAclDTO> createConsumerAclDto,
+ ServerWebExchange exchange) {
+ AccessContext context = AccessContext.builder()
+ .cluster(clusterName)
+ .aclActions(AclAction.EDIT)
+ .operationName("createConsumerAcl")
+ .build();
+
+ return accessControlService.validateAccess(context)
+ .then(createConsumerAclDto)
+ .flatMap(req -> aclsService.createConsumerAcl(getCluster(clusterName), req))
+ .doOnEach(sig -> auditService.audit(context, sig))
+ .thenReturn(ResponseEntity.ok().build());
+ }
+
+ @Override
+ public Mono<ResponseEntity<Void>> createProducerAcl(String clusterName,
+ Mono<CreateProducerAclDTO> createProducerAclDto,
+ ServerWebExchange exchange) {
+ AccessContext context = AccessContext.builder()
+ .cluster(clusterName)
+ .aclActions(AclAction.EDIT)
+ .operationName("createProducerAcl")
+ .build();
+
+ return accessControlService.validateAccess(context)
+ .then(createProducerAclDto)
+ .flatMap(req -> aclsService.createProducerAcl(getCluster(clusterName), req))
+ .doOnEach(sig -> auditService.audit(context, sig))
+ .thenReturn(ResponseEntity.ok().build());
+ }
+
+ @Override
+ public Mono<ResponseEntity<Void>> createStreamAppAcl(String clusterName,
+ Mono<CreateStreamAppAclDTO> createStreamAppAclDto,
+ ServerWebExchange exchange) {
+ AccessContext context = AccessContext.builder()
+ .cluster(clusterName)
+ .aclActions(AclAction.EDIT)
+ .operationName("createStreamAppAcl")
+ .build();
+
+ return accessControlService.validateAccess(context)
+ .then(createStreamAppAclDto)
+ .flatMap(req -> aclsService.createStreamAppAcl(getCluster(clusterName), req))
+ .doOnEach(sig -> auditService.audit(context, sig))
+ .thenReturn(ResponseEntity.ok().build());
+ }
}
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/acl/AclsService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/acl/AclsService.java
index c2ab1b5eb4d..a621ce99cc3 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/acl/AclsService.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/acl/AclsService.java
@@ -1,16 +1,44 @@
package com.provectus.kafka.ui.service.acl;
+import static org.apache.kafka.common.acl.AclOperation.ALL;
+import static org.apache.kafka.common.acl.AclOperation.CREATE;
+import static org.apache.kafka.common.acl.AclOperation.DESCRIBE;
+import static org.apache.kafka.common.acl.AclOperation.IDEMPOTENT_WRITE;
+import static org.apache.kafka.common.acl.AclOperation.READ;
+import static org.apache.kafka.common.acl.AclOperation.WRITE;
+import static org.apache.kafka.common.acl.AclPermissionType.ALLOW;
+import static org.apache.kafka.common.resource.PatternType.LITERAL;
+import static org.apache.kafka.common.resource.PatternType.PREFIXED;
+import static org.apache.kafka.common.resource.ResourceType.CLUSTER;
+import static org.apache.kafka.common.resource.ResourceType.GROUP;
+import static org.apache.kafka.common.resource.ResourceType.TOPIC;
+import static org.apache.kafka.common.resource.ResourceType.TRANSACTIONAL_ID;
+
import com.google.common.collect.Sets;
+import com.provectus.kafka.ui.model.CreateConsumerAclDTO;
+import com.provectus.kafka.ui.model.CreateProducerAclDTO;
+import com.provectus.kafka.ui.model.CreateStreamAppAclDTO;
import com.provectus.kafka.ui.model.KafkaCluster;
import com.provectus.kafka.ui.service.AdminClientService;
+import com.provectus.kafka.ui.service.ReactiveAdminClient;
+import java.util.ArrayList;
+import java.util.Collection;
import java.util.Comparator;
import java.util.List;
+import java.util.Optional;
import java.util.Set;
+import javax.annotation.Nullable;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
+import org.apache.kafka.common.acl.AccessControlEntry;
import org.apache.kafka.common.acl.AclBinding;
+import org.apache.kafka.common.acl.AclOperation;
+import org.apache.kafka.common.resource.Resource;
+import org.apache.kafka.common.resource.ResourcePattern;
import org.apache.kafka.common.resource.ResourcePatternFilter;
+import org.apache.kafka.common.resource.ResourceType;
import org.springframework.stereotype.Service;
+import org.springframework.util.CollectionUtils;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
@@ -22,11 +50,14 @@ public class AclsService {
private final AdminClientService adminClientService;
public Mono<Void> createAcl(KafkaCluster cluster, AclBinding aclBinding) {
- var aclString = AclCsv.createAclString(aclBinding);
- log.info("CREATING ACL: [{}]", aclString);
return adminClientService.get(cluster)
- .flatMap(ac -> ac.createAcls(List.of(aclBinding)))
- .doOnSuccess(v -> log.info("ACL CREATED: [{}]", aclString));
+ .flatMap(ac -> createAclsWithLogging(ac, List.of(aclBinding)));
+ }
+
+ private Mono<Void> createAclsWithLogging(ReactiveAdminClient ac, Collection<AclBinding> bindings) {
+ bindings.forEach(b -> log.info("CREATING ACL: [{}]", AclCsv.createAclString(b)));
+ return ac.createAcls(bindings)
+ .doOnSuccess(v -> bindings.forEach(b -> log.info("ACL CREATED: [{}]", AclCsv.createAclString(b))));
}
public Mono<Void> deleteAcl(KafkaCluster cluster, AclBinding aclBinding) {
@@ -92,4 +123,150 @@ private void logAclSyncPlan(KafkaCluster cluster, Set<AclBinding> toBeAdded, Set
}
}
+ // creates allow binding for resources by prefix or specific names list
+ private List<AclBinding> createAllowBindings(ResourceType resourceType,
+ List<AclOperation> opsToAllow,
+ String principal,
+ String host,
+ @Nullable String resourcePrefix,
+ @Nullable Collection<String> resourceNames) {
+ List<AclBinding> bindings = new ArrayList<>();
+ if (resourcePrefix != null) {
+ for (var op : opsToAllow) {
+ bindings.add(
+ new AclBinding(
+ new ResourcePattern(resourceType, resourcePrefix, PREFIXED),
+ new AccessControlEntry(principal, host, op, ALLOW)));
+ }
+ }
+ if (!CollectionUtils.isEmpty(resourceNames)) {
+ resourceNames.stream()
+ .distinct()
+ .forEach(resource ->
+ opsToAllow.forEach(op ->
+ bindings.add(
+ new AclBinding(
+ new ResourcePattern(resourceType, resource, LITERAL),
+ new AccessControlEntry(principal, host, op, ALLOW)))));
+ }
+ return bindings;
+ }
+
+ public Mono<Void> createConsumerAcl(KafkaCluster cluster, CreateConsumerAclDTO request) {
+ return adminClientService.get(cluster)
+ .flatMap(ac -> createAclsWithLogging(ac, createConsumerBindings(request)))
+ .then();
+ }
+
+ //Read, Describe on topics, Read on consumerGroups
+ private List<AclBinding> createConsumerBindings(CreateConsumerAclDTO request) {
+ List<AclBinding> bindings = new ArrayList<>();
+ bindings.addAll(
+ createAllowBindings(TOPIC,
+ List.of(READ, DESCRIBE),
+ request.getPrincipal(),
+ request.getHost(),
+ request.getTopicsPrefix(),
+ request.getTopics()));
+
+ bindings.addAll(
+ createAllowBindings(
+ GROUP,
+ List.of(READ),
+ request.getPrincipal(),
+ request.getHost(),
+ request.getConsumerGroupsPrefix(),
+ request.getConsumerGroups()));
+ return bindings;
+ }
+
+ public Mono<Void> createProducerAcl(KafkaCluster cluster, CreateProducerAclDTO request) {
+ return adminClientService.get(cluster)
+ .flatMap(ac -> createAclsWithLogging(ac, createProducerBindings(request)))
+ .then();
+ }
+
+ //Write, Describe, Create permission on topics, Write, Describe on transactionalIds
+ //IDEMPOTENT_WRITE on cluster if idempotent is enabled
+ private List<AclBinding> createProducerBindings(CreateProducerAclDTO request) {
+ List<AclBinding> bindings = new ArrayList<>();
+ bindings.addAll(
+ createAllowBindings(
+ TOPIC,
+ List.of(WRITE, DESCRIBE, CREATE),
+ request.getPrincipal(),
+ request.getHost(),
+ request.getTopicsPrefix(),
+ request.getTopics()));
+
+ bindings.addAll(
+ createAllowBindings(
+ TRANSACTIONAL_ID,
+ List.of(WRITE, DESCRIBE),
+ request.getPrincipal(),
+ request.getHost(),
+ request.getTransactionsIdPrefix(),
+ Optional.ofNullable(request.getTransactionalId()).map(List::of).orElse(null)));
+
+ if (Boolean.TRUE.equals(request.getIdempotent())) {
+ bindings.addAll(
+ createAllowBindings(
+ CLUSTER,
+ List.of(IDEMPOTENT_WRITE),
+ request.getPrincipal(),
+ request.getHost(),
+ null,
+ List.of(Resource.CLUSTER_NAME))); // cluster name is a const string in ACL api
+ }
+ return bindings;
+ }
+
+ public Mono<Void> createStreamAppAcl(KafkaCluster cluster, CreateStreamAppAclDTO request) {
+ return adminClientService.get(cluster)
+ .flatMap(ac -> createAclsWithLogging(ac, createStreamAppBindings(request)))
+ .then();
+ }
+
+ // Read on input topics, Write on output topics
+ // ALL on applicationId-prefixed Groups and Topics
+ private List<AclBinding> createStreamAppBindings(CreateStreamAppAclDTO request) {
+ List<AclBinding> bindings = new ArrayList<>();
+ bindings.addAll(
+ createAllowBindings(
+ TOPIC,
+ List.of(READ),
+ request.getPrincipal(),
+ request.getHost(),
+ null,
+ request.getInputTopics()));
+
+ bindings.addAll(
+ createAllowBindings(
+ TOPIC,
+ List.of(WRITE),
+ request.getPrincipal(),
+ request.getHost(),
+ null,
+ request.getOutputTopics()));
+
+ bindings.addAll(
+ createAllowBindings(
+ GROUP,
+ List.of(ALL),
+ request.getPrincipal(),
+ request.getHost(),
+ request.getApplicationId(),
+ null));
+
+ bindings.addAll(
+ createAllowBindings(
+ TOPIC,
+ List.of(ALL),
+ request.getPrincipal(),
+ request.getHost(),
+ request.getApplicationId(),
+ null));
+ return bindings;
+ }
+
}
diff --git a/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml b/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
index f9ed233bc1d..9484948b67e 100644
--- a/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
+++ b/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml
@@ -1868,6 +1868,69 @@ paths:
404:
description: Acl not found
+ /api/clusters/{clusterName}/acl/consumer:
+ post:
+ tags:
+ - Acls
+ summary: createConsumerAcl
+ operationId: createConsumerAcl
+ parameters:
+ - name: clusterName
+ in: path
+ required: true
+ schema:
+ type: string
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/CreateConsumerAcl'
+ responses:
+ 200:
+ description: OK
+
+ /api/clusters/{clusterName}/acl/producer:
+ post:
+ tags:
+ - Acls
+ summary: createProducerAcl
+ operationId: createProducerAcl
+ parameters:
+ - name: clusterName
+ in: path
+ required: true
+ schema:
+ type: string
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/CreateProducerAcl'
+ responses:
+ 200:
+ description: OK
+
+ /api/clusters/{clusterName}/acl/streamApp:
+ post:
+ tags:
+ - Acls
+ summary: createStreamAppAcl
+ operationId: createStreamAppAcl
+ parameters:
+ - name: clusterName
+ in: path
+ required: true
+ schema:
+ type: string
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/CreateStreamAppAcl'
+ responses:
+ 200:
+ description: OK
+
/api/authorization:
get:
tags:
@@ -3551,7 +3614,7 @@ components:
principal:
type: string
host:
- type: string # "*" if acl can be applied to any resource of given type
+ type: string
operation:
type: string
enum:
@@ -3575,6 +3638,69 @@ components:
- ALLOW
- DENY
+ CreateConsumerAcl:
+ type: object
+ required: [principal, host]
+ properties:
+ principal:
+ type: string
+ host:
+ type: string
+ topics:
+ type: array
+ items:
+ type: string
+ topicsPrefix:
+ type: string
+ consumerGroups:
+ type: array
+ items:
+ type: string
+ consumerGroupsPrefix:
+ type: string
+
+ CreateProducerAcl:
+ type: object
+ required: [principal, host]
+ properties:
+ principal:
+ type: string
+ host:
+ type: string
+ topics:
+ type: array
+ items:
+ type: string
+ topicsPrefix:
+ type: string
+ transactionalId:
+ type: string
+ transactionsIdPrefix:
+ type: string
+ idempotent:
+ type: boolean
+ default: false
+
+ CreateStreamAppAcl:
+ type: object
+ required: [principal, host, applicationId, inputTopics, outputTopics]
+ properties:
+ principal:
+ type: string
+ host:
+ type: string
+ inputTopics:
+ type: array
+ items:
+ type: string
+ outputTopics:
+ type: array
+ items:
+ type: string
+ applicationId:
+ nullable: false
+ type: string
+
KafkaAclResourceType:
type: string
enum:
| diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/acl/AclsServiceTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/acl/AclsServiceTest.java
index 5791bb20414..340aad7091c 100644
--- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/acl/AclsServiceTest.java
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/acl/AclsServiceTest.java
@@ -4,16 +4,21 @@
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
+import com.provectus.kafka.ui.model.CreateConsumerAclDTO;
+import com.provectus.kafka.ui.model.CreateProducerAclDTO;
+import com.provectus.kafka.ui.model.CreateStreamAppAclDTO;
import com.provectus.kafka.ui.model.KafkaCluster;
import com.provectus.kafka.ui.service.AdminClientService;
import com.provectus.kafka.ui.service.ReactiveAdminClient;
import java.util.Collection;
import java.util.List;
+import java.util.UUID;
import org.apache.kafka.common.acl.AccessControlEntry;
import org.apache.kafka.common.acl.AclBinding;
import org.apache.kafka.common.acl.AclOperation;
import org.apache.kafka.common.acl.AclPermissionType;
import org.apache.kafka.common.resource.PatternType;
+import org.apache.kafka.common.resource.Resource;
import org.apache.kafka.common.resource.ResourcePattern;
import org.apache.kafka.common.resource.ResourcePatternFilter;
import org.apache.kafka.common.resource.ResourceType;
@@ -53,12 +58,12 @@ void testSyncAclWithAclCsv() {
when(adminClientMock.listAcls(ResourcePatternFilter.ANY))
.thenReturn(Mono.just(List.of(existingBinding1, existingBinding2)));
- ArgumentCaptor<?> createdCaptor = ArgumentCaptor.forClass(Collection.class);
- when(adminClientMock.createAcls((Collection<AclBinding>) createdCaptor.capture()))
+ ArgumentCaptor<Collection<AclBinding>> createdCaptor = ArgumentCaptor.forClass(Collection.class);
+ when(adminClientMock.createAcls(createdCaptor.capture()))
.thenReturn(Mono.empty());
- ArgumentCaptor<?> deletedCaptor = ArgumentCaptor.forClass(Collection.class);
- when(adminClientMock.deleteAcls((Collection<AclBinding>) deletedCaptor.capture()))
+ ArgumentCaptor<Collection<AclBinding>> deletedCaptor = ArgumentCaptor.forClass(Collection.class);
+ when(adminClientMock.deleteAcls(deletedCaptor.capture()))
.thenReturn(Mono.empty());
aclsService.syncAclWithAclCsv(
@@ -68,15 +73,218 @@ void testSyncAclWithAclCsv() {
+ "User:test3,GROUP,PREFIXED,groupNew,DESCRIBE,DENY,localhost"
).block();
- Collection<AclBinding> createdBindings = (Collection<AclBinding>) createdCaptor.getValue();
+ Collection<AclBinding> createdBindings = createdCaptor.getValue();
assertThat(createdBindings)
.hasSize(1)
.contains(newBindingToBeAdded);
- Collection<AclBinding> deletedBindings = (Collection<AclBinding>) deletedCaptor.getValue();
+ Collection<AclBinding> deletedBindings = deletedCaptor.getValue();
assertThat(deletedBindings)
.hasSize(1)
.contains(existingBinding2);
}
+
+ @Test
+ void createsConsumerDependantAcls() {
+ ArgumentCaptor<Collection<AclBinding>> createdCaptor = ArgumentCaptor.forClass(Collection.class);
+ when(adminClientMock.createAcls(createdCaptor.capture()))
+ .thenReturn(Mono.empty());
+
+ var principal = UUID.randomUUID().toString();
+ var host = UUID.randomUUID().toString();
+
+ aclsService.createConsumerAcl(
+ CLUSTER,
+ new CreateConsumerAclDTO()
+ .principal(principal)
+ .host(host)
+ .consumerGroups(List.of("cg1", "cg2"))
+ .topics(List.of("t1", "t2"))
+ ).block();
+
+ //Read, Describe on topics, Read on consumerGroups
+ Collection<AclBinding> createdBindings = createdCaptor.getValue();
+ assertThat(createdBindings)
+ .hasSize(6)
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.TOPIC, "t1", PatternType.LITERAL),
+ new AccessControlEntry(principal, host, AclOperation.READ, AclPermissionType.ALLOW)))
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.TOPIC, "t1", PatternType.LITERAL),
+ new AccessControlEntry(principal, host, AclOperation.DESCRIBE, AclPermissionType.ALLOW)))
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.TOPIC, "t2", PatternType.LITERAL),
+ new AccessControlEntry(principal, host, AclOperation.READ, AclPermissionType.ALLOW)))
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.TOPIC, "t2", PatternType.LITERAL),
+ new AccessControlEntry(principal, host, AclOperation.DESCRIBE, AclPermissionType.ALLOW)))
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.GROUP, "cg1", PatternType.LITERAL),
+ new AccessControlEntry(principal, host, AclOperation.READ, AclPermissionType.ALLOW)))
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.GROUP, "cg2", PatternType.LITERAL),
+ new AccessControlEntry(principal, host, AclOperation.READ, AclPermissionType.ALLOW)));
+ }
+
+ @Test
+ void createsConsumerDependantAclsWhenTopicsAndGroupsSpecifiedByPrefix() {
+ ArgumentCaptor<Collection<AclBinding>> createdCaptor = ArgumentCaptor.forClass(Collection.class);
+ when(adminClientMock.createAcls(createdCaptor.capture()))
+ .thenReturn(Mono.empty());
+
+ var principal = UUID.randomUUID().toString();
+ var host = UUID.randomUUID().toString();
+
+ aclsService.createConsumerAcl(
+ CLUSTER,
+ new CreateConsumerAclDTO()
+ .principal(principal)
+ .host(host)
+ .consumerGroupsPrefix("cgPref")
+ .topicsPrefix("topicPref")
+ ).block();
+
+ //Read, Describe on topics, Read on consumerGroups
+ Collection<AclBinding> createdBindings = createdCaptor.getValue();
+ assertThat(createdBindings)
+ .hasSize(3)
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.TOPIC, "topicPref", PatternType.PREFIXED),
+ new AccessControlEntry(principal, host, AclOperation.READ, AclPermissionType.ALLOW)))
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.TOPIC, "topicPref", PatternType.PREFIXED),
+ new AccessControlEntry(principal, host, AclOperation.DESCRIBE, AclPermissionType.ALLOW)))
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.GROUP, "cgPref", PatternType.PREFIXED),
+ new AccessControlEntry(principal, host, AclOperation.READ, AclPermissionType.ALLOW)));
+ }
+
+ @Test
+ void createsProducerDependantAcls() {
+ ArgumentCaptor<Collection<AclBinding>> createdCaptor = ArgumentCaptor.forClass(Collection.class);
+ when(adminClientMock.createAcls(createdCaptor.capture()))
+ .thenReturn(Mono.empty());
+
+ var principal = UUID.randomUUID().toString();
+ var host = UUID.randomUUID().toString();
+
+ aclsService.createProducerAcl(
+ CLUSTER,
+ new CreateProducerAclDTO()
+ .principal(principal)
+ .host(host)
+ .topics(List.of("t1"))
+ .idempotent(true)
+ .transactionalId("txId1")
+ ).block();
+
+ //Write, Describe, Create permission on topics, Write, Describe on transactionalIds
+ //IDEMPOTENT_WRITE on cluster if idempotent is enabled (true)
+ Collection<AclBinding> createdBindings = createdCaptor.getValue();
+ assertThat(createdBindings)
+ .hasSize(6)
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.TOPIC, "t1", PatternType.LITERAL),
+ new AccessControlEntry(principal, host, AclOperation.WRITE, AclPermissionType.ALLOW)))
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.TOPIC, "t1", PatternType.LITERAL),
+ new AccessControlEntry(principal, host, AclOperation.DESCRIBE, AclPermissionType.ALLOW)))
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.TOPIC, "t1", PatternType.LITERAL),
+ new AccessControlEntry(principal, host, AclOperation.CREATE, AclPermissionType.ALLOW)))
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.TRANSACTIONAL_ID, "txId1", PatternType.LITERAL),
+ new AccessControlEntry(principal, host, AclOperation.WRITE, AclPermissionType.ALLOW)))
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.TRANSACTIONAL_ID, "txId1", PatternType.LITERAL),
+ new AccessControlEntry(principal, host, AclOperation.DESCRIBE, AclPermissionType.ALLOW)))
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.CLUSTER, Resource.CLUSTER_NAME, PatternType.LITERAL),
+ new AccessControlEntry(principal, host, AclOperation.IDEMPOTENT_WRITE, AclPermissionType.ALLOW)));
+ }
+
+
+ @Test
+ void createsProducerDependantAclsWhenTopicsAndTxIdSpecifiedByPrefix() {
+ ArgumentCaptor<Collection<AclBinding>> createdCaptor = ArgumentCaptor.forClass(Collection.class);
+ when(adminClientMock.createAcls(createdCaptor.capture()))
+ .thenReturn(Mono.empty());
+
+ var principal = UUID.randomUUID().toString();
+ var host = UUID.randomUUID().toString();
+
+ aclsService.createProducerAcl(
+ CLUSTER,
+ new CreateProducerAclDTO()
+ .principal(principal)
+ .host(host)
+ .topicsPrefix("topicPref")
+ .transactionsIdPrefix("txIdPref")
+ .idempotent(false)
+ ).block();
+
+ //Write, Describe, Create permission on topics, Write, Describe on transactionalIds
+ //IDEMPOTENT_WRITE on cluster if idempotent is enabled (false)
+ Collection<AclBinding> createdBindings = createdCaptor.getValue();
+ assertThat(createdBindings)
+ .hasSize(5)
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.TOPIC, "topicPref", PatternType.PREFIXED),
+ new AccessControlEntry(principal, host, AclOperation.WRITE, AclPermissionType.ALLOW)))
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.TOPIC, "topicPref", PatternType.PREFIXED),
+ new AccessControlEntry(principal, host, AclOperation.DESCRIBE, AclPermissionType.ALLOW)))
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.TOPIC, "topicPref", PatternType.PREFIXED),
+ new AccessControlEntry(principal, host, AclOperation.CREATE, AclPermissionType.ALLOW)))
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.TRANSACTIONAL_ID, "txIdPref", PatternType.PREFIXED),
+ new AccessControlEntry(principal, host, AclOperation.WRITE, AclPermissionType.ALLOW)))
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.TRANSACTIONAL_ID, "txIdPref", PatternType.PREFIXED),
+ new AccessControlEntry(principal, host, AclOperation.DESCRIBE, AclPermissionType.ALLOW)));
+ }
+
+
+ @Test
+ void createsStreamAppDependantAcls() {
+ ArgumentCaptor<Collection<AclBinding>> createdCaptor = ArgumentCaptor.forClass(Collection.class);
+ when(adminClientMock.createAcls(createdCaptor.capture()))
+ .thenReturn(Mono.empty());
+
+ var principal = UUID.randomUUID().toString();
+ var host = UUID.randomUUID().toString();
+
+ aclsService.createStreamAppAcl(
+ CLUSTER,
+ new CreateStreamAppAclDTO()
+ .principal(principal)
+ .host(host)
+ .inputTopics(List.of("t1"))
+ .outputTopics(List.of("t2", "t3"))
+ .applicationId("appId1")
+ ).block();
+
+ // Read on input topics, Write on output topics
+ // ALL on applicationId-prefixed Groups and Topics
+ Collection<AclBinding> createdBindings = createdCaptor.getValue();
+ assertThat(createdBindings)
+ .hasSize(5)
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.TOPIC, "t1", PatternType.LITERAL),
+ new AccessControlEntry(principal, host, AclOperation.READ, AclPermissionType.ALLOW)))
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.TOPIC, "t2", PatternType.LITERAL),
+ new AccessControlEntry(principal, host, AclOperation.WRITE, AclPermissionType.ALLOW)))
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.TOPIC, "t3", PatternType.LITERAL),
+ new AccessControlEntry(principal, host, AclOperation.WRITE, AclPermissionType.ALLOW)))
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.GROUP, "appId1", PatternType.PREFIXED),
+ new AccessControlEntry(principal, host, AclOperation.ALL, AclPermissionType.ALLOW)))
+ .contains(new AclBinding(
+ new ResourcePattern(ResourceType.TOPIC, "appId1", PatternType.PREFIXED),
+ new AccessControlEntry(principal, host, AclOperation.ALL, AclPermissionType.ALLOW)));
+ }
}
| train | test | 2023-08-01T13:47:03 | "2020-11-24T11:56:23Z" | soffest | train |
provectus/kafka-ui/3745_3784 | provectus/kafka-ui | provectus/kafka-ui/3745 | provectus/kafka-ui/3784 | [
"connected"
] | c813e74609a68225880f782a8eb0f975f37b44ee | cfcfb851c60ed3bfee68b887d8e1584fb0372cf7 | [
"Hello there michal-cesek! π\n\nThank you and congratulations π for opening your very first issue in this project! π\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. π",
"Hello @michal-cesek \r\nthank you for raising this!\r\n\r\nCurrently we assume that `value` is a json (obj/string/integer). If value is not valid json ( like string `123notajson` ) we set it to null. So, there can be 2 cases then `value` is null in filter: \r\n- kafka-record's value is null \r\n- kafka-record's value can't be represented as json after deserialization \r\n\r\nThis is why you see false-positive filters. \r\n\r\nWe will discuss it internally and think how to fix this. For now, as workaround you can use `valueAsText` variable in filter that will be null only if kafka-record's value is null.\r\n\r\ncc @Haarolean ",
"@iliax thank you for taking care of this. In addition, I wanted to point out that there is inconsistency in the UI. The second screenshot shows an active filter and the search box input filled with the code -> Ok. The third screenshot shows the same active filter, but the search box is not prefilled -> Not Ok / Confusing\r\n\r\n\r\n\r\ncc @Haarolean ",
"@michal-cesek we have a huge messages view refactoring planned -- #3504 "
] | [] | "2023-05-05T08:51:41Z" | [
"type/bug",
"scope/backend",
"status/accepted"
] | Topic message filter is displayed but not applied | ### Issue submitter TODO list
- [X] I've looked up my issue in [FAQ](https://docs.kafka-ui.provectus.io/faq/common-problems)
- [X] I've searched for an already existing issues [here](https://github.com/provectus/kafka-ui/issues)
- [X] I've tried running `master`-labeled docker image and the issue still persists there
- [X] I'm running a supported version of the application which is listed [here](https://github.com/provectus/kafka-ui/blob/master/SECURITY.md)
### Describe the bug (actual behavior)
The active message filter component is displayed but no filtering is applied to the topic message list
### Expected behavior
The filter is applied to the topic message list
### Your installation details
1. b0c367c - v0.6.2
### Steps to reproduce
**1.** Set up the filter by using the "Add filters" functionality


**2.** Leave the "Messages" view. E.g. click on the "Brokers" link from the left panel
**3.** Return to the view from the first step
**RESULT:** The filter is not applied and the unexpected topic message is shown

### Screenshots
_No response_
### Logs
_No response_
### Additional context
_No response_ | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/MessageFilters.java"
] | [
"kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/MessageFilters.java"
] | [
"kafka-ui-api/src/test/java/com/provectus/kafka/ui/emitter/MessageFiltersTest.java"
] | diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/MessageFilters.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/MessageFilters.java
index e48501f6a75..6e9f8a8bbe3 100644
--- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/MessageFilters.java
+++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/emitter/MessageFilters.java
@@ -39,41 +39,42 @@ static Predicate<TopicMessageDTO> containsStringFilter(String string) {
}
static Predicate<TopicMessageDTO> groovyScriptFilter(String script) {
- var compiledScript = compileScript(script);
+ var engine = getGroovyEngine();
+ var compiledScript = compileScript(engine, script);
var jsonSlurper = new JsonSlurper();
return new Predicate<TopicMessageDTO>() {
@SneakyThrows
@Override
public boolean test(TopicMessageDTO msg) {
- var bindings = getGroovyEngine().createBindings();
+ var bindings = engine.createBindings();
bindings.put("partition", msg.getPartition());
bindings.put("offset", msg.getOffset());
bindings.put("timestampMs", msg.getTimestamp().toInstant().toEpochMilli());
bindings.put("keyAsText", msg.getKey());
bindings.put("valueAsText", msg.getContent());
bindings.put("headers", msg.getHeaders());
- bindings.put("key", parseToJsonOrReturnNull(jsonSlurper, msg.getKey()));
- bindings.put("value", parseToJsonOrReturnNull(jsonSlurper, msg.getContent()));
+ bindings.put("key", parseToJsonOrReturnAsIs(jsonSlurper, msg.getKey()));
+ bindings.put("value", parseToJsonOrReturnAsIs(jsonSlurper, msg.getContent()));
var result = compiledScript.eval(bindings);
if (result instanceof Boolean) {
return (Boolean) result;
} else {
throw new ValidationException(
- String.format("Unexpected script result: %s, Boolean should be returned instead", result));
+ "Unexpected script result: %s, Boolean should be returned instead".formatted(result));
}
}
};
}
@Nullable
- private static Object parseToJsonOrReturnNull(JsonSlurper parser, @Nullable String str) {
+ private static Object parseToJsonOrReturnAsIs(JsonSlurper parser, @Nullable String str) {
if (str == null) {
return null;
}
try {
return parser.parseText(str);
} catch (Exception e) {
- return null;
+ return str;
}
}
@@ -86,9 +87,9 @@ private static synchronized GroovyScriptEngineImpl getGroovyEngine() {
return GROOVY_ENGINE;
}
- private static CompiledScript compileScript(String script) {
+ private static CompiledScript compileScript(GroovyScriptEngineImpl engine, String script) {
try {
- return getGroovyEngine().compile(script);
+ return engine.compile(script);
} catch (ScriptException e) {
throw new ValidationException("Script syntax error: " + e.getMessage());
}
| diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/emitter/MessageFiltersTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/emitter/MessageFiltersTest.java
index 94a377c9c84..4e9f5034cd2 100644
--- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/emitter/MessageFiltersTest.java
+++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/emitter/MessageFiltersTest.java
@@ -118,10 +118,18 @@ void canCheckKeyAsJsonObjectIfItCanBeParsedToJson() {
}
@Test
- void keySetToNullIfKeyCantBeParsedToJson() {
- var f = groovyScriptFilter("key == null");
+ void keySetToKeyStringIfCantBeParsedToJson() {
+ var f = groovyScriptFilter("key == \"not json\"");
assertTrue(f.test(msg().key("not json")));
- assertFalse(f.test(msg().key("{ \"k\" : \"v\" }")));
+ }
+
+ @Test
+ void keyAndKeyAsTextSetToNullIfRecordsKeyIsNull() {
+ var f = groovyScriptFilter("key == null");
+ assertTrue(f.test(msg().key(null)));
+
+ f = groovyScriptFilter("keyAsText == null");
+ assertTrue(f.test(msg().key(null)));
}
@Test
@@ -132,10 +140,18 @@ void canCheckValueAsJsonObjectIfItCanBeParsedToJson() {
}
@Test
- void valueSetToNullIfKeyCantBeParsedToJson() {
- var f = groovyScriptFilter("value == null");
+ void valueSetToContentStringIfCantBeParsedToJson() {
+ var f = groovyScriptFilter("value == \"not json\"");
assertTrue(f.test(msg().content("not json")));
- assertFalse(f.test(msg().content("{ \"k\" : \"v\" }")));
+ }
+
+ @Test
+ void valueAndValueAsTextSetToNullIfRecordsContentIsNull() {
+ var f = groovyScriptFilter("value == null");
+ assertTrue(f.test(msg().content(null)));
+
+ f = groovyScriptFilter("valueAsText == null");
+ assertTrue(f.test(msg().content(null)));
}
@Test
@@ -185,4 +201,4 @@ private TopicMessageDTO msg() {
.partition(1);
}
-}
\ No newline at end of file
+}
| train | test | 2023-05-09T11:14:50 | "2023-04-27T13:02:32Z" | michal-cesek | train |
provectus/kafka-ui/3738_3785 | provectus/kafka-ui | provectus/kafka-ui/3738 | provectus/kafka-ui/3785 | [
"connected"
] | 727f38401babcf25d5bb47e675149882ff3ede14 | 97a694b3f04ceec2f103a6c8836d32c686e72c40 | [] | [] | "2023-05-05T12:07:37Z" | [
"good first issue",
"scope/frontend",
"status/accepted",
"type/chore"
] | Messages: Produce pane is too long, the produce button is hidden by default | [
"kafka-ui-react-app/src/components/Topics/Topic/SendMessage/SendMessage.tsx"
] | [
"kafka-ui-react-app/src/components/Topics/Topic/SendMessage/SendMessage.tsx"
] | [] | diff --git a/kafka-ui-react-app/src/components/Topics/Topic/SendMessage/SendMessage.tsx b/kafka-ui-react-app/src/components/Topics/Topic/SendMessage/SendMessage.tsx
index bacfa76c93f..b7f31a230bd 100644
--- a/kafka-ui-react-app/src/components/Topics/Topic/SendMessage/SendMessage.tsx
+++ b/kafka-ui-react-app/src/components/Topics/Topic/SendMessage/SendMessage.tsx
@@ -210,6 +210,7 @@ const SendMessage: React.FC<{ closeSidebar: () => void }> = ({
name={name}
onChange={onChange}
value={value}
+ height="40px"
/>
)}
/>
@@ -225,6 +226,7 @@ const SendMessage: React.FC<{ closeSidebar: () => void }> = ({
name={name}
onChange={onChange}
value={value}
+ height="280px"
/>
)}
/>
@@ -242,7 +244,7 @@ const SendMessage: React.FC<{ closeSidebar: () => void }> = ({
defaultValue="{}"
name={name}
onChange={onChange}
- height="200px"
+ height="40px"
/>
)}
/>
| null | train | test | 2023-05-02T14:34:57 | "2023-04-27T00:49:43Z" | Haarolean | train |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.