instance_id
stringlengths
17
39
repo
stringclasses
8 values
issue_id
stringlengths
14
34
pr_id
stringlengths
14
34
linking_methods
sequencelengths
1
3
base_commit
stringlengths
40
40
merge_commit
stringlengths
0
40
βŒ€
hints_text
sequencelengths
0
106
resolved_comments
sequencelengths
0
119
created_at
unknown
labeled_as
sequencelengths
0
7
problem_title
stringlengths
7
174
problem_statement
stringlengths
0
55.4k
gold_files
sequencelengths
0
10
gold_files_postpatch
sequencelengths
1
10
test_files
sequencelengths
0
60
gold_patch
stringlengths
220
5.83M
test_patch
stringlengths
386
194k
βŒ€
split_random
stringclasses
3 values
split_time
stringclasses
3 values
issue_start_time
timestamp[ns]
issue_created_at
unknown
issue_by_user
stringlengths
3
21
split_repo
stringclasses
3 values
provectus/kafka-ui/3716_3786
provectus/kafka-ui
provectus/kafka-ui/3716
provectus/kafka-ui/3786
[ "connected" ]
727f38401babcf25d5bb47e675149882ff3ede14
61fb62276e8aee6b7730cd8a76e9a54cb7e76d44
[]
[]
"2023-05-05T13:07:44Z"
[ "good first issue", "scope/frontend", "status/accepted", "type/chore" ]
Serde fallback icon: Add a tooltip
Implement a tooltip saying it's a fallback serde <img width="701" alt="image" src="https://user-images.githubusercontent.com/1494347/234539665-1d2a1ecc-d39c-4812-8307-f1b5e5cd1570.png">
[ "kafka-ui-react-app/src/components/Topics/Topic/Messages/Message.tsx" ]
[ "kafka-ui-react-app/src/components/Topics/Topic/Messages/Message.tsx" ]
[]
diff --git a/kafka-ui-react-app/src/components/Topics/Topic/Messages/Message.tsx b/kafka-ui-react-app/src/components/Topics/Topic/Messages/Message.tsx index dd5cfae7488..af76db6739f 100644 --- a/kafka-ui-react-app/src/components/Topics/Topic/Messages/Message.tsx +++ b/kafka-ui-react-app/src/components/Topics/Topic/Messages/Message.tsx @@ -8,6 +8,7 @@ import { formatTimestamp } from 'lib/dateTimeHelpers'; import { JSONPath } from 'jsonpath-plus'; import Ellipsis from 'components/common/Ellipsis/Ellipsis'; import WarningRedIcon from 'components/common/Icons/WarningRedIcon'; +import Tooltip from 'components/common/Tooltip/Tooltip'; import MessageContent from './MessageContent/MessageContent'; import * as S from './MessageContent/MessageContent.styled'; @@ -110,14 +111,26 @@ const Message: React.FC<Props> = ({ </td> <S.DataCell title={key}> <Ellipsis text={renderFilteredJson(key, keyFilters)}> - {keySerde === 'Fallback' && <WarningRedIcon />} + {keySerde === 'Fallback' && ( + <Tooltip + value={<WarningRedIcon />} + content="Fallback serde was used" + placement="left" + /> + )} </Ellipsis> </S.DataCell> <S.DataCell title={content}> <S.Metadata> <S.MetadataValue> <Ellipsis text={renderFilteredJson(content, contentFilters)}> - {valueSerde === 'Fallback' && <WarningRedIcon />} + {valueSerde === 'Fallback' && ( + <Tooltip + value={<WarningRedIcon />} + content="Fallback serde was used" + placement="left" + /> + )} </Ellipsis> </S.MetadataValue> </S.Metadata>
null
test
test
2023-05-02T14:34:57
"2023-04-26T09:50:58Z"
Haarolean
train
provectus/kafka-ui/3654_3787
provectus/kafka-ui
provectus/kafka-ui/3654
provectus/kafka-ui/3787
[ "connected" ]
147b539c376028268d98955e66f0672125cd263b
5e539f1ba825d04d782694e299651560351d6e90
[]
[]
"2023-05-08T06:25:26Z"
[ "scope/QA", "scope/AQA" ]
[e2e] Stop query functionality check
Autotest implementation for: https://app.qase.io/project/KAFKAUI?case=277&previewMode=side&suite=8
[ "kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlQueryForm.java" ]
[ "kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlQueryForm.java" ]
[ "kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/backlog/SmokeBacklog.java", "kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/ksqldb/KsqlDbTest.java" ]
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlQueryForm.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlQueryForm.java index 4ce282b6cc5..ff57de39b2f 100644 --- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlQueryForm.java +++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/ksqldb/KsqlQueryForm.java @@ -17,11 +17,12 @@ public class KsqlQueryForm extends BasePage { protected SelenideElement clearBtn = $x("//div/button[text()='Clear']"); protected SelenideElement executeBtn = $x("//div/button[text()='Execute']"); - protected SelenideElement stopQueryBtn = $x("//div/button[text()='Stop query']"); protected SelenideElement clearResultsBtn = $x("//div/button[text()='Clear results']"); protected SelenideElement addStreamPropertyBtn = $x("//button[text()='Add Stream Property']"); protected SelenideElement queryAreaValue = $x("//div[@class='ace_content']"); protected SelenideElement queryArea = $x("//div[@id='ksql']/textarea[@class='ace_text-input']"); + protected SelenideElement abortButton = $x("//div[@role='status']/div[text()='Abort']"); + protected SelenideElement cancelledAlert = $x("//div[@role='status'][text()='Cancelled']"); protected ElementsCollection ksqlGridItems = $$x("//tbody//tr"); protected ElementsCollection keyField = $$x("//input[@aria-label='key']"); protected ElementsCollection valueField = $$x("//input[@aria-label='value']"); @@ -48,7 +49,7 @@ public String getEnteredQuery() { public KsqlQueryForm clickExecuteBtn(String query) { clickByActions(executeBtn); if (query.contains("EMIT CHANGES")) { - loadingSpinner.shouldBe(Condition.visible); + abortButton.shouldBe(Condition.visible); } else { waitUntilSpinnerDisappear(); } @@ -56,12 +57,21 @@ public KsqlQueryForm clickExecuteBtn(String query) { } @Step - public KsqlQueryForm clickStopQueryBtn() { - clickByActions(stopQueryBtn); - waitUntilSpinnerDisappear(); + public boolean isAbortBtnVisible() { + return isVisible(abortButton); + } + + @Step + public KsqlQueryForm clickAbortBtn() { + clickByActions(abortButton); return this; } + @Step + public boolean isCancelledAlertVisible() { + return isVisible(cancelledAlert); + } + @Step public KsqlQueryForm clickClearResultsBtn() { clickByActions(clearResultsBtn);
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/backlog/SmokeBacklog.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/backlog/SmokeBacklog.java index 25b95388820..7e663f5893e 100644 --- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/backlog/SmokeBacklog.java +++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/backlog/SmokeBacklog.java @@ -21,73 +21,66 @@ public class SmokeBacklog extends BaseManualTest { public void testCaseA() { } - @Automation(state = TO_BE_AUTOMATED) - @Suite(id = KSQL_DB_SUITE_ID) - @QaseId(277) - @Test - public void testCaseB() { - } - @Automation(state = TO_BE_AUTOMATED) @Suite(id = BROKERS_SUITE_ID) @QaseId(331) @Test - public void testCaseC() { + public void testCaseB() { } @Automation(state = TO_BE_AUTOMATED) @Suite(id = BROKERS_SUITE_ID) @QaseId(332) @Test - public void testCaseD() { + public void testCaseC() { } @Automation(state = TO_BE_AUTOMATED) @Suite(id = TOPICS_PROFILE_SUITE_ID) @QaseId(335) @Test - public void testCaseE() { + public void testCaseD() { } @Automation(state = TO_BE_AUTOMATED) @Suite(id = TOPICS_PROFILE_SUITE_ID) @QaseId(336) @Test - public void testCaseF() { + public void testCaseE() { } @Automation(state = TO_BE_AUTOMATED) @Suite(id = TOPICS_PROFILE_SUITE_ID) @QaseId(343) @Test - public void testCaseG() { + public void testCaseF() { } @Automation(state = TO_BE_AUTOMATED) @Suite(id = KSQL_DB_SUITE_ID) @QaseId(344) @Test - public void testCaseH() { + public void testCaseG() { } @Automation(state = TO_BE_AUTOMATED) @Suite(id = SCHEMAS_SUITE_ID) @QaseId(345) @Test - public void testCaseI() { + public void testCaseH() { } @Automation(state = TO_BE_AUTOMATED) @Suite(id = SCHEMAS_SUITE_ID) @QaseId(346) @Test - public void testCaseJ() { + public void testCaseI() { } @Automation(state = TO_BE_AUTOMATED) @Suite(id = TOPICS_PROFILE_SUITE_ID) @QaseId(347) @Test - public void testCaseK() { + public void testCaseJ() { } } diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/ksqldb/KsqlDbTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/ksqldb/KsqlDbTest.java index 00460da08dd..0504a8a31ab 100644 --- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/ksqldb/KsqlDbTest.java +++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/ksqldb/KsqlDbTest.java @@ -1,6 +1,7 @@ package com.provectus.kafka.ui.smokesuite.ksqldb; import static com.provectus.kafka.ui.pages.ksqldb.enums.KsqlMenuTabs.STREAMS; +import static com.provectus.kafka.ui.pages.ksqldb.enums.KsqlQueryConfig.SELECT_ALL_FROM; import static com.provectus.kafka.ui.pages.ksqldb.enums.KsqlQueryConfig.SHOW_STREAMS; import static com.provectus.kafka.ui.pages.ksqldb.enums.KsqlQueryConfig.SHOW_TABLES; import static com.provectus.kafka.ui.pages.panels.enums.MenuItem.KSQL_DB; @@ -87,7 +88,8 @@ public void checkShowStreamsRequestExecution() { navigateToKsqlDbAndExecuteRequest(SHOW_STREAMS.getQuery()); SoftAssert softly = new SoftAssert(); softly.assertTrue(ksqlQueryForm.areResultsVisible(), "areResultsVisible()"); - softly.assertTrue(ksqlQueryForm.getItemByName(DEFAULT_STREAM.getName()).isVisible(), "getItemByName()"); + softly.assertTrue(ksqlQueryForm.getItemByName(DEFAULT_STREAM.getName()).isVisible(), + String.format("getItemByName(%s)", FIRST_TABLE.getName())); softly.assertAll(); } @@ -104,6 +106,16 @@ public void clearResultsForExecutedRequest() { softly.assertAll(); } + @QaseId(277) + @Test(priority = 6) + public void stopQueryFunctionalCheck() { + navigateToKsqlDbAndExecuteRequest(String.format(SELECT_ALL_FROM.getQuery(), FIRST_TABLE.getName())); + Assert.assertTrue(ksqlQueryForm.isAbortBtnVisible(), "isAbortBtnVisible()"); + ksqlQueryForm + .clickAbortBtn(); + Assert.assertTrue(ksqlQueryForm.isCancelledAlertVisible(), "isCancelledAlertVisible()"); + } + @AfterClass(alwaysRun = true) public void afterClass() { TOPIC_NAMES_LIST.forEach(topicName -> apiService.deleteTopic(topicName));
val
test
2023-05-06T19:36:29
"2023-04-11T07:37:17Z"
VladSenyuta
train
provectus/kafka-ui/3163_3804
provectus/kafka-ui
provectus/kafka-ui/3163
provectus/kafka-ui/3804
[ "keyword_pr_to_issue" ]
69ebd3d52b2065ef970063d92d36aa2684ecf289
ed9f91fd8ac3c17afa8dd0a5f5b3c0172dd83441
[ "@Haarolean I would like to contribute to this issue." ]
[ "you can actually check the `nameMatch` before doing value check , so it wouldn't do more work if it does not have to.", "Actually i don't get it what you are saying. Do you mean that if nameMatch found then i don't need to check for valueMatch ?", "> Actually i don't get it what you are saying. Do you mean that if nameMatch found then i don't need to check for valueMatch ?\r\n\r\nYep, I think this is correct, and I think the suggestion is to do something like\r\n```\r\n .filter((item) => {\r\n const nameMatch = item.name.toLocaleLowerCase().includes(keyword);\r\n return nameMatch ? true : item.value && item.value.includes(keyword); // try to match the keyword on any of the item.value elements when nameMatch fails but item.value exists\r\n })\r\n```\r\n\r\nor more explicitly:\r\n```\r\n .filter((item) => {\r\n const nameMatch = item.name.toLocaleLowerCase().includes(keyword);\r\n if (nameMatch === true) return nameMatch;\r\n const valueMatch = item.value && item.value.includes(keyword);\r\n return valueMatch;\r\n })\r\n```\r\ninstead of (the current implementation):\r\n```\r\n .filter((item) => {\r\n const nameMatch = item.name.toLocaleLowerCase().includes(keyword);\r\n const valueMatch = item.value && item.value.includes(keyword);\r\n return nameMatch || valueMatch;\r\n })\r\n```\r\n\r\nso we don't need to go through the entire `item.value` array in case we find a `nameMatch` (and `item.value` exists).", "Yup. You are right.", "@malavmevada shouldn't we cast item value to lower case as well?", "@Haarolean Actually, we shouldn't convert the 'item' values to lowercase because if you observe, all the values under the 'name' key are in lowercase strings. However, for the 'item' key, we can have values in uppercase, lowercase, null, and numbers. Due to this diversity, I haven't changed the 'item' values to lowercase.", "@malavmevada sorry for the delay. Isn't this the point? Let's say I have some setting but in order to find it but its value I have to remember the exact case-sensitive value?", "Yes you are absolutely right. Sorry for the mistake. I got your point and I have tested it and yeah it's working fine. I'll change that.", "please cast the keyword to lowercase as well, otherwise it's not a case-insensitive search :)", "Yes @Haarolean. I have updated it." ]
"2023-05-10T09:35:23Z"
[ "type/enhancement", "good first issue", "scope/frontend", "status/accepted" ]
Broker: Config: Implement search by the Value
<!-- Don't forget to check for existing issues/discussions regarding your proposal. We might already have it. https://github.com/provectus/kafka-ui/issues https://github.com/provectus/kafka-ui/discussions --> **Actual behavior** <!--(A clear and concise description of what the bug is.)--> It is not possible to search by the config Value and only works for the Key now. **Expected behavior** Search by the config Value can be useful for maintenance and troubleshooting. **Set up** <!-- How do you run the app? Please provide as much info as possible: 1. App version (docker image version or check commit hash in the top left corner in UI) 2. Helm chart version, if you use one 3. Any IAAC configs We might close the issue without further explanation if you don't provide such information. --> [f4e6afe](https://github.com/provectus/kafka-ui/commit/f4e6afe) **Steps to Reproduce** <!-- We'd like you to provide an example setup (via docker-compose, helm, etc.) to reproduce the problem, especially with a complex setups. --> 1. Login to Kafka UI 2. Navigate to Borkers 3. Select broker and switch to Configs tab. **Screenshots** <!-- (If applicable, add screenshots to help explain your problem) --> **Additional context** <!-- (Add any other context about the problem here) --> Discussed with @Haarolean to be created as a separate issue from #2651
[ "kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersConfigTab.java", "kafka-ui-react-app/src/components/Brokers/Broker/Configs/Configs.tsx" ]
[ "kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersConfigTab.java", "kafka-ui-react-app/src/components/Brokers/Broker/Configs/Configs.tsx" ]
[]
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersConfigTab.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersConfigTab.java index a9e146002ec..e00e938297a 100644 --- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersConfigTab.java +++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersConfigTab.java @@ -16,6 +16,8 @@ public class BrokersConfigTab extends BasePage { + protected List<SelenideElement> editBtn = $$x("//button[@aria-label='editAction']"); + protected SelenideElement searchByKeyField = $x("//input[@placeholder='Search by Key or Value']"); protected SelenideElement sourceInfoIcon = $x("//div[text()='Source']/..//div/div[@class]"); protected SelenideElement sourceInfoTooltip = $x("//div[text()='Source']/..//div/div[@style]"); protected ElementsCollection editBtns = $$x("//button[@aria-label='editAction']"); diff --git a/kafka-ui-react-app/src/components/Brokers/Broker/Configs/Configs.tsx b/kafka-ui-react-app/src/components/Brokers/Broker/Configs/Configs.tsx index ad6c5087a0f..5b05b7cc5be 100644 --- a/kafka-ui-react-app/src/components/Brokers/Broker/Configs/Configs.tsx +++ b/kafka-ui-react-app/src/components/Brokers/Broker/Configs/Configs.tsx @@ -34,14 +34,19 @@ const Configs: React.FC = () => { const getData = () => { return data - .filter( - (item) => - item.name.toLocaleLowerCase().indexOf(keyword.toLocaleLowerCase()) > - -1 - ) + .filter((item) => { + const nameMatch = item.name + .toLocaleLowerCase() + .includes(keyword.toLocaleLowerCase()); + return nameMatch + ? true + : item.value && + item.value + .toLocaleLowerCase() + .includes(keyword.toLocaleLowerCase()); // try to match the keyword on any of the item.value elements when nameMatch fails but item.value exists + }) .sort((a, b) => { if (a.source === b.source) return 0; - return a.source === ConfigSource.DYNAMIC_BROKER_CONFIG ? -1 : 1; }); }; @@ -95,7 +100,7 @@ const Configs: React.FC = () => { <S.SearchWrapper> <Search onChange={setKeyword} - placeholder="Search by Key" + placeholder="Search by Key or Value" value={keyword} /> </S.SearchWrapper>
null
train
test
2023-08-03T13:05:09
"2022-12-28T12:21:53Z"
BulatKha
train
provectus/kafka-ui/3720_3805
provectus/kafka-ui
provectus/kafka-ui/3720
provectus/kafka-ui/3805
[ "connected" ]
aa7429eeba2e942846fe071d72d4621dbd2aa70b
52a42e698e4bac85b64a40d8e7e3843070f19ef9
[ "Hello there fl0wx! πŸ‘‹\n\nThank you and congratulations πŸŽ‰ for opening your very first issue in this project! πŸ’–\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. πŸ‘€", "Forgot frontend lol", "@David-DB88 \r\n\r\nTODO:\r\nChange RBAC permissions: For `ResourceType.CONNECT` all restart actions should have `Action.RESTART` instead of `Action.EDIT`" ]
[]
"2023-05-10T12:20:25Z"
[ "type/enhancement", "scope/backend", "scope/frontend", "status/accepted", "area/rbac" ]
RBAC: KC: Impl restart permissions
### Issue submitter TODO list - [X] I've searched for an already existing issues [here](https://github.com/provectus/kafka-ui/issues) - [X] I'm running a supported version of the application which is listed [here](https://github.com/provectus/kafka-ui/blob/master/SECURITY.md) and the feature is not present there ### Is your proposal related to a problem? _No response_ ### Describe the feature you're interested in I do want a new permission for connector / connector task restarts. As i deploy and manage connectors via strimzi, i dont want the frontend user to edit or remove the tasks. But in case of an issue / tests it would be helpful if tasks could be restarted via UI with a separate permission. ### Describe alternatives you've considered _No response_ ### Version you're running 0.6.2 ### Additional context _No response_
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/KafkaConnectController.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/rbac/permission/ConnectAction.java" ]
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/KafkaConnectController.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/rbac/permission/ConnectAction.java" ]
[]
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/KafkaConnectController.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/KafkaConnectController.java index 080c6020f9b..d300b930164 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/KafkaConnectController.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/KafkaConnectController.java @@ -211,7 +211,7 @@ public Mono<ResponseEntity<Void>> restartConnectorTask(String clusterName, Strin Mono<Void> validateAccess = accessControlService.validateAccess(AccessContext.builder() .cluster(clusterName) .connect(connectName) - .connectActions(ConnectAction.VIEW, ConnectAction.EDIT) + .connectActions(ConnectAction.VIEW, ConnectAction.RESTART) .build()); return validateAccess.then( diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/rbac/permission/ConnectAction.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/rbac/permission/ConnectAction.java index 3db4d4953c4..1c3335e3608 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/rbac/permission/ConnectAction.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/rbac/permission/ConnectAction.java @@ -7,7 +7,8 @@ public enum ConnectAction implements PermissibleAction { VIEW, EDIT, - CREATE + CREATE, + RESTART ;
null
train
test
2023-05-10T08:45:18
"2023-04-26T13:02:01Z"
fl0wx
train
provectus/kafka-ui/2764_3808
provectus/kafka-ui
provectus/kafka-ui/2764
provectus/kafka-ui/3808
[ "keyword_pr_to_issue" ]
4b724fd852f9814ec3ab9316a739cc2d8a1f282c
c3559556411189bdfa72b67188ee9d49a47fce37
[ "This would be super useful indeed! :)" ]
[]
"2023-05-10T15:40:29Z"
[ "type/enhancement", "scope/backend", "status/accepted", "area/serde" ]
Avro encoding/decoding: Logical types support
Implement Avro logica types encoding & decoding. List of logical types can be found here: https://avro.apache.org/docs/1.10.2/spec.html#Logical+Types.
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/AvroSchemaRegistrySerializer.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/MessageFormatter.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/AvroJsonSchemaConverter.java" ]
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/JsonToAvroConversionException.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/AvroSchemaRegistrySerializer.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/MessageFormatter.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/AvroJsonSchemaConverter.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/JsonAvroConversion.java" ]
[ "kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerdeTest.java", "kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/jsonschema/JsonAvroConversionTest.java" ]
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/JsonToAvroConversionException.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/JsonToAvroConversionException.java new file mode 100644 index 00000000000..5b3910f4cf3 --- /dev/null +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/JsonToAvroConversionException.java @@ -0,0 +1,7 @@ +package com.provectus.kafka.ui.exception; + +public class JsonToAvroConversionException extends ValidationException { + public JsonToAvroConversionException(String message) { + super(message); + } +} diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/AvroSchemaRegistrySerializer.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/AvroSchemaRegistrySerializer.java index 3c4a5008552..164938bfc70 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/AvroSchemaRegistrySerializer.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/AvroSchemaRegistrySerializer.java @@ -1,12 +1,13 @@ package com.provectus.kafka.ui.serdes.builtin.sr; +import com.provectus.kafka.ui.util.jsonschema.JsonAvroConversion; import io.confluent.kafka.schemaregistry.ParsedSchema; import io.confluent.kafka.schemaregistry.avro.AvroSchema; -import io.confluent.kafka.schemaregistry.avro.AvroSchemaUtils; import io.confluent.kafka.schemaregistry.client.SchemaMetadata; import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient; import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig; import io.confluent.kafka.serializers.KafkaAvroSerializer; +import io.confluent.kafka.serializers.KafkaAvroSerializerConfig; import java.util.Map; import org.apache.kafka.common.serialization.Serializer; @@ -25,6 +26,7 @@ protected Serializer<Object> createSerializer(SchemaRegistryClient client) { Map.of( "schema.registry.url", "wontbeused", AbstractKafkaSchemaSerDeConfig.AUTO_REGISTER_SCHEMAS, false, + KafkaAvroSerializerConfig.AVRO_USE_LOGICAL_TYPE_CONVERTERS_CONFIG, true, AbstractKafkaSchemaSerDeConfig.USE_LATEST_VERSION, true ), isKey @@ -35,7 +37,7 @@ protected Serializer<Object> createSerializer(SchemaRegistryClient client) { @Override protected Object serialize(String value, ParsedSchema schema) { try { - return AvroSchemaUtils.toObject(value, (AvroSchema) schema); + return JsonAvroConversion.convertJsonToAvro(value, ((AvroSchema) schema).rawSchema()); } catch (Throwable e) { throw new RuntimeException("Failed to serialize record for topic " + topic, e); } diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/MessageFormatter.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/MessageFormatter.java index 52e8c6f33a4..40073d85347 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/MessageFormatter.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/MessageFormatter.java @@ -3,9 +3,12 @@ import com.fasterxml.jackson.databind.JsonNode; import com.google.protobuf.Message; import com.google.protobuf.util.JsonFormat; +import com.provectus.kafka.ui.util.jsonschema.JsonAvroConversion; import io.confluent.kafka.schemaregistry.avro.AvroSchemaUtils; import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient; +import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig; import io.confluent.kafka.serializers.KafkaAvroDeserializer; +import io.confluent.kafka.serializers.KafkaAvroDeserializerConfig; import io.confluent.kafka.serializers.json.KafkaJsonSchemaDeserializer; import io.confluent.kafka.serializers.protobuf.KafkaProtobufDeserializer; import java.util.Map; @@ -28,16 +31,22 @@ class AvroMessageFormatter implements MessageFormatter { AvroMessageFormatter(SchemaRegistryClient client) { this.avroDeserializer = new KafkaAvroDeserializer(client); + this.avroDeserializer.configure( + Map.of( + AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, "wontbeused", + KafkaAvroDeserializerConfig.SPECIFIC_AVRO_READER_CONFIG, false, + KafkaAvroDeserializerConfig.SCHEMA_REFLECTION_CONFIG, false, + KafkaAvroDeserializerConfig.AVRO_USE_LOGICAL_TYPE_CONVERTERS_CONFIG, true + ), + false + ); } @Override - @SneakyThrows public String format(String topic, byte[] value) { - // deserialized will have type, that depends on schema type (record or primitive), - // AvroSchemaUtils.toJson(...) method will take it into account Object deserialized = avroDeserializer.deserialize(topic, value); - byte[] jsonBytes = AvroSchemaUtils.toJson(deserialized); - return new String(jsonBytes); + var schema = AvroSchemaUtils.getSchema(deserialized); + return JsonAvroConversion.convertAvroToJson(deserialized, schema).toString(); } } diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/AvroJsonSchemaConverter.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/AvroJsonSchemaConverter.java index d2cd0e65f39..56737d7a612 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/AvroJsonSchemaConverter.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/AvroJsonSchemaConverter.java @@ -5,6 +5,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.stream.Collectors; import org.apache.avro.Schema; import reactor.util.function.Tuple2; @@ -40,6 +41,10 @@ private FieldSchema convertField(Schema.Field field, Map<String, FieldSchema> de private FieldSchema convertSchema(Schema schema, Map<String, FieldSchema> definitions, boolean isRoot) { + Optional<FieldSchema> logicalTypeSchema = JsonAvroConversion.LogicalTypeConversion.getJsonSchema(schema); + if (logicalTypeSchema.isPresent()) { + return logicalTypeSchema.get(); + } if (!schema.isUnion()) { JsonType type = convertType(schema); switch (type.getType()) { @@ -66,7 +71,6 @@ private FieldSchema convertSchema(Schema schema, } } - // this method formats json-schema field in a way // to fit avro-> json encoding rules (https://avro.apache.org/docs/1.11.1/specification/_print/#json-encoding) private FieldSchema createUnionSchema(Schema schema, Map<String, FieldSchema> definitions) { diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/JsonAvroConversion.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/JsonAvroConversion.java new file mode 100644 index 00000000000..d2114dd971c --- /dev/null +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/JsonAvroConversion.java @@ -0,0 +1,503 @@ +package com.provectus.kafka.ui.util.jsonschema; + +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.json.JsonMapper; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.BooleanNode; +import com.fasterxml.jackson.databind.node.DecimalNode; +import com.fasterxml.jackson.databind.node.DoubleNode; +import com.fasterxml.jackson.databind.node.FloatNode; +import com.fasterxml.jackson.databind.node.IntNode; +import com.fasterxml.jackson.databind.node.JsonNodeType; +import com.fasterxml.jackson.databind.node.LongNode; +import com.fasterxml.jackson.databind.node.NullNode; +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.fasterxml.jackson.databind.node.TextNode; +import com.google.common.collect.Lists; +import com.provectus.kafka.ui.exception.JsonToAvroConversionException; +import io.confluent.kafka.serializers.AvroData; +import java.math.BigDecimal; +import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.time.ZoneOffset; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.TimeUnit; +import java.util.function.BiFunction; +import java.util.stream.Stream; +import lombok.SneakyThrows; +import org.apache.avro.Schema; +import org.apache.avro.generic.GenericData; + +// json <-> avro +public class JsonAvroConversion { + + private static final JsonMapper MAPPER = new JsonMapper(); + + // converts json into Object that is expected input for KafkaAvroSerializer + // (with AVRO_USE_LOGICAL_TYPE_CONVERTERS flat enabled!) + @SneakyThrows + public static Object convertJsonToAvro(String jsonString, Schema avroSchema) { + JsonNode rootNode = MAPPER.readTree(jsonString); + return convert(rootNode, avroSchema); + } + + private static Object convert(JsonNode node, Schema avroSchema) { + return switch (avroSchema.getType()) { + case RECORD -> { + assertJsonType(node, JsonNodeType.OBJECT); + var rec = new GenericData.Record(avroSchema); + for (Schema.Field field : avroSchema.getFields()) { + if (node.has(field.name()) && !node.get(field.name()).isNull()) { + rec.put(field.name(), convert(node.get(field.name()), field.schema())); + } + } + yield rec; + } + case MAP -> { + assertJsonType(node, JsonNodeType.OBJECT); + var map = new LinkedHashMap<String, Object>(); + var valueSchema = avroSchema.getValueType(); + node.fields().forEachRemaining(f -> map.put(f.getKey(), convert(f.getValue(), valueSchema))); + yield map; + } + case ARRAY -> { + assertJsonType(node, JsonNodeType.ARRAY); + var lst = new ArrayList<>(); + node.elements().forEachRemaining(e -> lst.add(convert(e, avroSchema.getElementType()))); + yield lst; + } + case ENUM -> { + assertJsonType(node, JsonNodeType.STRING); + String symbol = node.textValue(); + if (!avroSchema.getEnumSymbols().contains(symbol)) { + throw new JsonToAvroConversionException("%s is not a part of enum symbols [%s]" + .formatted(symbol, avroSchema.getEnumSymbols())); + } + yield new GenericData.EnumSymbol(avroSchema, symbol); + } + case UNION -> { + // for types from enum (other than null) payload should be an object with single key == name of type + // ex: schema = [ "null", "int", "string" ], possible payloads = null, { "string": "str" }, { "int": 123 } + if (node.isNull() && avroSchema.getTypes().contains(Schema.create(Schema.Type.NULL))) { + yield null; + } + + assertJsonType(node, JsonNodeType.OBJECT); + var elements = Lists.newArrayList(node.fields()); + if (elements.size() != 1) { + throw new JsonToAvroConversionException( + "UNION field value should be an object with single field == type name"); + } + var typeNameToValue = elements.get(0); + for (Schema unionType : avroSchema.getTypes()) { + if (typeNameToValue.getKey().equals(unionType.getFullName())) { + yield convert(typeNameToValue.getValue(), unionType); + } + } + throw new JsonToAvroConversionException( + "json value '%s' is cannot be converted to any of union types [%s]" + .formatted(node, avroSchema.getTypes())); + } + case STRING -> { + if (isLogicalType(avroSchema)) { + yield processLogicalType(node, avroSchema); + } + assertJsonType(node, JsonNodeType.STRING); + yield node.textValue(); + } + case LONG -> { + if (isLogicalType(avroSchema)) { + yield processLogicalType(node, avroSchema); + } + assertJsonType(node, JsonNodeType.NUMBER); + assertJsonNumberType(node, JsonParser.NumberType.LONG, JsonParser.NumberType.INT); + yield node.longValue(); + } + case INT -> { + if (isLogicalType(avroSchema)) { + yield processLogicalType(node, avroSchema); + } + assertJsonType(node, JsonNodeType.NUMBER); + assertJsonNumberType(node, JsonParser.NumberType.INT); + yield node.intValue(); + } + case FLOAT -> { + assertJsonType(node, JsonNodeType.NUMBER); + assertJsonNumberType(node, JsonParser.NumberType.DOUBLE, JsonParser.NumberType.FLOAT); + yield node.floatValue(); + } + case DOUBLE -> { + assertJsonType(node, JsonNodeType.NUMBER); + assertJsonNumberType(node, JsonParser.NumberType.DOUBLE, JsonParser.NumberType.FLOAT); + yield node.doubleValue(); + } + case BOOLEAN -> { + assertJsonType(node, JsonNodeType.BOOLEAN); + yield node.booleanValue(); + } + case NULL -> { + assertJsonType(node, JsonNodeType.NULL); + yield null; + } + case BYTES -> { + if (isLogicalType(avroSchema)) { + yield processLogicalType(node, avroSchema); + } + assertJsonType(node, JsonNodeType.STRING); + // logic copied from JsonDecoder::readBytes + yield ByteBuffer.wrap(node.textValue().getBytes(StandardCharsets.ISO_8859_1)); + } + case FIXED -> { + if (isLogicalType(avroSchema)) { + yield processLogicalType(node, avroSchema); + } + assertJsonType(node, JsonNodeType.STRING); + byte[] bytes = node.textValue().getBytes(StandardCharsets.ISO_8859_1); + if (bytes.length != avroSchema.getFixedSize()) { + throw new JsonToAvroConversionException( + "Fixed field has unexpected size %d (should be %d)" + .formatted(bytes.length, avroSchema.getFixedSize())); + } + yield new GenericData.Fixed(avroSchema, bytes); + } + }; + } + + // converts output of KafkaAvroDeserializer (with AVRO_USE_LOGICAL_TYPE_CONVERTERS flat enabled!) into json. + // Note: conversion should be compatible with AvroJsonSchemaConverter logic! + public static JsonNode convertAvroToJson(Object obj, Schema avroSchema) { + if (obj == null) { + return NullNode.getInstance(); + } + return switch (avroSchema.getType()) { + case RECORD -> { + var rec = (GenericData.Record) obj; + ObjectNode node = MAPPER.createObjectNode(); + for (Schema.Field field : avroSchema.getFields()) { + var fieldVal = rec.get(field.name()); + if (fieldVal != null) { + node.set(field.name(), convertAvroToJson(fieldVal, field.schema())); + } + } + yield node; + } + case MAP -> { + ObjectNode node = MAPPER.createObjectNode(); + ((Map) obj).forEach((k, v) -> node.set(k.toString(), convertAvroToJson(v, avroSchema.getValueType()))); + yield node; + } + case ARRAY -> { + var list = (List<Object>) obj; + ArrayNode node = MAPPER.createArrayNode(); + list.forEach(e -> node.add(convertAvroToJson(e, avroSchema.getElementType()))); + yield node; + } + case ENUM -> { + yield new TextNode(obj.toString()); + } + case UNION -> { + ObjectNode node = MAPPER.createObjectNode(); + int unionIdx = AvroData.getGenericData().resolveUnion(avroSchema, obj); + Schema unionType = avroSchema.getTypes().get(unionIdx); + node.set(unionType.getFullName(), convertAvroToJson(obj, unionType)); + yield node; + } + case STRING -> { + if (isLogicalType(avroSchema)) { + yield processLogicalType(obj, avroSchema); + } + yield new TextNode(obj.toString()); + } + case LONG -> { + if (isLogicalType(avroSchema)) { + yield processLogicalType(obj, avroSchema); + } + yield new LongNode((Long) obj); + } + case INT -> { + if (isLogicalType(avroSchema)) { + yield processLogicalType(obj, avroSchema); + } + yield new IntNode((Integer) obj); + } + case FLOAT -> new FloatNode((Float) obj); + case DOUBLE -> new DoubleNode((Double) obj); + case BOOLEAN -> BooleanNode.valueOf((Boolean) obj); + case NULL -> NullNode.getInstance(); + case BYTES -> { + if (isLogicalType(avroSchema)) { + yield processLogicalType(obj, avroSchema); + } + ByteBuffer bytes = (ByteBuffer) obj; + //see JsonEncoder::writeByteArray + yield new TextNode(new String(bytes.array(), StandardCharsets.ISO_8859_1)); + } + case FIXED -> { + if (isLogicalType(avroSchema)) { + yield processLogicalType(obj, avroSchema); + } + var fixed = (GenericData.Fixed) obj; + yield new TextNode(new String(fixed.bytes(), StandardCharsets.ISO_8859_1)); + } + }; + } + + private static Object processLogicalType(JsonNode node, Schema schema) { + return findConversion(schema) + .map(c -> c.jsonToAvroConversion.apply(node, schema)) + .orElseThrow(() -> + new JsonToAvroConversionException("'%s' logical type is not supported" + .formatted(schema.getLogicalType().getName()))); + } + + private static JsonNode processLogicalType(Object obj, Schema schema) { + return findConversion(schema) + .map(c -> c.avroToJsonConversion.apply(obj, schema)) + .orElseThrow(() -> + new JsonToAvroConversionException("'%s' logical type is not supported" + .formatted(schema.getLogicalType().getName()))); + } + + private static Optional<LogicalTypeConversion> findConversion(Schema schema) { + String logicalTypeName = schema.getLogicalType().getName(); + return Stream.of(LogicalTypeConversion.values()) + .filter(t -> t.name.equalsIgnoreCase(logicalTypeName)) + .findFirst(); + } + + private static boolean isLogicalType(Schema schema) { + return schema.getLogicalType() != null; + } + + private static void assertJsonType(JsonNode node, JsonNodeType... allowedTypes) { + if (Stream.of(allowedTypes).noneMatch(t -> node.getNodeType() == t)) { + throw new JsonToAvroConversionException( + "%s node has unexpected type, allowed types %s, actual type %s" + .formatted(node, Arrays.toString(allowedTypes), node.getNodeType())); + } + } + + private static void assertJsonNumberType(JsonNode node, JsonParser.NumberType... allowedTypes) { + if (Stream.of(allowedTypes).noneMatch(t -> node.numberType() == t)) { + throw new JsonToAvroConversionException( + "%s node has unexpected numeric type, allowed types %s, actual type %s" + .formatted(node, Arrays.toString(allowedTypes), node.numberType())); + } + } + + enum LogicalTypeConversion { + + UUID("uuid", + (node, schema) -> { + assertJsonType(node, JsonNodeType.STRING); + return java.util.UUID.fromString(node.asText()); + }, + (obj, schema) -> { + return new TextNode(obj.toString()); + }, + new SimpleFieldSchema( + new SimpleJsonType( + JsonType.Type.STRING, + Map.of("format", new TextNode("uuid")))) + ), + + DECIMAL("decimal", + (node, schema) -> { + if (node.isTextual()) { + return new BigDecimal(node.asText()); + } else if (node.isNumber()) { + return new BigDecimal(node.numberValue().toString()); + } + throw new JsonToAvroConversionException( + "node '%s' can't be converted to decimal logical type" + .formatted(node)); + }, + (obj, schema) -> { + return new DecimalNode((BigDecimal) obj); + }, + new SimpleFieldSchema(new SimpleJsonType(JsonType.Type.NUMBER)) + ), + + DATE("date", + (node, schema) -> { + if (node.isInt()) { + return LocalDate.ofEpochDay(node.intValue()); + } else if (node.isTextual()) { + return LocalDate.parse(node.asText()); + } else { + throw new JsonToAvroConversionException( + "node '%s' can't be converted to date logical type" + .formatted(node)); + } + }, + (obj, schema) -> { + return new TextNode(obj.toString()); + }, + new SimpleFieldSchema( + new SimpleJsonType( + JsonType.Type.STRING, + Map.of("format", new TextNode("date")))) + ), + + TIME_MILLIS("time-millis", + (node, schema) -> { + if (node.isIntegralNumber()) { + return LocalTime.ofNanoOfDay(TimeUnit.MILLISECONDS.toNanos(node.longValue())); + } else if (node.isTextual()) { + return LocalTime.parse(node.asText()); + } else { + throw new JsonToAvroConversionException( + "node '%s' can't be converted to time-millis logical type" + .formatted(node)); + } + }, + (obj, schema) -> { + return new TextNode(obj.toString()); + }, + new SimpleFieldSchema( + new SimpleJsonType( + JsonType.Type.STRING, + Map.of("format", new TextNode("time")))) + ), + + TIME_MICROS("time-micros", + (node, schema) -> { + if (node.isIntegralNumber()) { + return LocalTime.ofNanoOfDay(TimeUnit.MICROSECONDS.toNanos(node.longValue())); + } else if (node.isTextual()) { + return LocalTime.parse(node.asText()); + } else { + throw new JsonToAvroConversionException( + "node '%s' can't be converted to time-micros logical type" + .formatted(node)); + } + }, + (obj, schema) -> { + return new TextNode(obj.toString()); + }, + new SimpleFieldSchema( + new SimpleJsonType( + JsonType.Type.STRING, + Map.of("format", new TextNode("time")))) + ), + + TIMESTAMP_MILLIS("timestamp-millis", + (node, schema) -> { + if (node.isIntegralNumber()) { + return Instant.ofEpochMilli(node.longValue()); + } else if (node.isTextual()) { + return Instant.parse(node.asText()); + } else { + throw new JsonToAvroConversionException( + "node '%s' can't be converted to timestamp-millis logical type" + .formatted(node)); + } + }, + (obj, schema) -> { + return new TextNode(obj.toString()); + }, + new SimpleFieldSchema( + new SimpleJsonType( + JsonType.Type.STRING, + Map.of("format", new TextNode("date-time")))) + ), + + TIMESTAMP_MICROS("timestamp-micros", + (node, schema) -> { + if (node.isIntegralNumber()) { + // TimeConversions.TimestampMicrosConversion for impl + long microsFromEpoch = node.longValue(); + long epochSeconds = microsFromEpoch / (1_000_000L); + long nanoAdjustment = (microsFromEpoch % (1_000_000L)) * 1_000L; + return Instant.ofEpochSecond(epochSeconds, nanoAdjustment); + } else if (node.isTextual()) { + return Instant.parse(node.asText()); + } else { + throw new JsonToAvroConversionException( + "node '%s' can't be converted to timestamp-millis logical type" + .formatted(node)); + } + }, + (obj, schema) -> { + return new TextNode(obj.toString()); + }, + new SimpleFieldSchema( + new SimpleJsonType( + JsonType.Type.STRING, + Map.of("format", new TextNode("date-time")))) + ), + + LOCAL_TIMESTAMP_MILLIS("local-timestamp-millis", + (node, schema) -> { + if (node.isTextual()) { + return LocalDateTime.parse(node.asText()); + } + // TimeConversions.TimestampMicrosConversion for impl + Instant instant = (Instant) TIMESTAMP_MILLIS.jsonToAvroConversion.apply(node, schema); + return LocalDateTime.ofInstant(instant, ZoneOffset.UTC); + }, + (obj, schema) -> { + return new TextNode(obj.toString()); + }, + new SimpleFieldSchema( + new SimpleJsonType( + JsonType.Type.STRING, + Map.of("format", new TextNode("date-time")))) + ), + + LOCAL_TIMESTAMP_MICROS("local-timestamp-micros", + (node, schema) -> { + if (node.isTextual()) { + return LocalDateTime.parse(node.asText()); + } + Instant instant = (Instant) TIMESTAMP_MICROS.jsonToAvroConversion.apply(node, schema); + return LocalDateTime.ofInstant(instant, ZoneOffset.UTC); + }, + (obj, schema) -> { + return new TextNode(obj.toString()); + }, + new SimpleFieldSchema( + new SimpleJsonType( + JsonType.Type.STRING, + Map.of("format", new TextNode("date-time")))) + ); + + private final String name; + private final BiFunction<JsonNode, Schema, Object> jsonToAvroConversion; + private final BiFunction<Object, Schema, JsonNode> avroToJsonConversion; + private final FieldSchema jsonSchema; + + LogicalTypeConversion(String name, + BiFunction<JsonNode, Schema, Object> jsonToAvroConversion, + BiFunction<Object, Schema, JsonNode> avroToJsonConversion, + FieldSchema jsonSchema) { + this.name = name; + this.jsonToAvroConversion = jsonToAvroConversion; + this.avroToJsonConversion = avroToJsonConversion; + this.jsonSchema = jsonSchema; + } + + static Optional<FieldSchema> getJsonSchema(Schema schema) { + if (schema.getLogicalType() == null) { + return Optional.empty(); + } + String logicalTypeName = schema.getLogicalType().getName(); + return Stream.of(JsonAvroConversion.LogicalTypeConversion.values()) + .filter(t -> t.name.equalsIgnoreCase(logicalTypeName)) + .map(c -> c.jsonSchema) + .findFirst(); + } + } + + +}
diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerdeTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerdeTest.java index 4ea2bf3c2ac..b70450cea5c 100644 --- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerdeTest.java +++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerdeTest.java @@ -2,13 +2,12 @@ import static org.assertj.core.api.Assertions.assertThat; -import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.json.JsonMapper; import com.provectus.kafka.ui.serde.api.DeserializeResult; import com.provectus.kafka.ui.serde.api.SchemaDescription; import com.provectus.kafka.ui.serde.api.Serde; +import com.provectus.kafka.ui.util.jsonschema.JsonAvroConversion; import io.confluent.kafka.schemaregistry.avro.AvroSchema; -import io.confluent.kafka.schemaregistry.avro.AvroSchemaUtils; import io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient; import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException; import java.io.ByteArrayOutputStream; @@ -54,7 +53,8 @@ void returnsSchemaDescriptionIfSchemaRegisteredInSR(String topic, String subject SchemaDescription schemaDescription = schemaOptional.get(); assertThat(schemaDescription.getSchema()) - .contains("{\"$id\":\"int\",\"$schema\":\"https://json-schema.org/draft/2020-12/schema\",\"type\":\"integer\"}"); + .contains( + "{\"$id\":\"int\",\"$schema\":\"https://json-schema.org/draft/2020-12/schema\",\"type\":\"integer\"}"); assertThat(schemaDescription.getAdditionalProperties()) .containsOnlyKeys("subject", "schemaId", "latestVersion", "type") .containsEntry("subject", subject) @@ -189,7 +189,8 @@ void canSerializeReturnsFalseIfSubjectDoesNotExist() { assertThat(serde.canSerialize(topic, Serde.Target.VALUE)).isFalse(); } - private void assertJsonsEqual(String expected, String actual) throws JsonProcessingException { + @SneakyThrows + private void assertJsonsEqual(String expected, String actual) { var mapper = new JsonMapper(); assertThat(mapper.readTree(actual)).isEqualTo(mapper.readTree(expected)); } @@ -211,9 +212,174 @@ private byte[] jsonToAvro(String json, AvroSchema schema) { GenericDatumWriter<Object> writer = new GenericDatumWriter<>(schema.rawSchema()); ByteArrayOutputStream output = new ByteArrayOutputStream(); Encoder encoder = EncoderFactory.get().binaryEncoder(output, null); - writer.write(AvroSchemaUtils.toObject(json, schema), encoder); + writer.write(JsonAvroConversion.convertJsonToAvro(json, schema.rawSchema()), encoder); encoder.flush(); return output.toByteArray(); } + @Test + void avroFieldsRepresentationIsConsistentForSerializationAndDeserialization() throws Exception { + AvroSchema schema = new AvroSchema( + """ + { + "type": "record", + "name": "TestAvroRecord", + "fields": [ + { + "name": "f_int", + "type": "int" + }, + { + "name": "f_long", + "type": "long" + }, + { + "name": "f_string", + "type": "string" + }, + { + "name": "f_boolean", + "type": "boolean" + }, + { + "name": "f_float", + "type": "float" + }, + { + "name": "f_double", + "type": "double" + }, + { + "name": "f_enum", + "type" : { + "type": "enum", + "name": "Suit", + "symbols" : ["SPADES", "HEARTS", "DIAMONDS", "CLUBS"] + } + }, + { + "name": "f_map", + "type": { + "type": "map", + "values" : "string", + "default": {} + } + }, + { + "name": "f_union", + "type": ["null", "string", "int" ] + }, + { + "name": "f_optional_to_test_not_filled_case", + "type": [ "null", "string"] + }, + { + "name" : "f_fixed", + "type" : { "type" : "fixed" ,"size" : 8, "name": "long_encoded" } + }, + { + "name" : "f_bytes", + "type": "bytes" + } + ] + }""" + ); + + String jsonPayload = """ + { + "f_int": 123, + "f_long": 4294967294, + "f_string": "string here", + "f_boolean": true, + "f_float": 123.1, + "f_double": 123456.123456, + "f_enum": "SPADES", + "f_map": { "k1": "string value" }, + "f_union": { "int": 123 }, + "f_fixed": "\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0004Γ’", + "f_bytes": "\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\t)" + } + """; + + registryClient.register("test-value", schema); + assertSerdeCycle("test", jsonPayload); + } + + @Test + void avroLogicalTypesRepresentationIsConsistentForSerializationAndDeserialization() throws Exception { + AvroSchema schema = new AvroSchema( + """ + { + "type": "record", + "name": "TestAvroRecord", + "fields": [ + { + "name": "lt_date", + "type": { "type": "int", "logicalType": "date" } + }, + { + "name": "lt_uuid", + "type": { "type": "string", "logicalType": "uuid" } + }, + { + "name": "lt_decimal", + "type": { "type": "bytes", "logicalType": "decimal", "precision": 22, "scale":10 } + }, + { + "name": "lt_time_millis", + "type": { "type": "int", "logicalType": "time-millis"} + }, + { + "name": "lt_time_micros", + "type": { "type": "long", "logicalType": "time-micros"} + }, + { + "name": "lt_timestamp_millis", + "type": { "type": "long", "logicalType": "timestamp-millis" } + }, + { + "name": "lt_timestamp_micros", + "type": { "type": "long", "logicalType": "timestamp-micros" } + }, + { + "name": "lt_local_timestamp_millis", + "type": { "type": "long", "logicalType": "local-timestamp-millis" } + }, + { + "name": "lt_local_timestamp_micros", + "type": { "type": "long", "logicalType": "local-timestamp-micros" } + } + ] + }""" + ); + + String jsonPayload = """ + { + "lt_date":"1991-08-14", + "lt_decimal": 2.1617413862327545E11, + "lt_time_millis": "10:15:30.001", + "lt_time_micros": "10:15:30.123456", + "lt_uuid": "a37b75ca-097c-5d46-6119-f0637922e908", + "lt_timestamp_millis": "2007-12-03T10:15:30.123Z", + "lt_timestamp_micros": "2007-12-03T10:15:30.123456Z", + "lt_local_timestamp_millis": "2017-12-03T10:15:30.123", + "lt_local_timestamp_micros": "2017-12-03T10:15:30.123456" + } + """; + + registryClient.register("test-value", schema); + assertSerdeCycle("test", jsonPayload); + } + + // 1. serialize input json to binary + // 2. deserialize from binary + // 3. check that deserialized version equal to input + void assertSerdeCycle(String topic, String jsonInput) { + byte[] serializedBytes = serde.serializer(topic, Serde.Target.VALUE).serialize(jsonInput); + var deserializedJson = serde.deserializer(topic, Serde.Target.VALUE) + .deserialize(null, serializedBytes) + .getResult(); + assertJsonsEqual(jsonInput, deserializedJson); + } + } diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/jsonschema/JsonAvroConversionTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/jsonschema/JsonAvroConversionTest.java new file mode 100644 index 00000000000..0e9c291707e --- /dev/null +++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/jsonschema/JsonAvroConversionTest.java @@ -0,0 +1,621 @@ +package com.provectus.kafka.ui.util.jsonschema; + +import static com.provectus.kafka.ui.util.jsonschema.JsonAvroConversion.convertAvroToJson; +import static com.provectus.kafka.ui.util.jsonschema.JsonAvroConversion.convertJsonToAvro; +import static org.assertj.core.api.Assertions.assertThat; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.json.JsonMapper; +import com.fasterxml.jackson.databind.node.BooleanNode; +import com.fasterxml.jackson.databind.node.DoubleNode; +import com.fasterxml.jackson.databind.node.FloatNode; +import com.fasterxml.jackson.databind.node.IntNode; +import com.fasterxml.jackson.databind.node.LongNode; +import com.fasterxml.jackson.databind.node.TextNode; +import com.google.common.primitives.Longs; +import io.confluent.kafka.schemaregistry.avro.AvroSchema; +import java.math.BigDecimal; +import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import lombok.SneakyThrows; +import org.apache.avro.Schema; +import org.apache.avro.generic.GenericData; +import org.junit.jupiter.api.Nested; +import org.junit.jupiter.api.Test; + +class JsonAvroConversionTest { + + // checking conversion from json to KafkaAvroSerializer-compatible avro objects + @Nested + class FromJsonToAvro { + + @Test + void primitiveRoot() { + assertThat(convertJsonToAvro("\"str\"", createSchema("\"string\""))) + .isEqualTo("str"); + + assertThat(convertJsonToAvro("123", createSchema("\"int\""))) + .isEqualTo(123); + + assertThat(convertJsonToAvro("123", createSchema("\"long\""))) + .isEqualTo(123L); + + assertThat(convertJsonToAvro("123.123", createSchema("\"float\""))) + .isEqualTo(123.123F); + + assertThat(convertJsonToAvro("12345.12345", createSchema("\"double\""))) + .isEqualTo(12345.12345); + } + + @Test + void primitiveTypedFields() { + var schema = createSchema( + """ + { + "type": "record", + "name": "TestAvroRecord", + "fields": [ + { + "name": "f_int", + "type": "int" + }, + { + "name": "f_long", + "type": "long" + }, + { + "name": "f_string", + "type": "string" + }, + { + "name": "f_boolean", + "type": "boolean" + }, + { + "name": "f_float", + "type": "float" + }, + { + "name": "f_double", + "type": "double" + }, + { + "name": "f_enum", + "type" : { + "type": "enum", + "name": "Suit", + "symbols" : ["SPADES", "HEARTS", "DIAMONDS", "CLUBS"] + } + }, + { + "name" : "f_fixed", + "type" : { "type" : "fixed" ,"size" : 8, "name": "long_encoded" } + }, + { + "name" : "f_bytes", + "type": "bytes" + } + ] + }""" + ); + + String jsonPayload = """ + { + "f_int": 123, + "f_long": 4294967294, + "f_string": "string here", + "f_boolean": true, + "f_float": 123.1, + "f_double": 123456.123456, + "f_enum": "SPADES", + "f_fixed": "\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0004Γ’", + "f_bytes": "\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\t)" + } + """; + + var converted = convertJsonToAvro(jsonPayload, schema); + assertThat(converted).isInstanceOf(GenericData.Record.class); + + var record = (GenericData.Record) converted; + assertThat(record.get("f_int")).isEqualTo(123); + assertThat(record.get("f_long")).isEqualTo(4294967294L); + assertThat(record.get("f_string")).isEqualTo("string here"); + assertThat(record.get("f_boolean")).isEqualTo(true); + assertThat(record.get("f_float")).isEqualTo(123.1f); + assertThat(record.get("f_double")).isEqualTo(123456.123456); + assertThat(record.get("f_enum")) + .isEqualTo( + new GenericData.EnumSymbol( + schema.getField("f_enum").schema(), + "SPADES" + ) + ); + assertThat(((GenericData.Fixed) record.get("f_fixed")).bytes()).isEqualTo(Longs.toByteArray(1234L)); + assertThat(((ByteBuffer) record.get("f_bytes")).array()).isEqualTo(Longs.toByteArray(2345L)); + } + + @Test + void unionRoot() { + var schema = createSchema("[ \"null\", \"string\", \"int\" ]"); + + var converted = convertJsonToAvro("{\"string\":\"string here\"}", schema); + assertThat(converted).isEqualTo("string here"); + + converted = convertJsonToAvro("{\"int\": 123}", schema); + assertThat(converted).isEqualTo(123); + + converted = convertJsonToAvro("null", schema); + assertThat(converted).isEqualTo(null); + } + + @Test + void unionField() { + var schema = createSchema( + """ + { + "type": "record", + "namespace": "com.test", + "name": "TestAvroRecord", + "fields": [ + { + "name": "f_union", + "type": [ "null", "int", "TestAvroRecord"] + } + ] + }""" + ); + + String jsonPayload = "{ \"f_union\": null }"; + + var record = (GenericData.Record) convertJsonToAvro(jsonPayload, schema); + assertThat(record.get("f_union")).isNull(); + + jsonPayload = "{ \"f_union\": { \"int\": 123 } }"; + record = (GenericData.Record) convertJsonToAvro(jsonPayload, schema); + assertThat(record.get("f_union")).isEqualTo(123); + + //inner-record's name should be fully-qualified! + jsonPayload = "{ \"f_union\": { \"com.test.TestAvroRecord\": { \"f_union\": { \"int\": 123 } } } }"; + record = (GenericData.Record) convertJsonToAvro(jsonPayload, schema); + assertThat(record.get("f_union")).isInstanceOf(GenericData.Record.class); + var innerRec = (GenericData.Record) record.get("f_union"); + assertThat(innerRec.get("f_union")).isEqualTo(123); + } + + @Test + void mapField() { + var schema = createSchema( + """ + { + "type": "record", + "name": "TestAvroRecord", + "fields": [ + { + "name": "long_map", + "type": { + "type": "map", + "values" : "long", + "default": {} + } + }, + { + "name": "string_map", + "type": { + "type": "map", + "values" : "string", + "default": {} + } + }, + { + "name": "self_ref_map", + "type": { + "type": "map", + "values" : "TestAvroRecord", + "default": {} + } + } + ] + }""" + ); + + String jsonPayload = """ + { + "long_map": { + "k1": 123, + "k2": 456 + }, + "string_map": { + "k3": "s1", + "k4": "s2" + }, + "self_ref_map": { + "k5" : { + "long_map": { "_k1": 222 }, + "string_map": { "_k2": "_s1" } + } + } + } + """; + + var record = (GenericData.Record) convertJsonToAvro(jsonPayload, schema); + assertThat(record.get("long_map")) + .isEqualTo(Map.of("k1", 123L, "k2", 456L)); + assertThat(record.get("string_map")) + .isEqualTo(Map.of("k3", "s1", "k4", "s2")); + assertThat(record.get("self_ref_map")) + .isNotNull(); + + Map<String, Object> selfRefMapField = (Map<String, Object>) record.get("self_ref_map"); + assertThat(selfRefMapField) + .hasSize(1) + .hasEntrySatisfying("k5", v -> { + assertThat(v).isInstanceOf(GenericData.Record.class); + var innerRec = (GenericData.Record) v; + assertThat(innerRec.get("long_map")) + .isEqualTo(Map.of("_k1", 222L)); + assertThat(innerRec.get("string_map")) + .isEqualTo(Map.of("_k2", "_s1")); + }); + } + + @Test + void arrayField() { + var schema = createSchema( + """ + { + "type": "record", + "name": "TestAvroRecord", + "fields": [ + { + "name": "f_array", + "type": { + "type": "array", + "items" : "string", + "default": [] + } + } + ] + }""" + ); + + String jsonPayload = """ + { + "f_array": [ "e1", "e2" ] + } + """; + + var record = (GenericData.Record) convertJsonToAvro(jsonPayload, schema); + assertThat(record.get("f_array")).isEqualTo(List.of("e1", "e2")); + } + + @Test + void logicalTypesField() { + var schema = createSchema( + """ + { + "type": "record", + "name": "TestAvroRecord", + "fields": [ + { + "name": "lt_date", + "type": { "type": "int", "logicalType": "date" } + }, + { + "name": "lt_uuid", + "type": { "type": "string", "logicalType": "uuid" } + }, + { + "name": "lt_decimal", + "type": { "type": "bytes", "logicalType": "decimal", "precision": 22, "scale":10 } + }, + { + "name": "lt_time_millis", + "type": { "type": "int", "logicalType": "time-millis"} + }, + { + "name": "lt_time_micros", + "type": { "type": "long", "logicalType": "time-micros"} + }, + { + "name": "lt_timestamp_millis", + "type": { "type": "long", "logicalType": "timestamp-millis" } + }, + { + "name": "lt_timestamp_micros", + "type": { "type": "long", "logicalType": "timestamp-micros" } + }, + { + "name": "lt_local_timestamp_millis", + "type": { "type": "long", "logicalType": "local-timestamp-millis" } + }, + { + "name": "lt_local_timestamp_micros", + "type": { "type": "long", "logicalType": "local-timestamp-micros" } + } + ] + }""" + ); + + String jsonPayload = """ + { + "lt_date":"1991-08-14", + "lt_decimal": 2.1617413862327545E11, + "lt_time_millis": "10:15:30.001", + "lt_time_micros": "10:15:30.123456", + "lt_uuid": "a37b75ca-097c-5d46-6119-f0637922e908", + "lt_timestamp_millis": "2007-12-03T10:15:30.123Z", + "lt_timestamp_micros": "2007-12-13T10:15:30.123456Z", + "lt_local_timestamp_millis": "2017-12-03T10:15:30.123", + "lt_local_timestamp_micros": "2017-12-13T10:15:30.123456" + } + """; + + var converted = convertJsonToAvro(jsonPayload, schema); + assertThat(converted).isInstanceOf(GenericData.Record.class); + + var record = (GenericData.Record) converted; + + assertThat(record.get("lt_date")) + .isEqualTo(LocalDate.of(1991, 8, 14)); + assertThat(record.get("lt_decimal")) + .isEqualTo(new BigDecimal("2.1617413862327545E11")); + assertThat(record.get("lt_time_millis")) + .isEqualTo(LocalTime.parse("10:15:30.001")); + assertThat(record.get("lt_time_micros")) + .isEqualTo(LocalTime.parse("10:15:30.123456")); + assertThat(record.get("lt_timestamp_millis")) + .isEqualTo(Instant.parse("2007-12-03T10:15:30.123Z")); + assertThat(record.get("lt_timestamp_micros")) + .isEqualTo(Instant.parse("2007-12-13T10:15:30.123456Z")); + assertThat(record.get("lt_local_timestamp_millis")) + .isEqualTo(LocalDateTime.parse("2017-12-03T10:15:30.123")); + assertThat(record.get("lt_local_timestamp_micros")) + .isEqualTo(LocalDateTime.parse("2017-12-13T10:15:30.123456")); + } + } + + // checking conversion of KafkaAvroDeserializer output to JsonNode + @Nested + class FromAvroToJson { + + @Test + void primitiveRoot() { + assertThat(convertAvroToJson("str", createSchema("\"string\""))) + .isEqualTo(new TextNode("str")); + + assertThat(convertAvroToJson(123, createSchema("\"int\""))) + .isEqualTo(new IntNode(123)); + + assertThat(convertAvroToJson(123L, createSchema("\"long\""))) + .isEqualTo(new LongNode(123)); + + assertThat(convertAvroToJson(123.1F, createSchema("\"float\""))) + .isEqualTo(new FloatNode(123.1F)); + + assertThat(convertAvroToJson(123.1, createSchema("\"double\""))) + .isEqualTo(new DoubleNode(123.1)); + + assertThat(convertAvroToJson(true, createSchema("\"boolean\""))) + .isEqualTo(BooleanNode.valueOf(true)); + + assertThat(convertAvroToJson(ByteBuffer.wrap(Longs.toByteArray(123L)), createSchema("\"bytes\""))) + .isEqualTo(new TextNode(new String(Longs.toByteArray(123L), StandardCharsets.ISO_8859_1))); + } + + @SneakyThrows + @Test + void primitiveTypedFields() { + var schema = createSchema( + """ + { + "type": "record", + "name": "TestAvroRecord", + "fields": [ + { + "name": "f_int", + "type": "int" + }, + { + "name": "f_long", + "type": "long" + }, + { + "name": "f_string", + "type": "string" + }, + { + "name": "f_boolean", + "type": "boolean" + }, + { + "name": "f_float", + "type": "float" + }, + { + "name": "f_double", + "type": "double" + }, + { + "name": "f_enum", + "type" : { + "type": "enum", + "name": "Suit", + "symbols" : ["SPADES", "HEARTS", "DIAMONDS", "CLUBS"] + } + }, + { + "name" : "f_fixed", + "type" : { "type" : "fixed" ,"size" : 8, "name": "long_encoded" } + }, + { + "name" : "f_bytes", + "type": "bytes" + } + ] + }""" + ); + + byte[] fixedFieldValue = Longs.toByteArray(1234L); + byte[] bytesFieldValue = Longs.toByteArray(2345L); + + GenericData.Record inputRecord = new GenericData.Record(schema); + inputRecord.put("f_int", 123); + inputRecord.put("f_long", 4294967294L); + inputRecord.put("f_string", "string here"); + inputRecord.put("f_boolean", true); + inputRecord.put("f_float", 123.1f); + inputRecord.put("f_double", 123456.123456); + inputRecord.put("f_enum", new GenericData.EnumSymbol(schema.getField("f_enum").schema(), "SPADES")); + inputRecord.put("f_fixed", new GenericData.Fixed(schema.getField("f_fixed").schema(), fixedFieldValue)); + inputRecord.put("f_bytes", ByteBuffer.wrap(bytesFieldValue)); + + String expectedJson = """ + { + "f_int": 123, + "f_long": 4294967294, + "f_string": "string here", + "f_boolean": true, + "f_float": 123.1, + "f_double": 123456.123456, + "f_enum": "SPADES", + "f_fixed": "\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0004Γ’", + "f_bytes": "\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\t)" + } + """; + + assertJsonsEqual(expectedJson, convertAvroToJson(inputRecord, schema)); + } + + @Test + void logicalTypesField() { + var schema = createSchema( + """ + { + "type": "record", + "name": "TestAvroRecord", + "fields": [ + { + "name": "lt_date", + "type": { "type": "int", "logicalType": "date" } + }, + { + "name": "lt_uuid", + "type": { "type": "string", "logicalType": "uuid" } + }, + { + "name": "lt_decimal", + "type": { "type": "bytes", "logicalType": "decimal", "precision": 22, "scale":10 } + }, + { + "name": "lt_time_millis", + "type": { "type": "int", "logicalType": "time-millis"} + }, + { + "name": "lt_time_micros", + "type": { "type": "long", "logicalType": "time-micros"} + }, + { + "name": "lt_timestamp_millis", + "type": { "type": "long", "logicalType": "timestamp-millis" } + }, + { + "name": "lt_timestamp_micros", + "type": { "type": "long", "logicalType": "timestamp-micros" } + }, + { + "name": "lt_local_timestamp_millis", + "type": { "type": "long", "logicalType": "local-timestamp-millis" } + }, + { + "name": "lt_local_timestamp_micros", + "type": { "type": "long", "logicalType": "local-timestamp-micros" } + } + ] + }""" + ); + + GenericData.Record inputRecord = new GenericData.Record(schema); + inputRecord.put("lt_date", LocalDate.of(1991, 8, 14)); + inputRecord.put("lt_uuid", UUID.fromString("a37b75ca-097c-5d46-6119-f0637922e908")); + inputRecord.put("lt_decimal", new BigDecimal("2.16")); + inputRecord.put("lt_time_millis", LocalTime.parse("10:15:30.001")); + inputRecord.put("lt_time_micros", LocalTime.parse("10:15:30.123456")); + inputRecord.put("lt_timestamp_millis", Instant.parse("2007-12-03T10:15:30.123Z")); + inputRecord.put("lt_timestamp_micros", Instant.parse("2007-12-13T10:15:30.123456Z")); + inputRecord.put("lt_local_timestamp_millis", LocalDateTime.parse("2017-12-03T10:15:30.123")); + inputRecord.put("lt_local_timestamp_micros", LocalDateTime.parse("2017-12-13T10:15:30.123456")); + + String expectedJson = """ + { + "lt_date":"1991-08-14", + "lt_uuid": "a37b75ca-097c-5d46-6119-f0637922e908", + "lt_decimal": 2.16, + "lt_time_millis": "10:15:30.001", + "lt_time_micros": "10:15:30.123456", + "lt_timestamp_millis": "2007-12-03T10:15:30.123Z", + "lt_timestamp_micros": "2007-12-13T10:15:30.123456Z", + "lt_local_timestamp_millis": "2017-12-03T10:15:30.123", + "lt_local_timestamp_micros": "2017-12-13T10:15:30.123456" + } + """; + + assertJsonsEqual(expectedJson, convertAvroToJson(inputRecord, schema)); + } + + @Test + void unionField() { + var schema = createSchema( + """ + { + "type": "record", + "namespace": "com.test", + "name": "TestAvroRecord", + "fields": [ + { + "name": "f_union", + "type": [ "null", "int", "TestAvroRecord"] + } + ] + }""" + ); + + var r = new GenericData.Record(schema); + r.put("f_union", null); + assertJsonsEqual(" {}", convertAvroToJson(r, schema)); + + r = new GenericData.Record(schema); + r.put("f_union", 123); + assertJsonsEqual(" { \"f_union\" : { \"int\" : 123 } }", convertAvroToJson(r, schema)); + + + r = new GenericData.Record(schema); + var innerRec = new GenericData.Record(schema); + innerRec.put("f_union", 123); + r.put("f_union", innerRec); + assertJsonsEqual( + " { \"f_union\" : { \"com.test.TestAvroRecord\" : { \"f_union\" : { \"int\" : 123 } } } }", + convertAvroToJson(r, schema) + ); + } + + } + + private Schema createSchema(String schema) { + return new AvroSchema(schema).rawSchema(); + } + + @SneakyThrows + private void assertJsonsEqual(String expectedJson, JsonNode actual) { + var mapper = new JsonMapper(); + assertThat(actual.toPrettyString()) + .isEqualTo(mapper.readTree(expectedJson).toPrettyString()); + } + +}
train
test
2023-06-06T12:57:58
"2022-10-17T12:07:36Z"
iliax
train
provectus/kafka-ui/2943_3808
provectus/kafka-ui
provectus/kafka-ui/2943
provectus/kafka-ui/3808
[ "connected" ]
4b724fd852f9814ec3ab9316a739cc2d8a1f282c
c3559556411189bdfa72b67188ee9d49a47fce37
[ "Thanks for raising an issue", "You are missing config setting on the kafka avro deserializer. \r\n\r\nSpecifically AVRO_USE_LOGICAL_TYPE_CONVERTERS_CONFIG should be set to true\r\n\r\n ` \r\n protected void configure(KafkaAvroDeserializerConfig config) {\r\n configureClientProperties(config, new AvroSchemaProvider());\r\n useSpecificAvroReader = config\r\n .getBoolean(KafkaAvroDeserializerConfig.SPECIFIC_AVRO_READER_CONFIG);\r\n avroReflectionAllowNull = config\r\n .getBoolean(KafkaAvroDeserializerConfig.AVRO_REFLECTION_ALLOW_NULL_CONFIG);\r\n avroUseLogicalTypeConverters = config\r\n .getBoolean(KafkaAvroSerializerConfig.AVRO_USE_LOGICAL_TYPE_CONVERTERS_CONFIG);\r\n }\r\n `", "It will also fix https://github.com/provectus/kafka-ui/issues/2764", "@tkaszuba thank you for suggestion, but unfortunately it is not so simple (we know about that properties).\r\n1. we need to implement json string -> avro record translation to support serialization. Currently we use ` AvroSchemaUtils.toObject(json, schema)` , but that does not support logical types.\r\n2. we need to support avro schema -> json schema translation (used for samples generation in UI). And that code is now written from scratch (`AvroJsonSchemaConverter`), so it also need to support logical types. \r\n\r\nSo, there is some non-trivial things to be done πŸ˜ƒ", "ok, I have it working so I guess we'll just fork it for the time being\nwhile we wait for a proper fix.\n\nEither way I'm sending over my patch, perhaps it will come in handy.\n\nbtw: Thank you for this wonderful product! It's way better than confluent\ncontrol center! Congrats!\n\n\nOn Fri, 21 Apr 2023 at 18:54, Ilya Kuramshin ***@***.***>\nwrote:\n\n> @tkaszuba <https://github.com/tkaszuba> thank you for suggestion, but\n> unfortunately it is not so simple (we know about that properties).\n>\n> 1. we need to implement json string -> avro record translation to\n> support serialization. Currently we use AvroSchemaUtils.toObject(json,\n> schema) , but that does not support logical types.\n> 2. we need to support avro schema -> json schema translation (used for\n> samples generation in UI). And that code is now written from scratch (\n> AvroJsonSchemaConverter), so it also need to support logical types.\n>\n> So, there is some non-trivial things to be done πŸ˜ƒ\n>\n> β€”\n> Reply to this email directly, view it on GitHub\n> <https://github.com/provectus/kafka-ui/issues/2943#issuecomment-1518097703>,\n> or unsubscribe\n> <https://github.com/notifications/unsubscribe-auth/AG2T7OV3WW2X3PCXKDPKXP3XCK3UVANCNFSM6AAAAAASBGBLJE>\n> .\n> You are receiving this because you were mentioned.Message ID:\n> ***@***.***>\n>\n" ]
[]
"2023-05-10T15:40:29Z"
[ "type/bug", "scope/backend", "status/accepted", "status/confirmed", "area/serde" ]
Fields with logical type as decimal displays unicode values instead of original numeric values.
<!-- Don't forget to check for existing issues/discussions regarding your proposal. We might already have it. https://github.com/provectus/kafka-ui/issues https://github.com/provectus/kafka-ui/discussions --> **Describe the bug** <!--(A clear and concise description of what the bug is.)--> Fields with logical type as decimal are not showing correct numeric values from topics with schema registry. details for fields display value and schema registry definition is given below in addl context. **Set up** <!-- How do you run the app? Please provide as much info as possible: 1. App version (docker image version or check commit hash in the top left corner in UI) 2. Helm chart version, if you use one 3. Any IAAC configs We might close the issue without further explanation if you don't provide such information. --> 11.15.2022 07:05:35 ( [5900f8e](https://github.com/provectus/kafka-ui/commit/5900f8e) ) **Steps to Reproduce** <!-- We'd like you to provide an example setup (via docker-compose, helm, etc.) to reproduce the problem, especially with a complex setups. --> Steps to reproduce the behavior: 1. **Expected behavior** <!-- (A clear and concise description of what you expected to happen) --> **Screenshots** <!-- (If applicable, add screenshots to help explain your problem) --> **Additional context** <!-- (Add any other context about the problem here) --> <html xmlns:v="urn:schemas-microsoft-com:vml" xmlns:o="urn:schemas-microsoft-com:office:office" xmlns:x="urn:schemas-microsoft-com:office:excel" xmlns="http://www.w3.org/TR/REC-html40"> <head> <meta name=ProgId content=Excel.Sheet> <meta name=Generator content="Microsoft Excel 15"> <link id=Main-File rel=Main-File href="file:///C:/Users/nigupta/AppData/Local/Temp/msohtmlclip1/01/clip.htm"> <link rel=File-List href="file:///C:/Users/nigupta/AppData/Local/Temp/msohtmlclip1/01/clip_filelist.xml"> <style> <!--table {mso-displayed-decimal-separator:"\."; mso-displayed-thousand-separator:"\,";} @page {margin:.75in .7in .75in .7in; mso-header-margin:.3in; mso-footer-margin:.3in;} tr {mso-height-source:auto;} col {mso-width-source:auto;} br {mso-data-placement:same-cell;} td {padding-top:1px; padding-right:1px; padding-left:1px; mso-ignore:padding; color:black; font-size:11.0pt; font-weight:400; font-style:normal; text-decoration:none; font-family:Calibri, sans-serif; mso-font-charset:0; mso-number-format:General; text-align:general; vertical-align:bottom; border:none; mso-background-source:auto; mso-pattern:auto; mso-protection:locked visible; white-space:nowrap; mso-rotate:0;} .xl63 {white-space:normal;} .xl64 {vertical-align:top; white-space:normal;} .xl65 {vertical-align:top;} --> </style> </head> <body link="#0563C1" vlink="#954F72"> Data | SchemaRegistry -- | -- "field1": { Β "bytes": "#†òoÁ\u0000\u0000" }, | { Β Β Β Β Β  "name": "field1", Β Β Β Β Β  "type": [ Β Β Β Β Β Β  "null", Β Β Β Β Β Β  { Β Β Β Β Β Β Β  "type": "bytes", Β Β Β Β Β Β Β  "logicalType": "decimal", Β Β Β Β Β Β Β  "precision": 24, Β Β Β Β Β Β Β  "scale": 16 Β Β Β Β Β Β  } Β Β Β Β Β  ], Β Β Β Β Β  "doc": "this is field1.", Β Β Β Β Β  "default": null Β Β Β Β  }, "field2": { Β "bytes": "#†òoÁ\u0000\u0000" }, | { Β Β Β Β Β  "name": "field2", Β Β Β Β Β  "type": [ Β Β Β Β Β Β  "null", Β Β Β Β Β Β  { Β Β Β Β Β Β Β  "type": "bytes", Β Β Β Β Β Β Β  "logicalType": "decimal", Β Β Β Β Β Β Β  "precision": 24, Β Β Β Β Β Β Β  "scale": 16 Β Β Β Β Β Β  } Β Β Β Β Β  ], Β Β Β Β Β  "doc": "this is field2.", Β Β Β Β Β  "default": null Β Β Β Β  }, "field3": { Β "string": "L" }, | Β  "field4": { Β "string": "0" }, | Β  "field5": { Β "boolean": false }, | Β  "field6": { Β "com.avro.schema.common.BigAmount_30_10": { Β  "amount": "\u000e3Γ’\"\u0000", Β  "currency": "USD" Β } }, | { Β Β Β Β Β  "name": "field6", Β Β Β Β Β  "type": [ Β Β Β Β Β Β  "null", Β Β Β Β Β Β  { Β Β Β Β Β Β Β  "type": "record", Β Β Β Β Β Β Β  "name": "BigAmount_30_10", Β Β Β Β Β Β Β  "namespace": "com.avro.schema.common", Β Β Β Β Β Β Β  "fields": [ Β Β Β Β Β Β Β Β  { Β Β Β Β Β Β Β Β Β  "name": "amount", Β Β Β Β Β Β Β Β Β  "type": { Β Β Β Β Β Β Β Β Β Β  "type": "bytes", Β Β Β Β Β Β Β Β Β Β  "logicalType": "decimal", Β Β Β Β Β Β Β Β Β Β  "precision": 30, Β Β Β Β Β Β Β Β Β Β  "scale": 10 Β Β Β Β Β Β Β Β Β  } Β Β Β Β Β Β Β Β  }, Β Β Β Β Β Β Β Β  { Β Β Β Β Β Β Β Β Β  "name": "currency", Β Β Β Β Β Β Β Β Β  "type": { Β Β Β Β Β Β Β Β Β Β  "type": "string", Β Β Β Β Β Β Β Β Β Β  "avro.java.string": "String" Β Β Β Β Β Β Β Β Β  } Β Β Β Β Β Β Β Β  } Β Β Β Β Β Β Β  ] Β Β Β Β Β Β  } Β Β Β Β Β  ], Β Β Β Β Β  "doc": "this is field6.", Β Β Β Β Β  "default": null Β Β Β Β  }, "field7": { Β "com.avro.schema.common.BigAmount_20_8": { Β  "amount": "$[ΓœΒ€", Β  "currency": "USD" Β } }, | { Β Β Β Β Β  "name": "field7", Β Β Β Β Β  "type": [ Β Β Β Β Β Β  "null", Β Β Β Β Β Β  { Β Β Β Β Β Β Β  "type": "record", Β Β Β Β Β Β Β  "name": "BigAmount_20_8", Β Β Β Β Β Β Β  "namespace": "com.avro.schema.common", Β Β Β Β Β Β Β  "fields": [ Β Β Β Β Β Β Β Β  { Β Β Β Β Β Β Β Β Β  "name": "amount", Β Β Β Β Β Β Β Β Β  "type": { Β Β Β Β Β Β Β Β Β Β  "type": "bytes", Β Β Β Β Β Β Β Β Β Β  "logicalType": "decimal", Β Β Β Β Β Β Β Β Β Β  "precision": 20, Β Β Β Β Β Β Β Β Β Β  "scale": 8 Β Β Β Β Β Β Β Β Β  } Β Β Β Β Β Β Β Β  }, Β Β Β Β Β Β Β Β  { Β Β Β Β Β Β Β Β Β  "name": "currency", Β Β Β Β Β Β Β Β Β  "type": { Β Β Β Β Β Β Β Β Β Β  "type": "string", Β Β Β Β Β Β Β Β Β Β  "avro.java.string": "String" Β Β Β Β Β Β Β Β Β  } Β Β Β Β Β Β Β Β  } Β Β Β Β Β Β Β  ] Β Β Β Β Β Β  } Β Β Β Β Β  ], Β Β Β Β Β  "doc": "this is field7.", Β Β Β Β Β  "default": null Β Β Β Β  }, "field8": { Β "com.avro.schema.common.BigAmount_30_10": { Β  "amount": "\u000e3Γ’\"\u0000", Β  "currency": "USD" Β } }, | { Β Β Β Β Β  "name": "field8", Β Β Β Β Β  "type": [ Β Β Β Β Β Β  "null", Β Β Β Β Β Β  "com.avro.schema.common.BigAmount_30_10" Β Β Β Β Β  ], Β Β Β Β Β  "doc": "this is field8.", Β Β Β Β Β  "default": null Β Β Β Β  }, </body> </html>
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/AvroSchemaRegistrySerializer.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/MessageFormatter.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/AvroJsonSchemaConverter.java" ]
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/JsonToAvroConversionException.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/AvroSchemaRegistrySerializer.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/MessageFormatter.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/AvroJsonSchemaConverter.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/JsonAvroConversion.java" ]
[ "kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerdeTest.java", "kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/jsonschema/JsonAvroConversionTest.java" ]
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/JsonToAvroConversionException.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/JsonToAvroConversionException.java new file mode 100644 index 00000000000..5b3910f4cf3 --- /dev/null +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/JsonToAvroConversionException.java @@ -0,0 +1,7 @@ +package com.provectus.kafka.ui.exception; + +public class JsonToAvroConversionException extends ValidationException { + public JsonToAvroConversionException(String message) { + super(message); + } +} diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/AvroSchemaRegistrySerializer.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/AvroSchemaRegistrySerializer.java index 3c4a5008552..164938bfc70 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/AvroSchemaRegistrySerializer.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/AvroSchemaRegistrySerializer.java @@ -1,12 +1,13 @@ package com.provectus.kafka.ui.serdes.builtin.sr; +import com.provectus.kafka.ui.util.jsonschema.JsonAvroConversion; import io.confluent.kafka.schemaregistry.ParsedSchema; import io.confluent.kafka.schemaregistry.avro.AvroSchema; -import io.confluent.kafka.schemaregistry.avro.AvroSchemaUtils; import io.confluent.kafka.schemaregistry.client.SchemaMetadata; import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient; import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig; import io.confluent.kafka.serializers.KafkaAvroSerializer; +import io.confluent.kafka.serializers.KafkaAvroSerializerConfig; import java.util.Map; import org.apache.kafka.common.serialization.Serializer; @@ -25,6 +26,7 @@ protected Serializer<Object> createSerializer(SchemaRegistryClient client) { Map.of( "schema.registry.url", "wontbeused", AbstractKafkaSchemaSerDeConfig.AUTO_REGISTER_SCHEMAS, false, + KafkaAvroSerializerConfig.AVRO_USE_LOGICAL_TYPE_CONVERTERS_CONFIG, true, AbstractKafkaSchemaSerDeConfig.USE_LATEST_VERSION, true ), isKey @@ -35,7 +37,7 @@ protected Serializer<Object> createSerializer(SchemaRegistryClient client) { @Override protected Object serialize(String value, ParsedSchema schema) { try { - return AvroSchemaUtils.toObject(value, (AvroSchema) schema); + return JsonAvroConversion.convertJsonToAvro(value, ((AvroSchema) schema).rawSchema()); } catch (Throwable e) { throw new RuntimeException("Failed to serialize record for topic " + topic, e); } diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/MessageFormatter.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/MessageFormatter.java index 52e8c6f33a4..40073d85347 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/MessageFormatter.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/sr/MessageFormatter.java @@ -3,9 +3,12 @@ import com.fasterxml.jackson.databind.JsonNode; import com.google.protobuf.Message; import com.google.protobuf.util.JsonFormat; +import com.provectus.kafka.ui.util.jsonschema.JsonAvroConversion; import io.confluent.kafka.schemaregistry.avro.AvroSchemaUtils; import io.confluent.kafka.schemaregistry.client.SchemaRegistryClient; +import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig; import io.confluent.kafka.serializers.KafkaAvroDeserializer; +import io.confluent.kafka.serializers.KafkaAvroDeserializerConfig; import io.confluent.kafka.serializers.json.KafkaJsonSchemaDeserializer; import io.confluent.kafka.serializers.protobuf.KafkaProtobufDeserializer; import java.util.Map; @@ -28,16 +31,22 @@ class AvroMessageFormatter implements MessageFormatter { AvroMessageFormatter(SchemaRegistryClient client) { this.avroDeserializer = new KafkaAvroDeserializer(client); + this.avroDeserializer.configure( + Map.of( + AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, "wontbeused", + KafkaAvroDeserializerConfig.SPECIFIC_AVRO_READER_CONFIG, false, + KafkaAvroDeserializerConfig.SCHEMA_REFLECTION_CONFIG, false, + KafkaAvroDeserializerConfig.AVRO_USE_LOGICAL_TYPE_CONVERTERS_CONFIG, true + ), + false + ); } @Override - @SneakyThrows public String format(String topic, byte[] value) { - // deserialized will have type, that depends on schema type (record or primitive), - // AvroSchemaUtils.toJson(...) method will take it into account Object deserialized = avroDeserializer.deserialize(topic, value); - byte[] jsonBytes = AvroSchemaUtils.toJson(deserialized); - return new String(jsonBytes); + var schema = AvroSchemaUtils.getSchema(deserialized); + return JsonAvroConversion.convertAvroToJson(deserialized, schema).toString(); } } diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/AvroJsonSchemaConverter.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/AvroJsonSchemaConverter.java index d2cd0e65f39..56737d7a612 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/AvroJsonSchemaConverter.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/AvroJsonSchemaConverter.java @@ -5,6 +5,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.stream.Collectors; import org.apache.avro.Schema; import reactor.util.function.Tuple2; @@ -40,6 +41,10 @@ private FieldSchema convertField(Schema.Field field, Map<String, FieldSchema> de private FieldSchema convertSchema(Schema schema, Map<String, FieldSchema> definitions, boolean isRoot) { + Optional<FieldSchema> logicalTypeSchema = JsonAvroConversion.LogicalTypeConversion.getJsonSchema(schema); + if (logicalTypeSchema.isPresent()) { + return logicalTypeSchema.get(); + } if (!schema.isUnion()) { JsonType type = convertType(schema); switch (type.getType()) { @@ -66,7 +71,6 @@ private FieldSchema convertSchema(Schema schema, } } - // this method formats json-schema field in a way // to fit avro-> json encoding rules (https://avro.apache.org/docs/1.11.1/specification/_print/#json-encoding) private FieldSchema createUnionSchema(Schema schema, Map<String, FieldSchema> definitions) { diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/JsonAvroConversion.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/JsonAvroConversion.java new file mode 100644 index 00000000000..d2114dd971c --- /dev/null +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/JsonAvroConversion.java @@ -0,0 +1,503 @@ +package com.provectus.kafka.ui.util.jsonschema; + +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.json.JsonMapper; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.BooleanNode; +import com.fasterxml.jackson.databind.node.DecimalNode; +import com.fasterxml.jackson.databind.node.DoubleNode; +import com.fasterxml.jackson.databind.node.FloatNode; +import com.fasterxml.jackson.databind.node.IntNode; +import com.fasterxml.jackson.databind.node.JsonNodeType; +import com.fasterxml.jackson.databind.node.LongNode; +import com.fasterxml.jackson.databind.node.NullNode; +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.fasterxml.jackson.databind.node.TextNode; +import com.google.common.collect.Lists; +import com.provectus.kafka.ui.exception.JsonToAvroConversionException; +import io.confluent.kafka.serializers.AvroData; +import java.math.BigDecimal; +import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.time.ZoneOffset; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.TimeUnit; +import java.util.function.BiFunction; +import java.util.stream.Stream; +import lombok.SneakyThrows; +import org.apache.avro.Schema; +import org.apache.avro.generic.GenericData; + +// json <-> avro +public class JsonAvroConversion { + + private static final JsonMapper MAPPER = new JsonMapper(); + + // converts json into Object that is expected input for KafkaAvroSerializer + // (with AVRO_USE_LOGICAL_TYPE_CONVERTERS flat enabled!) + @SneakyThrows + public static Object convertJsonToAvro(String jsonString, Schema avroSchema) { + JsonNode rootNode = MAPPER.readTree(jsonString); + return convert(rootNode, avroSchema); + } + + private static Object convert(JsonNode node, Schema avroSchema) { + return switch (avroSchema.getType()) { + case RECORD -> { + assertJsonType(node, JsonNodeType.OBJECT); + var rec = new GenericData.Record(avroSchema); + for (Schema.Field field : avroSchema.getFields()) { + if (node.has(field.name()) && !node.get(field.name()).isNull()) { + rec.put(field.name(), convert(node.get(field.name()), field.schema())); + } + } + yield rec; + } + case MAP -> { + assertJsonType(node, JsonNodeType.OBJECT); + var map = new LinkedHashMap<String, Object>(); + var valueSchema = avroSchema.getValueType(); + node.fields().forEachRemaining(f -> map.put(f.getKey(), convert(f.getValue(), valueSchema))); + yield map; + } + case ARRAY -> { + assertJsonType(node, JsonNodeType.ARRAY); + var lst = new ArrayList<>(); + node.elements().forEachRemaining(e -> lst.add(convert(e, avroSchema.getElementType()))); + yield lst; + } + case ENUM -> { + assertJsonType(node, JsonNodeType.STRING); + String symbol = node.textValue(); + if (!avroSchema.getEnumSymbols().contains(symbol)) { + throw new JsonToAvroConversionException("%s is not a part of enum symbols [%s]" + .formatted(symbol, avroSchema.getEnumSymbols())); + } + yield new GenericData.EnumSymbol(avroSchema, symbol); + } + case UNION -> { + // for types from enum (other than null) payload should be an object with single key == name of type + // ex: schema = [ "null", "int", "string" ], possible payloads = null, { "string": "str" }, { "int": 123 } + if (node.isNull() && avroSchema.getTypes().contains(Schema.create(Schema.Type.NULL))) { + yield null; + } + + assertJsonType(node, JsonNodeType.OBJECT); + var elements = Lists.newArrayList(node.fields()); + if (elements.size() != 1) { + throw new JsonToAvroConversionException( + "UNION field value should be an object with single field == type name"); + } + var typeNameToValue = elements.get(0); + for (Schema unionType : avroSchema.getTypes()) { + if (typeNameToValue.getKey().equals(unionType.getFullName())) { + yield convert(typeNameToValue.getValue(), unionType); + } + } + throw new JsonToAvroConversionException( + "json value '%s' is cannot be converted to any of union types [%s]" + .formatted(node, avroSchema.getTypes())); + } + case STRING -> { + if (isLogicalType(avroSchema)) { + yield processLogicalType(node, avroSchema); + } + assertJsonType(node, JsonNodeType.STRING); + yield node.textValue(); + } + case LONG -> { + if (isLogicalType(avroSchema)) { + yield processLogicalType(node, avroSchema); + } + assertJsonType(node, JsonNodeType.NUMBER); + assertJsonNumberType(node, JsonParser.NumberType.LONG, JsonParser.NumberType.INT); + yield node.longValue(); + } + case INT -> { + if (isLogicalType(avroSchema)) { + yield processLogicalType(node, avroSchema); + } + assertJsonType(node, JsonNodeType.NUMBER); + assertJsonNumberType(node, JsonParser.NumberType.INT); + yield node.intValue(); + } + case FLOAT -> { + assertJsonType(node, JsonNodeType.NUMBER); + assertJsonNumberType(node, JsonParser.NumberType.DOUBLE, JsonParser.NumberType.FLOAT); + yield node.floatValue(); + } + case DOUBLE -> { + assertJsonType(node, JsonNodeType.NUMBER); + assertJsonNumberType(node, JsonParser.NumberType.DOUBLE, JsonParser.NumberType.FLOAT); + yield node.doubleValue(); + } + case BOOLEAN -> { + assertJsonType(node, JsonNodeType.BOOLEAN); + yield node.booleanValue(); + } + case NULL -> { + assertJsonType(node, JsonNodeType.NULL); + yield null; + } + case BYTES -> { + if (isLogicalType(avroSchema)) { + yield processLogicalType(node, avroSchema); + } + assertJsonType(node, JsonNodeType.STRING); + // logic copied from JsonDecoder::readBytes + yield ByteBuffer.wrap(node.textValue().getBytes(StandardCharsets.ISO_8859_1)); + } + case FIXED -> { + if (isLogicalType(avroSchema)) { + yield processLogicalType(node, avroSchema); + } + assertJsonType(node, JsonNodeType.STRING); + byte[] bytes = node.textValue().getBytes(StandardCharsets.ISO_8859_1); + if (bytes.length != avroSchema.getFixedSize()) { + throw new JsonToAvroConversionException( + "Fixed field has unexpected size %d (should be %d)" + .formatted(bytes.length, avroSchema.getFixedSize())); + } + yield new GenericData.Fixed(avroSchema, bytes); + } + }; + } + + // converts output of KafkaAvroDeserializer (with AVRO_USE_LOGICAL_TYPE_CONVERTERS flat enabled!) into json. + // Note: conversion should be compatible with AvroJsonSchemaConverter logic! + public static JsonNode convertAvroToJson(Object obj, Schema avroSchema) { + if (obj == null) { + return NullNode.getInstance(); + } + return switch (avroSchema.getType()) { + case RECORD -> { + var rec = (GenericData.Record) obj; + ObjectNode node = MAPPER.createObjectNode(); + for (Schema.Field field : avroSchema.getFields()) { + var fieldVal = rec.get(field.name()); + if (fieldVal != null) { + node.set(field.name(), convertAvroToJson(fieldVal, field.schema())); + } + } + yield node; + } + case MAP -> { + ObjectNode node = MAPPER.createObjectNode(); + ((Map) obj).forEach((k, v) -> node.set(k.toString(), convertAvroToJson(v, avroSchema.getValueType()))); + yield node; + } + case ARRAY -> { + var list = (List<Object>) obj; + ArrayNode node = MAPPER.createArrayNode(); + list.forEach(e -> node.add(convertAvroToJson(e, avroSchema.getElementType()))); + yield node; + } + case ENUM -> { + yield new TextNode(obj.toString()); + } + case UNION -> { + ObjectNode node = MAPPER.createObjectNode(); + int unionIdx = AvroData.getGenericData().resolveUnion(avroSchema, obj); + Schema unionType = avroSchema.getTypes().get(unionIdx); + node.set(unionType.getFullName(), convertAvroToJson(obj, unionType)); + yield node; + } + case STRING -> { + if (isLogicalType(avroSchema)) { + yield processLogicalType(obj, avroSchema); + } + yield new TextNode(obj.toString()); + } + case LONG -> { + if (isLogicalType(avroSchema)) { + yield processLogicalType(obj, avroSchema); + } + yield new LongNode((Long) obj); + } + case INT -> { + if (isLogicalType(avroSchema)) { + yield processLogicalType(obj, avroSchema); + } + yield new IntNode((Integer) obj); + } + case FLOAT -> new FloatNode((Float) obj); + case DOUBLE -> new DoubleNode((Double) obj); + case BOOLEAN -> BooleanNode.valueOf((Boolean) obj); + case NULL -> NullNode.getInstance(); + case BYTES -> { + if (isLogicalType(avroSchema)) { + yield processLogicalType(obj, avroSchema); + } + ByteBuffer bytes = (ByteBuffer) obj; + //see JsonEncoder::writeByteArray + yield new TextNode(new String(bytes.array(), StandardCharsets.ISO_8859_1)); + } + case FIXED -> { + if (isLogicalType(avroSchema)) { + yield processLogicalType(obj, avroSchema); + } + var fixed = (GenericData.Fixed) obj; + yield new TextNode(new String(fixed.bytes(), StandardCharsets.ISO_8859_1)); + } + }; + } + + private static Object processLogicalType(JsonNode node, Schema schema) { + return findConversion(schema) + .map(c -> c.jsonToAvroConversion.apply(node, schema)) + .orElseThrow(() -> + new JsonToAvroConversionException("'%s' logical type is not supported" + .formatted(schema.getLogicalType().getName()))); + } + + private static JsonNode processLogicalType(Object obj, Schema schema) { + return findConversion(schema) + .map(c -> c.avroToJsonConversion.apply(obj, schema)) + .orElseThrow(() -> + new JsonToAvroConversionException("'%s' logical type is not supported" + .formatted(schema.getLogicalType().getName()))); + } + + private static Optional<LogicalTypeConversion> findConversion(Schema schema) { + String logicalTypeName = schema.getLogicalType().getName(); + return Stream.of(LogicalTypeConversion.values()) + .filter(t -> t.name.equalsIgnoreCase(logicalTypeName)) + .findFirst(); + } + + private static boolean isLogicalType(Schema schema) { + return schema.getLogicalType() != null; + } + + private static void assertJsonType(JsonNode node, JsonNodeType... allowedTypes) { + if (Stream.of(allowedTypes).noneMatch(t -> node.getNodeType() == t)) { + throw new JsonToAvroConversionException( + "%s node has unexpected type, allowed types %s, actual type %s" + .formatted(node, Arrays.toString(allowedTypes), node.getNodeType())); + } + } + + private static void assertJsonNumberType(JsonNode node, JsonParser.NumberType... allowedTypes) { + if (Stream.of(allowedTypes).noneMatch(t -> node.numberType() == t)) { + throw new JsonToAvroConversionException( + "%s node has unexpected numeric type, allowed types %s, actual type %s" + .formatted(node, Arrays.toString(allowedTypes), node.numberType())); + } + } + + enum LogicalTypeConversion { + + UUID("uuid", + (node, schema) -> { + assertJsonType(node, JsonNodeType.STRING); + return java.util.UUID.fromString(node.asText()); + }, + (obj, schema) -> { + return new TextNode(obj.toString()); + }, + new SimpleFieldSchema( + new SimpleJsonType( + JsonType.Type.STRING, + Map.of("format", new TextNode("uuid")))) + ), + + DECIMAL("decimal", + (node, schema) -> { + if (node.isTextual()) { + return new BigDecimal(node.asText()); + } else if (node.isNumber()) { + return new BigDecimal(node.numberValue().toString()); + } + throw new JsonToAvroConversionException( + "node '%s' can't be converted to decimal logical type" + .formatted(node)); + }, + (obj, schema) -> { + return new DecimalNode((BigDecimal) obj); + }, + new SimpleFieldSchema(new SimpleJsonType(JsonType.Type.NUMBER)) + ), + + DATE("date", + (node, schema) -> { + if (node.isInt()) { + return LocalDate.ofEpochDay(node.intValue()); + } else if (node.isTextual()) { + return LocalDate.parse(node.asText()); + } else { + throw new JsonToAvroConversionException( + "node '%s' can't be converted to date logical type" + .formatted(node)); + } + }, + (obj, schema) -> { + return new TextNode(obj.toString()); + }, + new SimpleFieldSchema( + new SimpleJsonType( + JsonType.Type.STRING, + Map.of("format", new TextNode("date")))) + ), + + TIME_MILLIS("time-millis", + (node, schema) -> { + if (node.isIntegralNumber()) { + return LocalTime.ofNanoOfDay(TimeUnit.MILLISECONDS.toNanos(node.longValue())); + } else if (node.isTextual()) { + return LocalTime.parse(node.asText()); + } else { + throw new JsonToAvroConversionException( + "node '%s' can't be converted to time-millis logical type" + .formatted(node)); + } + }, + (obj, schema) -> { + return new TextNode(obj.toString()); + }, + new SimpleFieldSchema( + new SimpleJsonType( + JsonType.Type.STRING, + Map.of("format", new TextNode("time")))) + ), + + TIME_MICROS("time-micros", + (node, schema) -> { + if (node.isIntegralNumber()) { + return LocalTime.ofNanoOfDay(TimeUnit.MICROSECONDS.toNanos(node.longValue())); + } else if (node.isTextual()) { + return LocalTime.parse(node.asText()); + } else { + throw new JsonToAvroConversionException( + "node '%s' can't be converted to time-micros logical type" + .formatted(node)); + } + }, + (obj, schema) -> { + return new TextNode(obj.toString()); + }, + new SimpleFieldSchema( + new SimpleJsonType( + JsonType.Type.STRING, + Map.of("format", new TextNode("time")))) + ), + + TIMESTAMP_MILLIS("timestamp-millis", + (node, schema) -> { + if (node.isIntegralNumber()) { + return Instant.ofEpochMilli(node.longValue()); + } else if (node.isTextual()) { + return Instant.parse(node.asText()); + } else { + throw new JsonToAvroConversionException( + "node '%s' can't be converted to timestamp-millis logical type" + .formatted(node)); + } + }, + (obj, schema) -> { + return new TextNode(obj.toString()); + }, + new SimpleFieldSchema( + new SimpleJsonType( + JsonType.Type.STRING, + Map.of("format", new TextNode("date-time")))) + ), + + TIMESTAMP_MICROS("timestamp-micros", + (node, schema) -> { + if (node.isIntegralNumber()) { + // TimeConversions.TimestampMicrosConversion for impl + long microsFromEpoch = node.longValue(); + long epochSeconds = microsFromEpoch / (1_000_000L); + long nanoAdjustment = (microsFromEpoch % (1_000_000L)) * 1_000L; + return Instant.ofEpochSecond(epochSeconds, nanoAdjustment); + } else if (node.isTextual()) { + return Instant.parse(node.asText()); + } else { + throw new JsonToAvroConversionException( + "node '%s' can't be converted to timestamp-millis logical type" + .formatted(node)); + } + }, + (obj, schema) -> { + return new TextNode(obj.toString()); + }, + new SimpleFieldSchema( + new SimpleJsonType( + JsonType.Type.STRING, + Map.of("format", new TextNode("date-time")))) + ), + + LOCAL_TIMESTAMP_MILLIS("local-timestamp-millis", + (node, schema) -> { + if (node.isTextual()) { + return LocalDateTime.parse(node.asText()); + } + // TimeConversions.TimestampMicrosConversion for impl + Instant instant = (Instant) TIMESTAMP_MILLIS.jsonToAvroConversion.apply(node, schema); + return LocalDateTime.ofInstant(instant, ZoneOffset.UTC); + }, + (obj, schema) -> { + return new TextNode(obj.toString()); + }, + new SimpleFieldSchema( + new SimpleJsonType( + JsonType.Type.STRING, + Map.of("format", new TextNode("date-time")))) + ), + + LOCAL_TIMESTAMP_MICROS("local-timestamp-micros", + (node, schema) -> { + if (node.isTextual()) { + return LocalDateTime.parse(node.asText()); + } + Instant instant = (Instant) TIMESTAMP_MICROS.jsonToAvroConversion.apply(node, schema); + return LocalDateTime.ofInstant(instant, ZoneOffset.UTC); + }, + (obj, schema) -> { + return new TextNode(obj.toString()); + }, + new SimpleFieldSchema( + new SimpleJsonType( + JsonType.Type.STRING, + Map.of("format", new TextNode("date-time")))) + ); + + private final String name; + private final BiFunction<JsonNode, Schema, Object> jsonToAvroConversion; + private final BiFunction<Object, Schema, JsonNode> avroToJsonConversion; + private final FieldSchema jsonSchema; + + LogicalTypeConversion(String name, + BiFunction<JsonNode, Schema, Object> jsonToAvroConversion, + BiFunction<Object, Schema, JsonNode> avroToJsonConversion, + FieldSchema jsonSchema) { + this.name = name; + this.jsonToAvroConversion = jsonToAvroConversion; + this.avroToJsonConversion = avroToJsonConversion; + this.jsonSchema = jsonSchema; + } + + static Optional<FieldSchema> getJsonSchema(Schema schema) { + if (schema.getLogicalType() == null) { + return Optional.empty(); + } + String logicalTypeName = schema.getLogicalType().getName(); + return Stream.of(JsonAvroConversion.LogicalTypeConversion.values()) + .filter(t -> t.name.equalsIgnoreCase(logicalTypeName)) + .map(c -> c.jsonSchema) + .findFirst(); + } + } + + +}
diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerdeTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerdeTest.java index 4ea2bf3c2ac..b70450cea5c 100644 --- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerdeTest.java +++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/sr/SchemaRegistrySerdeTest.java @@ -2,13 +2,12 @@ import static org.assertj.core.api.Assertions.assertThat; -import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.json.JsonMapper; import com.provectus.kafka.ui.serde.api.DeserializeResult; import com.provectus.kafka.ui.serde.api.SchemaDescription; import com.provectus.kafka.ui.serde.api.Serde; +import com.provectus.kafka.ui.util.jsonschema.JsonAvroConversion; import io.confluent.kafka.schemaregistry.avro.AvroSchema; -import io.confluent.kafka.schemaregistry.avro.AvroSchemaUtils; import io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient; import io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException; import java.io.ByteArrayOutputStream; @@ -54,7 +53,8 @@ void returnsSchemaDescriptionIfSchemaRegisteredInSR(String topic, String subject SchemaDescription schemaDescription = schemaOptional.get(); assertThat(schemaDescription.getSchema()) - .contains("{\"$id\":\"int\",\"$schema\":\"https://json-schema.org/draft/2020-12/schema\",\"type\":\"integer\"}"); + .contains( + "{\"$id\":\"int\",\"$schema\":\"https://json-schema.org/draft/2020-12/schema\",\"type\":\"integer\"}"); assertThat(schemaDescription.getAdditionalProperties()) .containsOnlyKeys("subject", "schemaId", "latestVersion", "type") .containsEntry("subject", subject) @@ -189,7 +189,8 @@ void canSerializeReturnsFalseIfSubjectDoesNotExist() { assertThat(serde.canSerialize(topic, Serde.Target.VALUE)).isFalse(); } - private void assertJsonsEqual(String expected, String actual) throws JsonProcessingException { + @SneakyThrows + private void assertJsonsEqual(String expected, String actual) { var mapper = new JsonMapper(); assertThat(mapper.readTree(actual)).isEqualTo(mapper.readTree(expected)); } @@ -211,9 +212,174 @@ private byte[] jsonToAvro(String json, AvroSchema schema) { GenericDatumWriter<Object> writer = new GenericDatumWriter<>(schema.rawSchema()); ByteArrayOutputStream output = new ByteArrayOutputStream(); Encoder encoder = EncoderFactory.get().binaryEncoder(output, null); - writer.write(AvroSchemaUtils.toObject(json, schema), encoder); + writer.write(JsonAvroConversion.convertJsonToAvro(json, schema.rawSchema()), encoder); encoder.flush(); return output.toByteArray(); } + @Test + void avroFieldsRepresentationIsConsistentForSerializationAndDeserialization() throws Exception { + AvroSchema schema = new AvroSchema( + """ + { + "type": "record", + "name": "TestAvroRecord", + "fields": [ + { + "name": "f_int", + "type": "int" + }, + { + "name": "f_long", + "type": "long" + }, + { + "name": "f_string", + "type": "string" + }, + { + "name": "f_boolean", + "type": "boolean" + }, + { + "name": "f_float", + "type": "float" + }, + { + "name": "f_double", + "type": "double" + }, + { + "name": "f_enum", + "type" : { + "type": "enum", + "name": "Suit", + "symbols" : ["SPADES", "HEARTS", "DIAMONDS", "CLUBS"] + } + }, + { + "name": "f_map", + "type": { + "type": "map", + "values" : "string", + "default": {} + } + }, + { + "name": "f_union", + "type": ["null", "string", "int" ] + }, + { + "name": "f_optional_to_test_not_filled_case", + "type": [ "null", "string"] + }, + { + "name" : "f_fixed", + "type" : { "type" : "fixed" ,"size" : 8, "name": "long_encoded" } + }, + { + "name" : "f_bytes", + "type": "bytes" + } + ] + }""" + ); + + String jsonPayload = """ + { + "f_int": 123, + "f_long": 4294967294, + "f_string": "string here", + "f_boolean": true, + "f_float": 123.1, + "f_double": 123456.123456, + "f_enum": "SPADES", + "f_map": { "k1": "string value" }, + "f_union": { "int": 123 }, + "f_fixed": "\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0004Γ’", + "f_bytes": "\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\t)" + } + """; + + registryClient.register("test-value", schema); + assertSerdeCycle("test", jsonPayload); + } + + @Test + void avroLogicalTypesRepresentationIsConsistentForSerializationAndDeserialization() throws Exception { + AvroSchema schema = new AvroSchema( + """ + { + "type": "record", + "name": "TestAvroRecord", + "fields": [ + { + "name": "lt_date", + "type": { "type": "int", "logicalType": "date" } + }, + { + "name": "lt_uuid", + "type": { "type": "string", "logicalType": "uuid" } + }, + { + "name": "lt_decimal", + "type": { "type": "bytes", "logicalType": "decimal", "precision": 22, "scale":10 } + }, + { + "name": "lt_time_millis", + "type": { "type": "int", "logicalType": "time-millis"} + }, + { + "name": "lt_time_micros", + "type": { "type": "long", "logicalType": "time-micros"} + }, + { + "name": "lt_timestamp_millis", + "type": { "type": "long", "logicalType": "timestamp-millis" } + }, + { + "name": "lt_timestamp_micros", + "type": { "type": "long", "logicalType": "timestamp-micros" } + }, + { + "name": "lt_local_timestamp_millis", + "type": { "type": "long", "logicalType": "local-timestamp-millis" } + }, + { + "name": "lt_local_timestamp_micros", + "type": { "type": "long", "logicalType": "local-timestamp-micros" } + } + ] + }""" + ); + + String jsonPayload = """ + { + "lt_date":"1991-08-14", + "lt_decimal": 2.1617413862327545E11, + "lt_time_millis": "10:15:30.001", + "lt_time_micros": "10:15:30.123456", + "lt_uuid": "a37b75ca-097c-5d46-6119-f0637922e908", + "lt_timestamp_millis": "2007-12-03T10:15:30.123Z", + "lt_timestamp_micros": "2007-12-03T10:15:30.123456Z", + "lt_local_timestamp_millis": "2017-12-03T10:15:30.123", + "lt_local_timestamp_micros": "2017-12-03T10:15:30.123456" + } + """; + + registryClient.register("test-value", schema); + assertSerdeCycle("test", jsonPayload); + } + + // 1. serialize input json to binary + // 2. deserialize from binary + // 3. check that deserialized version equal to input + void assertSerdeCycle(String topic, String jsonInput) { + byte[] serializedBytes = serde.serializer(topic, Serde.Target.VALUE).serialize(jsonInput); + var deserializedJson = serde.deserializer(topic, Serde.Target.VALUE) + .deserialize(null, serializedBytes) + .getResult(); + assertJsonsEqual(jsonInput, deserializedJson); + } + } diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/jsonschema/JsonAvroConversionTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/jsonschema/JsonAvroConversionTest.java new file mode 100644 index 00000000000..0e9c291707e --- /dev/null +++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/jsonschema/JsonAvroConversionTest.java @@ -0,0 +1,621 @@ +package com.provectus.kafka.ui.util.jsonschema; + +import static com.provectus.kafka.ui.util.jsonschema.JsonAvroConversion.convertAvroToJson; +import static com.provectus.kafka.ui.util.jsonschema.JsonAvroConversion.convertJsonToAvro; +import static org.assertj.core.api.Assertions.assertThat; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.json.JsonMapper; +import com.fasterxml.jackson.databind.node.BooleanNode; +import com.fasterxml.jackson.databind.node.DoubleNode; +import com.fasterxml.jackson.databind.node.FloatNode; +import com.fasterxml.jackson.databind.node.IntNode; +import com.fasterxml.jackson.databind.node.LongNode; +import com.fasterxml.jackson.databind.node.TextNode; +import com.google.common.primitives.Longs; +import io.confluent.kafka.schemaregistry.avro.AvroSchema; +import java.math.BigDecimal; +import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import lombok.SneakyThrows; +import org.apache.avro.Schema; +import org.apache.avro.generic.GenericData; +import org.junit.jupiter.api.Nested; +import org.junit.jupiter.api.Test; + +class JsonAvroConversionTest { + + // checking conversion from json to KafkaAvroSerializer-compatible avro objects + @Nested + class FromJsonToAvro { + + @Test + void primitiveRoot() { + assertThat(convertJsonToAvro("\"str\"", createSchema("\"string\""))) + .isEqualTo("str"); + + assertThat(convertJsonToAvro("123", createSchema("\"int\""))) + .isEqualTo(123); + + assertThat(convertJsonToAvro("123", createSchema("\"long\""))) + .isEqualTo(123L); + + assertThat(convertJsonToAvro("123.123", createSchema("\"float\""))) + .isEqualTo(123.123F); + + assertThat(convertJsonToAvro("12345.12345", createSchema("\"double\""))) + .isEqualTo(12345.12345); + } + + @Test + void primitiveTypedFields() { + var schema = createSchema( + """ + { + "type": "record", + "name": "TestAvroRecord", + "fields": [ + { + "name": "f_int", + "type": "int" + }, + { + "name": "f_long", + "type": "long" + }, + { + "name": "f_string", + "type": "string" + }, + { + "name": "f_boolean", + "type": "boolean" + }, + { + "name": "f_float", + "type": "float" + }, + { + "name": "f_double", + "type": "double" + }, + { + "name": "f_enum", + "type" : { + "type": "enum", + "name": "Suit", + "symbols" : ["SPADES", "HEARTS", "DIAMONDS", "CLUBS"] + } + }, + { + "name" : "f_fixed", + "type" : { "type" : "fixed" ,"size" : 8, "name": "long_encoded" } + }, + { + "name" : "f_bytes", + "type": "bytes" + } + ] + }""" + ); + + String jsonPayload = """ + { + "f_int": 123, + "f_long": 4294967294, + "f_string": "string here", + "f_boolean": true, + "f_float": 123.1, + "f_double": 123456.123456, + "f_enum": "SPADES", + "f_fixed": "\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0004Γ’", + "f_bytes": "\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\t)" + } + """; + + var converted = convertJsonToAvro(jsonPayload, schema); + assertThat(converted).isInstanceOf(GenericData.Record.class); + + var record = (GenericData.Record) converted; + assertThat(record.get("f_int")).isEqualTo(123); + assertThat(record.get("f_long")).isEqualTo(4294967294L); + assertThat(record.get("f_string")).isEqualTo("string here"); + assertThat(record.get("f_boolean")).isEqualTo(true); + assertThat(record.get("f_float")).isEqualTo(123.1f); + assertThat(record.get("f_double")).isEqualTo(123456.123456); + assertThat(record.get("f_enum")) + .isEqualTo( + new GenericData.EnumSymbol( + schema.getField("f_enum").schema(), + "SPADES" + ) + ); + assertThat(((GenericData.Fixed) record.get("f_fixed")).bytes()).isEqualTo(Longs.toByteArray(1234L)); + assertThat(((ByteBuffer) record.get("f_bytes")).array()).isEqualTo(Longs.toByteArray(2345L)); + } + + @Test + void unionRoot() { + var schema = createSchema("[ \"null\", \"string\", \"int\" ]"); + + var converted = convertJsonToAvro("{\"string\":\"string here\"}", schema); + assertThat(converted).isEqualTo("string here"); + + converted = convertJsonToAvro("{\"int\": 123}", schema); + assertThat(converted).isEqualTo(123); + + converted = convertJsonToAvro("null", schema); + assertThat(converted).isEqualTo(null); + } + + @Test + void unionField() { + var schema = createSchema( + """ + { + "type": "record", + "namespace": "com.test", + "name": "TestAvroRecord", + "fields": [ + { + "name": "f_union", + "type": [ "null", "int", "TestAvroRecord"] + } + ] + }""" + ); + + String jsonPayload = "{ \"f_union\": null }"; + + var record = (GenericData.Record) convertJsonToAvro(jsonPayload, schema); + assertThat(record.get("f_union")).isNull(); + + jsonPayload = "{ \"f_union\": { \"int\": 123 } }"; + record = (GenericData.Record) convertJsonToAvro(jsonPayload, schema); + assertThat(record.get("f_union")).isEqualTo(123); + + //inner-record's name should be fully-qualified! + jsonPayload = "{ \"f_union\": { \"com.test.TestAvroRecord\": { \"f_union\": { \"int\": 123 } } } }"; + record = (GenericData.Record) convertJsonToAvro(jsonPayload, schema); + assertThat(record.get("f_union")).isInstanceOf(GenericData.Record.class); + var innerRec = (GenericData.Record) record.get("f_union"); + assertThat(innerRec.get("f_union")).isEqualTo(123); + } + + @Test + void mapField() { + var schema = createSchema( + """ + { + "type": "record", + "name": "TestAvroRecord", + "fields": [ + { + "name": "long_map", + "type": { + "type": "map", + "values" : "long", + "default": {} + } + }, + { + "name": "string_map", + "type": { + "type": "map", + "values" : "string", + "default": {} + } + }, + { + "name": "self_ref_map", + "type": { + "type": "map", + "values" : "TestAvroRecord", + "default": {} + } + } + ] + }""" + ); + + String jsonPayload = """ + { + "long_map": { + "k1": 123, + "k2": 456 + }, + "string_map": { + "k3": "s1", + "k4": "s2" + }, + "self_ref_map": { + "k5" : { + "long_map": { "_k1": 222 }, + "string_map": { "_k2": "_s1" } + } + } + } + """; + + var record = (GenericData.Record) convertJsonToAvro(jsonPayload, schema); + assertThat(record.get("long_map")) + .isEqualTo(Map.of("k1", 123L, "k2", 456L)); + assertThat(record.get("string_map")) + .isEqualTo(Map.of("k3", "s1", "k4", "s2")); + assertThat(record.get("self_ref_map")) + .isNotNull(); + + Map<String, Object> selfRefMapField = (Map<String, Object>) record.get("self_ref_map"); + assertThat(selfRefMapField) + .hasSize(1) + .hasEntrySatisfying("k5", v -> { + assertThat(v).isInstanceOf(GenericData.Record.class); + var innerRec = (GenericData.Record) v; + assertThat(innerRec.get("long_map")) + .isEqualTo(Map.of("_k1", 222L)); + assertThat(innerRec.get("string_map")) + .isEqualTo(Map.of("_k2", "_s1")); + }); + } + + @Test + void arrayField() { + var schema = createSchema( + """ + { + "type": "record", + "name": "TestAvroRecord", + "fields": [ + { + "name": "f_array", + "type": { + "type": "array", + "items" : "string", + "default": [] + } + } + ] + }""" + ); + + String jsonPayload = """ + { + "f_array": [ "e1", "e2" ] + } + """; + + var record = (GenericData.Record) convertJsonToAvro(jsonPayload, schema); + assertThat(record.get("f_array")).isEqualTo(List.of("e1", "e2")); + } + + @Test + void logicalTypesField() { + var schema = createSchema( + """ + { + "type": "record", + "name": "TestAvroRecord", + "fields": [ + { + "name": "lt_date", + "type": { "type": "int", "logicalType": "date" } + }, + { + "name": "lt_uuid", + "type": { "type": "string", "logicalType": "uuid" } + }, + { + "name": "lt_decimal", + "type": { "type": "bytes", "logicalType": "decimal", "precision": 22, "scale":10 } + }, + { + "name": "lt_time_millis", + "type": { "type": "int", "logicalType": "time-millis"} + }, + { + "name": "lt_time_micros", + "type": { "type": "long", "logicalType": "time-micros"} + }, + { + "name": "lt_timestamp_millis", + "type": { "type": "long", "logicalType": "timestamp-millis" } + }, + { + "name": "lt_timestamp_micros", + "type": { "type": "long", "logicalType": "timestamp-micros" } + }, + { + "name": "lt_local_timestamp_millis", + "type": { "type": "long", "logicalType": "local-timestamp-millis" } + }, + { + "name": "lt_local_timestamp_micros", + "type": { "type": "long", "logicalType": "local-timestamp-micros" } + } + ] + }""" + ); + + String jsonPayload = """ + { + "lt_date":"1991-08-14", + "lt_decimal": 2.1617413862327545E11, + "lt_time_millis": "10:15:30.001", + "lt_time_micros": "10:15:30.123456", + "lt_uuid": "a37b75ca-097c-5d46-6119-f0637922e908", + "lt_timestamp_millis": "2007-12-03T10:15:30.123Z", + "lt_timestamp_micros": "2007-12-13T10:15:30.123456Z", + "lt_local_timestamp_millis": "2017-12-03T10:15:30.123", + "lt_local_timestamp_micros": "2017-12-13T10:15:30.123456" + } + """; + + var converted = convertJsonToAvro(jsonPayload, schema); + assertThat(converted).isInstanceOf(GenericData.Record.class); + + var record = (GenericData.Record) converted; + + assertThat(record.get("lt_date")) + .isEqualTo(LocalDate.of(1991, 8, 14)); + assertThat(record.get("lt_decimal")) + .isEqualTo(new BigDecimal("2.1617413862327545E11")); + assertThat(record.get("lt_time_millis")) + .isEqualTo(LocalTime.parse("10:15:30.001")); + assertThat(record.get("lt_time_micros")) + .isEqualTo(LocalTime.parse("10:15:30.123456")); + assertThat(record.get("lt_timestamp_millis")) + .isEqualTo(Instant.parse("2007-12-03T10:15:30.123Z")); + assertThat(record.get("lt_timestamp_micros")) + .isEqualTo(Instant.parse("2007-12-13T10:15:30.123456Z")); + assertThat(record.get("lt_local_timestamp_millis")) + .isEqualTo(LocalDateTime.parse("2017-12-03T10:15:30.123")); + assertThat(record.get("lt_local_timestamp_micros")) + .isEqualTo(LocalDateTime.parse("2017-12-13T10:15:30.123456")); + } + } + + // checking conversion of KafkaAvroDeserializer output to JsonNode + @Nested + class FromAvroToJson { + + @Test + void primitiveRoot() { + assertThat(convertAvroToJson("str", createSchema("\"string\""))) + .isEqualTo(new TextNode("str")); + + assertThat(convertAvroToJson(123, createSchema("\"int\""))) + .isEqualTo(new IntNode(123)); + + assertThat(convertAvroToJson(123L, createSchema("\"long\""))) + .isEqualTo(new LongNode(123)); + + assertThat(convertAvroToJson(123.1F, createSchema("\"float\""))) + .isEqualTo(new FloatNode(123.1F)); + + assertThat(convertAvroToJson(123.1, createSchema("\"double\""))) + .isEqualTo(new DoubleNode(123.1)); + + assertThat(convertAvroToJson(true, createSchema("\"boolean\""))) + .isEqualTo(BooleanNode.valueOf(true)); + + assertThat(convertAvroToJson(ByteBuffer.wrap(Longs.toByteArray(123L)), createSchema("\"bytes\""))) + .isEqualTo(new TextNode(new String(Longs.toByteArray(123L), StandardCharsets.ISO_8859_1))); + } + + @SneakyThrows + @Test + void primitiveTypedFields() { + var schema = createSchema( + """ + { + "type": "record", + "name": "TestAvroRecord", + "fields": [ + { + "name": "f_int", + "type": "int" + }, + { + "name": "f_long", + "type": "long" + }, + { + "name": "f_string", + "type": "string" + }, + { + "name": "f_boolean", + "type": "boolean" + }, + { + "name": "f_float", + "type": "float" + }, + { + "name": "f_double", + "type": "double" + }, + { + "name": "f_enum", + "type" : { + "type": "enum", + "name": "Suit", + "symbols" : ["SPADES", "HEARTS", "DIAMONDS", "CLUBS"] + } + }, + { + "name" : "f_fixed", + "type" : { "type" : "fixed" ,"size" : 8, "name": "long_encoded" } + }, + { + "name" : "f_bytes", + "type": "bytes" + } + ] + }""" + ); + + byte[] fixedFieldValue = Longs.toByteArray(1234L); + byte[] bytesFieldValue = Longs.toByteArray(2345L); + + GenericData.Record inputRecord = new GenericData.Record(schema); + inputRecord.put("f_int", 123); + inputRecord.put("f_long", 4294967294L); + inputRecord.put("f_string", "string here"); + inputRecord.put("f_boolean", true); + inputRecord.put("f_float", 123.1f); + inputRecord.put("f_double", 123456.123456); + inputRecord.put("f_enum", new GenericData.EnumSymbol(schema.getField("f_enum").schema(), "SPADES")); + inputRecord.put("f_fixed", new GenericData.Fixed(schema.getField("f_fixed").schema(), fixedFieldValue)); + inputRecord.put("f_bytes", ByteBuffer.wrap(bytesFieldValue)); + + String expectedJson = """ + { + "f_int": 123, + "f_long": 4294967294, + "f_string": "string here", + "f_boolean": true, + "f_float": 123.1, + "f_double": 123456.123456, + "f_enum": "SPADES", + "f_fixed": "\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\u0004Γ’", + "f_bytes": "\\u0000\\u0000\\u0000\\u0000\\u0000\\u0000\\t)" + } + """; + + assertJsonsEqual(expectedJson, convertAvroToJson(inputRecord, schema)); + } + + @Test + void logicalTypesField() { + var schema = createSchema( + """ + { + "type": "record", + "name": "TestAvroRecord", + "fields": [ + { + "name": "lt_date", + "type": { "type": "int", "logicalType": "date" } + }, + { + "name": "lt_uuid", + "type": { "type": "string", "logicalType": "uuid" } + }, + { + "name": "lt_decimal", + "type": { "type": "bytes", "logicalType": "decimal", "precision": 22, "scale":10 } + }, + { + "name": "lt_time_millis", + "type": { "type": "int", "logicalType": "time-millis"} + }, + { + "name": "lt_time_micros", + "type": { "type": "long", "logicalType": "time-micros"} + }, + { + "name": "lt_timestamp_millis", + "type": { "type": "long", "logicalType": "timestamp-millis" } + }, + { + "name": "lt_timestamp_micros", + "type": { "type": "long", "logicalType": "timestamp-micros" } + }, + { + "name": "lt_local_timestamp_millis", + "type": { "type": "long", "logicalType": "local-timestamp-millis" } + }, + { + "name": "lt_local_timestamp_micros", + "type": { "type": "long", "logicalType": "local-timestamp-micros" } + } + ] + }""" + ); + + GenericData.Record inputRecord = new GenericData.Record(schema); + inputRecord.put("lt_date", LocalDate.of(1991, 8, 14)); + inputRecord.put("lt_uuid", UUID.fromString("a37b75ca-097c-5d46-6119-f0637922e908")); + inputRecord.put("lt_decimal", new BigDecimal("2.16")); + inputRecord.put("lt_time_millis", LocalTime.parse("10:15:30.001")); + inputRecord.put("lt_time_micros", LocalTime.parse("10:15:30.123456")); + inputRecord.put("lt_timestamp_millis", Instant.parse("2007-12-03T10:15:30.123Z")); + inputRecord.put("lt_timestamp_micros", Instant.parse("2007-12-13T10:15:30.123456Z")); + inputRecord.put("lt_local_timestamp_millis", LocalDateTime.parse("2017-12-03T10:15:30.123")); + inputRecord.put("lt_local_timestamp_micros", LocalDateTime.parse("2017-12-13T10:15:30.123456")); + + String expectedJson = """ + { + "lt_date":"1991-08-14", + "lt_uuid": "a37b75ca-097c-5d46-6119-f0637922e908", + "lt_decimal": 2.16, + "lt_time_millis": "10:15:30.001", + "lt_time_micros": "10:15:30.123456", + "lt_timestamp_millis": "2007-12-03T10:15:30.123Z", + "lt_timestamp_micros": "2007-12-13T10:15:30.123456Z", + "lt_local_timestamp_millis": "2017-12-03T10:15:30.123", + "lt_local_timestamp_micros": "2017-12-13T10:15:30.123456" + } + """; + + assertJsonsEqual(expectedJson, convertAvroToJson(inputRecord, schema)); + } + + @Test + void unionField() { + var schema = createSchema( + """ + { + "type": "record", + "namespace": "com.test", + "name": "TestAvroRecord", + "fields": [ + { + "name": "f_union", + "type": [ "null", "int", "TestAvroRecord"] + } + ] + }""" + ); + + var r = new GenericData.Record(schema); + r.put("f_union", null); + assertJsonsEqual(" {}", convertAvroToJson(r, schema)); + + r = new GenericData.Record(schema); + r.put("f_union", 123); + assertJsonsEqual(" { \"f_union\" : { \"int\" : 123 } }", convertAvroToJson(r, schema)); + + + r = new GenericData.Record(schema); + var innerRec = new GenericData.Record(schema); + innerRec.put("f_union", 123); + r.put("f_union", innerRec); + assertJsonsEqual( + " { \"f_union\" : { \"com.test.TestAvroRecord\" : { \"f_union\" : { \"int\" : 123 } } } }", + convertAvroToJson(r, schema) + ); + } + + } + + private Schema createSchema(String schema) { + return new AvroSchema(schema).rawSchema(); + } + + @SneakyThrows + private void assertJsonsEqual(String expectedJson, JsonNode actual) { + var mapper = new JsonMapper(); + assertThat(actual.toPrettyString()) + .isEqualTo(mapper.readTree(expectedJson).toPrettyString()); + } + +}
test
test
2023-06-06T12:57:58
"2022-11-15T17:35:22Z"
nitin1677
train
provectus/kafka-ui/3720_3810
provectus/kafka-ui
provectus/kafka-ui/3720
provectus/kafka-ui/3810
[ "keyword_pr_to_issue" ]
e7429ce6c6e93497ec4c5c8ae530236e5207f630
fdd9ad94c11d44259ef26bf4b2dc9a8bd139f607
[ "Hello there fl0wx! πŸ‘‹\n\nThank you and congratulations πŸŽ‰ for opening your very first issue in this project! πŸ’–\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. πŸ‘€", "Forgot frontend lol", "@David-DB88 \r\n\r\nTODO:\r\nChange RBAC permissions: For `ResourceType.CONNECT` all restart actions should have `Action.RESTART` instead of `Action.EDIT`" ]
[ "pls move dto creation to `.map()` method where `setUserInfo ` is done" ]
"2023-05-11T08:37:29Z"
[ "type/enhancement", "scope/backend", "scope/frontend", "status/accepted", "area/rbac" ]
RBAC: KC: Impl restart permissions
### Issue submitter TODO list - [X] I've searched for an already existing issues [here](https://github.com/provectus/kafka-ui/issues) - [X] I'm running a supported version of the application which is listed [here](https://github.com/provectus/kafka-ui/blob/master/SECURITY.md) and the feature is not present there ### Is your proposal related to a problem? _No response_ ### Describe the feature you're interested in I do want a new permission for connector / connector task restarts. As i deploy and manage connectors via strimzi, i dont want the frontend user to edit or remove the tasks. But in case of an issue / tests it would be helpful if tasks could be restarted via UI with a separate permission. ### Describe alternatives you've considered _No response_ ### Version you're running 0.6.2 ### Additional context _No response_
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/AccessController.java", "kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml" ]
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/AccessController.java", "kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml" ]
[]
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/AccessController.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/AccessController.java index a86b6db5a07..b6522647b45 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/AccessController.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/AccessController.java @@ -12,8 +12,11 @@ import java.util.Collection; import java.util.Collections; import java.util.List; +import java.util.Objects; import java.util.stream.Collectors; +import javax.annotation.Nullable; import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; import org.springframework.http.ResponseEntity; import org.springframework.security.core.context.ReactiveSecurityContextHolder; import org.springframework.security.core.context.SecurityContext; @@ -23,15 +26,12 @@ @RestController @RequiredArgsConstructor +@Slf4j public class AccessController implements AuthorizationApi { private final AccessControlService accessControlService; public Mono<ResponseEntity<AuthenticationInfoDTO>> getUserAuthInfo(ServerWebExchange exchange) { - AuthenticationInfoDTO dto = new AuthenticationInfoDTO(); - dto.setRbacEnabled(accessControlService.isRbacEnabled()); - UserInfoDTO userInfo = new UserInfoDTO(); - Mono<List<UserPermissionDTO>> permissions = accessControlService.getUser() .map(user -> accessControlService.getRoles() .stream() @@ -49,13 +49,11 @@ public Mono<ResponseEntity<AuthenticationInfoDTO>> getUserAuthInfo(ServerWebExch return userName .zipWith(permissions) .map(data -> { - userInfo.setUsername(data.getT1()); - userInfo.setPermissions(data.getT2()); - - dto.setUserInfo(userInfo); + var dto = new AuthenticationInfoDTO(accessControlService.isRbacEnabled()); + dto.setUserInfo(new UserInfoDTO(data.getT1(), data.getT2())); return dto; }) - .switchIfEmpty(Mono.just(dto)) + .switchIfEmpty(Mono.just(new AuthenticationInfoDTO(accessControlService.isRbacEnabled()))) .map(ResponseEntity::ok); } @@ -70,11 +68,22 @@ private List<UserPermissionDTO> mapPermissions(List<Permission> permissions, Lis dto.setActions(permission.getActions() .stream() .map(String::toUpperCase) - .map(ActionDTO::valueOf) + .map(this::mapAction) + .filter(Objects::nonNull) .collect(Collectors.toList())); return dto; }) .collect(Collectors.toList()); } + @Nullable + private ActionDTO mapAction(String name) { + try { + return ActionDTO.fromValue(name); + } catch (IllegalArgumentException e) { + log.warn("Unknown Action [{}], skipping", name); + return null; + } + } + } diff --git a/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml b/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml index b89f8d09635..4aa79cff1d1 100644 --- a/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml +++ b/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml @@ -3452,6 +3452,7 @@ components: - MESSAGES_READ - MESSAGES_PRODUCE - MESSAGES_DELETE + - RESTART ResourceType: type: string
null
test
test
2023-05-11T11:13:09
"2023-04-26T13:02:01Z"
fl0wx
train
provectus/kafka-ui/2893_3825
provectus/kafka-ui
provectus/kafka-ui/2893
provectus/kafka-ui/3825
[ "connected" ]
4b724fd852f9814ec3ab9316a739cc2d8a1f282c
742e6eed3eba0b2b351dfb25aec5b4fb63f2e64f
[ "@Haarolean What's the deprecation policy in Kafka UI?\r\n\r\nShould this be done within the 0.5 milestone or one version after?", "@joschi in 0.6 :) ", "@joschi wanna nuke it? " ]
[]
"2023-05-15T13:44:00Z"
[ "good first issue", "scope/backend", "status/accepted", "type/chore" ]
Drop deprecated protobuf single file property
TODO drop deprecated protobuf single file property _Originally posted by @Haarolean in https://github.com/provectus/kafka-ui/pull/2874#discussion_r1015295797_
[ ".gitignore", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/ProtobufFileSerde.java" ]
[ ".gitignore", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/ProtobufFileSerde.java" ]
[ "kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/ProtobufFileSerdeTest.java" ]
diff --git a/.gitignore b/.gitignore index 55b770349f8..a12e7753760 100644 --- a/.gitignore +++ b/.gitignore @@ -31,6 +31,9 @@ build/ .vscode/ /kafka-ui-api/app/node +### SDKMAN ### +.sdkmanrc + .DS_Store *.code-workspace diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/ProtobufFileSerde.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/ProtobufFileSerde.java index 8a4c28a320a..05809e26912 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/ProtobufFileSerde.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/ProtobufFileSerde.java @@ -50,7 +50,6 @@ import java.io.ByteArrayInputStream; import java.nio.file.Files; import java.nio.file.Path; -import java.nio.file.Paths; import java.util.Collection; import java.util.HashMap; import java.util.List; @@ -204,17 +203,13 @@ record Configuration(@Nullable Descriptor defaultMessageDescriptor, Map<String, Descriptor> keyMessageDescriptorMap) { static boolean canBeAutoConfigured(PropertyResolver kafkaClusterProperties) { - Optional<String> protobufFile = kafkaClusterProperties.getProperty("protobufFile", String.class); Optional<List<String>> protobufFiles = kafkaClusterProperties.getListProperty("protobufFiles", String.class); Optional<String> protobufFilesDir = kafkaClusterProperties.getProperty("protobufFilesDir", String.class); - return protobufFilesDir.isPresent() - || protobufFile.isPresent() - || protobufFiles.filter(files -> !files.isEmpty()).isPresent(); + return protobufFilesDir.isPresent() || protobufFiles.filter(files -> !files.isEmpty()).isPresent(); } static Configuration create(PropertyResolver properties) { var protobufSchemas = loadSchemas( - properties.getProperty("protobufFile", String.class), properties.getListProperty("protobufFiles", String.class), properties.getProperty("protobufFilesDir", String.class) ); @@ -272,12 +267,11 @@ private static Map<String, Descriptor> populateDescriptors(Map<String, Descripto } @VisibleForTesting - static Map<Path, ProtobufSchema> loadSchemas(Optional<String> protobufFile, - Optional<List<String>> protobufFiles, + static Map<Path, ProtobufSchema> loadSchemas(Optional<List<String>> protobufFiles, Optional<String> protobufFilesDir) { if (protobufFilesDir.isPresent()) { - if (protobufFile.isPresent() || protobufFiles.isPresent()) { - log.warn("protobufFile and protobufFiles properties will be ignored, since protobufFilesDir provided"); + if (protobufFiles.isPresent()) { + log.warn("protobufFiles properties will be ignored, since protobufFilesDir provided"); } List<ProtoFile> loadedFiles = new ProtoSchemaLoader(protobufFilesDir.get()).load(); Map<String, ProtoFileElement> allPaths = loadedFiles.stream() @@ -288,10 +282,8 @@ static Map<Path, ProtobufSchema> loadSchemas(Optional<String> protobufFile, f -> new ProtobufSchema(f.toElement(), List.of(), allPaths))); } //Supporting for backward-compatibility. Normally, protobufFilesDir setting should be used - return Stream.concat( - protobufFile.stream(), - protobufFiles.stream().flatMap(Collection::stream) - ) + return protobufFiles.stream() + .flatMap(Collection::stream) .distinct() .map(Path::of) .collect(Collectors.toMap(path -> path, path -> new ProtobufSchema(readFileAsString(path))));
diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/ProtobufFileSerdeTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/ProtobufFileSerdeTest.java index ab99df74de6..1b295dc77a4 100644 --- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/ProtobufFileSerdeTest.java +++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/ProtobufFileSerdeTest.java @@ -47,7 +47,6 @@ class ProtobufFileSerdeTest { @BeforeEach void setUp() throws Exception { Map<Path, ProtobufSchema> files = ProtobufFileSerde.Configuration.loadSchemas( - Optional.empty(), Optional.empty(), Optional.of(protoFilesDir()) ); @@ -107,15 +106,6 @@ void canBeAutoConfiguredReturnsNoProtoPropertiesProvided() { .isFalse(); } - @Test - void canBeAutoConfiguredReturnsTrueIfNoProtoFileHasBeenProvided() { - PropertyResolver resolver = mock(PropertyResolver.class); - when(resolver.getProperty("protobufFile", String.class)) - .thenReturn(Optional.of("file.proto")); - assertThat(Configuration.canBeAutoConfigured(resolver)) - .isTrue(); - } - @Test void canBeAutoConfiguredReturnsTrueIfProtoFilesHasBeenProvided() { PropertyResolver resolver = mock(PropertyResolver.class); @@ -193,13 +183,10 @@ void unknownSchemaAsTopicSchemaForKeyThrowsException() { @Test void createConfigureFillsDescriptorMappingsWhenProtoFilesListProvided() throws Exception { PropertyResolver resolver = mock(PropertyResolver.class); - when(resolver.getProperty("protobufFile", String.class)) - .thenReturn(Optional.of( - ResourceUtils.getFile("classpath:protobuf-serde/sensor.proto").getPath())); - when(resolver.getListProperty("protobufFiles", String.class)) .thenReturn(Optional.of( List.of( + ResourceUtils.getFile("classpath:protobuf-serde/sensor.proto").getPath(), ResourceUtils.getFile("classpath:protobuf-serde/address-book.proto").getPath()))); when(resolver.getProperty("protobufMessageName", String.class))
val
test
2023-06-06T12:57:58
"2022-11-07T11:12:35Z"
Haarolean
train
provectus/kafka-ui/2753_3827
provectus/kafka-ui
provectus/kafka-ui/2753
provectus/kafka-ui/3827
[ "connected" ]
62bee1ced8f8dc4184c22024fca77eae2de8a2b9
ab9d0e2b3f6fc54344ecb1912d974f139020ca48
[ "Hello there LaurentDanti! πŸ‘‹\n\nThank you and congratulations πŸŽ‰ for opening your very first issue in this project! πŸ’–\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. πŸ‘€", "@LaurentDanti thank you for the issue!\r\n\r\n@Haarolean I think we already have issue created for fonts, maybe we add github call handling to that issue? ", "hey @LaurentDanti, thanks for reaching out.\r\nI know already at least a few cases in which people successfully use the app in the \"air gap\" environment (e.g. #1390).\r\nWe already have the fonts issue in progress (#2728), we'll keep this open for considering opting out of the version check.", "@Haarolean hello, I also have the fonts issue, UI doesn't work in isolated network, would you give workaround ?", "@Fuou we're working on it (#2372)", " @Haarolean what's the status here?", "@cassanellicarlo it's up for grabs for the 3rd-party contributors since we don't have enough capacity to address this issue.", "Hey πŸ™ what is the status woth the opt out option?", "Frontend implementation is invalid.\r\n\r\nTagged versions before:\r\n<img width=\"361\" alt=\"image\" src=\"https://github.com/provectus/kafka-ui/assets/1494347/c44b0863-63dd-446f-a25a-a5d816843a60\">\r\nTagged versions now are missing the tag and have a commit displayed anyway:\r\n<img width=\"408\" alt=\"image\" src=\"https://github.com/provectus/kafka-ui/assets/1494347/ed5fb888-52fa-4f8a-aa18-9d0d768d1402\">\r\n\r\nif version == versionTag display version rather than commitId\r\n<img width=\"766\" alt=\"image\" src=\"https://github.com/provectus/kafka-ui/assets/1494347/66a9a277-d452-46df-ac17-2696fa8bba87\">\r\n\r\n@David-DB88 \r\n\r\n\r\n", "ok\r\n" ]
[ "```suggestion\r\n const currentVersion = isLatestRelease && version?.match(versionTag)\r\n```", "ΠΎΠΊ" ]
"2023-05-16T10:51:03Z"
[ "type/enhancement", "scope/backend", "scope/frontend", "status/pending-frontend" ]
Air Gap: Enhancement: Opt out of version check
Hello, i'm using your great UI for kafka cluster instance and it's perfect : functionality, rapidity, installating in k8s world with helm. But i try to install this UI in isolated network without internet access so github.com is not allow. Is it possible to disable call from the ui to https://api.github.com/repos/provectus/kafka-ui/releases/latest and load the font inside the image ? Like that the UI could be use in isolated network.
[ "kafka-ui-react-app/src/components/Version/Version.tsx" ]
[ "kafka-ui-react-app/src/components/Version/Version.tsx" ]
[]
diff --git a/kafka-ui-react-app/src/components/Version/Version.tsx b/kafka-ui-react-app/src/components/Version/Version.tsx index 6788605d92b..0d0fdcff4b6 100644 --- a/kafka-ui-react-app/src/components/Version/Version.tsx +++ b/kafka-ui-react-app/src/components/Version/Version.tsx @@ -8,9 +8,15 @@ import * as S from './Version.styled'; const Version: React.FC = () => { const { data: latestVersionInfo = {} } = useLatestVersion(); - const { buildTime, commitId, isLatestRelease } = latestVersionInfo.build; + const { buildTime, commitId, isLatestRelease, version } = + latestVersionInfo.build; const { versionTag } = latestVersionInfo?.latestRelease || ''; + const currentVersion = + isLatestRelease && version?.match(versionTag) + ? versionTag + : formatTimestamp(buildTime); + return ( <S.Wrapper> {!isLatestRelease && ( @@ -32,7 +38,7 @@ const Version: React.FC = () => { </S.CurrentCommitLink> </div> )} - <S.CurrentVersion>{formatTimestamp(buildTime)}</S.CurrentVersion> + <S.CurrentVersion>{currentVersion}</S.CurrentVersion> </S.Wrapper> ); };
null
train
test
2023-05-17T11:34:12
"2022-10-14T10:49:29Z"
LaurentDanti
train
provectus/kafka-ui/3824_3829
provectus/kafka-ui
provectus/kafka-ui/3824
provectus/kafka-ui/3829
[ "connected" ]
baeb494f532bdca103e7f8d8e8f627a921d76bb0
62bee1ced8f8dc4184c22024fca77eae2de8a2b9
[ "The only thing worth paying attention here is this:\r\n\r\n```\r\n11:13:13,458 |-WARN in ch.qos.logback.core.ConsoleAppender[STDOUT] - This appender no longer admits a layout as a sub-component, set an encoder instead.\r\n11:13:13,458 |-WARN in ch.qos.logback.core.ConsoleAppender[STDOUT] - To ensure compatibility, wrapping your layout in LayoutWrappingEncoder.\r\n```\r\n\r\n@iliax guess we might need to update the appender within logback's xml? Might be related to spring version bump." ]
[]
"2023-05-16T15:46:48Z"
[ "good first issue", "scope/backend", "type/chore" ]
Log warnings after upgrading from version 0.6.2 to 0.7.0
### Issue submitter TODO list - [X] I've looked up my issue in [FAQ](https://docs.kafka-ui.provectus.io/faq/common-problems) - [X] I've searched for an already existing issues [here](https://github.com/provectus/kafka-ui/issues) - [X] I've tried running `master`-labeled docker image and the issue still persists there - [X] I'm running a supported version of the application which is listed [here](https://github.com/provectus/kafka-ui/blob/master/SECURITY.md) ### Describe the bug (actual behavior) I use 'persistent' version of KafkaUI without Docker and provide all parameters as Env-vars in my shell-script. After upgrading from version 0.6.2 to 0.7.0 I see in the KafkaUI log-file messages 'Could NOT find resource [logback.xml]'. I also see messages about the absence of some parameters for starting KafkaUI, although I have them set: 2023-05-15 11:13:13,619 INFO [main] c.p.k.u.KafkaUiApplication: No active profile set, falling back to 1 default profile: "default" 2023-05-15 11:13:16,761 DEBUG [main] c.p.k.u.s.SerdesInitializer: Configuring serdes for cluster KAFKATS_TEST 2023-05-15 11:13:17,380 INFO [main] o.s.b.a.e.w.EndpointLinksResolver: Exposing 2 endpoint(s) beneath base path '/actuator' 2023-05-15 11:13:17,420 INFO [main] o.s.s.l.u.DefaultLdapAuthoritiesPopulator: Will not perform group search since groupSearchBase is null. 2023-05-15 11:13:17,562 INFO [main] c.p.k.u.c.a.LdapSecurityConfig: Configuring LDAP authentication. 2023-05-15 11:13:17,659 INFO [main] o.s.l.c.s.AbstractContextSource: Property 'userDn' not set - anonymous context will be used for read-write operations 2 ### Expected behavior The provided warnings are absent ### Your installation details KafkaUI - [fdd9ad9](https://github.com/provectus/kafka-ui/commit/fdd9ad9) 11.05.2023, 17:02:15 KafkaUI startup script with configuration parameters: export DYNAMIC_CONFIG_ENABLED='false' export SECURITY_BASIC_ENABLED='false' export SERVER_PORT='8080' export SERVER_SSL_ENABLED='true' export SERVER_SSL_KEY_STORE_TYPE='JKS' export SERVER_SSL_KEY_STORE='/disk01/kafka-ui-api-v0.7.0/keystore.jks' export SERVER_SSL_KEY_STORE_PASSWORD='***' export AUTH_TYPE='LDAP' export SPRING_LDAP_URLS='ldaps://****.**.*.com:3269 ldaps://****.**.*.com:3269' export SPRING_LDAP_BASE='DC=\**,DC=\*,DC=com' export SPRING_LDAP_USER_FILTER_SEARCH_BASE='DC=\**,DC=\*,DC=com' export SPRING_LDAP_USER_FILTER_SEARCH_FILTER='(&(sAMAccountName={0})(|(memberOf=CN=kafka-admin,OU=Service,DC=**,DC=*,DC=com)(memberOf=CN=admin,OU=Service,DC=\**,DC=\*,DC=com)))' export SPRING_LDAP_ADMIN_USER='CN=ldap-user,OU=Service,DC=\**,DC=\*,DC=com' export SPRING_LDAP_ADMIN_PASSWORD='***' export KAFKA_CLUSTERS_0_METRICS_PORT='9094' export KAFKA_CLUSTERS_0_METRICS_SSL='false' export KAFKA_CLUSTERS_0_METRICS_TYPE='false' export KAFKA_CLUSTERS_0_NAME='KAFKATS_TEST' export KAFKA_CLUSTERS_0_READONLY='false' export KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS='kafka-tst.**.*.com:9093' export KAFKA_CLUSTERS_0_ZOOKEEPER='kafka-tst.**.*.com:2182' export KAFKA_CLUSTERS_0_SSL_TRUSTSTORELOCATION='/disk01/kafka-ui-api-v0.7.0/keystore.jks' export KAFKA_CLUSTERS_0_SSL_TRUSTSTOREPASSWORD='***' export KAFKA_CLUSTERS_0_PROPERTIES_SECURITY_PROTOCOL='SASL_SSL' export KAFKA_CLUSTERS_0_PROPERTIES_SASL_MECHANISM='GSSAPI' export KAFKA_CLUSTERS_0_PROPERTIES_SASL_KERBEROS_SERVICE_NAME='kafkats' export KAFKA_CLUSTERS_0_PROPERTIES_SASL_JAAS_CONFIG='com.sun.security.auth.module.Krb5LoginModule required serviceName="kafkats" useKeyTab=true storeKey=true keyTab="/disk01/kafka-ui-api-v0.7.0/kafka-tst.keytab" principal="kafkats/kafka-tst.**.*.com@**.*.COM";' export KAFKA_CLUSTERS_0_PROPERTIES_SSL_KEYSTORE_TYPE='JKS' export KAFKA_CLUSTERS_0_PROPERTIES_SSL_KEYSTORE_LOCATION='/disk01/kafka-ui-api-v0.7.0/keystore.jks' export KAFKA_CLUSTERS_0_PROPERTIES_SSL_KEYSTORE_PASSWORD='***' export KAFKA_CLUSTERS_0_PROPERTIES_SSL_KEY_PASSWORD='***' export KAFKA_CLUSTERS_0_PROPERTIES_SSL_TRUSTSTORE_TYPE='JKS' export KAFKA_CLUSTERS_0_PROPERTIES_SSL_TRUSTSTORE_LOCATION='/disk01/kafka-ui-api-v0.7.0/keystore.jks' export KAFKA_CLUSTERS_0_PROPERTIES_SSL_TRUSTSTORE_PASSWORD='***' export KAFKA_CLUSTERS_0_PROPERTIES_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM='' export KAFKA_CLUSTERS_0_SCHEMAREGISTRY='https://kafka-tst.**.*.com:8081' export KAFKA_CLUSTERS_0_SCHEMAREGISTRYSSL_KEYSTORELOCATION='/disk01/kafka-ui-api-v0.7.0/keystore.jks' export KAFKA_CLUSTERS_0_SCHEMAREGISTRYSSL_KEYSTOREPASSWORD='***' export KAFKA_CLUSTERS_0_SCHEMAREGISTRYSSL_KEYPASSWORD='***' export KAFKA_CLUSTERS_0_SCHEMAREGISTRYSSL_TRUSTSTORELOCATION='/disk01/kafka-ui-api-v0.7.0/keystore.jks' export KAFKA_CLUSTERS_0_SCHEMAREGISTRYSSL_TRUSTSTOREPASSWORD='***' export JAVA_OPTS=" -Dzookeeper.client.secure=false -Dzookeeper.ssl.client.enable=false -Djavax.net.ssl.keyStoreType=jks -Djavax.net.ssl.keyStore=/disk01/kafka-ui-api-v0.7.0/keystore.jks -Djavax.net.ssl.keyStorePassword=*** -Djavax.net.ssl.trustStoreType=jks -Djavax.net.ssl.trustStore=/disk01/kafka-ui-api-v0.7.0/keystore.jks -Djavax.net.ssl.trustStorePassword==***" export JAVA_OPTS="$JAVA_OPTS -Xms2g -Xmx4g -Djava.awt.headless=true --add-opens java.rmi/javax.rmi.ssl=ALL-UNNAMED" cd /disk01/kafka-ui-api-v0.7.0 nohup /opt/java/jdk-17.0.3.1/bin/java $JAVA_OPTS -jar /disk01/kafka-ui-api-v0.7.0/kafka-ui-api-v0.7.0.jar>/disk01/kafka-ui-api-v0.7.0/kafkaui-console.log 2>&1 & ### Steps to reproduce Start KafkaUI without dokcer with similar startup parameters as my ### Screenshots ![image](https://github.com/provectus/kafka-ui/assets/117813220/07e48221-23e6-404d-bc50-3ff672e7c89d) ### Logs 11:13:12,650 |-INFO in ch.qos.logback.classic.LoggerContext[default] - This is logback-classic version 1.4.6 11:13:12,684 |-INFO in ch.qos.logback.classic.LoggerContext[default] - Could NOT find resource [logback-test.xml] 11:13:12,685 |-INFO in ch.qos.logback.classic.LoggerContext[default] - Could NOT find resource [logback.xml] 11:13:12,693 |-INFO in ch.qos.logback.classic.BasicConfigurator@1ffe63b9 - Setting up default configuration. 11:13:13,307 |-INFO in ch.qos.logback.core.joran.spi.ConfigurationWatchList@51e5fc98 - URL [jar:file:/disk01/kafka-ui-api-v0.7.0/kafka-ui-api-v0.7.0.jar!/BOOT-INF/classes!/logback-spring.xml] is not of type file 11:13:13,420 |-INFO in ch.qos.logback.core.model.processor.AppenderModelHandler - Processing appender named [STDOUT] 11:13:13,421 |-INFO in ch.qos.logback.core.model.processor.AppenderModelHandler - About to instantiate appender of type [ch.qos.logback.core.ConsoleAppender] 11:13:13,458 |-WARN in ch.qos.logback.core.ConsoleAppender[STDOUT] - This appender no longer admits a layout as a sub-component, set an encoder instead. 11:13:13,458 |-WARN in ch.qos.logback.core.ConsoleAppender[STDOUT] - To ensure compatibility, wrapping your layout in LayoutWrappingEncoder. 11:13:13,458 |-WARN in ch.qos.logback.core.ConsoleAppender[STDOUT] - See also http://logback.qos.ch/codes.html#layoutInsteadOfEncoder for details 11:13:13,458 |-INFO in ch.qos.logback.classic.model.processor.RootLoggerModelHandler - Setting level of ROOT logger to INFO 11:13:13,458 |-INFO in ch.qos.logback.classic.jul.LevelChangePropagator@7c469c48 - Propagating INFO level on Logger[ROOT] onto the JUL framework 11:13:13,459 |-INFO in ch.qos.logback.core.model.processor.AppenderRefModelHandler - Attaching appender named [STDOUT] to Logger[ROOT] 11:13:13,459 |-INFO in ch.qos.logback.core.model.processor.DefaultProcessor@12e61fe6 - End of configuration. 11:13:13,459 |-INFO in org.springframework.boot.logging.logback.SpringBootJoranConfigurator@7ee955a8 - Registering current configuration as safe fallback point _ _ ___ __ _ _ _ __ __ _ | | | |_ _| / _|___ _ _ /_\ _ __ __ _ __| |_ ___ | |/ /__ _ / _| |_____ | |_| || | | _/ _ | '_| / _ \| '_ / _` / _| ' \/ -_) | ' </ _` | _| / / _`| \___/|___| |_| \___|_| /_/ \_| .__\__,_\__|_||_\___| |_|\_\__,_|_| |_\_\__,| |_| 2023-05-15 11:13:13,542 INFO [background-preinit] o.h.v.i.u.Version: HV000001: Hibernate Validator 8.0.0.Final 2023-05-15 11:13:13,618 INFO [main] c.p.k.u.KafkaUiApplication: Starting KafkaUiApplication using Java 17.0.3.1 with PID 148550 (/disk01/kafka-ui-api-v0.7.0/kafka-ui-api-v0.7.0.jar started by root in /disk01/kafka-ui-api-v0.7.0) 2023-05-15 11:13:13,618 DEBUG [main] c.p.k.u.KafkaUiApplication: Running with Spring Boot v3.0.5, Spring v6.0.7 2023-05-15 11:13:13,619 INFO [main] c.p.k.u.KafkaUiApplication: No active profile set, falling back to 1 default profile: "default" 2023-05-15 11:13:16,761 DEBUG [main] c.p.k.u.s.SerdesInitializer: Configuring serdes for cluster KAFKATS_TEST 2023-05-15 11:13:17,380 INFO [main] o.s.b.a.e.w.EndpointLinksResolver: Exposing 2 endpoint(s) beneath base path '/actuator' 2023-05-15 11:13:17,420 INFO [main] o.s.s.l.u.DefaultLdapAuthoritiesPopulator: Will not perform group search since groupSearchBase is null. 2023-05-15 11:13:17,562 INFO [main] c.p.k.u.c.a.LdapSecurityConfig: Configuring LDAP authentication. 2023-05-15 11:13:17,659 INFO [main] o.s.l.c.s.AbstractContextSource: Property 'userDn' not set - anonymous context will be used for read-write operations 2023-05-15 11:13:17,948 INFO [main] o.s.b.w.e.n.NettyWebServer: Netty started on port 8080 2023-05-15 11:13:17,966 INFO [main] c.p.k.u.KafkaUiApplication: Started KafkaUiApplication in 5.086 seconds (process running for 5.766) 2023-05-15 11:13:20,012 DEBUG [parallel-3] c.p.k.u.s.ClustersStatisticsScheduler: Start getting metrics for kafkaCluster: KAFKATS_TEST ### Additional context _No response_
[ "kafka-ui-api/src/main/resources/application.yml", "kafka-ui-api/src/main/resources/logback-spring.xml" ]
[ "kafka-ui-api/src/main/resources/application.yml", "kafka-ui-api/src/main/resources/logback-spring.xml" ]
[]
diff --git a/kafka-ui-api/src/main/resources/application.yml b/kafka-ui-api/src/main/resources/application.yml index 864289c2d9a..71af4e8d925 100644 --- a/kafka-ui-api/src/main/resources/application.yml +++ b/kafka-ui-api/src/main/resources/application.yml @@ -17,4 +17,5 @@ logging: root: INFO com.provectus: DEBUG reactor.netty.http.server.AccessLog: INFO + org.hibernate.validator: WARN diff --git a/kafka-ui-api/src/main/resources/logback-spring.xml b/kafka-ui-api/src/main/resources/logback-spring.xml index a0705128172..a33692146bb 100644 --- a/kafka-ui-api/src/main/resources/logback-spring.xml +++ b/kafka-ui-api/src/main/resources/logback-spring.xml @@ -1,17 +1,14 @@ <?xml version="1.0" encoding="UTF-8"?> <configuration> - <appender name="STDOUT" - class="ch.qos.logback.core.ConsoleAppender"> - <layout class="ch.qos.logback.classic.PatternLayout"> - <Pattern> - %black(%d{ISO8601}) %highlight(%-5level) [%blue(%t)] %yellow(%c{1}): %msg%n%throwable - </Pattern> - </layout> + <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender"> + <encoder> + <pattern>%black(%d{ISO8601}) %highlight(%-5level) [%blue(%t)] %yellow(%c{1}): %msg%n%throwable</pattern> + </encoder> </appender> <root level="info"> - <appender-ref ref="STDOUT" /> + <appender-ref ref="STDOUT"/> </root> </configuration>
null
train
test
2023-05-15T15:57:32
"2023-05-15T11:09:21Z"
sm-shevchenko
train
provectus/kafka-ui/3818_3833
provectus/kafka-ui
provectus/kafka-ui/3818
provectus/kafka-ui/3833
[ "connected" ]
62bee1ced8f8dc4184c22024fca77eae2de8a2b9
f22c910f5cb19dffc74d997cb7b668d4ab4be65e
[]
[]
"2023-05-17T09:31:39Z"
[ "type/bug", "scope/frontend", "status/accepted", "status/confirmed" ]
FE: Latest version is null
![image](https://github.com/provectus/kafka-ui/assets/1494347/9e9d7e84-8bf8-4093-bf60-8f0357de7d53) 2. Replace "Current latest" with "latest" 1. Display "Latest version is UNKNOWN" in case of nulls
[ "kafka-ui-react-app/src/components/Version/Version.tsx" ]
[ "kafka-ui-react-app/src/components/Version/Version.tsx" ]
[]
diff --git a/kafka-ui-react-app/src/components/Version/Version.tsx b/kafka-ui-react-app/src/components/Version/Version.tsx index 6788605d92b..3fc1f05774f 100644 --- a/kafka-ui-react-app/src/components/Version/Version.tsx +++ b/kafka-ui-react-app/src/components/Version/Version.tsx @@ -15,7 +15,9 @@ const Version: React.FC = () => { <S.Wrapper> {!isLatestRelease && ( <S.OutdatedWarning - title={`Your app version is outdated. Current latest version is ${versionTag}`} + title={`Your app version is outdated. Latest version is ${ + versionTag || 'UNKNOWN' + }`} > <WarningIcon /> </S.OutdatedWarning>
null
train
test
2023-05-17T11:34:12
"2023-05-15T07:13:28Z"
Haarolean
train
provectus/kafka-ui/3812_3840
provectus/kafka-ui
provectus/kafka-ui/3812
provectus/kafka-ui/3840
[ "keyword_pr_to_issue" ]
62bee1ced8f8dc4184c22024fca77eae2de8a2b9
f7d85d86e6016f284e0192eba2844c0a89190344
[ "@Haarolean Glad you're looking into CVE issues. I ran [grype](https://github.com/anchore/grype) against the latest image locally, and got the list below of CVE vulnerabilities. Are you going to be addressing them as well?\r\n```\r\nlibcrypto1.1 1.1.1t-r3 apk CVE-2023-0466 Medium\r\nlibssl1.1 1.1.1t-r3 apk CVE-2023-0466 Medium\r\nnetty-reactive-streams 2.0.5 java-archive CVE-2014-3488 Medium\r\nnetty-reactive-streams 2.0.5 java-archive CVE-2015-2156 High\r\nnetty-reactive-streams 2.0.5 java-archive CVE-2019-16869 High\r\nnetty-reactive-streams 2.0.5 java-archive CVE-2019-20444 Critical dzr libclc pocketbase virtualfish\r\nnetty-reactive-streams 2.0.5 java-archive CVE-2019-20445 Critical\r\nnetty-reactive-streams 2.0.5 java-archive CVE-2021-21290 Medium llamachat lo-rain uhk-agent\r\nnetty-reactive-streams 2.0.5 java-archive CVE-2021-21295 Medium\r\nnetty-reactive-streams 2.0.5 java-archive CVE-2021-21409 Medium\r\nnetty-reactive-streams 2.0.5 java-archive CVE-2021-37136 High\r\nnetty-reactive-streams 2.0.5 java-archive CVE-2021-37137 High\r\nnetty-reactive-streams 2.0.5 java-archive CVE-2021-43797 Medium\r\nnetty-reactive-streams 2.0.5 java-archive CVE-2022-24823 Medium\r\nnetty-reactive-streams 2.0.5 java-archive CVE-2022-41881 High\r\nnetty-reactive-streams-http 2.0.5 java-archive CVE-2014-3488 Medium\r\nnetty-reactive-streams-http 2.0.5 java-archive CVE-2015-2156 High\r\nnetty-reactive-streams-http 2.0.5 java-archive CVE-2019-16869 High\r\nnetty-reactive-streams-http 2.0.5 java-archive CVE-2019-20444 Critical\r\nnetty-reactive-streams-http 2.0.5 java-archive CVE-2019-20445 Critical\r\nnetty-reactive-streams-http 2.0.5 java-archive CVE-2021-21290 Medium\r\nnetty-reactive-streams-http 2.0.5 java-archive CVE-2021-21295 Medium\r\nnetty-reactive-streams-http 2.0.5 java-archive CVE-2021-21409 Medium\r\nnetty-reactive-streams-http 2.0.5 java-archive CVE-2021-37136 High\r\nnetty-reactive-streams-http 2.0.5 java-archive CVE-2021-37137 High\r\nnetty-reactive-streams-http 2.0.5 java-archive CVE-2021-43797 Medium\r\nnetty-reactive-streams-http 2.0.5 java-archive CVE-2022-24823 Medium rwin_arm64.tar.gz\r\nnetty-reactive-streams-http 2.0.5 java-archive CVE-2022-41881 High -2e65be/267054247/d4576751-827c-4f81-a9ab-61f248ef6c76?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIAIWNJYAX4CSVEH53A%2F20230517%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-Date=20\r\nreactor-netty-core 1.1.5 java-archive CVE-2014-3488 Medium ############################################################################################################################################################### 100.0%\r\nreactor-netty-core 1.1.5 java-archive CVE-2015-2156 High\r\nreactor-netty-core 1.1.5 java-archive CVE-2019-16869 High\r\nreactor-netty-core 1.1.5 java-archive CVE-2019-20444 Critical\r\nreactor-netty-core 1.1.5 java-archive CVE-2019-20445 Critical\r\nreactor-netty-core 1.1.5 java-archive CVE-2021-21290 Medium\r\nreactor-netty-core 1.1.5 java-archive CVE-2021-21295 Medium\r\nreactor-netty-core 1.1.5 java-archive CVE-2021-21409 Medium\r\nreactor-netty-core 1.1.5 java-archive CVE-2021-37136 High\r\nreactor-netty-core 1.1.5 java-archive CVE-2021-37137 High\r\nreactor-netty-core 1.1.5 java-archive CVE-2021-43797 Medium\r\nreactor-netty-core 1.1.5 java-archive CVE-2022-24823 Medium 20 MB]\r\nreactor-netty-core 1.1.5 java-archive CVE-2022-41881 High\r\nreactor-netty-http 1.1.5 java-archive CVE-2014-3488 Medium\r\nreactor-netty-http 1.1.5 java-archive CVE-2015-2156 High\r\nreactor-netty-http 1.1.5 java-archive CVE-2019-16869 High\r\nreactor-netty-http 1.1.5 java-archive CVE-2019-20444 Critical\r\nreactor-netty-http 1.1.5 java-archive CVE-2019-20445 Critical\r\nreactor-netty-http 1.1.5 java-archive CVE-2021-21290 Medium\r\nreactor-netty-http 1.1.5 java-archive CVE-2021-21295 Medium\r\nreactor-netty-http 1.1.5 java-archive CVE-2021-21409 Medium\r\nreactor-netty-http 1.1.5 java-archive CVE-2021-37136 High\r\nreactor-netty-http 1.1.5 java-archive CVE-2021-37137 High\r\nreactor-netty-http 1.1.5 java-archive CVE-2021-43797 Medium\r\nreactor-netty-http 1.1.5 java-archive CVE-2022-24823 Medium\r\nreactor-netty-http 1.1.5 java-archive CVE-2022-41881 High\r\n```" ]
[]
"2023-05-18T09:00:55Z"
[ "scope/backend", "type/security" ]
CVE fixes, May 23
https://github.com/provectus/kafka-ui/actions/runs/4946294159/jobs/8844174426
[ "kafka-ui-api/pom.xml", "pom.xml" ]
[ "kafka-ui-api/pom.xml", "pom.xml" ]
[]
diff --git a/kafka-ui-api/pom.xml b/kafka-ui-api/pom.xml index 5ebefe31dfe..dc5dad08e2b 100644 --- a/kafka-ui-api/pom.xml +++ b/kafka-ui-api/pom.xml @@ -21,12 +21,6 @@ </properties> <dependencies> - <dependency> - <!--TODO: remove, when spring-boot fixed dependency to 6.0.8+ (6.0.7 has CVE) --> - <groupId>org.springframework</groupId> - <artifactId>spring-core</artifactId> - <version>6.0.8</version> - </dependency> <dependency> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-starter-webflux</artifactId> @@ -61,7 +55,7 @@ <dependency> <groupId>org.apache.commons</groupId> <artifactId>commons-lang3</artifactId> - <version>3.9</version> + <version>3.12.0</version> </dependency> <dependency> <groupId>org.projectlombok</groupId> @@ -97,7 +91,7 @@ <dependency> <groupId>software.amazon.msk</groupId> <artifactId>aws-msk-iam-auth</artifactId> - <version>1.1.5</version> + <version>1.1.6</version> </dependency> <dependency> @@ -115,7 +109,6 @@ <groupId>io.projectreactor.addons</groupId> <artifactId>reactor-extra</artifactId> </dependency> -<!-- https://github.com/provectus/kafka-ui/pull/3693 --> <dependency> <groupId>org.json</groupId> <artifactId>json</artifactId> diff --git a/pom.xml b/pom.xml index a5a450d294b..048f2370f03 100644 --- a/pom.xml +++ b/pom.xml @@ -36,7 +36,7 @@ <protobuf-java.version>3.21.9</protobuf-java.version> <scala-lang.library.version>2.13.9</scala-lang.library.version> <snakeyaml.version>2.0</snakeyaml.version> - <spring-boot.version>3.0.5</spring-boot.version> + <spring-boot.version>3.0.6</spring-boot.version> <kafka-ui-serde-api.version>1.0.0</kafka-ui-serde-api.version> <odd-oddrn-generator.version>0.1.15</odd-oddrn-generator.version> <odd-oddrn-client.version>0.1.23</odd-oddrn-client.version>
null
train
test
2023-05-17T11:34:12
"2023-05-11T09:34:43Z"
Haarolean
train
provectus/kafka-ui/3452_3849
provectus/kafka-ui
provectus/kafka-ui/3452
provectus/kafka-ui/3849
[ "connected" ]
17ea464ec173cb7f60f221fa0f928b661a08a209
63f71b8a052c3540a8f0018027afab59556f419c
[]
[]
"2023-05-22T06:31:19Z"
[ "scope/QA", "scope/AQA" ]
[e2e]Checking Broker's config search
Autotest implementation for: https://app.qase.io/case/KAFKAUI-330 Description: The purpose of this case is to make sure that Broker config search is working for all pages Pre-conditions: Not set Post-conditions: Not set Steps: - Login to kafka-ui - Navigate to Brokers - Select the Broker - Turn to Configs tab - Turn to next page - Search for Config existing in a first page. Expected results: - Check the configs in the first page - The searched config should be displayed
[ "kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java", "kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersConfigTab.java", "kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersDetails.java", "kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersList.java", "kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicDetails.java", "kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicsList.java" ]
[ "kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java", "kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersConfigTab.java", "kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersDetails.java", "kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersList.java", "kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicDetails.java", "kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicsList.java" ]
[ "kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/backlog/SmokeBacklog.java", "kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/brokers/BrokersTest.java", "kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/topics/TopicsTest.java" ]
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java index 8bd7901a635..d2e201191ab 100644 --- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java +++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java @@ -67,6 +67,14 @@ protected void clickSubmitBtn() { clickByJavaScript(submitBtn); } + protected void clickNextBtn() { + clickByJavaScript(nextBtn); + } + + protected void clickBackBtn() { + clickByJavaScript(backBtn); + } + protected void setJsonInputValue(SelenideElement jsonInput, String jsonConfig) { sendKeysByActions(jsonInput, jsonConfig.replace(" ", "")); new Actions(WebDriverRunner.getWebDriver()) diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersConfigTab.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersConfigTab.java index f358614dc87..a3756d3bdfb 100644 --- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersConfigTab.java +++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersConfigTab.java @@ -3,29 +3,37 @@ import static com.codeborne.selenide.Selenide.$$x; import static com.codeborne.selenide.Selenide.$x; +import com.codeborne.selenide.CollectionCondition; import com.codeborne.selenide.Condition; +import com.codeborne.selenide.ElementsCollection; import com.codeborne.selenide.SelenideElement; import com.provectus.kafka.ui.pages.BasePage; import io.qameta.allure.Step; +import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; public class BrokersConfigTab extends BasePage { - protected List<SelenideElement> editBtn = $$x("//button[@aria-label='editAction']"); - protected SelenideElement searchByKeyField = $x("//input[@placeholder='Search by Key']"); + protected ElementsCollection editBtns = $$x("//button[@aria-label='editAction']"); @Step public BrokersConfigTab waitUntilScreenReady() { waitUntilSpinnerDisappear(); - searchByKeyField.shouldBe(Condition.visible); + searchFld.shouldBe(Condition.visible); return this; } @Step public boolean isSearchByKeyVisible() { - return isVisible(searchByKeyField); + return isVisible(searchFld); + } + + @Step + public BrokersConfigTab searchConfig(String key) { + searchItem(key); + return this; } public List<SelenideElement> getColumnHeaders() { @@ -35,6 +43,61 @@ public List<SelenideElement> getColumnHeaders() { } public List<SelenideElement> getEditButtons() { - return editBtn; + return editBtns; + } + + @Step + public BrokersConfigTab clickNextButton() { + clickNextBtn(); + waitUntilSpinnerDisappear(1); + return this; + } + + private List<BrokersConfigTab.BrokersConfigItem> initGridItems() { + List<BrokersConfigTab.BrokersConfigItem> gridItemList = new ArrayList<>(); + gridItems.shouldHave(CollectionCondition.sizeGreaterThan(0)) + .forEach(item -> gridItemList.add(new BrokersConfigTab.BrokersConfigItem(item))); + return gridItemList; + } + + @Step + public BrokersConfigTab.BrokersConfigItem getConfig(String key) { + return initGridItems().stream() + .filter(e -> e.getKey().equals(key)) + .findFirst().orElseThrow(); + } + + @Step + public List<BrokersConfigTab.BrokersConfigItem> getAllConfigs() { + return initGridItems(); + } + + public static class BrokersConfigItem extends BasePage { + + private final SelenideElement element; + + public BrokersConfigItem(SelenideElement element) { + this.element = element; + } + + @Step + public String getKey() { + return element.$x("./td[1]").getText().trim(); + } + + @Step + public String getValue() { + return element.$x("./td[2]//span").getText().trim(); + } + + @Step + public void edit() { + element.$x("./td[2]//button").shouldBe(Condition.enabled).click(); + } + + @Step + public String getSource() { + return element.$x("./td[3]").getText().trim(); + } } } diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersDetails.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersDetails.java index 4eca65f1f47..05d571116ea 100644 --- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersDetails.java +++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersDetails.java @@ -1,6 +1,5 @@ package com.provectus.kafka.ui.pages.brokers; -import static com.codeborne.selenide.Selenide.$; import static com.codeborne.selenide.Selenide.$x; import com.codeborne.selenide.Condition; @@ -8,28 +7,24 @@ import com.provectus.kafka.ui.pages.BasePage; import io.qameta.allure.Step; import java.util.ArrayList; -import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; -import org.openqa.selenium.By; public class BrokersDetails extends BasePage { - protected SelenideElement logDirectoriesTab = $x("//a[text()='Log directories']"); - protected SelenideElement metricsTab = $x("//a[text()='Metrics']"); protected String brokersTabLocator = "//a[text()='%s']"; @Step public BrokersDetails waitUntilScreenReady() { waitUntilSpinnerDisappear(); - Arrays.asList(logDirectoriesTab, metricsTab).forEach(element -> element.shouldBe(Condition.visible)); + $x(String.format(brokersTabLocator, DetailsTab.LOG_DIRECTORIES)).shouldBe(Condition.visible); return this; } @Step public BrokersDetails openDetailsTab(DetailsTab menu) { - $(By.linkText(menu.toString())).shouldBe(Condition.enabled).click(); + $x(String.format(brokersTabLocator, menu.toString())).shouldBe(Condition.enabled).click(); waitUntilSpinnerDisappear(); return this; } diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersList.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersList.java index 9e81a0795cc..9e0741fe747 100644 --- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersList.java +++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersList.java @@ -24,7 +24,7 @@ public BrokersList waitUntilScreenReady() { @Step public BrokersList openBroker(int brokerId) { - getBrokerItem(brokerId).openItem(); + getBroker(brokerId).openItem(); return this; } @@ -59,30 +59,30 @@ public List<SelenideElement> getAllEnabledElements() { return getEnabledColumnHeaders(); } - private List<BrokersList.BrokerGridItem> initGridItems() { - List<BrokersList.BrokerGridItem> gridItemList = new ArrayList<>(); + private List<BrokersGridItem> initGridItems() { + List<BrokersGridItem> gridItemList = new ArrayList<>(); gridItems.shouldHave(CollectionCondition.sizeGreaterThan(0)) - .forEach(item -> gridItemList.add(new BrokersList.BrokerGridItem(item))); + .forEach(item -> gridItemList.add(new BrokersGridItem(item))); return gridItemList; } @Step - public BrokerGridItem getBrokerItem(int id) { + public BrokersGridItem getBroker(int id) { return initGridItems().stream() .filter(e -> e.getId() == id) .findFirst().orElseThrow(); } @Step - public List<BrokerGridItem> getAllBrokers() { + public List<BrokersGridItem> getAllBrokers() { return initGridItems(); } - public static class BrokerGridItem extends BasePage { + public static class BrokersGridItem extends BasePage { private final SelenideElement element; - public BrokerGridItem(SelenideElement element) { + public BrokersGridItem(SelenideElement element) { this.element = element; } diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicDetails.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicDetails.java index b7d03dcf3a7..8341450c6cb 100644 --- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicDetails.java +++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicDetails.java @@ -186,7 +186,7 @@ public TopicDetails clickMessagesAddFiltersBtn() { @Step public TopicDetails clickNextButton() { - nextBtn.shouldBe(Condition.enabled).click(); + clickNextBtn(); waitUntilSpinnerDisappear(); return this; } diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicsList.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicsList.java index 184178423d4..c3ef098ff0f 100644 --- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicsList.java +++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/topics/TopicsList.java @@ -68,6 +68,17 @@ public TopicsList setShowInternalRadioButton(boolean select) { return this; } + @Step + public TopicsList goToLastPage() { + if (nextBtn.exists()) { + while (nextBtn.isEnabled()) { + clickNextBtn(); + waitUntilSpinnerDisappear(1); + } + } + return this; + } + @Step public TopicsList openTopic(String topicName) { getTopicItem(topicName).openItem();
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/backlog/SmokeBacklog.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/backlog/SmokeBacklog.java index 593cb82c65d..f96be5deecc 100644 --- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/backlog/SmokeBacklog.java +++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/backlog/SmokeBacklog.java @@ -15,94 +15,87 @@ public class SmokeBacklog extends BaseManualTest { - @Automation(state = TO_BE_AUTOMATED) - @Suite(id = BROKERS_SUITE_ID) - @QaseId(330) - @Test - public void testCaseA() { - } - @Automation(state = TO_BE_AUTOMATED) @Suite(id = BROKERS_SUITE_ID) @QaseId(331) @Test - public void testCaseB() { + public void testCaseA() { } @Automation(state = TO_BE_AUTOMATED) @Suite(id = BROKERS_SUITE_ID) @QaseId(332) @Test - public void testCaseC() { + public void testCaseB() { } @Automation(state = TO_BE_AUTOMATED) @Suite(id = TOPICS_PROFILE_SUITE_ID) @QaseId(335) @Test - public void testCaseD() { + public void testCaseC() { } @Automation(state = TO_BE_AUTOMATED) @Suite(id = TOPICS_PROFILE_SUITE_ID) @QaseId(336) @Test - public void testCaseE() { + public void testCaseD() { } @Automation(state = TO_BE_AUTOMATED) @Suite(id = TOPICS_PROFILE_SUITE_ID) @QaseId(343) @Test - public void testCaseF() { + public void testCaseE() { } @Automation(state = TO_BE_AUTOMATED) @Suite(id = SCHEMAS_SUITE_ID) @QaseId(345) @Test - public void testCaseG() { + public void testCaseF() { } @Automation(state = TO_BE_AUTOMATED) @Suite(id = SCHEMAS_SUITE_ID) @QaseId(346) @Test - public void testCaseH() { + public void testCaseG() { } @Automation(state = TO_BE_AUTOMATED) @Suite(id = TOPICS_PROFILE_SUITE_ID) @QaseId(347) @Test - public void testCaseI() { + public void testCaseH() { } @Automation(state = TO_BE_AUTOMATED) @Suite(id = BROKERS_SUITE_ID) @QaseId(348) @Test - public void testCaseJ() { + public void testCaseI() { } @Automation(state = TO_BE_AUTOMATED) @Suite(id = BROKERS_SUITE_ID) @QaseId(350) @Test - public void testCaseK() { + public void testCaseJ() { } @Automation(state = NOT_AUTOMATED) @Suite(id = TOPICS_SUITE_ID) @QaseId(50) @Test - public void testCaseL() { + public void testCaseK() { } @Automation(state = NOT_AUTOMATED) @Suite(id = SCHEMAS_SUITE_ID) @QaseId(351) @Test - public void testCaseM() { + public void testCaseL() { } } diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/brokers/BrokersTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/brokers/BrokersTest.java index 8fd1ffde159..b58e0f678c3 100644 --- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/brokers/BrokersTest.java +++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/brokers/BrokersTest.java @@ -4,12 +4,17 @@ import com.codeborne.selenide.Condition; import com.provectus.kafka.ui.BaseTest; +import com.provectus.kafka.ui.pages.brokers.BrokersConfigTab; +import io.qameta.allure.Issue; import io.qase.api.annotation.QaseId; import org.testng.Assert; +import org.testng.annotations.Ignore; import org.testng.annotations.Test; public class BrokersTest extends BaseTest { + public static final int DEFAULT_BROKER_ID = 1; + @QaseId(1) @Test public void checkBrokersOverview() { @@ -25,7 +30,7 @@ public void checkExistingBrokersInCluster() { navigateToBrokers(); Assert.assertTrue(brokersList.getAllBrokers().size() > 0, "getAllBrokers()"); brokersList - .openBroker(1); + .openBroker(DEFAULT_BROKER_ID); brokersDetails .waitUntilScreenReady(); verifyElementsCondition(brokersDetails.getAllVisibleElements(), Condition.visible); @@ -38,4 +43,30 @@ public void checkExistingBrokersInCluster() { verifyElementsCondition(brokersConfigTab.getEditButtons(), Condition.enabled); Assert.assertTrue(brokersConfigTab.isSearchByKeyVisible(), "isSearchByKeyVisible()"); } + + @Ignore + @Issue("https://github.com/provectus/kafka-ui/issues/3347") + @QaseId(330) + @Test + public void brokersConfigSearchCheck() { + navigateToBrokersAndOpenDetails(DEFAULT_BROKER_ID); + brokersDetails + .openDetailsTab(CONFIGS); + String anyConfigKey = brokersConfigTab + .getAllConfigs().stream() + .findAny().orElseThrow() + .getKey(); + brokersConfigTab + .clickNextButton(); + Assert.assertFalse(brokersConfigTab.getAllConfigs().stream() + .map(BrokersConfigTab.BrokersConfigItem::getKey) + .toList().contains(anyConfigKey), + String.format("getAllConfigs().contains(%s)", anyConfigKey)); + brokersConfigTab + .searchConfig(anyConfigKey); + Assert.assertTrue(brokersConfigTab.getAllConfigs().stream() + .map(BrokersConfigTab.BrokersConfigItem::getKey) + .toList().contains(anyConfigKey), + String.format("getAllConfigs().contains(%s)", anyConfigKey)); + } } diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/topics/TopicsTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/topics/TopicsTest.java index bad6a9fcde0..f9e79b0a723 100644 --- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/topics/TopicsTest.java +++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/topics/TopicsTest.java @@ -365,12 +365,13 @@ public void checkShowInternalTopicsButton() { navigateToTopics(); topicsList .setShowInternalRadioButton(true); - SoftAssert softly = new SoftAssert(); - softly.assertTrue(topicsList.getInternalTopics().size() > 0, "getInternalTopics()"); - softly.assertTrue(topicsList.getNonInternalTopics().size() > 0, "getNonInternalTopics()"); - softly.assertAll(); + Assert.assertTrue(topicsList.getInternalTopics().size() > 0, "getInternalTopics()"); + topicsList + .goToLastPage(); + Assert.assertTrue(topicsList.getNonInternalTopics().size() > 0, "getNonInternalTopics()"); topicsList .setShowInternalRadioButton(false); + SoftAssert softly = new SoftAssert(); softly.assertEquals(topicsList.getInternalTopics().size(), 0, "getInternalTopics()"); softly.assertTrue(topicsList.getNonInternalTopics().size() > 0, "getNonInternalTopics()"); softly.assertAll();
val
test
2023-05-19T12:46:26
"2023-03-07T08:50:57Z"
ArthurNiedial
train
provectus/kafka-ui/3625_3861
provectus/kafka-ui
provectus/kafka-ui/3625
provectus/kafka-ui/3861
[ "connected" ]
0099169a2be914ccbb8e9d8f0c7e308fbb7446dc
73bd6ca3a5ae599a986b1268239221526e63deaf
[ "Hello there Dugong42! πŸ‘‹\n\nThank you and congratulations πŸŽ‰ for opening your very first issue in this project! πŸ’–\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. πŸ‘€", "Hey, thanks for bringing this to our attention. \r\n\r\nWe'll def look into this shortly. ", "Hello @Dugong42 \r\nCan you please provide your docker version?", "Hi. It's Docker `1.13`", "Hi, would it possible to have an ETA for this fix ?\r\nThx :)", "https://github.com/provectus/kafka-ui/pull/3861#issuecomment-1601009788", "The one left workflow fixed within #3986\r\nThe rest is our private ECR" ]
[]
"2023-05-24T12:56:54Z"
[ "type/bug", "status/accepted", "scope/infrastructure" ]
Cannot pull image of v0.6.1 from non OCI-compliant runtimes
**Describe the bug** (Actual behavior) Hi, the update of [docker/build-push-action](https://github.com/docker/build-push-action) to `v4.0.0` (introduced by #3488) seems to cause issues with some container runtimes that are not OCI-compliant. Docker fails when pulling the `v0.6.1` image from non OCI-compliant runtimes (older Openshift platform). `Failed to pull image "<dockerproxy>/provectuslabs/kafka-ui:v0.6.1": rpc error: code = Unknown desc = missing signature key ` The changelog of docker/build-push-action states that it could introduce issues with some runtimes. > ## v4.0.0 > **Note** > Buildx v0.10 enables support for a minimal [SLSA Provenance](https://slsa.dev/provenance/) attestation, which requires support for [OCI-compliant](https://github.com/opencontainers/image-spec) multi-platform images. This may introduce issues with registry and runtime support (e.g. [Google Cloud Run and AWS Lambda](https://redirect.github.com/docker/buildx/issues/1533)). You can optionally disable the default provenance attestation functionality using `provenance: false`. This could probably be fixed by using `provenance: false` as the changelog above suggests **Expected behavior** Can pull on older docker runtimes. **Set up** I realize this is a niche issue. In my case I got it when trying to pull the image in `v0.6.1` in an older Openshift platform, and I'm guessing that the problem comes from the OCI compliance. **Steps to Reproduce** Hard to test. Maybe by installing an older docker version and pulling v0.6.1. I could potentially test an image with the suggested fix and see if it works. **Additional information** The manifest of the image previously contained (version <= `v0.6.0`): ` "mediaType": "application/vnd.docker.distribution.manifest.list.v2+json",` From `v0.6.1` it became: ` "mediaType": "application/vnd.oci.image.index.v1+json",`
[ ".github/workflows/release.yaml" ]
[ ".github/workflows/release.yaml" ]
[]
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 19128550c08..7f18f994e3c 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -77,6 +77,7 @@ jobs: builder: ${{ steps.buildx.outputs.name }} context: kafka-ui-api platforms: linux/amd64,linux/arm64 + provenance: false push: true tags: | provectuslabs/kafka-ui:${{ steps.build.outputs.version }}
null
val
test
2023-05-24T14:23:48
"2023-04-05T14:56:26Z"
Dugong42
train
provectus/kafka-ui/3835_3862
provectus/kafka-ui
provectus/kafka-ui/3835
provectus/kafka-ui/3862
[ "keyword_pr_to_issue" ]
0099169a2be914ccbb8e9d8f0c7e308fbb7446dc
71a7a1ec84442e2d797d867fa5222c5a9e2f4393
[]
[]
"2023-05-24T14:47:44Z"
[ "type/bug", "scope/frontend", "status/accepted", "status/confirmed", "type/regression", "severity/high", "area/rbac" ]
FE: Regression: Configure cluster buttons are disabled with RBAC tooltip even if there's no RBAC enabled
Reproduce: 1. docker run -it -p 8080:8080 -e DYNAMIC_CONFIG_ENABLED=true provectuslabs/kafka-ui 2. Observe "You don't have a required permission to perform this action" Regression of #3646
[ "kafka-ui-react-app/src/components/Dashboard/ClusterTableActionsCell.tsx", "kafka-ui-react-app/src/components/Dashboard/Dashboard.tsx" ]
[ "kafka-ui-react-app/src/components/Dashboard/ClusterTableActionsCell.tsx", "kafka-ui-react-app/src/components/Dashboard/Dashboard.tsx" ]
[]
diff --git a/kafka-ui-react-app/src/components/Dashboard/ClusterTableActionsCell.tsx b/kafka-ui-react-app/src/components/Dashboard/ClusterTableActionsCell.tsx index 19fefd784ce..946f8b9ddaf 100644 --- a/kafka-ui-react-app/src/components/Dashboard/ClusterTableActionsCell.tsx +++ b/kafka-ui-react-app/src/components/Dashboard/ClusterTableActionsCell.tsx @@ -11,7 +11,8 @@ const ClusterTableActionsCell: React.FC<Props> = ({ row }) => { const { name } = row.original; const { data } = useGetUserInfo(); - const isApplicationConfig = useMemo(() => { + const hasPermissions = useMemo(() => { + if (!data?.rbacEnabled) return true; return !!data?.userInfo?.permissions.some( (permission) => permission.resource === ResourceType.APPLICATIONCONFIG ); @@ -22,7 +23,7 @@ const ClusterTableActionsCell: React.FC<Props> = ({ row }) => { buttonType="secondary" buttonSize="S" to={clusterConfigPath(name)} - canDoAction={isApplicationConfig} + canDoAction={hasPermissions} > Configure </ActionCanButton> diff --git a/kafka-ui-react-app/src/components/Dashboard/Dashboard.tsx b/kafka-ui-react-app/src/components/Dashboard/Dashboard.tsx index d7ace183692..1fb8076fd48 100644 --- a/kafka-ui-react-app/src/components/Dashboard/Dashboard.tsx +++ b/kafka-ui-react-app/src/components/Dashboard/Dashboard.tsx @@ -57,7 +57,8 @@ const Dashboard: React.FC = () => { return initialColumns; }, []); - const isApplicationConfig = useMemo(() => { + const hasPermissions = useMemo(() => { + if (!data?.rbacEnabled) return true; return !!data?.userInfo?.permissions.some( (permission) => permission.resource === ResourceType.APPLICATIONCONFIG ); @@ -91,7 +92,7 @@ const Dashboard: React.FC = () => { buttonType="primary" buttonSize="M" to={clusterNewConfigPath} - canDoAction={isApplicationConfig} + canDoAction={hasPermissions} > Configure new cluster </ActionCanButton>
null
val
test
2023-05-24T14:23:48
"2023-05-17T12:24:37Z"
Haarolean
train
provectus/kafka-ui/3852_3864
provectus/kafka-ui
provectus/kafka-ui/3852
provectus/kafka-ui/3864
[ "keyword_pr_to_issue" ]
71a7a1ec84442e2d797d867fa5222c5a9e2f4393
80b748b02e60261fea687f63265cefdb1ce73902
[ "Hello there seono! πŸ‘‹\n\nThank you and congratulations πŸŽ‰ for opening your very first issue in this project! πŸ’–\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. πŸ‘€", "Hi, has this happened to you?\r\n\r\nWith over ten years of using github I don't remember encountering a single user with over than 15 organizations, that's half of 30.\r\n\r\nUnless there are users affected by this already, this is a low priority issue.", "@Haarolean thanks for reply.\r\n\r\nI agree with you that a single user has under 15 organizations in github.\r\nIt happened to me in my company's github enterprise.\r\n\r\nSo if you don't mind, can i try this issue? I want fix this myself.\r\n" ]
[ "please add comment that it is a max value for github api / make a constant with descriptive name", "@iliax thanks for review!\r\nI make constant and add comment.\r\nplease let me know if comment is too summarized." ]
"2023-05-25T05:05:07Z"
[ "type/bug", "good first issue", "scope/backend", "status/accepted", "severity/low", "area/rbac" ]
RBAC : Github Organizations could be more than 30
### Issue submitter TODO list - [X] I've searched for an already existing issues [here](https://github.com/provectus/kafka-ui/issues) - [X] I'm running a supported version of the application which is listed [here](https://github.com/provectus/kafka-ui/blob/master/SECURITY.md) and the feature is not present there ### Is your proposal related to a problem? I think organizations for authenticated user can be more than 30. https://github.com/provectus/kafka-ui/blob/ab9d0e2b3f6fc54344ecb1912d974f139020ca48/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/GithubAuthorityExtractor.java#L84-L94 https://docs.github.com/en/[email protected]/rest/orgs/orgs#list-organizations-for-the-authenticated-user It response only 30 organizations if you request without `per_page` parameter. So, if i use 31st organization for rbac it will not work. ### Describe the feature you're interested in I want RBAC with `oauth_github` provider and `organization` type works even if there are more than 30 organizations for authenticated user. ### Describe alternatives you've considered _No response_ ### Version you're running 902f11a ### Additional context _No response_
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/GithubAuthorityExtractor.java" ]
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/GithubAuthorityExtractor.java" ]
[]
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/GithubAuthorityExtractor.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/GithubAuthorityExtractor.java index 3cc33035e88..654654a05dd 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/GithubAuthorityExtractor.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/GithubAuthorityExtractor.java @@ -28,6 +28,8 @@ public class GithubAuthorityExtractor implements ProviderAuthorityExtractor { private static final String ORGANIZATION_NAME = "login"; private static final String GITHUB_ACCEPT_HEADER = "application/vnd.github+json"; private static final String DUMMY = "dummy"; + // The number of results (max 100) per page of list organizations for authenticated user. + private static final Integer ORGANIZATIONS_PER_PAGE = 100; @Override public boolean isApplicable(String provider, Map<String, String> customParams) { @@ -83,7 +85,9 @@ public Mono<Set<String>> extract(AccessControlService acs, Object value, Map<Str final Mono<List<Map<String, Object>>> userOrganizations = webClient .get() - .uri("/orgs") + .uri(uriBuilder -> uriBuilder.path("/orgs") + .queryParam("per_page", ORGANIZATIONS_PER_PAGE) + .build()) .headers(headers -> { headers.set(HttpHeaders.ACCEPT, GITHUB_ACCEPT_HEADER); OAuth2UserRequest request = (OAuth2UserRequest) additionalParams.get("request");
null
val
test
2023-05-25T07:36:58
"2023-05-23T02:29:04Z"
seono
train
provectus/kafka-ui/3729_3871
provectus/kafka-ui
provectus/kafka-ui/3729
provectus/kafka-ui/3871
[ "keyword_pr_to_issue" ]
8337c9c183d632ea27b7c253d776fdfda4b19840
216c87670d8f286314559348e77f378cb9a76896
[]
[ "can you add `as const` so this `CONSUMER_GROUP_STATE_TOOLTIPS` will be readonly , and then you can harness the full power of typescript. ", "@Mgrdich \r\nhi, thanks for your review. \r\nI added `as const`" ]
"2023-05-27T08:50:45Z"
[ "good first issue", "scope/frontend", "status/accepted", "type/chore", "area/consumers" ]
Consumers: Add a tooltip for state
Empty: The group exists but has no members Stable: Consumers are happily consuming and have assigned partitions. PreparingRebalance: Something has changed, and the reassignment of partitions is required. CompletingRebalance: Partition reassignment is in progress. Dead: The group is going to be removed. It might be due to the inactivity, or the group is being migrated to different group coordinator.
[ "kafka-ui-react-app/src/components/ConsumerGroups/Details/Details.tsx", "kafka-ui-react-app/src/components/ConsumerGroups/List.tsx", "kafka-ui-react-app/src/lib/constants.ts" ]
[ "kafka-ui-react-app/src/components/ConsumerGroups/Details/Details.tsx", "kafka-ui-react-app/src/components/ConsumerGroups/List.tsx", "kafka-ui-react-app/src/lib/constants.ts" ]
[]
diff --git a/kafka-ui-react-app/src/components/ConsumerGroups/Details/Details.tsx b/kafka-ui-react-app/src/components/ConsumerGroups/Details/Details.tsx index b8fac73dc1e..f5cae44df72 100644 --- a/kafka-ui-react-app/src/components/ConsumerGroups/Details/Details.tsx +++ b/kafka-ui-react-app/src/components/ConsumerGroups/Details/Details.tsx @@ -16,13 +16,15 @@ import { Table } from 'components/common/table/Table/Table.styled'; import getTagColor from 'components/common/Tag/getTagColor'; import { Dropdown } from 'components/common/Dropdown'; import { ControlPanelWrapper } from 'components/common/ControlPanel/ControlPanel.styled'; -import { Action, ResourceType } from 'generated-sources'; +import { Action, ConsumerGroupState, ResourceType } from 'generated-sources'; import { ActionDropdownItem } from 'components/common/ActionComponent'; import TableHeaderCell from 'components/common/table/TableHeaderCell/TableHeaderCell'; import { useConsumerGroupDetails, useDeleteConsumerGroupMutation, } from 'lib/hooks/api/consumers'; +import Tooltip from 'components/common/Tooltip/Tooltip'; +import { CONSUMER_GROUP_STATE_TOOLTIPS } from 'lib/constants'; import ListItem from './ListItem'; @@ -96,9 +98,19 @@ const Details: React.FC = () => { <Metrics.Wrapper> <Metrics.Section> <Metrics.Indicator label="State"> - <Tag color={getTagColor(consumerGroup.data?.state)}> - {consumerGroup.data?.state} - </Tag> + <Tooltip + value={ + <Tag color={getTagColor(consumerGroup.data?.state)}> + {consumerGroup.data?.state} + </Tag> + } + content={ + CONSUMER_GROUP_STATE_TOOLTIPS[ + consumerGroup.data?.state || ConsumerGroupState.UNKNOWN + ] + } + placement="bottom-start" + /> </Metrics.Indicator> <Metrics.Indicator label="Members"> {consumerGroup.data?.members} diff --git a/kafka-ui-react-app/src/components/ConsumerGroups/List.tsx b/kafka-ui-react-app/src/components/ConsumerGroups/List.tsx index 4d51b492485..247a18b0bd8 100644 --- a/kafka-ui-react-app/src/components/ConsumerGroups/List.tsx +++ b/kafka-ui-react-app/src/components/ConsumerGroups/List.tsx @@ -5,15 +5,17 @@ import { ControlPanelWrapper } from 'components/common/ControlPanel/ControlPanel import { ConsumerGroupDetails, ConsumerGroupOrdering, + ConsumerGroupState, SortOrder, } from 'generated-sources'; import useAppParams from 'lib/hooks/useAppParams'; import { clusterConsumerGroupDetailsPath, ClusterNameRoute } from 'lib/paths'; import { ColumnDef } from '@tanstack/react-table'; -import Table, { TagCell, LinkCell } from 'components/common/NewTable'; +import Table, { LinkCell, TagCell } from 'components/common/NewTable'; import { useNavigate, useSearchParams } from 'react-router-dom'; -import { PER_PAGE } from 'lib/constants'; +import { CONSUMER_GROUP_STATE_TOOLTIPS, PER_PAGE } from 'lib/constants'; import { useConsumerGroups } from 'lib/hooks/api/consumers'; +import Tooltip from 'components/common/Tooltip/Tooltip'; const List = () => { const { clusterName } = useAppParams<ClusterNameRoute>(); @@ -69,7 +71,17 @@ const List = () => { id: ConsumerGroupOrdering.STATE, header: 'State', accessorKey: 'state', - cell: TagCell, + // eslint-disable-next-line react/no-unstable-nested-components + cell: (args) => { + const value = args.getValue() as ConsumerGroupState; + return ( + <Tooltip + value={<TagCell {...args} />} + content={CONSUMER_GROUP_STATE_TOOLTIPS[value]} + placement="bottom-end" + /> + ); + }, }, ], [] diff --git a/kafka-ui-react-app/src/lib/constants.ts b/kafka-ui-react-app/src/lib/constants.ts index 8c9c6deb613..a3e622550b8 100644 --- a/kafka-ui-react-app/src/lib/constants.ts +++ b/kafka-ui-react-app/src/lib/constants.ts @@ -1,5 +1,5 @@ import { SelectOption } from 'components/common/Select/Select'; -import { ConfigurationParameters } from 'generated-sources'; +import { ConfigurationParameters, ConsumerGroupState } from 'generated-sources'; declare global { interface Window { @@ -96,3 +96,14 @@ export const METRICS_OPTIONS: SelectOption[] = [ { value: 'JMX', label: 'JMX' }, { value: 'PROMETHEUS', label: 'PROMETHEUS' }, ]; + +export const CONSUMER_GROUP_STATE_TOOLTIPS: Record<ConsumerGroupState, string> = + { + EMPTY: 'The group exists but has no members.', + STABLE: 'Consumers are happily consuming and have assigned partitions.', + PREPARING_REBALANCE: + 'Something has changed, and the reassignment of partitions is required.', + COMPLETING_REBALANCE: 'Partition reassignment is in progress.', + DEAD: 'The group is going to be removed. It might be due to the inactivity, or the group is being migrated to different group coordinator.', + UNKNOWN: '', + } as const;
null
val
test
2023-06-22T10:46:11
"2023-04-27T00:39:59Z"
Haarolean
train
provectus/kafka-ui/3649_3873
provectus/kafka-ui
provectus/kafka-ui/3649
provectus/kafka-ui/3873
[ "connected" ]
80b748b02e60261fea687f63265cefdb1ce73902
004de798e47d00092a1ff7ac61e655e6596f777f
[]
[]
"2023-05-29T07:30:03Z"
[ "scope/QA", "scope/AQA" ]
[e2e] Checking Broker's Source information
Autotest implementation for: https://app.qase.io/project/KAFKAUI?case=331&previewMode=modal&suite=1
[ "kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersConfigTab.java" ]
[ "kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersConfigTab.java", "kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/variables/Expected.java" ]
[ "kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/backlog/SmokeBacklog.java", "kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/brokers/BrokersTest.java" ]
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersConfigTab.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersConfigTab.java index a3756d3bdfb..11489304795 100644 --- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersConfigTab.java +++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersConfigTab.java @@ -16,6 +16,8 @@ public class BrokersConfigTab extends BasePage { + protected SelenideElement sourceInfoIcon = $x("//div[text()='Source']/..//div/div[@class]"); + protected SelenideElement sourceInfoTooltip = $x("//div[text()='Source']/..//div/div[@style]"); protected ElementsCollection editBtns = $$x("//button[@aria-label='editAction']"); @Step @@ -25,6 +27,17 @@ public BrokersConfigTab waitUntilScreenReady() { return this; } + @Step + public BrokersConfigTab hoverOnSourceInfoIcon() { + sourceInfoIcon.shouldBe(Condition.visible).hover(); + return this; + } + + @Step + public String getSourceInfoTooltipText() { + return sourceInfoTooltip.shouldBe(Condition.visible).getText().trim(); + } + @Step public boolean isSearchByKeyVisible() { return isVisible(searchFld); diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/variables/Expected.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/variables/Expected.java new file mode 100644 index 00000000000..3e5b7726116 --- /dev/null +++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/variables/Expected.java @@ -0,0 +1,15 @@ +package com.provectus.kafka.ui.variables; + +public interface Expected { + + String BROKER_SOURCE_INFO_TOOLTIP = + "DYNAMIC_TOPIC_CONFIG = dynamic topic config that is configured for a specific topic\n" + + "DYNAMIC_BROKER_LOGGER_CONFIG = dynamic broker logger config that is configured for a specific broker\n" + + "DYNAMIC_BROKER_CONFIG = dynamic broker config that is configured for a specific broker\n" + + "DYNAMIC_DEFAULT_BROKER_CONFIG = dynamic broker config that is configured as default " + + "for all brokers in the cluster\n" + + "STATIC_BROKER_CONFIG = static broker config provided as broker properties at start up " + + "(e.g. server.properties file)\n" + + "DEFAULT_CONFIG = built-in default configuration for configs that have a default value\n" + + "UNKNOWN = source unknown e.g. in the ConfigEntry used for alter requests where source is not set"; +}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/backlog/SmokeBacklog.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/backlog/SmokeBacklog.java index f96be5deecc..f94d4356e98 100644 --- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/backlog/SmokeBacklog.java +++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/backlog/SmokeBacklog.java @@ -15,87 +15,80 @@ public class SmokeBacklog extends BaseManualTest { - @Automation(state = TO_BE_AUTOMATED) - @Suite(id = BROKERS_SUITE_ID) - @QaseId(331) - @Test - public void testCaseA() { - } - @Automation(state = TO_BE_AUTOMATED) @Suite(id = BROKERS_SUITE_ID) @QaseId(332) @Test - public void testCaseB() { + public void testCaseA() { } @Automation(state = TO_BE_AUTOMATED) @Suite(id = TOPICS_PROFILE_SUITE_ID) @QaseId(335) @Test - public void testCaseC() { + public void testCaseB() { } @Automation(state = TO_BE_AUTOMATED) @Suite(id = TOPICS_PROFILE_SUITE_ID) @QaseId(336) @Test - public void testCaseD() { + public void testCaseC() { } @Automation(state = TO_BE_AUTOMATED) @Suite(id = TOPICS_PROFILE_SUITE_ID) @QaseId(343) @Test - public void testCaseE() { + public void testCaseD() { } @Automation(state = TO_BE_AUTOMATED) @Suite(id = SCHEMAS_SUITE_ID) @QaseId(345) @Test - public void testCaseF() { + public void testCaseE() { } @Automation(state = TO_BE_AUTOMATED) @Suite(id = SCHEMAS_SUITE_ID) @QaseId(346) @Test - public void testCaseG() { + public void testCaseF() { } @Automation(state = TO_BE_AUTOMATED) @Suite(id = TOPICS_PROFILE_SUITE_ID) @QaseId(347) @Test - public void testCaseH() { + public void testCaseG() { } @Automation(state = TO_BE_AUTOMATED) @Suite(id = BROKERS_SUITE_ID) @QaseId(348) @Test - public void testCaseI() { + public void testCaseH() { } @Automation(state = TO_BE_AUTOMATED) @Suite(id = BROKERS_SUITE_ID) @QaseId(350) @Test - public void testCaseJ() { + public void testCaseI() { } @Automation(state = NOT_AUTOMATED) @Suite(id = TOPICS_SUITE_ID) @QaseId(50) @Test - public void testCaseK() { + public void testCaseJ() { } @Automation(state = NOT_AUTOMATED) @Suite(id = SCHEMAS_SUITE_ID) @QaseId(351) @Test - public void testCaseL() { + public void testCaseK() { } } diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/brokers/BrokersTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/brokers/BrokersTest.java index b58e0f678c3..010ab5769a1 100644 --- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/brokers/BrokersTest.java +++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/brokers/BrokersTest.java @@ -1,6 +1,7 @@ package com.provectus.kafka.ui.smokesuite.brokers; import static com.provectus.kafka.ui.pages.brokers.BrokersDetails.DetailsTab.CONFIGS; +import static com.provectus.kafka.ui.variables.Expected.BROKER_SOURCE_INFO_TOOLTIP; import com.codeborne.selenide.Condition; import com.provectus.kafka.ui.BaseTest; @@ -69,4 +70,16 @@ public void brokersConfigSearchCheck() { .toList().contains(anyConfigKey), String.format("getAllConfigs().contains(%s)", anyConfigKey)); } + + @QaseId(331) + @Test + public void brokersSourceInfoCheck() { + navigateToBrokersAndOpenDetails(DEFAULT_BROKER_ID); + brokersDetails + .openDetailsTab(CONFIGS); + String sourceInfoTooltip = brokersConfigTab + .hoverOnSourceInfoIcon() + .getSourceInfoTooltipText(); + Assert.assertEquals(sourceInfoTooltip, BROKER_SOURCE_INFO_TOOLTIP, "brokerSourceInfoTooltip"); + } }
train
test
2023-05-25T13:12:59
"2023-04-11T07:30:36Z"
VladSenyuta
train
provectus/kafka-ui/3868_3881
provectus/kafka-ui
provectus/kafka-ui/3868
provectus/kafka-ui/3881
[ "keyword_pr_to_issue", "keyword_issue_to_pr" ]
61bf71f9b7a730180559cd23b321823e640bf128
f19abb203624fbb30820605200e321745f869a0e
[ "Hello there ben-voss! πŸ‘‹\n\nThank you and congratulations πŸŽ‰ for opening your very first issue in this project! πŸ’–\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. πŸ‘€", "Hi, thanks for reporting this. Will be fixed within #3881 (to be used with java args via `-DproxyHost`)" ]
[ "let use `new WebClientConfigurator().build()` here" ]
"2023-05-30T10:38:11Z"
[ "type/bug", "scope/backend", "status/accepted" ]
Version Check Doesn't Use Proxy Settings
### Issue submitter TODO list - [X] I've looked up my issue in [FAQ](https://docs.kafka-ui.provectus.io/faq/common-problems) - [X] I've searched for an already existing issues [here](https://github.com/provectus/kafka-ui/issues) - [X] I've tried running `master`-labeled docker image and the issue still persists there - [X] I'm running a supported version of the application which is listed [here](https://github.com/provectus/kafka-ui/blob/master/SECURITY.md) ### Describe the bug (actual behavior) The version check fails and displays a 'null' version in the UI when the server is running in an environment that requires a proxy for internet access. Attempts to configure proxy settings using HTTP_PROXY/HTTPS_PROXY environment variables or using http.proxyHost/https.proxyHost properties do not appear to work. ### Expected behavior Proxy settings should be used in the WebClient that connects to github to obtain the version information and that should then be displayed in the UI ### Your installation details App is deployed in a Kubernetes cluster running in AWS behind a web proxy for internet access. App Version: 0.7 - fdd9ad9 Helm Chart Version: 0.7.0 Application config is sensitive and contains nothing but a broker URL. These are the environment variables used to configure the proxy with fake setting values: ``` - name: JAVA_OPTS value: "-Dhttp.proxyHost=http://example.com -Dhttp.proxyPort:8080 -Dhttps.proxyHost=http://example.com -Dhttps.proxyPort:8080" - name: HTTP_PROXY value: http://example.com:8080 - name: HTTPS_PROXY value: http://example.com:8080 - name: http_proxy value: http://example.com:8080 - name: https_proxy value: http://example.com:8080 ``` ### Steps to reproduce Configure as per above and navigate to the application. Hover the cursor over the version check which displays a 'null' version value. No errors in the logs. ### Screenshots _No response_ ### Logs _No response_ ### Additional context _No response_
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/GithubReleaseInfo.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/WebClientConfigurator.java" ]
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/GithubReleaseInfo.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/WebClientConfigurator.java" ]
[]
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/GithubReleaseInfo.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/GithubReleaseInfo.java index 2ad0c9c399b..26afaa82a2e 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/GithubReleaseInfo.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/GithubReleaseInfo.java @@ -3,7 +3,6 @@ import com.google.common.annotations.VisibleForTesting; import java.time.Duration; import lombok.extern.slf4j.Slf4j; -import org.springframework.web.reactive.function.client.WebClient; import reactor.core.publisher.Mono; @Slf4j @@ -31,7 +30,7 @@ public GithubReleaseInfo() { @VisibleForTesting GithubReleaseInfo(String url) { - this.refreshMono = WebClient.create() + this.refreshMono = new WebClientConfigurator().build() .get() .uri(url) .exchangeToMono(resp -> resp.bodyToMono(GithubReleaseDto.class)) diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/WebClientConfigurator.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/WebClientConfigurator.java index fe2240bd6aa..e0815828e23 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/WebClientConfigurator.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/WebClientConfigurator.java @@ -5,11 +5,8 @@ import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; import com.provectus.kafka.ui.config.ClustersProperties; import com.provectus.kafka.ui.exception.ValidationException; -import io.netty.buffer.ByteBufAllocator; -import io.netty.handler.ssl.JdkSslContext; import io.netty.handler.ssl.SslContext; import io.netty.handler.ssl.SslContextBuilder; -import io.netty.handler.ssl.SslProvider; import java.io.FileInputStream; import java.security.KeyStore; import java.util.function.Consumer; @@ -93,7 +90,12 @@ private WebClientConfigurator configureSsl( // Create webclient SslContext context = contextBuilder.build(); - builder.clientConnector(new ReactorClientHttpConnector(HttpClient.create().secure(t -> t.sslContext(context)))); + var httpClient = HttpClient + .create() + .secure(t -> t.sslContext(context)) + .proxyWithSystemProperties(); + + builder.clientConnector(new ReactorClientHttpConnector(httpClient)); return this; }
null
test
test
2023-05-30T12:41:03
"2023-05-26T08:19:19Z"
ben-voss
train
provectus/kafka-ui/3564_3889
provectus/kafka-ui
provectus/kafka-ui/3564
provectus/kafka-ui/3889
[ "keyword_pr_to_issue" ]
29d91bca4b8c313e23278e126dad1020a87386e0
fdd4947142ec71be5628378a1e3b27f32d82520d
[]
[]
"2023-06-01T06:27:20Z"
[ "scope/infrastructure" ]
Post feature testing link as a check not a comment
[ ".github/workflows/branch-deploy.yml", ".github/workflows/branch-remove.yml", ".github/workflows/build-public-image.yml" ]
[ ".github/workflows/branch-deploy.yml", ".github/workflows/branch-remove.yml", ".github/workflows/build-public-image.yml" ]
[]
diff --git a/.github/workflows/branch-deploy.yml b/.github/workflows/branch-deploy.yml index a7f655e719d..880633f44e0 100644 --- a/.github/workflows/branch-deploy.yml +++ b/.github/workflows/branch-deploy.yml @@ -84,18 +84,22 @@ jobs: git add ../kafka-ui-from-branch/ git commit -m "added env:${{ needs.build.outputs.deploy }}" && git push || true - - name: make comment with private deployment link + - name: update status check for private deployment if: ${{ github.event.label.name == 'status/feature_testing' }} - uses: peter-evans/create-or-update-comment@v3 + uses: Sibz/[email protected] with: - issue-number: ${{ github.event.pull_request.number }} - body: | - Custom deployment will be available at http://${{ needs.build.outputs.tag }}.internal.kafka-ui.provectus.io + authToken: ${{secrets.GITHUB_TOKEN}} + context: "Click Details button to open custom deployment page" + state: "success" + sha: ${{ github.event.pull_request.head.sha || github.sha }} + target_url: "http://${{ needs.build.outputs.tag }}.internal.kafka-ui.provectus.io" - - name: make comment with public deployment link + - name: update status check for public deployment if: ${{ github.event.label.name == 'status/feature_testing_public' }} - uses: peter-evans/create-or-update-comment@v3 + uses: Sibz/[email protected] with: - issue-number: ${{ github.event.pull_request.number }} - body: | - Custom deployment will be available at http://${{ needs.build.outputs.tag }}.kafka-ui.provectus.io in 5 minutes + authToken: ${{secrets.GITHUB_TOKEN}} + context: "Click Details button to open custom deployment page" + state: "success" + sha: ${{ github.event.pull_request.head.sha || github.sha }} + target_url: "http://${{ needs.build.outputs.tag }}.internal.kafka-ui.provectus.io" diff --git a/.github/workflows/branch-remove.yml b/.github/workflows/branch-remove.yml index 9a65064c862..a891232fad2 100644 --- a/.github/workflows/branch-remove.yml +++ b/.github/workflows/branch-remove.yml @@ -20,9 +20,3 @@ jobs: git config --global user.name "infra-tech" git add ../kafka-ui-from-branch/ git commit -m "removed env:${{ needs.build.outputs.deploy }}" && git push || true - - name: make comment with deployment link - uses: peter-evans/create-or-update-comment@v3 - with: - issue-number: ${{ github.event.pull_request.number }} - body: | - Custom deployment removed diff --git a/.github/workflows/build-public-image.yml b/.github/workflows/build-public-image.yml index c4698b062cf..9f6a3b902b2 100644 --- a/.github/workflows/build-public-image.yml +++ b/.github/workflows/build-public-image.yml @@ -64,12 +64,14 @@ jobs: JAR_FILE=kafka-ui-api-${{ steps.build.outputs.version }}.jar cache-from: type=local,src=/tmp/.buildx-cache cache-to: type=local,dest=/tmp/.buildx-cache - - name: make comment with private deployment link - uses: peter-evans/create-or-update-comment@v3 + - name: update status check + uses: Sibz/[email protected] with: - issue-number: ${{ github.event.pull_request.number }} - body: | - Image published at public.ecr.aws/provectus/kafka-ui-custom-build:${{ steps.extract_branch.outputs.tag }} + authToken: ${{secrets.GITHUB_TOKEN}} + context: "Image published at" + state: "success" + sha: ${{ github.event.pull_request.head.sha || github.sha }} + target_url: "public.ecr.aws/provectus/kafka-ui-custom-build:${{ steps.extract_branch.outputs.tag }}" outputs: tag: ${{ steps.extract_branch.outputs.tag }}
null
train
test
2023-05-31T17:50:40
"2023-03-27T18:22:11Z"
Haarolean
train
provectus/kafka-ui/3720_3891
provectus/kafka-ui
provectus/kafka-ui/3720
provectus/kafka-ui/3891
[ "keyword_pr_to_issue" ]
29d91bca4b8c313e23278e126dad1020a87386e0
8a68ba0778b7709f553a5987ddde505c5ebe42bb
[ "Hello there fl0wx! πŸ‘‹\n\nThank you and congratulations πŸŽ‰ for opening your very first issue in this project! πŸ’–\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. πŸ‘€", "Forgot frontend lol", "@David-DB88 \r\n\r\nTODO:\r\nChange RBAC permissions: For `ResourceType.CONNECT` all restart actions should have `Action.RESTART` instead of `Action.EDIT`" ]
[ "Set is more appropriate here, imo", "sure, fixed" ]
"2023-06-01T11:48:55Z"
[ "type/enhancement", "scope/backend", "scope/frontend", "status/accepted", "area/rbac" ]
RBAC: KC: Impl restart permissions
### Issue submitter TODO list - [X] I've searched for an already existing issues [here](https://github.com/provectus/kafka-ui/issues) - [X] I'm running a supported version of the application which is listed [here](https://github.com/provectus/kafka-ui/blob/master/SECURITY.md) and the feature is not present there ### Is your proposal related to a problem? _No response_ ### Describe the feature you're interested in I do want a new permission for connector / connector task restarts. As i deploy and manage connectors via strimzi, i dont want the frontend user to edit or remove the tasks. But in case of an issue / tests it would be helpful if tasks could be restarted via UI with a separate permission. ### Describe alternatives you've considered _No response_ ### Version you're running 0.6.2 ### Additional context _No response_
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/KafkaConnectController.java", "kafka-ui-react-app/src/components/Connect/Details/Actions/Actions.tsx", "kafka-ui-react-app/src/components/Connect/Details/Tasks/ActionsCellTasks.tsx", "kafka-ui-react-app/src/components/Connect/List/ActionsCell.tsx", "kafka-ui-react-app/src/components/Connect/New/New.tsx" ]
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/KafkaConnectController.java", "kafka-ui-react-app/src/components/Connect/Details/Actions/Actions.tsx", "kafka-ui-react-app/src/components/Connect/Details/Tasks/ActionsCellTasks.tsx", "kafka-ui-react-app/src/components/Connect/List/ActionsCell.tsx", "kafka-ui-react-app/src/components/Connect/New/New.tsx" ]
[]
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/KafkaConnectController.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/KafkaConnectController.java index d300b930164..d6de3210b68 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/KafkaConnectController.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/KafkaConnectController.java @@ -1,5 +1,9 @@ package com.provectus.kafka.ui.controller; +import static com.provectus.kafka.ui.model.ConnectorActionDTO.RESTART; +import static com.provectus.kafka.ui.model.ConnectorActionDTO.RESTART_ALL_TASKS; +import static com.provectus.kafka.ui.model.ConnectorActionDTO.RESTART_FAILED_TASKS; + import com.provectus.kafka.ui.api.KafkaConnectApi; import com.provectus.kafka.ui.model.ConnectDTO; import com.provectus.kafka.ui.model.ConnectorActionDTO; @@ -17,6 +21,7 @@ import com.provectus.kafka.ui.service.rbac.AccessControlService; import java.util.Comparator; import java.util.Map; +import java.util.Set; import javax.validation.Valid; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -30,6 +35,8 @@ @RequiredArgsConstructor @Slf4j public class KafkaConnectController extends AbstractController implements KafkaConnectApi { + private static final Set<ConnectorActionDTO> RESTART_ACTIONS + = Set.of(RESTART, RESTART_FAILED_TASKS, RESTART_ALL_TASKS); private final KafkaConnectService kafkaConnectService; private final AccessControlService accessControlService; @@ -172,10 +179,17 @@ public Mono<ResponseEntity<Void>> updateConnectorState(String clusterName, Strin ConnectorActionDTO action, ServerWebExchange exchange) { + ConnectAction[] connectActions; + if (RESTART_ACTIONS.contains(action)) { + connectActions = new ConnectAction[] {ConnectAction.VIEW, ConnectAction.RESTART}; + } else { + connectActions = new ConnectAction[] {ConnectAction.VIEW, ConnectAction.EDIT}; + } + Mono<Void> validateAccess = accessControlService.validateAccess(AccessContext.builder() .cluster(clusterName) .connect(connectName) - .connectActions(ConnectAction.VIEW, ConnectAction.EDIT) + .connectActions(connectActions) .build()); return validateAccess.then( @@ -253,16 +267,11 @@ private Comparator<FullConnectorInfoDTO> getConnectorsComparator(ConnectorColumn if (orderBy == null) { return defaultComparator; } - switch (orderBy) { - case CONNECT: - return Comparator.comparing(FullConnectorInfoDTO::getConnect); - case TYPE: - return Comparator.comparing(FullConnectorInfoDTO::getType); - case STATUS: - return Comparator.comparing(fullConnectorInfoDTO -> fullConnectorInfoDTO.getStatus().getState()); - case NAME: - default: - return defaultComparator; - } + return switch (orderBy) { + case CONNECT -> Comparator.comparing(FullConnectorInfoDTO::getConnect); + case TYPE -> Comparator.comparing(FullConnectorInfoDTO::getType); + case STATUS -> Comparator.comparing(fullConnectorInfoDTO -> fullConnectorInfoDTO.getStatus().getState()); + default -> defaultComparator; + }; } } diff --git a/kafka-ui-react-app/src/components/Connect/Details/Actions/Actions.tsx b/kafka-ui-react-app/src/components/Connect/Details/Actions/Actions.tsx index 530f0db1471..cc3755e19b8 100644 --- a/kafka-ui-react-app/src/components/Connect/Details/Actions/Actions.tsx +++ b/kafka-ui-react-app/src/components/Connect/Details/Actions/Actions.tsx @@ -102,7 +102,7 @@ const Actions: React.FC = () => { disabled={isMutating} permission={{ resource: ResourceType.CONNECT, - action: Action.EDIT, + action: Action.RESTART, value: routerProps.connectorName, }} > @@ -113,7 +113,7 @@ const Actions: React.FC = () => { disabled={isMutating} permission={{ resource: ResourceType.CONNECT, - action: Action.EDIT, + action: Action.RESTART, value: routerProps.connectorName, }} > @@ -124,7 +124,7 @@ const Actions: React.FC = () => { disabled={isMutating} permission={{ resource: ResourceType.CONNECT, - action: Action.EDIT, + action: Action.RESTART, value: routerProps.connectorName, }} > diff --git a/kafka-ui-react-app/src/components/Connect/Details/Tasks/ActionsCellTasks.tsx b/kafka-ui-react-app/src/components/Connect/Details/Tasks/ActionsCellTasks.tsx index 6d2cf845e1b..01e0b7b800f 100644 --- a/kafka-ui-react-app/src/components/Connect/Details/Tasks/ActionsCellTasks.tsx +++ b/kafka-ui-react-app/src/components/Connect/Details/Tasks/ActionsCellTasks.tsx @@ -1,9 +1,10 @@ import React from 'react'; -import { Task } from 'generated-sources'; +import { Action, ResourceType, Task } from 'generated-sources'; import { CellContext } from '@tanstack/react-table'; import useAppParams from 'lib/hooks/useAppParams'; import { useRestartConnectorTask } from 'lib/hooks/api/kafkaConnect'; -import { Dropdown, DropdownItem } from 'components/common/Dropdown'; +import { Dropdown } from 'components/common/Dropdown'; +import { ActionDropdownItem } from 'components/common/ActionComponent'; import { RouterParamsClusterConnectConnector } from 'lib/paths'; const ActionsCellTasks: React.FC<CellContext<Task, unknown>> = ({ row }) => { @@ -18,13 +19,18 @@ const ActionsCellTasks: React.FC<CellContext<Task, unknown>> = ({ row }) => { return ( <Dropdown> - <DropdownItem + <ActionDropdownItem onClick={() => restartTaskHandler(id?.task)} danger confirm="Are you sure you want to restart the task?" + permission={{ + resource: ResourceType.CONNECT, + action: Action.RESTART, + value: routerProps.connectorName, + }} > <span>Restart task</span> - </DropdownItem> + </ActionDropdownItem> </Dropdown> ); }; diff --git a/kafka-ui-react-app/src/components/Connect/List/ActionsCell.tsx b/kafka-ui-react-app/src/components/Connect/List/ActionsCell.tsx index 5b3a24cdb70..246ad332c65 100644 --- a/kafka-ui-react-app/src/components/Connect/List/ActionsCell.tsx +++ b/kafka-ui-react-app/src/components/Connect/List/ActionsCell.tsx @@ -78,7 +78,7 @@ const ActionsCell: React.FC<CellContext<FullConnectorInfo, unknown>> = ({ disabled={isMutating} permission={{ resource: ResourceType.CONNECT, - action: Action.EDIT, + action: Action.RESTART, value: name, }} > @@ -89,7 +89,7 @@ const ActionsCell: React.FC<CellContext<FullConnectorInfo, unknown>> = ({ disabled={isMutating} permission={{ resource: ResourceType.CONNECT, - action: Action.EDIT, + action: Action.RESTART, value: name, }} > @@ -100,7 +100,7 @@ const ActionsCell: React.FC<CellContext<FullConnectorInfo, unknown>> = ({ disabled={isMutating} permission={{ resource: ResourceType.CONNECT, - action: Action.EDIT, + action: Action.RESTART, value: name, }} > diff --git a/kafka-ui-react-app/src/components/Connect/New/New.tsx b/kafka-ui-react-app/src/components/Connect/New/New.tsx index 8040267661f..bf285838d38 100644 --- a/kafka-ui-react-app/src/components/Connect/New/New.tsx +++ b/kafka-ui-react-app/src/components/Connect/New/New.tsx @@ -38,7 +38,7 @@ const New: React.FC = () => { const { clusterName } = useAppParams<ClusterNameRoute>(); const navigate = useNavigate(); - const { data: connects } = useConnects(clusterName); + const { data: connects = [] } = useConnects(clusterName); const mutation = useCreateConnector(clusterName); const methods = useForm<FormValues>({ @@ -88,10 +88,6 @@ const New: React.FC = () => { } }; - if (!connects || connects.length === 0) { - return null; - } - const connectOptions = connects.map(({ name: connectName }) => ({ value: connectName, label: connectName, @@ -108,10 +104,10 @@ const New: React.FC = () => { onSubmit={handleSubmit(onSubmit)} aria-label="Create connect form" > - <S.Filed $hidden={connects.length <= 1}> + <S.Filed $hidden={connects?.length <= 1}> <Heading level={3}>Connect *</Heading> <Controller - defaultValue={connectOptions[0].value} + defaultValue={connectOptions[0]?.value} control={control} name="connectName" render={({ field: { name, onChange } }) => ( @@ -120,7 +116,7 @@ const New: React.FC = () => { name={name} disabled={isSubmitting} onChange={onChange} - value={connectOptions[0].value} + value={connectOptions[0]?.value} minWidth="100%" options={connectOptions} />
null
train
test
2023-05-31T17:50:40
"2023-04-26T13:02:01Z"
fl0wx
train
provectus/kafka-ui/3650_3912
provectus/kafka-ui
provectus/kafka-ui/3650
provectus/kafka-ui/3912
[ "connected" ]
cd9bc43d2e91ef43201494c4424c54347136d9c0
4b724fd852f9814ec3ab9316a739cc2d8a1f282c
[]
[]
"2023-06-06T06:58:30Z"
[ "scope/QA", "scope/AQA" ]
[e2e] Checking Brokers configs editing
Autotest implementation for: https://app.qase.io/project/KAFKAUI?case=332&previewMode=side&suite=1
[ "kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersConfigTab.java" ]
[ "kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersConfigTab.java" ]
[ "kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/backlog/SmokeBacklog.java", "kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/brokers/BrokersTest.java" ]
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersConfigTab.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersConfigTab.java index 11489304795..cf0dce73218 100644 --- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersConfigTab.java +++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersConfigTab.java @@ -104,13 +104,58 @@ public String getValue() { } @Step - public void edit() { - element.$x("./td[2]//button").shouldBe(Condition.enabled).click(); + public BrokersConfigItem setValue(String value) { + sendKeysAfterClear(getValueFld(), value); + return this; + } + + @Step + public SelenideElement getValueFld() { + return element.$x("./td[2]//input"); + } + + @Step + public SelenideElement getSaveBtn() { + return element.$x("./td[2]//button[@aria-label='confirmAction']"); + } + + @Step + public SelenideElement getCancelBtn() { + return element.$x("./td[2]//button[@aria-label='cancelAction']"); + } + + @Step + public SelenideElement getEditBtn() { + return element.$x("./td[2]//button[@aria-label='editAction']"); + } + + @Step + public BrokersConfigItem clickSaveBtn() { + getSaveBtn().shouldBe(Condition.enabled).click(); + return this; + } + + @Step + public BrokersConfigItem clickCancelBtn() { + getCancelBtn().shouldBe(Condition.enabled).click(); + return this; + } + + @Step + public BrokersConfigItem clickEditBtn() { + getEditBtn().shouldBe(Condition.enabled).click(); + return this; } @Step public String getSource() { return element.$x("./td[3]").getText().trim(); } + + @Step + public BrokersConfigItem clickConfirm() { + clickConfirmButton(); + return this; + } } }
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/backlog/SmokeBacklog.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/backlog/SmokeBacklog.java index f94d4356e98..c52db3c1b3c 100644 --- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/backlog/SmokeBacklog.java +++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/backlog/SmokeBacklog.java @@ -15,80 +15,73 @@ public class SmokeBacklog extends BaseManualTest { - @Automation(state = TO_BE_AUTOMATED) - @Suite(id = BROKERS_SUITE_ID) - @QaseId(332) - @Test - public void testCaseA() { - } - @Automation(state = TO_BE_AUTOMATED) @Suite(id = TOPICS_PROFILE_SUITE_ID) @QaseId(335) @Test - public void testCaseB() { + public void testCaseA() { } @Automation(state = TO_BE_AUTOMATED) @Suite(id = TOPICS_PROFILE_SUITE_ID) @QaseId(336) @Test - public void testCaseC() { + public void testCaseB() { } @Automation(state = TO_BE_AUTOMATED) @Suite(id = TOPICS_PROFILE_SUITE_ID) @QaseId(343) @Test - public void testCaseD() { + public void testCaseC() { } @Automation(state = TO_BE_AUTOMATED) @Suite(id = SCHEMAS_SUITE_ID) @QaseId(345) @Test - public void testCaseE() { + public void testCaseD() { } @Automation(state = TO_BE_AUTOMATED) @Suite(id = SCHEMAS_SUITE_ID) @QaseId(346) @Test - public void testCaseF() { + public void testCaseE() { } @Automation(state = TO_BE_AUTOMATED) @Suite(id = TOPICS_PROFILE_SUITE_ID) @QaseId(347) @Test - public void testCaseG() { + public void testCaseF() { } @Automation(state = TO_BE_AUTOMATED) @Suite(id = BROKERS_SUITE_ID) @QaseId(348) @Test - public void testCaseH() { + public void testCaseG() { } @Automation(state = TO_BE_AUTOMATED) @Suite(id = BROKERS_SUITE_ID) @QaseId(350) @Test - public void testCaseI() { + public void testCaseH() { } @Automation(state = NOT_AUTOMATED) @Suite(id = TOPICS_SUITE_ID) @QaseId(50) @Test - public void testCaseJ() { + public void testCaseI() { } @Automation(state = NOT_AUTOMATED) @Suite(id = SCHEMAS_SUITE_ID) @QaseId(351) @Test - public void testCaseK() { + public void testCaseJ() { } } diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/brokers/BrokersTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/brokers/BrokersTest.java index 010ab5769a1..e4a4f9f90c9 100644 --- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/brokers/BrokersTest.java +++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/brokers/BrokersTest.java @@ -11,6 +11,7 @@ import org.testng.Assert; import org.testng.annotations.Ignore; import org.testng.annotations.Test; +import org.testng.asserts.SoftAssert; public class BrokersTest extends BaseTest { @@ -82,4 +83,42 @@ public void brokersSourceInfoCheck() { .getSourceInfoTooltipText(); Assert.assertEquals(sourceInfoTooltip, BROKER_SOURCE_INFO_TOOLTIP, "brokerSourceInfoTooltip"); } + + @QaseId(332) + @Test + public void brokersConfigEditCheck() { + navigateToBrokersAndOpenDetails(DEFAULT_BROKER_ID); + brokersDetails + .openDetailsTab(CONFIGS); + String configKey = "log.cleaner.min.compaction.lag.ms"; + BrokersConfigTab.BrokersConfigItem configItem = brokersConfigTab + .searchConfig(configKey) + .getConfig(configKey); + int defaultValue = Integer.parseInt(configItem.getValue()); + configItem + .clickEditBtn(); + SoftAssert softly = new SoftAssert(); + softly.assertTrue(configItem.getSaveBtn().isDisplayed(), "getSaveBtn().isDisplayed()"); + softly.assertTrue(configItem.getCancelBtn().isDisplayed(), "getCancelBtn().isDisplayed()"); + softly.assertTrue(configItem.getValueFld().isEnabled(), "getValueFld().isEnabled()"); + softly.assertAll(); + int newValue = defaultValue + 1; + configItem + .setValue(String.valueOf(newValue)) + .clickCancelBtn(); + Assert.assertEquals(Integer.parseInt(configItem.getValue()), defaultValue, "getValue()"); + configItem + .clickEditBtn() + .setValue(String.valueOf(newValue)) + .clickSaveBtn() + .clickConfirm(); + configItem = brokersConfigTab + .searchConfig(configKey) + .getConfig(configKey); + softly.assertFalse(configItem.getSaveBtn().isDisplayed(), "getSaveBtn().isDisplayed()"); + softly.assertFalse(configItem.getCancelBtn().isDisplayed(), "getCancelBtn().isDisplayed()"); + softly.assertTrue(configItem.getEditBtn().isDisplayed(), "getEditBtn().isDisplayed()"); + softly.assertEquals(Integer.parseInt(configItem.getValue()), newValue, "getValue()"); + softly.assertAll(); + } }
train
test
2023-06-05T12:13:13
"2023-04-11T07:32:30Z"
VladSenyuta
train
provectus/kafka-ui/3865_3915
provectus/kafka-ui
provectus/kafka-ui/3865
provectus/kafka-ui/3915
[ "keyword_pr_to_issue" ]
4b724fd852f9814ec3ab9316a739cc2d8a1f282c
2ac8646769d10e69d972928b943c5bcc708ddc61
[ "Hello there uaeink! πŸ‘‹\n\nThank you and congratulations πŸŽ‰ for opening your very first issue in this project! πŸ’–\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. πŸ‘€", "@Haarolean Can you help to review it?\r\nI inspect on the browser. And I find that it will try to fetch resources(https://api.github.com/repos/provectus/kafka-ui/releases/latest & https://fonts.googleapis.com/css2?family=Inter:wght@400;500&family=Roboto+Mono:wght@400;500&display=swap)", "@uaeink please share the output from `http://localhost:8080/api/info` endpoint", "@Haarolean Sure, the image i am using: \"image: provectuslabs/kafka-ui:master\"\r\n\r\n<img width=\"710\" alt=\"Screenshot 2023-05-30 at 5 46 07 PM\" src=\"https://github.com/provectus/kafka-ui/assets/134577692/4865cd26-0996-44c0-abde-2973e2cb9894\">\r\n\r\n", "This issue has been automatically marked as stale because no requested feedback has been provided. It will be closed if no further activity occurs. Thank you for your contributions.", "@uaeink the actual problem's scope is bigger here: you won't see any clusters available without internet access (even if it's localhost, yes). We'll take a look into this." ]
[]
"2023-06-07T08:26:00Z"
[ "type/bug", "scope/frontend", "status/accepted", "status/confirmed" ]
UI is unusable without internet access even for localhost
### Issue submitter TODO list - [X] I've looked up my issue in [FAQ](https://docs.kafka-ui.provectus.io/faq/common-problems) - [X] I've searched for an already existing issues [here](https://github.com/provectus/kafka-ui/issues) - [X] I've tried running `master`-labeled docker image and the issue still persists there - [x] I'm running a supported version of the application which is listed [here](https://github.com/provectus/kafka-ui/blob/master/SECURITY.md) ### Describe the bug (actual behavior) 1. deploy kafka-ui via docker image 2. disconnect Internet 3. enter into web page 4. web page Empty <img width="1728" alt="Screenshot 2023-05-25 at 9 02 32 AM" src="https://github.com/provectus/kafka-ui/assets/134577692/4dbf4019-91df-46c1-86c3-e69a489d51b7"> ### Expected behavior web page working fine with Internet connection <img width="1728" alt="Screenshot 2023-05-25 at 9 04 50 AM" src="https://github.com/provectus/kafka-ui/assets/134577692/a5de8d27-006b-4985-a714-1ee91377fb63"> ### Your installation details [docker-compose.yml.txt](https://github.com/provectus/kafka-ui/files/11561295/docker-compose.yml.txt) garen node server.properties [server.properties.txt](https://github.com/provectus/kafka-ui/files/11561312/server.properties.txt) leesin node server.properties [server.properties.txt](https://github.com/provectus/kafka-ui/files/11561316/server.properties.txt) zookeeper.properties [zookeeper.properties.txt](https://github.com/provectus/kafka-ui/files/11562024/zookeeper.properties.txt) ### Steps to reproduce 1. deploy kafka-ui with docker-compose 2. disconnect from Internet 3. try to access the web page ### Screenshots _No response_ ### Logs _No response_ ### Additional context _No response_
[ "kafka-ui-react-app/src/components/App.tsx" ]
[ "kafka-ui-react-app/src/components/App.tsx" ]
[]
diff --git a/kafka-ui-react-app/src/components/App.tsx b/kafka-ui-react-app/src/components/App.tsx index 44409c403ab..f1718e31005 100644 --- a/kafka-ui-react-app/src/components/App.tsx +++ b/kafka-ui-react-app/src/components/App.tsx @@ -30,6 +30,7 @@ const queryClient = new QueryClient({ defaultOptions: { queries: { suspense: true, + networkMode: 'offlineFirst', onError(error) { showServerError(error as Response); },
null
train
test
2023-06-06T12:57:58
"2023-05-25T05:14:25Z"
uaeink
train
provectus/kafka-ui/3110_3923
provectus/kafka-ui
provectus/kafka-ui/3110
provectus/kafka-ui/3923
[ "connected" ]
6fe61654271fe74d595180fc29615b155fb66182
d0088490a48b7c45855a42cf1deb4bba465595c0
[ "Hi @Haarolean Can I pick this up?", "@vikasrajputin sure, please", "Hello @Haarolean I've fixed the issue and raised the PR -> https://github.com/provectus/kafka-ui/pull/3923" ]
[]
"2023-06-10T10:40:22Z"
[ "type/bug", "good first issue", "scope/frontend", "status/accepted", "status/confirmed" ]
Messages: reset timestamp value w/ Clear all filters
**Describe the bug** Messages tab refreshing with Clear all filters instead of removing Timestamp filter **Set up** https://www.kafka-ui.provectus.io/ **Steps to Reproduce** 1. Login 2. Open the Topic 3. Turn to Messages 4. Apply Timestamp filter 5. Press Clear all **Actual behavior** The seek type changes to Offset automatically and the Timestamp value keeps **Expected behavior** With click on Clear All the current filter values should be removed and messages list should be updated **Screenshots** https://user-images.githubusercontent.com/104780608/208837242-9cc6d514-f11b-41aa-9a0d-d530fa40a378.mov
[ "kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.tsx" ]
[ "kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.tsx" ]
[]
diff --git a/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.tsx b/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.tsx index 0c2510ff4cc..347623d2226 100644 --- a/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.tsx +++ b/kafka-ui-react-app/src/components/Topics/Topic/Messages/Filters/Filters.tsx @@ -188,6 +188,7 @@ const Filters: React.FC<FiltersProps> = ({ const handleClearAllFilters = () => { setCurrentSeekType(SeekType.OFFSET); setOffset(''); + setTimestamp(null); setQuery(''); changeSeekDirection(SeekDirection.FORWARD); getSelectedPartitionsFromSeekToParam(searchParams, partitions);
null
test
test
2023-06-20T05:51:50
"2022-12-21T06:36:26Z"
armenuikafka
train
provectus/kafka-ui/3879_3928
provectus/kafka-ui
provectus/kafka-ui/3879
provectus/kafka-ui/3928
[ "connected" ]
c3559556411189bdfa72b67188ee9d49a47fce37
100bb1dac6b79c92fdb40d4ddab8e16970a99e77
[]
[]
"2023-06-12T08:32:40Z"
[ "scope/QA", "scope/AQA" ]
[e2e] Checking Broker's config search - non-first page results
### Issue submitter TODO list - [X] I've searched for an already existing issues [here](https://github.com/provectus/kafka-ui/issues) - [X] I'm running a supported version of the application which is listed [here](https://github.com/provectus/kafka-ui/blob/master/SECURITY.md) and the feature is not present there ### Is your proposal related to a problem? _No response_ ### Describe the feature you're interested in autotest implementation for https://app.qase.io/project/KAFKAUI?case=350&suite=1&tab= ### Describe alternatives you've considered _No response_ ### Version you're running master ### Additional context _No response_
[ "kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java", "kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersConfigTab.java" ]
[ "kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java", "kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersConfigTab.java" ]
[ "kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/backlog/SmokeBacklog.java", "kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/brokers/BrokersTest.java" ]
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java index d2e201191ab..8d0841b4407 100644 --- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java +++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/BasePage.java @@ -28,6 +28,7 @@ public abstract class BasePage extends WebUtils { protected SelenideElement confirmBtn = $x("//button[contains(text(),'Confirm')]"); protected SelenideElement cancelBtn = $x("//button[contains(text(),'Cancel')]"); protected SelenideElement backBtn = $x("//button[contains(text(),'Back')]"); + protected SelenideElement previousBtn = $x("//button[contains(text(),'Previous')]"); protected SelenideElement nextBtn = $x("//button[contains(text(),'Next')]"); protected ElementsCollection ddlOptions = $$x("//li[@value]"); protected ElementsCollection gridItems = $$x("//tr[@class]"); @@ -75,6 +76,10 @@ protected void clickBackBtn() { clickByJavaScript(backBtn); } + protected void clickPreviousBtn() { + clickByJavaScript(previousBtn); + } + protected void setJsonInputValue(SelenideElement jsonInput, String jsonConfig) { sendKeysByActions(jsonInput, jsonConfig.replace(" ", "")); new Actions(WebDriverRunner.getWebDriver()) diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersConfigTab.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersConfigTab.java index cf0dce73218..a9e146002ec 100644 --- a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersConfigTab.java +++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/pages/brokers/BrokersConfigTab.java @@ -66,6 +66,13 @@ public BrokersConfigTab clickNextButton() { return this; } + @Step + public BrokersConfigTab clickPreviousButton() { + clickPreviousBtn(); + waitUntilSpinnerDisappear(1); + return this; + } + private List<BrokersConfigTab.BrokersConfigItem> initGridItems() { List<BrokersConfigTab.BrokersConfigItem> gridItemList = new ArrayList<>(); gridItems.shouldHave(CollectionCondition.sizeGreaterThan(0))
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/backlog/SmokeBacklog.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/backlog/SmokeBacklog.java index c52db3c1b3c..f4cacc25207 100644 --- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/backlog/SmokeBacklog.java +++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/backlog/SmokeBacklog.java @@ -64,24 +64,17 @@ public void testCaseF() { public void testCaseG() { } - @Automation(state = TO_BE_AUTOMATED) - @Suite(id = BROKERS_SUITE_ID) - @QaseId(350) - @Test - public void testCaseH() { - } - @Automation(state = NOT_AUTOMATED) @Suite(id = TOPICS_SUITE_ID) @QaseId(50) @Test - public void testCaseI() { + public void testCaseH() { } @Automation(state = NOT_AUTOMATED) @Suite(id = SCHEMAS_SUITE_ID) @QaseId(351) @Test - public void testCaseJ() { + public void testCaseI() { } } diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/brokers/BrokersTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/brokers/BrokersTest.java index e4a4f9f90c9..663018cbc3b 100644 --- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/brokers/BrokersTest.java +++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/brokers/BrokersTest.java @@ -50,11 +50,11 @@ public void checkExistingBrokersInCluster() { @Issue("https://github.com/provectus/kafka-ui/issues/3347") @QaseId(330) @Test - public void brokersConfigSearchCheck() { + public void brokersConfigFirstPageSearchCheck() { navigateToBrokersAndOpenDetails(DEFAULT_BROKER_ID); brokersDetails .openDetailsTab(CONFIGS); - String anyConfigKey = brokersConfigTab + String anyConfigKeyFirstPage = brokersConfigTab .getAllConfigs().stream() .findAny().orElseThrow() .getKey(); @@ -62,14 +62,42 @@ public void brokersConfigSearchCheck() { .clickNextButton(); Assert.assertFalse(brokersConfigTab.getAllConfigs().stream() .map(BrokersConfigTab.BrokersConfigItem::getKey) - .toList().contains(anyConfigKey), - String.format("getAllConfigs().contains(%s)", anyConfigKey)); + .toList().contains(anyConfigKeyFirstPage), + String.format("getAllConfigs().contains(%s)", anyConfigKeyFirstPage)); brokersConfigTab - .searchConfig(anyConfigKey); + .searchConfig(anyConfigKeyFirstPage); Assert.assertTrue(brokersConfigTab.getAllConfigs().stream() .map(BrokersConfigTab.BrokersConfigItem::getKey) - .toList().contains(anyConfigKey), - String.format("getAllConfigs().contains(%s)", anyConfigKey)); + .toList().contains(anyConfigKeyFirstPage), + String.format("getAllConfigs().contains(%s)", anyConfigKeyFirstPage)); + } + + @Ignore + @Issue("https://github.com/provectus/kafka-ui/issues/3347") + @QaseId(350) + @Test + public void brokersConfigSecondPageSearchCheck() { + navigateToBrokersAndOpenDetails(DEFAULT_BROKER_ID); + brokersDetails + .openDetailsTab(CONFIGS); + brokersConfigTab + .clickNextButton(); + String anyConfigKeySecondPage = brokersConfigTab + .getAllConfigs().stream() + .findAny().orElseThrow() + .getKey(); + brokersConfigTab + .clickPreviousButton(); + Assert.assertFalse(brokersConfigTab.getAllConfigs().stream() + .map(BrokersConfigTab.BrokersConfigItem::getKey) + .toList().contains(anyConfigKeySecondPage), + String.format("getAllConfigs().contains(%s)", anyConfigKeySecondPage)); + brokersConfigTab + .searchConfig(anyConfigKeySecondPage); + Assert.assertTrue(brokersConfigTab.getAllConfigs().stream() + .map(BrokersConfigTab.BrokersConfigItem::getKey) + .toList().contains(anyConfigKeySecondPage), + String.format("getAllConfigs().contains(%s)", anyConfigKeySecondPage)); } @QaseId(331)
train
test
2023-06-08T15:14:39
"2023-05-30T09:11:34Z"
VladSenyuta
train
provectus/kafka-ui/3237_3931
provectus/kafka-ui
provectus/kafka-ui/3237
provectus/kafka-ui/3931
[ "connected" ]
cdb4f84e236de638fe9e45f267ece011c72463e7
b1ac3482db3d4157984effe0bb7be321a2a37090
[ "Hey, thanks, we'll take a look.", "This seems to also apply to the default SchemaRegistry Serde. \r\n\r\nIs there a way i can fix this for the included SchemaRegistry serde?", "@Haarolean Any update on this? We're also being affected by this odd display behavior and would like to see it fixed.", "@frankgrimes97 planned for 0.8", "Reopening, since it is only fixed for Kafka schema registry, not glue", "@Haarolean Any update on when we might see a fix and 0.8 release?", "> @Haarolean Any update on when we might see a fix and 0.8 release?\r\n\r\n@frankgrimes97\r\nhttps://github.com/provectus/kafka-ui/discussions/4255\r\nhttps://github.com/kafbat/kafka-ui/discussions/23" ]
[]
"2023-06-12T10:47:53Z"
[ "type/bug", "scope/backend", "status/accepted", "area/serde" ]
Glue serde avro to json deserialization includes namespaces and union types
Originally reported in #3224 , split into a separate issue following the discussion in #3235 When the glue serde deserializes to json from avro, it includes the record namespaces and types in the case of union. This is the first time I'm encountering the behaviour since the python deserializer or the one used in kafka-connect don't follow this behavior ### Example: Original msg: ```json {"name": {"first": "ron", "last": "serruya", "full": "ron serruya"}, "ids1": [5,6], "ids2": ["abc", 123]} ``` schema used: ``` { "type": "record", "name": "generation", "namespace": "top_level", "fields": [ { "name": "name", "type": [ { "type": "record", "name": "name", "namespace": "top_level.generation", "fields": [ { "name": "raw", "type": [ "string", "null" ] }, { "name": "first", "type": "string" }, { "name": "full", "type": "string" }, { "name": "last", "type": ["string"] } ] }, "null" ] }, { "name": "ids1", "type": {"type": "array", "items": "int"} }, { "name": "ids2", "type": {"type": "array", "items": ["string", "int"]} } ] } ``` base64 encoded avro msg (just the msg, without the glue-related bytes at the start) `AAIGcm9uFnJvbiBzZXJydXlhAA5zZXJydXlhBAoMAAQABmFiYwL2AQA=` The current glue deserializer shows this msg as: ``` { "name": { "top_level.generation.name": { "raw": null, "first": "ron", "full": "ron serruya", "last": { "string": "serruya" } } }, "ids1": [ 5, 6 ], "ids2": [ { "string": "abc" }, { "int": 123 } ] } ``` As you can see it adds `string`, `int`, or the record namespace `top_level.generation.name` I fixed this issue locally by adding this line: `encoder.setIncludeNamespace(false);` in the [avroRecordToJson method](https://github.com/provectus/kafkaui-glue-sr-serde/blob/main/src/main/java/com/provectus/kafka/ui/serdes/glue/JsonUtil.java#L28) But according to the comment in #3235 , that's not a completely valid fix since it can break other stuff? Before and after the fix: <img width="469" alt="Screen Shot 2023-01-15 at 15 48 52" src="https://user-images.githubusercontent.com/21235163/212545189-9b1e6b3f-d8b1-483a-a204-269150df8a93.png"> <img width="431" alt="Screen Shot 2023-01-15 at 15 45 44" src="https://user-images.githubusercontent.com/21235163/212545187-7eea90ee-f4c8-4a73-9dc9-17bfa4f0d86a.png">
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/JsonToAvroConversionException.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/AvroJsonSchemaConverter.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/JsonAvroConversion.java" ]
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/JsonAvroConversionException.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/AvroJsonSchemaConverter.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/JsonAvroConversion.java" ]
[ "kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/jsonschema/JsonAvroConversionTest.java" ]
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/JsonAvroConversionException.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/JsonAvroConversionException.java new file mode 100644 index 00000000000..ef658031e56 --- /dev/null +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/JsonAvroConversionException.java @@ -0,0 +1,7 @@ +package com.provectus.kafka.ui.exception; + +public class JsonAvroConversionException extends ValidationException { + public JsonAvroConversionException(String message) { + super(message); + } +} diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/JsonToAvroConversionException.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/JsonToAvroConversionException.java deleted file mode 100644 index 5b3910f4cf3..00000000000 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/JsonToAvroConversionException.java +++ /dev/null @@ -1,7 +0,0 @@ -package com.provectus.kafka.ui.exception; - -public class JsonToAvroConversionException extends ValidationException { - public JsonToAvroConversionException(String message) { - super(message); - } -} diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/AvroJsonSchemaConverter.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/AvroJsonSchemaConverter.java index 56737d7a612..84f56b81dc0 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/AvroJsonSchemaConverter.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/AvroJsonSchemaConverter.java @@ -151,30 +151,16 @@ private ArrayFieldSchema createArraySchema(Schema schema, } private JsonType convertType(Schema schema) { - switch (schema.getType()) { - case INT: - case LONG: - return new SimpleJsonType(JsonType.Type.INTEGER); - case MAP: - case RECORD: - return new SimpleJsonType(JsonType.Type.OBJECT); - case ENUM: - return new EnumJsonType(schema.getEnumSymbols()); - case BYTES: - case STRING: - return new SimpleJsonType(JsonType.Type.STRING); - case NULL: - return new SimpleJsonType(JsonType.Type.NULL); - case ARRAY: - return new SimpleJsonType(JsonType.Type.ARRAY); - case FIXED: - case FLOAT: - case DOUBLE: - return new SimpleJsonType(JsonType.Type.NUMBER); - case BOOLEAN: - return new SimpleJsonType(JsonType.Type.BOOLEAN); - default: - return new SimpleJsonType(JsonType.Type.STRING); - } + return switch (schema.getType()) { + case INT, LONG -> new SimpleJsonType(JsonType.Type.INTEGER); + case MAP, RECORD -> new SimpleJsonType(JsonType.Type.OBJECT); + case ENUM -> new EnumJsonType(schema.getEnumSymbols()); + case BYTES, STRING -> new SimpleJsonType(JsonType.Type.STRING); + case NULL -> new SimpleJsonType(JsonType.Type.NULL); + case ARRAY -> new SimpleJsonType(JsonType.Type.ARRAY); + case FIXED, FLOAT, DOUBLE -> new SimpleJsonType(JsonType.Type.NUMBER); + case BOOLEAN -> new SimpleJsonType(JsonType.Type.BOOLEAN); + default -> new SimpleJsonType(JsonType.Type.STRING); + }; } } diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/JsonAvroConversion.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/JsonAvroConversion.java index d2114dd971c..0708044837b 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/JsonAvroConversion.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/util/jsonschema/JsonAvroConversion.java @@ -1,6 +1,7 @@ package com.provectus.kafka.ui.util.jsonschema; import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.json.JsonMapper; import com.fasterxml.jackson.databind.node.ArrayNode; @@ -15,7 +16,7 @@ import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.databind.node.TextNode; import com.google.common.collect.Lists; -import com.provectus.kafka.ui.exception.JsonToAvroConversionException; +import com.provectus.kafka.ui.exception.JsonAvroConversionException; import io.confluent.kafka.serializers.AvroData; import java.math.BigDecimal; import java.nio.ByteBuffer; @@ -34,7 +35,6 @@ import java.util.concurrent.TimeUnit; import java.util.function.BiFunction; import java.util.stream.Stream; -import lombok.SneakyThrows; import org.apache.avro.Schema; import org.apache.avro.generic.GenericData; @@ -42,12 +42,17 @@ public class JsonAvroConversion { private static final JsonMapper MAPPER = new JsonMapper(); + private static final Schema NULL_SCHEMA = Schema.create(Schema.Type.NULL); // converts json into Object that is expected input for KafkaAvroSerializer // (with AVRO_USE_LOGICAL_TYPE_CONVERTERS flat enabled!) - @SneakyThrows public static Object convertJsonToAvro(String jsonString, Schema avroSchema) { - JsonNode rootNode = MAPPER.readTree(jsonString); + JsonNode rootNode = null; + try { + rootNode = MAPPER.readTree(jsonString); + } catch (JsonProcessingException e) { + throw new JsonAvroConversionException("String is not a valid json"); + } return convert(rootNode, avroSchema); } @@ -80,7 +85,7 @@ private static Object convert(JsonNode node, Schema avroSchema) { assertJsonType(node, JsonNodeType.STRING); String symbol = node.textValue(); if (!avroSchema.getEnumSymbols().contains(symbol)) { - throw new JsonToAvroConversionException("%s is not a part of enum symbols [%s]" + throw new JsonAvroConversionException("%s is not a part of enum symbols [%s]" .formatted(symbol, avroSchema.getEnumSymbols())); } yield new GenericData.EnumSymbol(avroSchema, symbol); @@ -88,23 +93,35 @@ private static Object convert(JsonNode node, Schema avroSchema) { case UNION -> { // for types from enum (other than null) payload should be an object with single key == name of type // ex: schema = [ "null", "int", "string" ], possible payloads = null, { "string": "str" }, { "int": 123 } - if (node.isNull() && avroSchema.getTypes().contains(Schema.create(Schema.Type.NULL))) { + if (node.isNull() && avroSchema.getTypes().contains(NULL_SCHEMA)) { yield null; } assertJsonType(node, JsonNodeType.OBJECT); var elements = Lists.newArrayList(node.fields()); if (elements.size() != 1) { - throw new JsonToAvroConversionException( + throw new JsonAvroConversionException( "UNION field value should be an object with single field == type name"); } - var typeNameToValue = elements.get(0); + Map.Entry<String, JsonNode> typeNameToValue = elements.get(0); + List<Schema> candidates = new ArrayList<>(); for (Schema unionType : avroSchema.getTypes()) { if (typeNameToValue.getKey().equals(unionType.getFullName())) { yield convert(typeNameToValue.getValue(), unionType); } + if (typeNameToValue.getKey().equals(unionType.getName())) { + candidates.add(unionType); + } + } + if (candidates.size() == 1) { + yield convert(typeNameToValue.getValue(), candidates.get(0)); } - throw new JsonToAvroConversionException( + if (candidates.size() > 1) { + throw new JsonAvroConversionException( + "Can't select type within union for value '%s'. Provide full type name.".formatted(node) + ); + } + throw new JsonAvroConversionException( "json value '%s' is cannot be converted to any of union types [%s]" .formatted(node, avroSchema.getTypes())); } @@ -164,7 +181,7 @@ private static Object convert(JsonNode node, Schema avroSchema) { assertJsonType(node, JsonNodeType.STRING); byte[] bytes = node.textValue().getBytes(StandardCharsets.ISO_8859_1); if (bytes.length != avroSchema.getFixedSize()) { - throw new JsonToAvroConversionException( + throw new JsonAvroConversionException( "Fixed field has unexpected size %d (should be %d)" .formatted(bytes.length, avroSchema.getFixedSize())); } @@ -208,8 +225,11 @@ public static JsonNode convertAvroToJson(Object obj, Schema avroSchema) { case UNION -> { ObjectNode node = MAPPER.createObjectNode(); int unionIdx = AvroData.getGenericData().resolveUnion(avroSchema, obj); - Schema unionType = avroSchema.getTypes().get(unionIdx); - node.set(unionType.getFullName(), convertAvroToJson(obj, unionType)); + Schema selectedType = avroSchema.getTypes().get(unionIdx); + node.set( + selectUnionTypeFieldName(avroSchema, selectedType, unionIdx), + convertAvroToJson(obj, selectedType) + ); yield node; } case STRING -> { @@ -252,11 +272,30 @@ public static JsonNode convertAvroToJson(Object obj, Schema avroSchema) { }; } + // select name for a key field that represents type name of union. + // For records selects short name, if it is possible. + private static String selectUnionTypeFieldName(Schema unionSchema, + Schema chosenType, + int chosenTypeIdx) { + var types = unionSchema.getTypes(); + if (types.size() == 2 && types.contains(NULL_SCHEMA)) { + return chosenType.getName(); + } + for (int i = 0; i < types.size(); i++) { + if (i != chosenTypeIdx && chosenType.getName().equals(types.get(i).getName())) { + // there is another type inside union with the same name + // so, we have to use fullname + return chosenType.getFullName(); + } + } + return chosenType.getName(); + } + private static Object processLogicalType(JsonNode node, Schema schema) { return findConversion(schema) .map(c -> c.jsonToAvroConversion.apply(node, schema)) .orElseThrow(() -> - new JsonToAvroConversionException("'%s' logical type is not supported" + new JsonAvroConversionException("'%s' logical type is not supported" .formatted(schema.getLogicalType().getName()))); } @@ -264,7 +303,7 @@ private static JsonNode processLogicalType(Object obj, Schema schema) { return findConversion(schema) .map(c -> c.avroToJsonConversion.apply(obj, schema)) .orElseThrow(() -> - new JsonToAvroConversionException("'%s' logical type is not supported" + new JsonAvroConversionException("'%s' logical type is not supported" .formatted(schema.getLogicalType().getName()))); } @@ -281,7 +320,7 @@ private static boolean isLogicalType(Schema schema) { private static void assertJsonType(JsonNode node, JsonNodeType... allowedTypes) { if (Stream.of(allowedTypes).noneMatch(t -> node.getNodeType() == t)) { - throw new JsonToAvroConversionException( + throw new JsonAvroConversionException( "%s node has unexpected type, allowed types %s, actual type %s" .formatted(node, Arrays.toString(allowedTypes), node.getNodeType())); } @@ -289,7 +328,7 @@ private static void assertJsonType(JsonNode node, JsonNodeType... allowedTypes) private static void assertJsonNumberType(JsonNode node, JsonParser.NumberType... allowedTypes) { if (Stream.of(allowedTypes).noneMatch(t -> node.numberType() == t)) { - throw new JsonToAvroConversionException( + throw new JsonAvroConversionException( "%s node has unexpected numeric type, allowed types %s, actual type %s" .formatted(node, Arrays.toString(allowedTypes), node.numberType())); } @@ -318,7 +357,7 @@ enum LogicalTypeConversion { } else if (node.isNumber()) { return new BigDecimal(node.numberValue().toString()); } - throw new JsonToAvroConversionException( + throw new JsonAvroConversionException( "node '%s' can't be converted to decimal logical type" .formatted(node)); }, @@ -335,7 +374,7 @@ enum LogicalTypeConversion { } else if (node.isTextual()) { return LocalDate.parse(node.asText()); } else { - throw new JsonToAvroConversionException( + throw new JsonAvroConversionException( "node '%s' can't be converted to date logical type" .formatted(node)); } @@ -356,7 +395,7 @@ enum LogicalTypeConversion { } else if (node.isTextual()) { return LocalTime.parse(node.asText()); } else { - throw new JsonToAvroConversionException( + throw new JsonAvroConversionException( "node '%s' can't be converted to time-millis logical type" .formatted(node)); } @@ -377,7 +416,7 @@ enum LogicalTypeConversion { } else if (node.isTextual()) { return LocalTime.parse(node.asText()); } else { - throw new JsonToAvroConversionException( + throw new JsonAvroConversionException( "node '%s' can't be converted to time-micros logical type" .formatted(node)); } @@ -398,7 +437,7 @@ enum LogicalTypeConversion { } else if (node.isTextual()) { return Instant.parse(node.asText()); } else { - throw new JsonToAvroConversionException( + throw new JsonAvroConversionException( "node '%s' can't be converted to timestamp-millis logical type" .formatted(node)); } @@ -423,7 +462,7 @@ enum LogicalTypeConversion { } else if (node.isTextual()) { return Instant.parse(node.asText()); } else { - throw new JsonToAvroConversionException( + throw new JsonAvroConversionException( "node '%s' can't be converted to timestamp-millis logical type" .formatted(node)); }
diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/jsonschema/JsonAvroConversionTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/jsonschema/JsonAvroConversionTest.java index 0e9c291707e..7c4d79f30d7 100644 --- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/jsonschema/JsonAvroConversionTest.java +++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/jsonschema/JsonAvroConversionTest.java @@ -3,6 +3,7 @@ import static com.provectus.kafka.ui.util.jsonschema.JsonAvroConversion.convertAvroToJson; import static com.provectus.kafka.ui.util.jsonschema.JsonAvroConversion.convertJsonToAvro; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.json.JsonMapper; @@ -13,6 +14,7 @@ import com.fasterxml.jackson.databind.node.LongNode; import com.fasterxml.jackson.databind.node.TextNode; import com.google.common.primitives.Longs; +import com.provectus.kafka.ui.exception.JsonAvroConversionException; import io.confluent.kafka.schemaregistry.avro.AvroSchema; import java.math.BigDecimal; import java.nio.ByteBuffer; @@ -181,12 +183,62 @@ var record = (GenericData.Record) convertJsonToAvro(jsonPayload, schema); record = (GenericData.Record) convertJsonToAvro(jsonPayload, schema); assertThat(record.get("f_union")).isEqualTo(123); - //inner-record's name should be fully-qualified! - jsonPayload = "{ \"f_union\": { \"com.test.TestAvroRecord\": { \"f_union\": { \"int\": 123 } } } }"; + //short name can be used since there is no clash with other type names + jsonPayload = "{ \"f_union\": { \"TestAvroRecord\": { \"f_union\": { \"int\": 123 } } } }"; record = (GenericData.Record) convertJsonToAvro(jsonPayload, schema); assertThat(record.get("f_union")).isInstanceOf(GenericData.Record.class); var innerRec = (GenericData.Record) record.get("f_union"); assertThat(innerRec.get("f_union")).isEqualTo(123); + + assertThatThrownBy(() -> + convertJsonToAvro("{ \"f_union\": { \"NotExistingType\": 123 } }", schema) + ).isInstanceOf(JsonAvroConversionException.class); + } + + @Test + void unionFieldWithTypeNamesClash() { + var schema = createSchema( + """ + { + "type": "record", + "namespace": "com.test", + "name": "TestAvroRecord", + "fields": [ + { + "name": "nestedClass", + "type": { + "type": "record", + "namespace": "com.nested", + "name": "TestAvroRecord", + "fields": [ + {"name" : "inner_obj_field", "type": "int" } + ] + } + }, + { + "name": "f_union", + "type": [ "null", "int", "com.test.TestAvroRecord", "com.nested.TestAvroRecord"] + } + ] + }""" + ); + //short name can't can be used since there is a clash with other type names + var jsonPayload = "{ \"f_union\": { \"com.test.TestAvroRecord\": { \"f_union\": { \"int\": 123 } } } }"; + var record = (GenericData.Record) convertJsonToAvro(jsonPayload, schema); + assertThat(record.get("f_union")).isInstanceOf(GenericData.Record.class); + var innerRec = (GenericData.Record) record.get("f_union"); + assertThat(innerRec.get("f_union")).isEqualTo(123); + + //short name can't can be used since there is a clash with other type names + jsonPayload = "{ \"f_union\": { \"com.nested.TestAvroRecord\": { \"inner_obj_field\": 234 } } }"; + record = (GenericData.Record) convertJsonToAvro(jsonPayload, schema); + assertThat(record.get("f_union")).isInstanceOf(GenericData.Record.class); + innerRec = (GenericData.Record) record.get("f_union"); + assertThat(innerRec.get("inner_obj_field")).isEqualTo(234); + + assertThatThrownBy(() -> + convertJsonToAvro("{ \"f_union\": { \"TestAvroRecord\": { \"inner_obj_field\": 234 } } }", schema) + ).isInstanceOf(JsonAvroConversionException.class); } @Test @@ -599,6 +651,46 @@ void unionField() { var innerRec = new GenericData.Record(schema); innerRec.put("f_union", 123); r.put("f_union", innerRec); + // short type name can be set since there is NO clash with other types name + assertJsonsEqual( + " { \"f_union\" : { \"TestAvroRecord\" : { \"f_union\" : { \"int\" : 123 } } } }", + convertAvroToJson(r, schema) + ); + } + + @Test + void unionFieldWithInnerTypesNamesClash() { + var schema = createSchema( + """ + { + "type": "record", + "namespace": "com.test", + "name": "TestAvroRecord", + "fields": [ + { + "name": "nestedClass", + "type": { + "type": "record", + "namespace": "com.nested", + "name": "TestAvroRecord", + "fields": [ + {"name" : "inner_obj_field", "type": "int" } + ] + } + }, + { + "name": "f_union", + "type": [ "null", "int", "com.test.TestAvroRecord", "com.nested.TestAvroRecord"] + } + ] + }""" + ); + + var r = new GenericData.Record(schema); + var innerRec = new GenericData.Record(schema); + innerRec.put("f_union", 123); + r.put("f_union", innerRec); + // full type name should be set since there is a clash with other type name assertJsonsEqual( " { \"f_union\" : { \"com.test.TestAvroRecord\" : { \"f_union\" : { \"int\" : 123 } } } }", convertAvroToJson(r, schema)
train
test
2023-06-21T13:12:38
"2023-01-15T14:04:16Z"
Ronserruya
train
provectus/kafka-ui/3930_3934
provectus/kafka-ui
provectus/kafka-ui/3930
provectus/kafka-ui/3934
[ "keyword_pr_to_issue" ]
100bb1dac6b79c92fdb40d4ddab8e16970a99e77
9a2f6bfc8ee3fce69fb0d03cf1ce185ef8f7e375
[ "Hello there pombuppa! πŸ‘‹\n\nThank you and congratulations πŸŽ‰ for opening your very first issue in this project! πŸ’–\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. πŸ‘€", "@pombuppa thanks for reporting this, could you try the following docker image?\r\n`public.ecr.aws/provectus/kafka-ui-custom-build:3934`\r\n" ]
[]
"2023-06-13T11:13:29Z"
[ "type/bug", "scope/frontend", "status/accepted", "area/wizard" ]
Wizard: SASL_SSL generates wrong case of 'useKeytab'
### Issue submitter TODO list - [X] I've looked up my issue in [FAQ](https://docs.kafka-ui.provectus.io/faq/common-problems) - [X] I've searched for an already existing issues [here](https://github.com/provectus/kafka-ui/issues) - [X] I've tried running `master`-labeled docker image and the issue still persists there - [X] I'm running a supported version of the application which is listed [here](https://github.com/provectus/kafka-ui/blob/master/SECURITY.md) ### Describe the bug (actual behavior) Create new cluster with SASL_SSL with kerberos will generate wrong sasl.jaas.config ``` - bootstrapServers: xxx:9092,xxx:9092,xxx:9092 name: test-cluster properties: security.protocol: SASL_SSL sasl.mechanism: GSSAPI sasl.jaas.config: com.sun.security.auth.module.Krb5LoginModule required useKeytab="true" keyTab="/etc/kafkaui/uploads/kfuat_BDH_CES.client.keytab-1686539412" storeKey="true" principal="[email protected]"; sasl.kerberos.service.name: bigfoot readOnly: true ssl: truststoreLocation: /etc/kafkaui/uploads/my.client.truststore.jks-1686539188 truststorePassword: xxxxx ``` This will cause the following error ``` Caused by: javax.security.auth.login.LoginException: Configuration Error - useKeyTab should be set to true to use the keytab ``` ### Expected behavior sasl.jaas.config must include useKeyTab (with uppercase T) and without double quote between boolean value. ### Your installation details Commit: fdd9ad9 ### Steps to reproduce Create new cluster with SASL_SSL with kerberos and keytab file. ### Screenshots _No response_ ### Logs _No response_ ### Additional context _No response_
[ "kafka-ui-react-app/src/widgets/ClusterConfigForm/utils/getJaasConfig.ts", "kafka-ui-react-app/src/widgets/ClusterConfigForm/utils/transformFormDataToPayload.ts" ]
[ "kafka-ui-react-app/src/widgets/ClusterConfigForm/utils/getJaasConfig.ts", "kafka-ui-react-app/src/widgets/ClusterConfigForm/utils/transformFormDataToPayload.ts" ]
[]
diff --git a/kafka-ui-react-app/src/widgets/ClusterConfigForm/utils/getJaasConfig.ts b/kafka-ui-react-app/src/widgets/ClusterConfigForm/utils/getJaasConfig.ts index 065612451fc..23578159d44 100644 --- a/kafka-ui-react-app/src/widgets/ClusterConfigForm/utils/getJaasConfig.ts +++ b/kafka-ui-react-app/src/widgets/ClusterConfigForm/utils/getJaasConfig.ts @@ -20,7 +20,13 @@ export const getJaasConfig = ( options: Record<string, string> ) => { const optionsString = Object.entries(options) - .map(([key, value]) => (isUndefined(value) ? null : ` ${key}="${value}"`)) + .map(([key, value]) => { + if (isUndefined(value)) return null; + if (value === 'true' || value === 'false') { + return ` ${key}=${value}`; + } + return ` ${key}="${value}"`; + }) .join(''); return `${JAAS_CONFIGS[method]} required${optionsString};`; diff --git a/kafka-ui-react-app/src/widgets/ClusterConfigForm/utils/transformFormDataToPayload.ts b/kafka-ui-react-app/src/widgets/ClusterConfigForm/utils/transformFormDataToPayload.ts index 0a5d6c35235..91f9ad1e289 100644 --- a/kafka-ui-react-app/src/widgets/ClusterConfigForm/utils/transformFormDataToPayload.ts +++ b/kafka-ui-react-app/src/widgets/ClusterConfigForm/utils/transformFormDataToPayload.ts @@ -122,7 +122,7 @@ export const transformFormDataToPayload = (data: ClusterConfigFormValues) => { 'sasl.mechanism': 'GSSAPI', 'sasl.kerberos.service.name': props.saslKerberosServiceName, 'sasl.jaas.config': getJaasConfig('SASL/GSSAPI', { - useKeytab: props.keyTabFile ? 'true' : 'false', + useKeyTab: props.keyTabFile ? 'true' : 'false', keyTab: props.keyTabFile, storeKey: String(!!props.storeKey), principal: props.principal,
null
test
test
2023-06-12T10:35:38
"2023-06-12T10:37:32Z"
pombuppa
train
provectus/kafka-ui/3932_3935
provectus/kafka-ui
provectus/kafka-ui/3932
provectus/kafka-ui/3935
[ "keyword_pr_to_issue" ]
100bb1dac6b79c92fdb40d4ddab8e16970a99e77
af2cff20b6e127db02236fdf34b182b0720c62dc
[ "@iliax , @Haarolean , @freeformz , @stuarth and @mstolbov Greetings!!!\r\n\r\nI see that this issue is closed but still I see that, when no Active consumer group, i don't see the inactive consumer group in the consumers page. \r\n\r\nI tried with both latest tag image as well as \"docker pull provectuslabs/kafka-ui:master\"\r\n\r\nBoth having the same behavior.\r\n\r\nCould you please let me know when this feature will be available in the latest tag?\r\nEagerly waiting for this feature in the UI.\r\n\r\n\r\nThankyou..." ]
[ "we don't need the `useMemo` here this is not expensive calculation.\r\n` const hasAssignedTopics = consumerGroup?.data?.topics !== 0`", "Okay, thank you.\r\nDone" ]
"2023-06-13T12:12:35Z"
[ "type/bug", "scope/frontend", "status/accepted", "status/confirmed" ]
Blank page for "Reset offsets" when consumer has no committed offsets
### Issue submitter TODO list - [X] I've looked up my issue in [FAQ](https://docs.kafka-ui.provectus.io/faq/common-problems) - [X] I've searched for an already existing issues [here](https://github.com/provectus/kafka-ui/issues) - [X] I've tried running `master`-labeled docker image and the issue still persists there - [X] I'm running a supported version of the application which is listed [here](https://github.com/provectus/kafka-ui/blob/master/SECURITY.md) ### Describe the bug (actual behavior) Blank page for "Reset offsets" when consumer has no committed offsets ### Expected behavior Page is rendered ### Your installation details [80b748b](https://github.com/provectus/kafka-ui/commit/80b748b) ### Steps to reproduce 1. run consumer group that reads from topic, but does not commit offsets 2. stop CG 3. Consumers -> select created CG -> click "..." -> "Reset offsets" ### Screenshots https://github.com/provectus/kafka-ui/assets/702205/027eeb5d-d548-4905-bc0c-fad3626efa1b ### Logs _No response_ ### Additional context _No response_
[ "kafka-ui-react-app/src/components/ConsumerGroups/Details/Details.tsx", "kafka-ui-react-app/src/components/ConsumerGroups/Details/ResetOffsets/ResetOffsets.tsx" ]
[ "kafka-ui-react-app/src/components/ConsumerGroups/Details/Details.tsx", "kafka-ui-react-app/src/components/ConsumerGroups/Details/ResetOffsets/ResetOffsets.tsx" ]
[]
diff --git a/kafka-ui-react-app/src/components/ConsumerGroups/Details/Details.tsx b/kafka-ui-react-app/src/components/ConsumerGroups/Details/Details.tsx index 33dd9b48ad5..b8fac73dc1e 100644 --- a/kafka-ui-react-app/src/components/ConsumerGroups/Details/Details.tsx +++ b/kafka-ui-react-app/src/components/ConsumerGroups/Details/Details.tsx @@ -54,6 +54,8 @@ const Details: React.FC = () => { ? filteredPartitionsByTopic : Object.keys(partitionsByTopic); + const hasAssignedTopics = consumerGroup?.data?.topics !== 0; + return ( <div> <div> @@ -71,6 +73,7 @@ const Details: React.FC = () => { action: Action.RESET_OFFSETS, value: consumerGroupID, }} + disabled={!hasAssignedTopics} > Reset offset </ActionDropdownItem> diff --git a/kafka-ui-react-app/src/components/ConsumerGroups/Details/ResetOffsets/ResetOffsets.tsx b/kafka-ui-react-app/src/components/ConsumerGroups/Details/ResetOffsets/ResetOffsets.tsx index 8d220501940..548ed47f543 100644 --- a/kafka-ui-react-app/src/components/ConsumerGroups/Details/ResetOffsets/ResetOffsets.tsx +++ b/kafka-ui-react-app/src/components/ConsumerGroups/Details/ResetOffsets/ResetOffsets.tsx @@ -21,7 +21,7 @@ const ResetOffsets: React.FC = () => { return <PageLoader />; const partitions = consumerGroup.data.partitions || []; - const { topic } = partitions[0]; + const { topic } = partitions[0] || ''; const uniqTopics = Array.from( new Set(partitions.map((partition) => partition.topic))
null
train
test
2023-06-12T10:35:38
"2023-06-12T13:28:11Z"
iliax
train
provectus/kafka-ui/3916_3937
provectus/kafka-ui
provectus/kafka-ui/3916
provectus/kafka-ui/3937
[ "connected" ]
100bb1dac6b79c92fdb40d4ddab8e16970a99e77
03b7d1bd60d78fac83a8725ef1d15e47df4f089f
[ "Hello there sappusaketh! πŸ‘‹\n\nThank you and congratulations πŸŽ‰ for opening your very first issue in this project! πŸ’–\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. πŸ‘€" ]
[]
"2023-06-13T14:41:33Z"
[ "type/bug", "scope/backend", "status/accepted" ]
KC: NPE on sanitizing
### Issue submitter TODO list - [X] I've looked up my issue in [FAQ](https://docs.kafka-ui.provectus.io/faq/common-problems) - [X] I've searched for an already existing issues [here](https://github.com/provectus/kafka-ui/issues) - [X] I've tried running `master`-labeled docker image and the issue still persists there - [X] I'm running a supported version of the application which is listed [here](https://github.com/provectus/kafka-ui/blob/master/SECURITY.md) ### Describe the bug (actual behavior) Unable to load connectors page in kafka-ui we have four clusters and this is happening only on one cluster ``` java 500 Server Error for HTTP GET "/api/clusters/dev/connectors" java.lang.NullPointerException: null at java.base/java.util.Objects.requireNonNull(Objects.java:208) Suppressed: reactor.core.publisher.FluxOnAssembly$OnAssemblyException: Error has been observed at the following site(s): *__checkpoint β‡’ Handler com.provectus.kafka.ui.controller.KafkaConnectController#getAllConnectors(String, String, ConnectorColumnsToSortDTO, SortOrderDTO, ServerWebExchange) [DispatcherHandler] *__checkpoint β‡’ com.provectus.kafka.ui.config.CustomWebFilter [DefaultWebFilterChain] *__checkpoint β‡’ com.provectus.kafka.ui.config.ReadOnlyModeFilter [DefaultWebFilterChain] *__checkpoint β‡’ AuthorizationWebFilter [DefaultWebFilterChain] *__checkpoint β‡’ ExceptionTranslationWebFilter [DefaultWebFilterChain] *__checkpoint β‡’ LogoutWebFilter [DefaultWebFilterChain] *__checkpoint β‡’ ServerRequestCacheWebFilter [DefaultWebFilterChain] *__checkpoint β‡’ SecurityContextServerWebExchangeWebFilter [DefaultWebFilterChain] *__checkpoint β‡’ ReactorContextWebFilter [DefaultWebFilterChain] *__checkpoint β‡’ HttpHeaderWriterWebFilter [DefaultWebFilterChain] *__checkpoint β‡’ ServerWebExchangeReactorContextWebFilter [DefaultWebFilterChain] *__checkpoint β‡’ org.springframework.security.web.server.WebFilterChainProxy [DefaultWebFilterChain] *__checkpoint β‡’ org.springframework.web.filter.reactive.ServerHttpObservationFilter [DefaultWebFilterChain] *__checkpoint β‡’ HTTP GET "/api/clusters/dev/connectors" [ExceptionHandlingWebHandler] Original Stack Trace: at java.base/java.util.Objects.requireNonNull(Objects.java:208) at java.base/java.util.stream.Collectors.lambda$uniqKeysMapAccumulator$1(Collectors.java:180) at java.base/java.util.stream.ReduceOps$3ReducingSink.accept(ReduceOps.java:169) at java.base/java.util.HashMap$EntrySpliterator.forEachRemaining(HashMap.java:1850) at java.base/java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:509) at java.base/java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:499) at java.base/java.util.stream.ReduceOps$ReduceOp.evaluateSequential(ReduceOps.java:921) at java.base/java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234) at java.base/java.util.stream.ReferencePipeline.collect(ReferencePipeline.java:682) at com.provectus.kafka.ui.service.KafkaConnectService.lambda$getConnector$25(KafkaConnectService.java:181) at reactor.core.publisher.FluxMap$MapSubscriber.onNext(FluxMap.java:106) at reactor.core.publisher.FluxOnErrorResume$ResumeSubscriber.onNext(FluxOnErrorResume.java:79) at reactor.core.publisher.SerializedSubscriber.onNext(SerializedSubscriber.java:99) at reactor.core.publisher.FluxRetryWhen$RetryWhenMainSubscriber.onNext(FluxRetryWhen.java:174) at reactor.core.publisher.MonoFlatMap$FlatMapMain.secondComplete(MonoFlatMap.java:245) at reactor.core.publisher.MonoFlatMap$FlatMapInner.onNext(MonoFlatMap.java:305) at reactor.core.publisher.FluxOnErrorResume$ResumeSubscriber.onNext(FluxOnErrorResume.java:79) at reactor.core.publisher.MonoFlatMap$FlatMapMain.onNext(MonoFlatMap.java:158) at reactor.core.publisher.FluxContextWrite$ContextWriteSubscriber.onNext(FluxContextWrite.java:107) at reactor.core.publisher.FluxMapFuseable$MapFuseableConditionalSubscriber.onNext(FluxMapFuseable.java:299) at reactor.core.publisher.FluxFilterFuseable$FilterFuseableConditionalSubscriber.onNext(FluxFilterFuseable.java:337) at reactor.core.publisher.Operators$BaseFluxToMonoOperator.completePossiblyEmpty(Operators.java:2071) at reactor.core.publisher.MonoCollect$CollectSubscriber.onComplete(MonoCollect.java:145) at reactor.core.publisher.FluxMap$MapSubscriber.onComplete(FluxMap.java:144) at reactor.core.publisher.FluxPeek$PeekSubscriber.onComplete(FluxPeek.java:260) at reactor.core.publisher.FluxMap$MapSubscriber.onComplete(FluxMap.java:144) at reactor.netty.channel.FluxReceive.onInboundComplete(FluxReceive.java:415) at reactor.netty.channel.ChannelOperations.onInboundComplete(ChannelOperations.java:431) at reactor.netty.channel.ChannelOperations.terminate(ChannelOperations.java:485) at reactor.netty.http.client.HttpClientOperations.onInboundNext(HttpClientOperations.java:712) at reactor.netty.channel.ChannelOperationsHandler.channelRead(ChannelOperationsHandler.java:113) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:444) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412) at io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:444) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412) at io.netty.channel.CombinedChannelDuplexHandler$DelegatingChannelHandlerContext.fireChannelRead(CombinedChannelDuplexHandler.java:436) at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:346) at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:318) at io.netty.channel.CombinedChannelDuplexHandler.channelRead(CombinedChannelDuplexHandler.java:251) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:442) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412) at io.netty.handler.ssl.SslHandler.unwrap(SslHandler.java:1382) at io.netty.handler.ssl.SslHandler.decodeJdkCompatible(SslHandler.java:1245) at io.netty.handler.ssl.SslHandler.decode(SslHandler.java:1294) at io.netty.handler.codec.ByteToMessageDecoder.decodeRemovalReentryProtection(ByteToMessageDecoder.java:529) at io.netty.handler.codec.ByteToMessageDecoder.callDecode(ByteToMessageDecoder.java:468) at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:290) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:444) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412) at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:440) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420) at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919) at io.netty.channel.epoll.AbstractEpollStreamChannel$EpollStreamUnsafe.epollInReady(AbstractEpollStreamChannel.java:800) at io.netty.channel.epoll.EpollEventLoop.processReady(EpollEventLoop.java:499) at io.netty.channel.epoll.EpollEventLoop.run(EpollEventLoop.java:397) at io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:997) at io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74) at io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30) at java.base/java.lang.Thread.run(Thread.java:833) ``` I tried setting below config from [here](https://github.com/provectus/kafka-ui/issues/3243#issuecomment-1386574743) still seeing same error ``` - name: WEBCLIENT_MAX_IN_MEMORY_BUFFER_SIZE value: "12500MB" ``` ### Expected behavior Connectors page should load list of connectors if not it should say clearly what the issue is ### Your installation details 1. App version (https://github.com/provectus/kafka-ui/commit/fdd9ad9) 2. No helm chart 3. App config ```yaml apiVersion: apps/v1 kind: Deployment metadata: name: kafka-ui namespace: kafka labels: app: kafka-ui spec: replicas: 1 selector: matchLabels: app: kafka-ui template: metadata: labels: app: kafka-ui spec: containers: - name: kafka-ui image: provectuslabs/kafka-ui:v0.7.0 ports: - containerPort: 8080 env: # dev - name: KAFKA_CLUSTERS_0_NAME value: dev - name: KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS value: <redacted> - name: KAFKA_CLUSTERS_0_SCHEMAREGISTRY value: <redacted> - name: KAFKA_CLUSTERS_0_KSQLDBSERVER value: <redacted> - name: KAFKA_CLUSTERS_0_READONLY value: "False" - name: KAFKA_CLUSTERS_0_KAFKACONNECT_0_NAME value: dev-kafka-connect - name: KAFKA_CLUSTERS_0_KAFKACONNECT_0_ADDRESS value: <redacted> - name: KAFKA_CLUSTERS_0_KAFKACONNECT_1_NAME value: dev-sink-s3-connect - name: KAFKA_CLUSTERS_0_KAFKACONNECT_1_ADDRESS value: <redacted> - name: KAFKA_CLUSTERS_0_KAFKACONNECT_2_NAME value: dev-source-documentdb-connect - name: KAFKA_CLUSTERS_0_KAFKACONNECT_2_ADDRESS value: <redacted> # yyyy - name: KAFKA_CLUSTERS_1_NAME value: stage - name: KAFKA_CLUSTERS_1_BOOTSTRAPSERVERS value: <redacted> - name: KAFKA_CLUSTERS_1_SCHEMAREGISTRY value: <redacted> - name: KAFKA_CLUSTERS_1_KSQLDBSERVER value: <redacted> - name: KAFKA_CLUSTERS_1_READONLY value: "False" - name: KAFKA_CLUSTERS_1_KAFKACONNECT_0_NAME value: yyyyy-kafka-connect - name: KAFKA_CLUSTERS_1_KAFKACONNECT_0_ADDRESS value: <redacted> - name: KAFKA_CLUSTERS_1_KAFKACONNECT_1_NAME value: yyyy-sink-s3-connect - name: KAFKA_CLUSTERS_1_KAFKACONNECT_1_ADDRESS value: <redacted> - name: KAFKA_CLUSTERS_1_KAFKACONNECT_2_NAME value: yyyy-source-documentdb-connect - name: KAFKA_CLUSTERS_1_KAFKACONNECT_2_ADDRESS value: <redacted> # zzzzz - name: KAFKA_CLUSTERS_2_NAME value: preprod - name: KAFKA_CLUSTERS_2_BOOTSTRAPSERVERS value: <redacted> - name: KAFKA_CLUSTERS_2_SCHEMAREGISTRY value: <redacted> - name: KAFKA_CLUSTERS_2_KSQLDBSERVER value: <redacted> - name: KAFKA_CLUSTERS_2_READONLY value: "False" - name: KAFKA_CLUSTERS_2_KAFKACONNECT_0_NAME value: zzzz-kafka-connect - name: KAFKA_CLUSTERS_2_KAFKACONNECT_0_ADDRESS value: <redacted> - name: KAFKA_CLUSTERS_2_KAFKACONNECT_1_NAME value: zzzz-sink-s3-connect - name: KAFKA_CLUSTERS_2_KAFKACONNECT_1_ADDRESS value: <redacted> - name: KAFKA_CLUSTERS_2_KAFKACONNECT_2_NAME value: zzzz-source-documentdb-connect - name: KAFKA_CLUSTERS_2_KAFKACONNECT_2_ADDRESS value: <redacted> # xxxxx - name: KAFKA_CLUSTERS_3_NAME value: xxxxx - name: KAFKA_CLUSTERS_3_BOOTSTRAPSERVERS value: <redacted> - name: KAFKA_CLUSTERS_3_SCHEMAREGISTRY value: <redacted> - name: KAFKA_CLUSTERS_3_KSQLDBSERVER value: <redacted> - name: KAFKA_CLUSTERS_3_READONLY value: "False" - name: KAFKA_CLUSTERS_3_KAFKACONNECT_0_NAME value: xxxx-kafka-connect - name: KAFKA_CLUSTERS_3_KAFKACONNECT_0_ADDRESS value: <redacted> - name: KAFKA_CLUSTERS_3_KAFKACONNECT_1_NAME value: xxxx-sink-s3-connect - name: KAFKA_CLUSTERS_3_KAFKACONNECT_1_ADDRESS value: <redacted> - name: KAFKA_CLUSTERS_3_KAFKACONNECT_2_NAME value: xxxx-source-documentdb-connect - name: KAFKA_CLUSTERS_3_KAFKACONNECT_2_ADDRESS value: <redacted> - name: WEBCLIENT_MAX_IN_MEMORY_BUFFER_SIZE value: "250MB" ``` ### Steps to reproduce Not sure how to reproduce. This is happening only on specific cluster which has 110 connectors ### Screenshots _No response_ ### Logs ``` java 500 Server Error for HTTP GET "/api/clusters/dev/connectors" java.lang.NullPointerException: null at java.base/java.util.Objects.requireNonNull(Objects.java:208) Suppressed: reactor.core.publisher.FluxOnAssembly$OnAssemblyException: Error has been observed at the following site(s): *__checkpoint β‡’ Handler com.provectus.kafka.ui.controller.KafkaConnectController#getAllConnectors(String, String, ConnectorColumnsToSortDTO, SortOrderDTO, ServerWebExchange) [DispatcherHandler] *__checkpoint β‡’ com.provectus.kafka.ui.config.CustomWebFilter [DefaultWebFilterChain] *__checkpoint β‡’ com.provectus.kafka.ui.config.ReadOnlyModeFilter [DefaultWebFilterChain] *__checkpoint β‡’ AuthorizationWebFilter [DefaultWebFilterChain] *__checkpoint β‡’ ExceptionTranslationWebFilter [DefaultWebFilterChain] *__checkpoint β‡’ LogoutWebFilter [DefaultWebFilterChain] *__checkpoint β‡’ ServerRequestCacheWebFilter [DefaultWebFilterChain] *__checkpoint β‡’ SecurityContextServerWebExchangeWebFilter [DefaultWebFilterChain] *__checkpoint β‡’ ReactorContextWebFilter [DefaultWebFilterChain] *__checkpoint β‡’ HttpHeaderWriterWebFilter [DefaultWebFilterChain] *__checkpoint β‡’ ServerWebExchangeReactorContextWebFilter [DefaultWebFilterChain] *__checkpoint β‡’ org.springframework.security.web.server.WebFilterChainProxy [DefaultWebFilterChain] *__checkpoint β‡’ org.springframework.web.filter.reactive.ServerHttpObservationFilter [DefaultWebFilterChain] *__checkpoint β‡’ HTTP GET "/api/clusters/dev/connectors" [ExceptionHandlingWebHandler] Original Stack Trace: at java.base/java.util.Objects.requireNonNull(Objects.java:208) at java.base/java.util.stream.Collectors.lambda$uniqKeysMapAccumulator$1(Collectors.java:180) at java.base/java.util.stream.ReduceOps$3ReducingSink.accept(ReduceOps.java:169) at java.base/java.util.HashMap$EntrySpliterator.forEachRemaining(HashMap.java:1850) at java.base/java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:509) at java.base/java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:499) at java.base/java.util.stream.ReduceOps$ReduceOp.evaluateSequential(ReduceOps.java:921) at java.base/java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234) at java.base/java.util.stream.ReferencePipeline.collect(ReferencePipeline.java:682) at com.provectus.kafka.ui.service.KafkaConnectService.lambda$getConnector$25(KafkaConnectService.java:181) at reactor.core.publisher.FluxMap$MapSubscriber.onNext(FluxMap.java:106) at reactor.core.publisher.FluxOnErrorResume$ResumeSubscriber.onNext(FluxOnErrorResume.java:79) at reactor.core.publisher.SerializedSubscriber.onNext(SerializedSubscriber.java:99) at reactor.core.publisher.FluxRetryWhen$RetryWhenMainSubscriber.onNext(FluxRetryWhen.java:174) at reactor.core.publisher.MonoFlatMap$FlatMapMain.secondComplete(MonoFlatMap.java:245) at reactor.core.publisher.MonoFlatMap$FlatMapInner.onNext(MonoFlatMap.java:305) at reactor.core.publisher.FluxOnErrorResume$ResumeSubscriber.onNext(FluxOnErrorResume.java:79) at reactor.core.publisher.MonoFlatMap$FlatMapMain.onNext(MonoFlatMap.java:158) at reactor.core.publisher.FluxContextWrite$ContextWriteSubscriber.onNext(FluxContextWrite.java:107) at reactor.core.publisher.FluxMapFuseable$MapFuseableConditionalSubscriber.onNext(FluxMapFuseable.java:299) at reactor.core.publisher.FluxFilterFuseable$FilterFuseableConditionalSubscriber.onNext(FluxFilterFuseable.java:337) at reactor.core.publisher.Operators$BaseFluxToMonoOperator.completePossiblyEmpty(Operators.java:2071) at reactor.core.publisher.MonoCollect$CollectSubscriber.onComplete(MonoCollect.java:145) at reactor.core.publisher.FluxMap$MapSubscriber.onComplete(FluxMap.java:144) at reactor.core.publisher.FluxPeek$PeekSubscriber.onComplete(FluxPeek.java:260) at reactor.core.publisher.FluxMap$MapSubscriber.onComplete(FluxMap.java:144) at reactor.netty.channel.FluxReceive.onInboundComplete(FluxReceive.java:415) at reactor.netty.channel.ChannelOperations.onInboundComplete(ChannelOperations.java:431) at reactor.netty.channel.ChannelOperations.terminate(ChannelOperations.java:485) at reactor.netty.http.client.HttpClientOperations.onInboundNext(HttpClientOperations.java:712) at reactor.netty.channel.ChannelOperationsHandler.channelRead(ChannelOperationsHandler.java:113) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:444) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412) at io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:444) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412) at io.netty.channel.CombinedChannelDuplexHandler$DelegatingChannelHandlerContext.fireChannelRead(CombinedChannelDuplexHandler.java:436) at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:346) at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:318) at io.netty.channel.CombinedChannelDuplexHandler.channelRead(CombinedChannelDuplexHandler.java:251) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:442) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412) at io.netty.handler.ssl.SslHandler.unwrap(SslHandler.java:1382) at io.netty.handler.ssl.SslHandler.decodeJdkCompatible(SslHandler.java:1245) at io.netty.handler.ssl.SslHandler.decode(SslHandler.java:1294) at io.netty.handler.codec.ByteToMessageDecoder.decodeRemovalReentryProtection(ByteToMessageDecoder.java:529) at io.netty.handler.codec.ByteToMessageDecoder.callDecode(ByteToMessageDecoder.java:468) at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:290) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:444) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412) at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:440) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420) at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919) at io.netty.channel.epoll.AbstractEpollStreamChannel$EpollStreamUnsafe.epollInReady(AbstractEpollStreamChannel.java:800) at io.netty.channel.epoll.EpollEventLoop.processReady(EpollEventLoop.java:499) at io.netty.channel.epoll.EpollEventLoop.run(EpollEventLoop.java:397) at io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:997) at io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74) at io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30) at java.base/java.lang.Thread.run(Thread.java:833) ``` ### Additional context _No response_
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaConfigSanitizer.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaConnectService.java" ]
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaConfigSanitizer.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaConnectService.java" ]
[ "kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/KafkaConfigSanitizerTest.java" ]
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaConfigSanitizer.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaConfigSanitizer.java index 375afb0fefd..b4cdf144c9f 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaConfigSanitizer.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaConfigSanitizer.java @@ -5,11 +5,13 @@ import com.google.common.collect.ImmutableList; import java.util.Arrays; import java.util.Collection; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.regex.Pattern; import java.util.stream.Collectors; +import javax.annotation.Nullable; import org.apache.kafka.common.config.ConfigDef; import org.apache.kafka.common.config.SaslConfigs; import org.apache.kafka.common.config.SslConfigs; @@ -17,7 +19,7 @@ import org.springframework.stereotype.Component; @Component -class KafkaConfigSanitizer { +class KafkaConfigSanitizer { private static final String SANITIZED_VALUE = "******"; @@ -65,10 +67,8 @@ private static Set<String> kafkaConfigKeysToSanitize() { .collect(Collectors.toSet()); } - public Object sanitize(String key, Object value) { - if (value == null) { - return null; - } + @Nullable + public Object sanitize(String key, @Nullable Object value) { for (Pattern pattern : sanitizeKeysPatterns) { if (pattern.matcher(key).matches()) { return SANITIZED_VALUE; @@ -77,5 +77,12 @@ public Object sanitize(String key, Object value) { return value; } + public Map<String, Object> sanitizeConnectorConfig(@Nullable Map<String, Object> original) { + var result = new HashMap<String, Object>(); //null-values supporting map! + if (original != null) { + original.forEach((k, v) -> result.put(k, sanitize(k, v))); + } + return result; + } } diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaConnectService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaConnectService.java index 98b61541c5f..390348707d3 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaConnectService.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaConnectService.java @@ -24,7 +24,6 @@ import com.provectus.kafka.ui.model.TaskDTO; import com.provectus.kafka.ui.model.connect.InternalConnectInfo; import com.provectus.kafka.ui.util.ReactiveFailover; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; @@ -176,19 +175,14 @@ public Mono<ConnectorDTO> getConnector(KafkaCluster cluster, String connectName, e -> emptyStatus(connectorName)) .map(connectorStatus -> { var status = connectorStatus.getConnector(); - final Map<String, Object> obfuscatedConfig = connector.getConfig().entrySet() - .stream() - .collect(Collectors.toMap( - Map.Entry::getKey, - e -> kafkaConfigSanitizer.sanitize(e.getKey(), e.getValue()) - )); - ConnectorDTO result = (ConnectorDTO) new ConnectorDTO() + var sanitizedConfig = kafkaConfigSanitizer.sanitizeConnectorConfig(connector.getConfig()); + ConnectorDTO result = new ConnectorDTO() .connect(connectName) .status(kafkaConnectMapper.fromClient(status)) .type(connector.getType()) .tasks(connector.getTasks()) .name(connector.getName()) - .config(obfuscatedConfig); + .config(sanitizedConfig); if (connectorStatus.getTasks() != null) { boolean isAnyTaskFailed = connectorStatus.getTasks().stream() @@ -217,12 +211,7 @@ public Mono<Map<String, Object>> getConnectorConfig(KafkaCluster cluster, String String connectorName) { return api(cluster, connectName) .mono(c -> c.getConnectorConfig(connectorName)) - .map(connectorConfig -> { - final Map<String, Object> obfuscatedMap = new HashMap<>(); - connectorConfig.forEach((key, value) -> - obfuscatedMap.put(key, kafkaConfigSanitizer.sanitize(key, value))); - return obfuscatedMap; - }); + .map(kafkaConfigSanitizer::sanitizeConnectorConfig); } public Mono<ConnectorDTO> setConnectorConfig(KafkaCluster cluster, String connectName,
diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/KafkaConfigSanitizerTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/KafkaConfigSanitizerTest.java index 6454cd9f2a4..9cab6b2f13f 100644 --- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/KafkaConfigSanitizerTest.java +++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/KafkaConfigSanitizerTest.java @@ -3,14 +3,16 @@ import static org.assertj.core.api.Assertions.assertThat; import java.util.Arrays; -import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import org.junit.jupiter.api.Test; class KafkaConfigSanitizerTest { @Test void doNothingIfEnabledPropertySetToFalse() { - final var sanitizer = new KafkaConfigSanitizer(false, Collections.emptyList()); + final var sanitizer = new KafkaConfigSanitizer(false, List.of()); assertThat(sanitizer.sanitize("password", "secret")).isEqualTo("secret"); assertThat(sanitizer.sanitize("sasl.jaas.config", "secret")).isEqualTo("secret"); assertThat(sanitizer.sanitize("database.password", "secret")).isEqualTo("secret"); @@ -18,7 +20,7 @@ void doNothingIfEnabledPropertySetToFalse() { @Test void obfuscateCredentials() { - final var sanitizer = new KafkaConfigSanitizer(true, Collections.emptyList()); + final var sanitizer = new KafkaConfigSanitizer(true, List.of()); assertThat(sanitizer.sanitize("sasl.jaas.config", "secret")).isEqualTo("******"); assertThat(sanitizer.sanitize("consumer.sasl.jaas.config", "secret")).isEqualTo("******"); assertThat(sanitizer.sanitize("producer.sasl.jaas.config", "secret")).isEqualTo("******"); @@ -36,7 +38,7 @@ void obfuscateCredentials() { @Test void notObfuscateNormalConfigs() { - final var sanitizer = new KafkaConfigSanitizer(true, Collections.emptyList()); + final var sanitizer = new KafkaConfigSanitizer(true, List.of()); assertThat(sanitizer.sanitize("security.protocol", "SASL_SSL")).isEqualTo("SASL_SSL"); final String[] bootstrapServer = new String[] {"test1:9092", "test2:9092"}; assertThat(sanitizer.sanitize("bootstrap.servers", bootstrapServer)).isEqualTo(bootstrapServer); @@ -52,4 +54,22 @@ void obfuscateCredentialsWithDefinedPatterns() { assertThat(sanitizer.sanitize("database.password", "no longer credential")) .isEqualTo("no longer credential"); } + + @Test + void sanitizeConnectorConfigDoNotFailOnNullableValues() { + Map<String, Object> originalConfig = new HashMap<>(); + originalConfig.put("password", "secret"); + originalConfig.put("asIs", "normal"); + originalConfig.put("nullVal", null); + + var sanitizedConfig = new KafkaConfigSanitizer(true, List.of()) + .sanitizeConnectorConfig(originalConfig); + + assertThat(sanitizedConfig) + .hasSize(3) + .containsEntry("password", "******") + .containsEntry("asIs", "normal") + .containsEntry("nullVal", null); + } + }
train
test
2023-06-12T10:35:38
"2023-06-07T14:48:27Z"
sappusaketh
train
provectus/kafka-ui/3894_3941
provectus/kafka-ui
provectus/kafka-ui/3894
provectus/kafka-ui/3941
[ "connected" ]
03b7d1bd60d78fac83a8725ef1d15e47df4f089f
5d23f2a4ed9b4ff0488bfb99a29b19b8acfb8608
[ "Hello there taylorsmithgg! πŸ‘‹\n\nThank you and congratulations πŸŽ‰ for opening your very first issue in this project! πŸ’–\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. πŸ‘€", "I would recommend VS Code theme as a fairly standardized theme:\r\n```\r\n{\r\n\t\"$schema\": \"vscode://schemas/color-theme\",\r\n\t\"type\": \"dark\",\r\n\t\"colors\": {\r\n\t\t\"activityBar.activeBorder\": \"#0078d4\",\r\n\t\t\"activityBar.background\": \"#181818\",\r\n\t\t\"activityBar.border\": \"#ffffff15\",\r\n\t\t\"activityBar.foreground\": \"#d7d7d7\",\r\n\t\t\"activityBar.inactiveForeground\": \"#ffffff80\",\r\n\t\t\"activityBarBadge.background\": \"#0078d4\",\r\n\t\t\"activityBarBadge.foreground\": \"#ffffff\",\r\n\t\t\"badge.background\": \"#0078d4\",\r\n\t\t\"badge.foreground\": \"#ffffff\",\r\n\t\t\"button.background\": \"#0078d4\",\r\n\t\t\"button.border\": \"#ffffff12\",\r\n\t\t\"button.foreground\": \"#ffffff\",\r\n\t\t\"button.hoverBackground\": \"#0078d4e6\",\r\n\t\t\"button.secondaryBackground\": \"#ffffff0f\",\r\n\t\t\"button.secondaryForeground\": \"#cccccc\",\r\n\t\t\"button.secondaryHoverBackground\": \"#ffffff15\",\r\n\t\t\"checkbox.background\": \"#313131\",\r\n\t\t\"checkbox.border\": \"#ffffff1f\",\r\n\t\t\"debugToolBar.background\": \"#181818\",\r\n\t\t\"descriptionForeground\": \"#8b949e\",\r\n\t\t\"diffEditor.insertedLineBackground\": \"#23863633\",\r\n\t\t\"diffEditor.insertedTextBackground\": \"#2386364d\",\r\n\t\t\"diffEditor.removedLineBackground\": \"#da363333\",\r\n\t\t\"diffEditor.removedTextBackground\": \"#da36334d\",\r\n\t\t\"dropdown.background\": \"#313131\",\r\n\t\t\"dropdown.border\": \"#ffffff1f\",\r\n\t\t\"dropdown.foreground\": \"#cccccc\",\r\n\t\t\"dropdown.listBackground\": \"#313131\",\r\n\t\t\"editor.background\": \"#1f1f1f\",\r\n\t\t\"editor.findMatchBackground\": \"#9e6a03\",\r\n\t\t\"editor.foreground\": \"#cccccc\",\r\n\t\t\"editor.inactiveSelectionBackground\": \"#3a3d41\",\r\n\t\t\"editor.selectionHighlightBackground\": \"#add6ff26\",\r\n\t\t\"editorGroup.border\": \"#ffffff17\",\r\n\t\t\"editorGroupHeader.tabsBackground\": \"#181818\",\r\n\t\t\"editorGroupHeader.tabsBorder\": \"#ffffff15\",\r\n\t\t\"editorGutter.addedBackground\": \"#2ea043\",\r\n\t\t\"editorGutter.deletedBackground\": \"#f85149\",\r\n\t\t\"editorGutter.modifiedBackground\": \"#0078d4\",\r\n\t\t\"editorIndentGuide.activeBackground\": \"#707070\",\r\n\t\t\"editorIndentGuide.background\": \"#404040\",\r\n\t\t\"editorInlayHint.background\": \"#8b949e33\",\r\n\t\t\"editorInlayHint.foreground\": \"#8b949e\",\r\n\t\t\"editorInlayHint.typeBackground\": \"#8b949e33\",\r\n\t\t\"editorInlayHint.typeForeground\": \"#8b949e\",\r\n\t\t\"editorLineNumber.activeForeground\": \"#cccccc\",\r\n\t\t\"editorLineNumber.foreground\": \"#6e7681\",\r\n\t\t\"editorOverviewRuler.border\": \"#010409\",\r\n\t\t\"editorWidget.background\": \"#1f1f1f\",\r\n\t\t\"errorForeground\": \"#f85149\",\r\n\t\t\"focusBorder\": \"#0078d4\",\r\n\t\t\"foreground\": \"#cccccc\",\r\n\t\t\"icon.foreground\": \"#cccccc\",\r\n\t\t\"input.background\": \"#2a2a2a\",\r\n\t\t\"input.border\": \"#ffffff1f\",\r\n\t\t\"input.foreground\": \"#cccccc\",\r\n\t\t\"input.placeholderForeground\": \"#ffffff79\",\r\n\t\t\"inputOption.activeBackground\": \"#2489db82\",\r\n\t\t\"inputOption.activeBorder\": \"#2488db\",\r\n\t\t\"keybindingLabel.foreground\": \"#cccccc\",\r\n\t\t\"list.activeSelectionBackground\": \"#323232\",\r\n\t\t\"list.activeSelectionForeground\": \"#ffffff\",\r\n\t\t\"list.activeSelectionIconForeground\": \"#ffffff\",\r\n\t\t\"list.dropBackground\": \"#383b3d\",\r\n\t\t\"menu.background\": \"#1f1f1f\",\r\n\t\t\"menu.border\": \"#454545\",\r\n\t\t\"menu.foreground\": \"#cccccc\",\r\n\t\t\"menu.separatorBackground\": \"#454545\",\r\n\t\t\"notificationCenterHeader.background\": \"#1f1f1f\",\r\n\t\t\"notificationCenterHeader.foreground\": \"#cccccc\",\r\n\t\t\"notifications.background\": \"#1f1f1f\",\r\n\t\t\"notifications.border\": \"#ffffff15\",\r\n\t\t\"notifications.foreground\": \"#cccccc\",\r\n\t\t\"panel.background\": \"#181818\",\r\n\t\t\"panel.border\": \"#ffffff15\",\r\n\t\t\"panelInput.border\": \"#ffffff15\",\r\n\t\t\"panelTitle.activeBorder\": \"#0078d4\",\r\n\t\t\"panelTitle.activeForeground\": \"#cccccc\",\r\n\t\t\"panelTitle.inactiveForeground\": \"#8b949e\",\r\n\t\t\"peekViewEditor.background\": \"#1f1f1f\",\r\n\t\t\"peekViewEditor.matchHighlightBackground\": \"#bb800966\",\r\n\t\t\"peekViewResult.background\": \"#1f1f1f\",\r\n\t\t\"peekViewResult.matchHighlightBackground\": \"#bb800966\",\r\n\t\t\"pickerGroup.border\": \"#ffffff15\",\r\n\t\t\"pickerGroup.foreground\": \"#8b949e\",\r\n\t\t\"ports.iconRunningProcessForeground\": \"#369432\",\r\n\t\t\"progressBar.background\": \"#0078d4\",\r\n\t\t\"quickInput.background\": \"#1f1f1f\",\r\n\t\t\"quickInput.foreground\": \"#cccccc\",\r\n\t\t\"scrollbar.shadow\": \"#484f5833\",\r\n\t\t\"scrollbarSlider.activeBackground\": \"#6e768187\",\r\n\t\t\"scrollbarSlider.background\": \"#6e768133\",\r\n\t\t\"scrollbarSlider.hoverBackground\": \"#6e768145\",\r\n\t\t\"settings.dropdownBackground\": \"#313131\",\r\n\t\t\"settings.dropdownBorder\": \"#ffffff1f\",\r\n\t\t\"settings.headerForeground\": \"#ffffff\",\r\n\t\t\"settings.modifiedItemIndicator\": \"#bb800966\",\r\n\t\t\"sideBar.background\": \"#181818\",\r\n\t\t\"sideBar.border\": \"#ffffff15\",\r\n\t\t\"sideBar.foreground\": \"#cccccc\",\r\n\t\t\"sideBarSectionHeader.background\": \"#181818\",\r\n\t\t\"sideBarSectionHeader.border\": \"#ffffff15\",\r\n\t\t\"sideBarSectionHeader.foreground\": \"#cccccc\",\r\n\t\t\"sideBarTitle.foreground\": \"#cccccc\",\r\n\t\t\"statusBar.background\": \"#181818\",\r\n\t\t\"statusBar.border\": \"#ffffff15\",\r\n\t\t\"statusBar.debuggingBackground\": \"#0078d4\",\r\n\t\t\"statusBar.debuggingForeground\": \"#ffffff\",\r\n\t\t\"statusBar.foreground\": \"#cccccc\",\r\n\t\t\"statusBar.noFolderBackground\": \"#1f1f1f\",\r\n\t\t\"statusBarItem.focusBorder\": \"#0078d4\",\r\n\t\t\"statusBarItem.prominentBackground\": \"#6e768166\",\r\n\t\t\"statusBarItem.remoteBackground\": \"#0078d4\",\r\n\t\t\"statusBarItem.remoteForeground\": \"#ffffff\",\r\n\t\t\"tab.activeBackground\": \"#1f1f1f\",\r\n\t\t\"tab.activeBorder\": \"#1f1f1f\",\r\n\t\t\"tab.activeBorderTop\": \"#0078d4\",\r\n\t\t\"tab.activeForeground\": \"#ffffff\",\r\n\t\t\"tab.border\": \"#ffffff15\",\r\n\t\t\"tab.hoverBackground\": \"#1f1f1f\",\r\n\t\t\"tab.inactiveBackground\": \"#181818\",\r\n\t\t\"tab.inactiveForeground\": \"#ffffff80\",\r\n\t\t\"tab.lastPinnedBorder\": \"#cccccc33\",\r\n\t\t\"tab.unfocusedActiveBorder\": \"#1f1f1f\",\r\n\t\t\"tab.unfocusedActiveBorderTop\": \"#ffffff15\",\r\n\t\t\"tab.unfocusedHoverBackground\": \"#6e76811a\",\r\n\t\t\"terminal.foreground\": \"#cccccc\",\r\n\t\t\"terminal.inactiveSelectionBackground\": \"#3a3d41\",\r\n\t\t\"terminal.tab.activeBorder\": \"#0078d4\",\r\n\t\t\"textBlockQuote.background\": \"#010409\",\r\n\t\t\"textBlockQuote.border\": \"#ffffff14\",\r\n\t\t\"textCodeBlock.background\": \"#6e768166\",\r\n\t\t\"textLink.activeForeground\": \"#40a6ff\",\r\n\t\t\"textLink.foreground\": \"#40a6ff\",\r\n\t\t\"textSeparator.foreground\": \"#21262d\",\r\n\t\t\"titleBar.activeBackground\": \"#181818\",\r\n\t\t\"titleBar.activeForeground\": \"#cccccc\",\r\n\t\t\"titleBar.border\": \"#ffffff15\",\r\n\t\t\"titleBar.inactiveBackground\": \"#1f1f1f\",\r\n\t\t\"titleBar.inactiveForeground\": \"#8b949e\",\r\n\t\t\"welcomePage.progress.foreground\": \"#0078d4\",\r\n\t\t\"welcomePage.tileBackground\": \"#ffffff0f\",\r\n\t\t\"widget.border\": \"#ffffff15\",\r\n\t\t//\"activityBar.dropBorder\": \"#d7d7d7\",\r\n\t\t//\"banner.background\": \"#323232\",\r\n\t\t//\"banner.foreground\": \"#ffffff\",\r\n\t\t//\"banner.iconForeground\": \"#3794ff\",\r\n\t\t//\"breadcrumb.activeSelectionForeground\": \"#e0e0e0\",\r\n\t\t//\"breadcrumb.background\": \"#1f1f1f\",\r\n\t\t//\"breadcrumb.focusForeground\": \"#e0e0e0\",\r\n\t\t//\"breadcrumb.foreground\": \"#cccccccc\",\r\n\t\t//\"breadcrumbPicker.background\": \"#1f1f1f\",\r\n\t\t//\"button.separator\": \"#ffffff66\",\r\n\t\t//\"charts.blue\": \"#3794ff\",\r\n\t\t//\"charts.foreground\": \"#cccccc\",\r\n\t\t//\"charts.green\": \"#89d185\",\r\n\t\t//\"charts.lines\": \"#cccccc80\",\r\n\t\t//\"charts.orange\": \"#d18616\",\r\n\t\t//\"charts.purple\": \"#b180d7\",\r\n\t\t//\"charts.red\": \"#f14c4c\",\r\n\t\t//\"charts.yellow\": \"#cca700\",\r\n\t\t//\"checkbox.foreground\": \"#cccccc\",\r\n\t\t//\"checkbox.selectBackground\": \"#1f1f1f\",\r\n\t\t//\"checkbox.selectBorder\": \"#cccccc\",\r\n\t\t//\"commandCenter.activeBackground\": \"#ffffff14\",\r\n\t\t//\"commandCenter.activeBorder\": \"#cccccc4d\",\r\n\t\t//\"commandCenter.activeForeground\": \"#cccccc\",\r\n\t\t//\"commandCenter.background\": \"#ffffff0d\",\r\n\t\t//\"commandCenter.border\": \"#cccccc33\",\r\n\t\t//\"commandCenter.foreground\": \"#cccccc\",\r\n\t\t//\"commandCenter.inactiveBorder\": \"#8b949e40\",\r\n\t\t//\"commandCenter.inactiveForeground\": \"#8b949e\",\r\n\t\t//\"debugConsole.errorForeground\": \"#f85149\",\r\n\t\t//\"debugConsole.infoForeground\": \"#3794ff\",\r\n\t\t//\"debugConsole.sourceForeground\": \"#cccccc\",\r\n\t\t//\"debugConsole.warningForeground\": \"#cca700\",\r\n\t\t//\"debugConsoleInputIcon.foreground\": \"#cccccc\",\r\n\t\t//\"debugExceptionWidget.background\": \"#420b0d\",\r\n\t\t//\"debugExceptionWidget.border\": \"#a31515\",\r\n\t\t//\"debugIcon.breakpointCurrentStackframeForeground\": \"#ffcc00\",\r\n\t\t//\"debugIcon.breakpointDisabledForeground\": \"#848484\",\r\n\t\t//\"debugIcon.breakpointForeground\": \"#e51400\",\r\n\t\t//\"debugIcon.breakpointStackframeForeground\": \"#89d185\",\r\n\t\t//\"debugIcon.breakpointUnverifiedForeground\": \"#848484\",\r\n\t\t//\"debugIcon.continueForeground\": \"#75beff\",\r\n\t\t//\"debugIcon.disconnectForeground\": \"#f48771\",\r\n\t\t//\"debugIcon.pauseForeground\": \"#75beff\",\r\n\t\t//\"debugIcon.restartForeground\": \"#89d185\",\r\n\t\t//\"debugIcon.startForeground\": \"#89d185\",\r\n\t\t//\"debugIcon.stepBackForeground\": \"#75beff\",\r\n\t\t//\"debugIcon.stepIntoForeground\": \"#75beff\",\r\n\t\t//\"debugIcon.stepOutForeground\": \"#75beff\",\r\n\t\t//\"debugIcon.stepOverForeground\": \"#75beff\",\r\n\t\t//\"debugIcon.stopForeground\": \"#f48771\",\r\n\t\t//\"debugTokenExpression.boolean\": \"#4e94ce\",\r\n\t\t//\"debugTokenExpression.error\": \"#f48771\",\r\n\t\t//\"debugTokenExpression.name\": \"#c586c0\",\r\n\t\t//\"debugTokenExpression.number\": \"#b5cea8\",\r\n\t\t//\"debugTokenExpression.string\": \"#ce9178\",\r\n\t\t//\"debugTokenExpression.value\": \"#cccccc99\",\r\n\t\t//\"debugView.exceptionLabelBackground\": \"#6c2022\",\r\n\t\t//\"debugView.exceptionLabelForeground\": \"#cccccc\",\r\n\t\t//\"debugView.stateLabelBackground\": \"#88888844\",\r\n\t\t//\"debugView.stateLabelForeground\": \"#cccccc\",\r\n\t\t//\"debugView.valueChangedHighlight\": \"#569cd6\",\r\n\t\t//\"diffEditor.diagonalFill\": \"#cccccc33\",\r\n\t\t//\"disabledForeground\": \"#cccccc80\",\r\n\t\t//\"editor.findMatchHighlightBackground\": \"#ea5c0055\",\r\n\t\t//\"editor.findRangeHighlightBackground\": \"#3a3d4166\",\r\n\t\t//\"editor.focusedStackFrameHighlightBackground\": \"#7abd7a4d\",\r\n\t\t//\"editor.foldBackground\": \"#264f784d\",\r\n\t\t//\"editor.hoverHighlightBackground\": \"#264f7840\",\r\n\t\t//\"editor.inlineValuesBackground\": \"#ffc80033\",\r\n\t\t//\"editor.inlineValuesForeground\": \"#ffffff80\",\r\n\t\t//\"editor.lineHighlightBorder\": \"#282828\",\r\n\t\t//\"editor.linkedEditingBackground\": \"#ff00004d\",\r\n\t\t//\"editor.rangeHighlightBackground\": \"#ffffff0b\",\r\n\t\t//\"editor.selectionBackground\": \"#264f78\",\r\n\t\t//\"editor.snippetFinalTabstopHighlightBorder\": \"#525252\",\r\n\t\t//\"editor.snippetTabstopHighlightBackground\": \"#7c7c7c4d\",\r\n\t\t//\"editor.stackFrameHighlightBackground\": \"#ffff0033\",\r\n\t\t//\"editor.symbolHighlightBackground\": \"#ea5c0055\",\r\n\t\t//\"editor.wordHighlightBackground\": \"#575757b8\",\r\n\t\t//\"editor.wordHighlightStrongBackground\": \"#004972b8\",\r\n\t\t//\"editor.wordHighlightTextBackground\": \"#575757b8\",\r\n\t\t//\"editorActiveLineNumber.foreground\": \"#c6c6c6\",\r\n\t\t//\"editorBracketHighlight.foreground1\": \"#ffd700\",\r\n\t\t//\"editorBracketHighlight.foreground2\": \"#da70d6\",\r\n\t\t//\"editorBracketHighlight.foreground3\": \"#179fff\",\r\n\t\t//\"editorBracketHighlight.foreground4\": \"#00000000\",\r\n\t\t//\"editorBracketHighlight.foreground5\": \"#00000000\",\r\n\t\t//\"editorBracketHighlight.foreground6\": \"#00000000\",\r\n\t\t//\"editorBracketHighlight.unexpectedBracket.foreground\": \"#ff1212cc\",\r\n\t\t//\"editorBracketMatch.background\": \"#0064001a\",\r\n\t\t//\"editorBracketMatch.border\": \"#888888\",\r\n\t\t//\"editorBracketPairGuide.activeBackground1\": \"#00000000\",\r\n\t\t//\"editorBracketPairGuide.activeBackground2\": \"#00000000\",\r\n\t\t//\"editorBracketPairGuide.activeBackground3\": \"#00000000\",\r\n\t\t//\"editorBracketPairGuide.activeBackground4\": \"#00000000\",\r\n\t\t//\"editorBracketPairGuide.activeBackground5\": \"#00000000\",\r\n\t\t//\"editorBracketPairGuide.activeBackground6\": \"#00000000\",\r\n\t\t//\"editorBracketPairGuide.background1\": \"#00000000\",\r\n\t\t//\"editorBracketPairGuide.background2\": \"#00000000\",\r\n\t\t//\"editorBracketPairGuide.background3\": \"#00000000\",\r\n\t\t//\"editorBracketPairGuide.background4\": \"#00000000\",\r\n\t\t//\"editorBracketPairGuide.background5\": \"#00000000\",\r\n\t\t//\"editorBracketPairGuide.background6\": \"#00000000\",\r\n\t\t//\"editorCodeLens.foreground\": \"#999999\",\r\n\t\t//\"editorCommentsWidget.rangeActiveBackground\": \"#3794ff1a\",\r\n\t\t//\"editorCommentsWidget.rangeActiveBorder\": \"#3794ff66\",\r\n\t\t//\"editorCommentsWidget.rangeBackground\": \"#3794ff1a\",\r\n\t\t//\"editorCommentsWidget.rangeBorder\": \"#3794ff66\",\r\n\t\t//\"editorCommentsWidget.resolvedBorder\": \"#cccccc80\",\r\n\t\t//\"editorCommentsWidget.unresolvedBorder\": \"#3794ff\",\r\n\t\t//\"editorCursor.foreground\": \"#aeafad\",\r\n\t\t//\"editorError.foreground\": \"#f14c4c\",\r\n\t\t//\"editorGhostText.foreground\": \"#ffffff56\",\r\n\t\t//\"editorGroup.dropBackground\": \"#53595d80\",\r\n\t\t//\"editorGroup.dropIntoPromptBackground\": \"#1f1f1f\",\r\n\t\t//\"editorGroup.dropIntoPromptForeground\": \"#cccccc\",\r\n\t\t//\"editorGroupHeader.noTabsBackground\": \"#1f1f1f\",\r\n\t\t//\"editorGutter.background\": \"#1f1f1f\",\r\n\t\t//\"editorGutter.commentGlyphForeground\": \"#cccccc\",\r\n\t\t//\"editorGutter.commentRangeForeground\": \"#37373d\",\r\n\t\t//\"editorGutter.commentUnresolvedGlyphForeground\": \"#cccccc\",\r\n\t\t//\"editorGutter.foldingControlForeground\": \"#cccccc\",\r\n\t\t//\"editorHint.foreground\": \"#eeeeeeb3\",\r\n\t\t//\"editorHoverWidget.background\": \"#1f1f1f\",\r\n\t\t//\"editorHoverWidget.border\": \"#454545\",\r\n\t\t//\"editorHoverWidget.foreground\": \"#cccccc\",\r\n\t\t//\"editorHoverWidget.highlightForeground\": \"#2aaaff\",\r\n\t\t//\"editorHoverWidget.statusBarBackground\": \"#252525\",\r\n\t\t//\"editorInfo.foreground\": \"#3794ff\",\r\n\t\t//\"editorInlayHint.parameterBackground\": \"#8b949e33\",\r\n\t\t//\"editorInlayHint.parameterForeground\": \"#8b949e\",\r\n\t\t//\"editorLightBulb.foreground\": \"#ffcc00\",\r\n\t\t//\"editorLightBulbAutoFix.foreground\": \"#75beff\",\r\n\t\t//\"editorLink.activeForeground\": \"#4e94ce\",\r\n\t\t//\"editorMarkerNavigation.background\": \"#1f1f1f\",\r\n\t\t//\"editorMarkerNavigationError.background\": \"#f14c4c\",\r\n\t\t//\"editorMarkerNavigationError.headerBackground\": \"#f14c4c1a\",\r\n\t\t//\"editorMarkerNavigationInfo.background\": \"#3794ff\",\r\n\t\t//\"editorMarkerNavigationInfo.headerBackground\": \"#3794ff1a\",\r\n\t\t//\"editorMarkerNavigationWarning.background\": \"#cca700\",\r\n\t\t//\"editorMarkerNavigationWarning.headerBackground\": \"#cca7001a\",\r\n\t\t//\"editorOverviewRuler.addedForeground\": \"#2ea04399\",\r\n\t\t//\"editorOverviewRuler.bracketMatchForeground\": \"#a0a0a0\",\r\n\t\t//\"editorOverviewRuler.commonContentForeground\": \"#60606066\",\r\n\t\t//\"editorOverviewRuler.currentContentForeground\": \"#40c8ae80\",\r\n\t\t//\"editorOverviewRuler.deletedForeground\": \"#f8514999\",\r\n\t\t//\"editorOverviewRuler.errorForeground\": \"#ff1212b3\",\r\n\t\t//\"editorOverviewRuler.findMatchForeground\": \"#d186167e\",\r\n\t\t//\"editorOverviewRuler.incomingContentForeground\": \"#40a6ff80\",\r\n\t\t//\"editorOverviewRuler.infoForeground\": \"#3794ff\",\r\n\t\t//\"editorOverviewRuler.modifiedForeground\": \"#0078d499\",\r\n\t\t//\"editorOverviewRuler.rangeHighlightForeground\": \"#007acc99\",\r\n\t\t//\"editorOverviewRuler.selectionHighlightForeground\": \"#a0a0a0cc\",\r\n\t\t//\"editorOverviewRuler.warningForeground\": \"#cca700\",\r\n\t\t//\"editorOverviewRuler.wordHighlightForeground\": \"#a0a0a0cc\",\r\n\t\t//\"editorOverviewRuler.wordHighlightStrongForeground\": \"#c0a0c0cc\",\r\n\t\t//\"editorOverviewRuler.wordHighlightTextForeground\": \"#a0a0a0cc\",\r\n\t\t//\"editorPane.background\": \"#1f1f1f\",\r\n\t\t//\"editorRuler.foreground\": \"#5a5a5a\",\r\n\t\t//\"editorStickyScroll.background\": \"#1f1f1f\",\r\n\t\t//\"editorStickyScrollHover.background\": \"#2a2d2e\",\r\n\t\t//\"editorSuggestWidget.background\": \"#1f1f1f\",\r\n\t\t//\"editorSuggestWidget.border\": \"#454545\",\r\n\t\t//\"editorSuggestWidget.focusHighlightForeground\": \"#2aaaff\",\r\n\t\t//\"editorSuggestWidget.foreground\": \"#cccccc\",\r\n\t\t//\"editorSuggestWidget.highlightForeground\": \"#2aaaff\",\r\n\t\t//\"editorSuggestWidget.selectedBackground\": \"#323232\",\r\n\t\t//\"editorSuggestWidget.selectedForeground\": \"#ffffff\",\r\n\t\t//\"editorSuggestWidget.selectedIconForeground\": \"#ffffff\",\r\n\t\t//\"editorSuggestWidgetStatus.foreground\": \"#cccccc80\",\r\n\t\t//\"editorUnicodeHighlight.background\": \"#bd9b0326\",\r\n\t\t//\"editorUnicodeHighlight.border\": \"#bd9b03\",\r\n\t\t//\"editorUnnecessaryCode.opacity\": \"#000000aa\",\r\n\t\t//\"editorWarning.foreground\": \"#cca700\",\r\n\t\t//\"editorWhitespace.foreground\": \"#e3e4e229\",\r\n\t\t//\"editorWidget.border\": \"#454545\",\r\n\t\t//\"editorWidget.foreground\": \"#cccccc\",\r\n\t\t//\"extensionBadge.remoteBackground\": \"#0078d4\",\r\n\t\t//\"extensionBadge.remoteForeground\": \"#ffffff\",\r\n\t\t//\"extensionButton.background\": \"#0078d4\",\r\n\t\t//\"extensionButton.foreground\": \"#ffffff\",\r\n\t\t//\"extensionButton.hoverBackground\": \"#0078d4e6\",\r\n\t\t//\"extensionButton.prominentBackground\": \"#0078d4\",\r\n\t\t//\"extensionButton.prominentForeground\": \"#ffffff\",\r\n\t\t//\"extensionButton.prominentHoverBackground\": \"#0078d4e6\",\r\n\t\t//\"extensionButton.separator\": \"#ffffff66\",\r\n\t\t//\"extensionIcon.preReleaseForeground\": \"#1d9271\",\r\n\t\t//\"extensionIcon.sponsorForeground\": \"#d758b3\",\r\n\t\t//\"extensionIcon.starForeground\": \"#ff8e00\",\r\n\t\t//\"extensionIcon.verifiedForeground\": \"#40a6ff\",\r\n\t\t//\"gitDecoration.addedResourceForeground\": \"#81b88b\",\r\n\t\t//\"gitDecoration.conflictingResourceForeground\": \"#e4676b\",\r\n\t\t//\"gitDecoration.deletedResourceForeground\": \"#c74e39\",\r\n\t\t//\"gitDecoration.ignoredResourceForeground\": \"#8c8c8c\",\r\n\t\t//\"gitDecoration.modifiedResourceForeground\": \"#e2c08d\",\r\n\t\t//\"gitDecoration.renamedResourceForeground\": \"#73c991\",\r\n\t\t//\"gitDecoration.stageDeletedResourceForeground\": \"#c74e39\",\r\n\t\t//\"gitDecoration.stageModifiedResourceForeground\": \"#e2c08d\",\r\n\t\t//\"gitDecoration.submoduleResourceForeground\": \"#8db9e2\",\r\n\t\t//\"gitDecoration.untrackedResourceForeground\": \"#73c991\",\r\n\t\t//\"inputOption.activeForeground\": \"#ffffff\",\r\n\t\t//\"inputOption.hoverBackground\": \"#5a5d5e80\",\r\n\t\t//\"inputValidation.errorBackground\": \"#5a1d1d\",\r\n\t\t//\"inputValidation.errorBorder\": \"#be1100\",\r\n\t\t//\"inputValidation.infoBackground\": \"#063b49\",\r\n\t\t//\"inputValidation.infoBorder\": \"#007acc\",\r\n\t\t//\"inputValidation.warningBackground\": \"#352a05\",\r\n\t\t//\"inputValidation.warningBorder\": \"#b89500\",\r\n\t\t//\"interactive.activeCodeBorder\": \"#3794ff\",\r\n\t\t//\"interactive.inactiveCodeBorder\": \"#37373d\",\r\n\t\t//\"interactive.requestBackground\": \"#ffffff08\",\r\n\t\t//\"interactive.requestBorder\": \"#ffffff1a\",\r\n\t\t//\"interactiveEditor.border\": \"#454545\",\r\n\t\t//\"interactiveEditor.regionHighlight\": \"#264f7840\",\r\n\t\t//\"interactiveEditor.shadow\": \"#0000005c\",\r\n\t\t//\"interactiveEditorDiff.inserted\": \"#23863627\",\r\n\t\t//\"interactiveEditorDiff.removed\": \"#da363327\",\r\n\t\t//\"interactiveEditorInput.background\": \"#2a2a2a\",\r\n\t\t//\"interactiveEditorInput.border\": \"#454545\",\r\n\t\t//\"interactiveEditorInput.focusBorder\": \"#0078d4\",\r\n\t\t//\"interactiveEditorInput.placeholderForeground\": \"#ffffff79\",\r\n\t\t//\"keybindingLabel.background\": \"#8080802b\",\r\n\t\t//\"keybindingLabel.border\": \"#33333399\",\r\n\t\t//\"keybindingLabel.bottomBorder\": \"#44444499\",\r\n\t\t//\"keybindingTable.headerBackground\": \"#cccccc0a\",\r\n\t\t//\"keybindingTable.rowsBackground\": \"#cccccc0a\",\r\n\t\t//\"list.deemphasizedForeground\": \"#8c8c8c\",\r\n\t\t//\"list.errorForeground\": \"#f88070\",\r\n\t\t//\"list.filterMatchBackground\": \"#ea5c0055\",\r\n\t\t//\"list.focusHighlightForeground\": \"#2aaaff\",\r\n\t\t//\"list.focusOutline\": \"#0078d4\",\r\n\t\t//\"list.highlightForeground\": \"#2aaaff\",\r\n\t\t//\"list.hoverBackground\": \"#2a2d2e\",\r\n\t\t//\"list.inactiveSelectionBackground\": \"#37373d\",\r\n\t\t//\"list.invalidItemForeground\": \"#b89500\",\r\n\t\t//\"list.warningForeground\": \"#cca700\",\r\n\t\t//\"listFilterWidget.background\": \"#1f1f1f\",\r\n\t\t//\"listFilterWidget.noMatchesOutline\": \"#be1100\",\r\n\t\t//\"listFilterWidget.outline\": \"#00000000\",\r\n\t\t//\"listFilterWidget.shadow\": \"#0000005c\",\r\n\t\t//\"menu.selectionBackground\": \"#323232\",\r\n\t\t//\"menu.selectionForeground\": \"#ffffff\",\r\n\t\t//\"menubar.selectionBackground\": \"#5a5d5e50\",\r\n\t\t//\"menubar.selectionForeground\": \"#cccccc\",\r\n\t\t//\"merge.commonContentBackground\": \"#60606029\",\r\n\t\t//\"merge.commonHeaderBackground\": \"#60606066\",\r\n\t\t//\"merge.currentContentBackground\": \"#40c8ae33\",\r\n\t\t//\"merge.currentHeaderBackground\": \"#40c8ae80\",\r\n\t\t//\"merge.incomingContentBackground\": \"#40a6ff33\",\r\n\t\t//\"merge.incomingHeaderBackground\": \"#40a6ff80\",\r\n\t\t//\"mergeEditor.change.background\": \"#9bb95533\",\r\n\t\t//\"mergeEditor.change.word.background\": \"#9ccc2c33\",\r\n\t\t//\"mergeEditor.changeBase.background\": \"#4b1818\",\r\n\t\t//\"mergeEditor.changeBase.word.background\": \"#6f1313\",\r\n\t\t//\"mergeEditor.conflict.handled.minimapOverViewRuler\": \"#adaca8ee\",\r\n\t\t//\"mergeEditor.conflict.handledFocused.border\": \"#c1c1c1cc\",\r\n\t\t//\"mergeEditor.conflict.handledUnfocused.border\": \"#86868649\",\r\n\t\t//\"mergeEditor.conflict.input1.background\": \"#40c8ae33\",\r\n\t\t//\"mergeEditor.conflict.input2.background\": \"#40a6ff33\",\r\n\t\t//\"mergeEditor.conflict.unhandled.minimapOverViewRuler\": \"#fcba03\",\r\n\t\t//\"mergeEditor.conflict.unhandledFocused.border\": \"#ffa600\",\r\n\t\t//\"mergeEditor.conflict.unhandledUnfocused.border\": \"#ffa6007a\",\r\n\t\t//\"mergeEditor.conflictingLines.background\": \"#ffea0047\",\r\n\t\t//\"minimap.errorHighlight\": \"#ff1212b3\",\r\n\t\t//\"minimap.findMatchHighlight\": \"#d18616\",\r\n\t\t//\"minimap.foregroundOpacity\": \"#000000\",\r\n\t\t//\"minimap.selectionHighlight\": \"#264f78\",\r\n\t\t//\"minimap.selectionOccurrenceHighlight\": \"#676767\",\r\n\t\t//\"minimap.warningHighlight\": \"#cca700\",\r\n\t\t//\"minimapGutter.addedBackground\": \"#2ea043\",\r\n\t\t//\"minimapGutter.deletedBackground\": \"#f85149\",\r\n\t\t//\"minimapGutter.modifiedBackground\": \"#0078d4\",\r\n\t\t//\"minimapSlider.activeBackground\": \"#6e768144\",\r\n\t\t//\"minimapSlider.background\": \"#6e76811a\",\r\n\t\t//\"minimapSlider.hoverBackground\": \"#6e768123\",\r\n\t\t//\"notebook.cellBorderColor\": \"#37373d\",\r\n\t\t//\"notebook.cellEditorBackground\": \"#181818\",\r\n\t\t//\"notebook.cellInsertionIndicator\": \"#0078d4\",\r\n\t\t//\"notebook.cellStatusBarItemHoverBackground\": \"#ffffff26\",\r\n\t\t//\"notebook.cellToolbarSeparator\": \"#80808059\",\r\n\t\t//\"notebook.editorBackground\": \"#1f1f1f\",\r\n\t\t//\"notebook.focusedCellBorder\": \"#0078d4\",\r\n\t\t//\"notebook.focusedEditorBorder\": \"#0078d4\",\r\n\t\t//\"notebook.inactiveFocusedCellBorder\": \"#37373d\",\r\n\t\t//\"notebook.selectedCellBackground\": \"#37373d\",\r\n\t\t//\"notebook.selectedCellBorder\": \"#37373d\",\r\n\t\t//\"notebook.symbolHighlightBackground\": \"#ffffff0b\",\r\n\t\t//\"notebookEditorOverviewRuler.runningCellForeground\": \"#89d185\",\r\n\t\t//\"notebookScrollbarSlider.activeBackground\": \"#6e768187\",\r\n\t\t//\"notebookScrollbarSlider.background\": \"#6e768133\",\r\n\t\t//\"notebookScrollbarSlider.hoverBackground\": \"#6e768145\",\r\n\t\t//\"notebookStatusErrorIcon.foreground\": \"#f85149\",\r\n\t\t//\"notebookStatusRunningIcon.foreground\": \"#cccccc\",\r\n\t\t//\"notebookStatusSuccessIcon.foreground\": \"#89d185\",\r\n\t\t//\"notificationCenter.border\": \"#ffffff15\",\r\n\t\t//\"notificationLink.foreground\": \"#40a6ff\",\r\n\t\t//\"notificationToast.border\": \"#ffffff15\",\r\n\t\t//\"notificationsErrorIcon.foreground\": \"#f14c4c\",\r\n\t\t//\"notificationsInfoIcon.foreground\": \"#3794ff\",\r\n\t\t//\"notificationsWarningIcon.foreground\": \"#cca700\",\r\n\t\t//\"panel.dropBorder\": \"#cccccc\",\r\n\t\t//\"panelSection.border\": \"#ffffff15\",\r\n\t\t//\"panelSection.dropBackground\": \"#53595d80\",\r\n\t\t//\"panelSectionHeader.background\": \"#80808033\",\r\n\t\t//\"peekView.border\": \"#3794ff\",\r\n\t\t//\"peekViewEditorGutter.background\": \"#1f1f1f\",\r\n\t\t//\"peekViewEditorStickyScroll.background\": \"#1f1f1f\",\r\n\t\t//\"peekViewResult.fileForeground\": \"#ffffff\",\r\n\t\t//\"peekViewResult.lineForeground\": \"#bbbbbb\",\r\n\t\t//\"peekViewResult.selectionBackground\": \"#3399ff33\",\r\n\t\t//\"peekViewResult.selectionForeground\": \"#ffffff\",\r\n\t\t//\"peekViewTitle.background\": \"#252526\",\r\n\t\t//\"peekViewTitleDescription.foreground\": \"#ccccccb3\",\r\n\t\t//\"peekViewTitleLabel.foreground\": \"#ffffff\",\r\n\t\t//\"problemsErrorIcon.foreground\": \"#f14c4c\",\r\n\t\t//\"problemsInfoIcon.foreground\": \"#3794ff\",\r\n\t\t//\"problemsWarningIcon.foreground\": \"#cca700\",\r\n\t\t//\"profileBadge.background\": \"#4d4d4d\",\r\n\t\t//\"profileBadge.foreground\": \"#ffffff\",\r\n\t\t//\"quickInputList.focusBackground\": \"#323232\",\r\n\t\t//\"quickInputList.focusForeground\": \"#ffffff\",\r\n\t\t//\"quickInputList.focusIconForeground\": \"#ffffff\",\r\n\t\t//\"quickInputTitle.background\": \"#ffffff1b\",\r\n\t\t//\"sash.hoverBorder\": \"#0078d4\",\r\n\t\t//\"scm.providerBorder\": \"#454545\",\r\n\t\t//\"search.resultsInfoForeground\": \"#cccccca6\",\r\n\t\t//\"searchEditor.findMatchBackground\": \"#ea5c0038\",\r\n\t\t//\"searchEditor.textInputBorder\": \"#ffffff1f\",\r\n\t\t//\"settings.checkboxBackground\": \"#313131\",\r\n\t\t//\"settings.checkboxBorder\": \"#ffffff1f\",\r\n\t\t//\"settings.checkboxForeground\": \"#cccccc\",\r\n\t\t//\"settings.dropdownForeground\": \"#cccccc\",\r\n\t\t//\"settings.dropdownListBorder\": \"#454545\",\r\n\t\t//\"settings.focusedRowBackground\": \"#2a2d2e99\",\r\n\t\t//\"settings.focusedRowBorder\": \"#0078d4\",\r\n\t\t//\"settings.headerBorder\": \"#ffffff15\",\r\n\t\t//\"settings.numberInputBackground\": \"#2a2a2a\",\r\n\t\t//\"settings.numberInputBorder\": \"#ffffff1f\",\r\n\t\t//\"settings.numberInputForeground\": \"#cccccc\",\r\n\t\t//\"settings.rowHoverBackground\": \"#2a2d2e4d\",\r\n\t\t//\"settings.sashBorder\": \"#ffffff15\",\r\n\t\t//\"settings.settingsHeaderHoverForeground\": \"#ffffffb3\",\r\n\t\t//\"settings.textInputBackground\": \"#2a2a2a\",\r\n\t\t//\"settings.textInputBorder\": \"#ffffff1f\",\r\n\t\t//\"settings.textInputForeground\": \"#cccccc\",\r\n\t\t//\"sideBar.dropBackground\": \"#53595d80\",\r\n\t\t//\"sideBySideEditor.horizontalBorder\": \"#ffffff17\",\r\n\t\t//\"sideBySideEditor.verticalBorder\": \"#ffffff17\",\r\n\t\t//\"statusBar.debuggingBorder\": \"#ffffff15\",\r\n\t\t//\"statusBar.focusBorder\": \"#cccccc\",\r\n\t\t//\"statusBar.noFolderBorder\": \"#ffffff15\",\r\n\t\t//\"statusBar.noFolderForeground\": \"#cccccc\",\r\n\t\t//\"statusBarItem.activeBackground\": \"#ffffff2e\",\r\n\t\t//\"statusBarItem.compactHoverBackground\": \"#ffffff33\",\r\n\t\t//\"statusBarItem.errorBackground\": \"#b91007\",\r\n\t\t//\"statusBarItem.errorForeground\": \"#ffffff\",\r\n\t\t//\"statusBarItem.hoverBackground\": \"#ffffff1f\",\r\n\t\t//\"statusBarItem.prominentForeground\": \"#cccccc\",\r\n\t\t//\"statusBarItem.prominentHoverBackground\": \"#0000004d\",\r\n\t\t//\"statusBarItem.warningBackground\": \"#7a6400\",\r\n\t\t//\"statusBarItem.warningForeground\": \"#ffffff\",\r\n\t\t//\"symbolIcon.arrayForeground\": \"#cccccc\",\r\n\t\t//\"symbolIcon.booleanForeground\": \"#cccccc\",\r\n\t\t//\"symbolIcon.classForeground\": \"#ee9d28\",\r\n\t\t//\"symbolIcon.colorForeground\": \"#cccccc\",\r\n\t\t//\"symbolIcon.constantForeground\": \"#cccccc\",\r\n\t\t//\"symbolIcon.constructorForeground\": \"#b180d7\",\r\n\t\t//\"symbolIcon.enumeratorForeground\": \"#ee9d28\",\r\n\t\t//\"symbolIcon.enumeratorMemberForeground\": \"#75beff\",\r\n\t\t//\"symbolIcon.eventForeground\": \"#ee9d28\",\r\n\t\t//\"symbolIcon.fieldForeground\": \"#75beff\",\r\n\t\t//\"symbolIcon.fileForeground\": \"#cccccc\",\r\n\t\t//\"symbolIcon.folderForeground\": \"#cccccc\",\r\n\t\t//\"symbolIcon.functionForeground\": \"#b180d7\",\r\n\t\t//\"symbolIcon.interfaceForeground\": \"#75beff\",\r\n\t\t//\"symbolIcon.keyForeground\": \"#cccccc\",\r\n\t\t//\"symbolIcon.keywordForeground\": \"#cccccc\",\r\n\t\t//\"symbolIcon.methodForeground\": \"#b180d7\",\r\n\t\t//\"symbolIcon.moduleForeground\": \"#cccccc\",\r\n\t\t//\"symbolIcon.namespaceForeground\": \"#cccccc\",\r\n\t\t//\"symbolIcon.nullForeground\": \"#cccccc\",\r\n\t\t//\"symbolIcon.numberForeground\": \"#cccccc\",\r\n\t\t//\"symbolIcon.objectForeground\": \"#cccccc\",\r\n\t\t//\"symbolIcon.operatorForeground\": \"#cccccc\",\r\n\t\t//\"symbolIcon.packageForeground\": \"#cccccc\",\r\n\t\t//\"symbolIcon.propertyForeground\": \"#cccccc\",\r\n\t\t//\"symbolIcon.referenceForeground\": \"#cccccc\",\r\n\t\t//\"symbolIcon.snippetForeground\": \"#cccccc\",\r\n\t\t//\"symbolIcon.stringForeground\": \"#cccccc\",\r\n\t\t//\"symbolIcon.structForeground\": \"#cccccc\",\r\n\t\t//\"symbolIcon.textForeground\": \"#cccccc\",\r\n\t\t//\"symbolIcon.typeParameterForeground\": \"#cccccc\",\r\n\t\t//\"symbolIcon.unitForeground\": \"#cccccc\",\r\n\t\t//\"symbolIcon.variableForeground\": \"#75beff\",\r\n\t\t//\"tab.activeModifiedBorder\": \"#3399cc\",\r\n\t\t//\"tab.inactiveModifiedBorder\": \"#3399cc80\",\r\n\t\t//\"tab.unfocusedActiveBackground\": \"#1f1f1f\",\r\n\t\t//\"tab.unfocusedActiveForeground\": \"#ffffff80\",\r\n\t\t//\"tab.unfocusedActiveModifiedBorder\": \"#3399cc80\",\r\n\t\t//\"tab.unfocusedInactiveBackground\": \"#181818\",\r\n\t\t//\"tab.unfocusedInactiveForeground\": \"#ffffff40\",\r\n\t\t//\"tab.unfocusedInactiveModifiedBorder\": \"#3399cc40\",\r\n\t\t//\"terminal.ansiBlack\": \"#000000\",\r\n\t\t//\"terminal.ansiBlue\": \"#2472c8\",\r\n\t\t//\"terminal.ansiBrightBlack\": \"#666666\",\r\n\t\t//\"terminal.ansiBrightBlue\": \"#3b8eea\",\r\n\t\t//\"terminal.ansiBrightCyan\": \"#29b8db\",\r\n\t\t//\"terminal.ansiBrightGreen\": \"#23d18b\",\r\n\t\t//\"terminal.ansiBrightMagenta\": \"#d670d6\",\r\n\t\t//\"terminal.ansiBrightRed\": \"#f14c4c\",\r\n\t\t//\"terminal.ansiBrightWhite\": \"#e5e5e5\",\r\n\t\t//\"terminal.ansiBrightYellow\": \"#f5f543\",\r\n\t\t//\"terminal.ansiCyan\": \"#11a8cd\",\r\n\t\t//\"terminal.ansiGreen\": \"#0dbc79\",\r\n\t\t//\"terminal.ansiMagenta\": \"#bc3fbc\",\r\n\t\t//\"terminal.ansiRed\": \"#cd3131\",\r\n\t\t//\"terminal.ansiWhite\": \"#e5e5e5\",\r\n\t\t//\"terminal.ansiYellow\": \"#e5e510\",\r\n\t\t//\"terminal.border\": \"#ffffff15\",\r\n\t\t//\"terminal.dropBackground\": \"#53595d80\",\r\n\t\t//\"terminal.findMatchBackground\": \"#9e6a03\",\r\n\t\t//\"terminal.findMatchHighlightBackground\": \"#ea5c0055\",\r\n\t\t//\"terminal.hoverHighlightBackground\": \"#264f7820\",\r\n\t\t//\"terminal.selectionBackground\": \"#264f78\",\r\n\t\t//\"terminalCommandDecoration.defaultBackground\": \"#ffffff40\",\r\n\t\t//\"terminalCommandDecoration.errorBackground\": \"#f14c4c\",\r\n\t\t//\"terminalCommandDecoration.successBackground\": \"#1b81a8\",\r\n\t\t//\"terminalOverviewRuler.cursorForeground\": \"#a0a0a0cc\",\r\n\t\t//\"terminalOverviewRuler.findMatchForeground\": \"#d186167e\",\r\n\t\t//\"testing.iconErrored\": \"#f14c4c\",\r\n\t\t//\"testing.iconFailed\": \"#f14c4c\",\r\n\t\t//\"testing.iconPassed\": \"#73c991\",\r\n\t\t//\"testing.iconQueued\": \"#cca700\",\r\n\t\t//\"testing.iconSkipped\": \"#848484\",\r\n\t\t//\"testing.iconUnset\": \"#848484\",\r\n\t\t//\"testing.message.error.decorationForeground\": \"#f14c4c\",\r\n\t\t//\"testing.message.error.lineBackground\": \"#ff000033\",\r\n\t\t//\"testing.message.info.decorationForeground\": \"#cccccc80\",\r\n\t\t//\"testing.peekBorder\": \"#f14c4c\",\r\n\t\t//\"testing.peekHeaderBackground\": \"#f14c4c1a\",\r\n\t\t//\"testing.runAction\": \"#73c991\",\r\n\t\t//\"textPreformat.foreground\": \"#d7ba7d\",\r\n\t\t//\"toolbar.activeBackground\": \"#63666750\",\r\n\t\t//\"toolbar.hoverBackground\": \"#5a5d5e50\",\r\n\t\t//\"tree.inactiveIndentGuidesStroke\": \"#58585866\",\r\n\t\t//\"tree.indentGuidesStroke\": \"#585858\",\r\n\t\t//\"tree.tableColumnsBorder\": \"#cccccc20\",\r\n\t\t//\"tree.tableOddRowsBackground\": \"#cccccc0a\",\r\n\t\t//\"walkThrough.embeddedEditorBackground\": \"#00000066\",\r\n\t\t//\"walkthrough.stepTitle.foreground\": \"#ffffff\",\r\n\t\t//\"welcomePage.progress.background\": \"#2a2a2a\",\r\n\t\t//\"welcomePage.tileBorder\": \"#ffffff1a\",\r\n\t\t//\"welcomePage.tileHoverBackground\": \"#252525\",\r\n\t\t//\"widget.shadow\": \"#0000005c\",\r\n\t\t//\"activityBar.activeBackground\": null,\r\n\t\t//\"activityBar.activeFocusBorder\": null,\r\n\t\t//\"contrastActiveBorder\": null,\r\n\t\t//\"contrastBorder\": null,\r\n\t\t//\"debugToolBar.border\": null,\r\n\t\t//\"diffEditor.border\": null,\r\n\t\t//\"diffEditor.insertedTextBorder\": null,\r\n\t\t//\"diffEditor.removedTextBorder\": null,\r\n\t\t//\"diffEditorGutter.insertedLineBackground\": null,\r\n\t\t//\"diffEditorGutter.removedLineBackground\": null,\r\n\t\t//\"diffEditorOverview.insertedForeground\": null,\r\n\t\t//\"diffEditorOverview.removedForeground\": null,\r\n\t\t//\"editor.findMatchBorder\": null,\r\n\t\t//\"editor.findMatchHighlightBorder\": null,\r\n\t\t//\"editor.findRangeHighlightBorder\": null,\r\n\t\t//\"editor.lineHighlightBackground\": null,\r\n\t\t//\"editor.rangeHighlightBorder\": null,\r\n\t\t//\"editor.selectionForeground\": null,\r\n\t\t//\"editor.selectionHighlightBorder\": null,\r\n\t\t//\"editor.snippetFinalTabstopHighlightBackground\": null,\r\n\t\t//\"editor.snippetTabstopHighlightBorder\": null,\r\n\t\t//\"editor.symbolHighlightBorder\": null,\r\n\t\t//\"editor.wordHighlightBorder\": null,\r\n\t\t//\"editor.wordHighlightStrongBorder\": null,\r\n\t\t//\"editor.wordHighlightTextBorder\": null,\r\n\t\t//\"editorCursor.background\": null,\r\n\t\t//\"editorError.background\": null,\r\n\t\t//\"editorError.border\": null,\r\n\t\t//\"editorGhostText.background\": null,\r\n\t\t//\"editorGhostText.border\": null,\r\n\t\t//\"editorGroup.dropIntoPromptBorder\": null,\r\n\t\t//\"editorGroup.emptyBackground\": null,\r\n\t\t//\"editorGroup.focusedEmptyBorder\": null,\r\n\t\t//\"editorGroupHeader.border\": null,\r\n\t\t//\"editorHint.border\": null,\r\n\t\t//\"editorInfo.background\": null,\r\n\t\t//\"editorInfo.border\": null,\r\n\t\t//\"editorLineNumber.dimmedForeground\": null,\r\n\t\t//\"editorOverviewRuler.background\": null,\r\n\t\t//\"editorUnnecessaryCode.border\": null,\r\n\t\t//\"editorWarning.background\": null,\r\n\t\t//\"editorWarning.border\": null,\r\n\t\t//\"editorWidget.resizeBorder\": null,\r\n\t\t//\"inputValidation.errorForeground\": null,\r\n\t\t//\"inputValidation.infoForeground\": null,\r\n\t\t//\"inputValidation.warningForeground\": null,\r\n\t\t//\"list.filterMatchBorder\": null,\r\n\t\t//\"list.focusAndSelectionOutline\": null,\r\n\t\t//\"list.focusBackground\": null,\r\n\t\t//\"list.focusForeground\": null,\r\n\t\t//\"list.hoverForeground\": null,\r\n\t\t//\"list.inactiveFocusBackground\": null,\r\n\t\t//\"list.inactiveFocusOutline\": null,\r\n\t\t//\"list.inactiveSelectionForeground\": null,\r\n\t\t//\"list.inactiveSelectionIconForeground\": null,\r\n\t\t//\"menu.selectionBorder\": null,\r\n\t\t//\"menubar.selectionBorder\": null,\r\n\t\t//\"merge.border\": null,\r\n\t\t//\"minimap.background\": null,\r\n\t\t//\"notebook.cellHoverBackground\": null,\r\n\t\t//\"notebook.focusedCellBackground\": null,\r\n\t\t//\"notebook.inactiveSelectedCellBorder\": null,\r\n\t\t//\"notebook.outputContainerBackgroundColor\": null,\r\n\t\t//\"notebook.outputContainerBorderColor\": null,\r\n\t\t//\"panelSectionHeader.border\": null,\r\n\t\t//\"panelSectionHeader.foreground\": null,\r\n\t\t//\"peekViewEditor.matchHighlightBorder\": null,\r\n\t\t//\"quickInput.list.focusBackground\": null,\r\n\t\t//\"searchEditor.findMatchBorder\": null,\r\n\t\t//\"selection.background\": null,\r\n\t\t//\"tab.hoverBorder\": null,\r\n\t\t//\"tab.hoverForeground\": null,\r\n\t\t//\"tab.unfocusedHoverBorder\": null,\r\n\t\t//\"tab.unfocusedHoverForeground\": null,\r\n\t\t//\"terminal.background\": null,\r\n\t\t//\"terminal.findMatchBorder\": null,\r\n\t\t//\"terminal.findMatchHighlightBorder\": null,\r\n\t\t//\"terminal.selectionForeground\": null,\r\n\t\t//\"terminalCursor.background\": null,\r\n\t\t//\"terminalCursor.foreground\": null,\r\n\t\t//\"testing.message.info.lineBackground\": null,\r\n\t\t//\"toolbar.hoverOutline\": null,\r\n\t\t//\"welcomePage.background\": null,\r\n\t\t//\"window.activeBorder\": null,\r\n\t\t//\"window.inactiveBorder\": null\r\n\t},\r\n\t\"tokenColors\": [\r\n\t\t{\r\n\t\t\t\"scope\": [\r\n\t\t\t\t\"meta.embedded\",\r\n\t\t\t\t\"source.groovy.embedded\",\r\n\t\t\t\t\"string meta.image.inline.markdown\"\r\n\t\t\t],\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#D4D4D4\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"emphasis\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"fontStyle\": \"italic\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"strong\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"fontStyle\": \"bold\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"header\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#000080\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"comment\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#6A9955\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"constant.language\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#569CD6\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": [\r\n\t\t\t\t\"constant.numeric\",\r\n\t\t\t\t\"variable.other.enummember\",\r\n\t\t\t\t\"keyword.operator.plus.exponent\",\r\n\t\t\t\t\"keyword.operator.minus.exponent\"\r\n\t\t\t],\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#B5CEA8\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"constant.regexp\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#646695\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"entity.name.tag\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#569CD6\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"entity.name.tag.css\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#D7BA7D\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"entity.other.attribute-name\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#9CDCFE\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": [\r\n\t\t\t\t\"entity.other.attribute-name.class.css\",\r\n\t\t\t\t\"entity.other.attribute-name.class.mixin.css\",\r\n\t\t\t\t\"entity.other.attribute-name.id.css\",\r\n\t\t\t\t\"entity.other.attribute-name.parent-selector.css\",\r\n\t\t\t\t\"entity.other.attribute-name.pseudo-class.css\",\r\n\t\t\t\t\"entity.other.attribute-name.pseudo-element.css\",\r\n\t\t\t\t\"source.css.less entity.other.attribute-name.id\",\r\n\t\t\t\t\"entity.other.attribute-name.scss\"\r\n\t\t\t],\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#D7BA7D\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"invalid\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#F44747\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"markup.underline\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"fontStyle\": \"underline\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"markup.bold\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#569CD6\",\r\n\t\t\t\t\"fontStyle\": \"bold\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"markup.heading\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#569CD6\",\r\n\t\t\t\t\"fontStyle\": \"bold\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"markup.italic\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"fontStyle\": \"italic\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"markup.strikethrough\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"fontStyle\": \"strikethrough\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"markup.inserted\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#B5CEA8\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"markup.deleted\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#CE9178\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"markup.changed\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#569CD6\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"punctuation.definition.quote.begin.markdown\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#6A9955\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"punctuation.definition.list.begin.markdown\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#6796E6\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"markup.inline.raw\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#CE9178\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"punctuation.definition.tag\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#808080\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": [\r\n\t\t\t\t\"meta.preprocessor\",\r\n\t\t\t\t\"entity.name.function.preprocessor\"\r\n\t\t\t],\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#569CD6\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"meta.preprocessor.string\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#CE9178\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"meta.preprocessor.numeric\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#B5CEA8\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"meta.structure.dictionary.key.python\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#9CDCFE\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"meta.diff.header\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#569CD6\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"storage\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#569CD6\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"storage.type\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#569CD6\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": [\r\n\t\t\t\t\"storage.modifier\",\r\n\t\t\t\t\"keyword.operator.noexcept\"\r\n\t\t\t],\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#569CD6\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": [\r\n\t\t\t\t\"string\",\r\n\t\t\t\t\"meta.embedded.assembly\"\r\n\t\t\t],\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#CE9178\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"string.tag\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#CE9178\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"string.value\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#CE9178\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"string.regexp\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#D16969\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": [\r\n\t\t\t\t\"punctuation.definition.template-expression.begin\",\r\n\t\t\t\t\"punctuation.definition.template-expression.end\",\r\n\t\t\t\t\"punctuation.section.embedded\"\r\n\t\t\t],\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#569CD6\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": [\r\n\t\t\t\t\"meta.template.expression\"\r\n\t\t\t],\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#D4D4D4\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": [\r\n\t\t\t\t\"support.type.vendored.property-name\",\r\n\t\t\t\t\"support.type.property-name\",\r\n\t\t\t\t\"variable.css\",\r\n\t\t\t\t\"variable.scss\",\r\n\t\t\t\t\"variable.other.less\",\r\n\t\t\t\t\"source.coffee.embedded\"\r\n\t\t\t],\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#9CDCFE\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"keyword\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#569CD6\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"keyword.control\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#569CD6\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"keyword.operator\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#D4D4D4\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": [\r\n\t\t\t\t\"keyword.operator.new\",\r\n\t\t\t\t\"keyword.operator.expression\",\r\n\t\t\t\t\"keyword.operator.cast\",\r\n\t\t\t\t\"keyword.operator.sizeof\",\r\n\t\t\t\t\"keyword.operator.alignof\",\r\n\t\t\t\t\"keyword.operator.typeid\",\r\n\t\t\t\t\"keyword.operator.alignas\",\r\n\t\t\t\t\"keyword.operator.instanceof\",\r\n\t\t\t\t\"keyword.operator.logical.python\",\r\n\t\t\t\t\"keyword.operator.wordlike\"\r\n\t\t\t],\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#569CD6\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"keyword.other.unit\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#B5CEA8\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": [\r\n\t\t\t\t\"punctuation.section.embedded.begin.php\",\r\n\t\t\t\t\"punctuation.section.embedded.end.php\"\r\n\t\t\t],\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#569CD6\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"support.function.git-rebase\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#9CDCFE\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"constant.sha.git-rebase\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#B5CEA8\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": [\r\n\t\t\t\t\"storage.modifier.import.java\",\r\n\t\t\t\t\"variable.language.wildcard.java\",\r\n\t\t\t\t\"storage.modifier.package.java\"\r\n\t\t\t],\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#D4D4D4\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"variable.language\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#569CD6\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": [\r\n\t\t\t\t\"entity.name.function\",\r\n\t\t\t\t\"support.function\",\r\n\t\t\t\t\"support.constant.handlebars\",\r\n\t\t\t\t\"source.powershell variable.other.member\",\r\n\t\t\t\t\"entity.name.operator.custom-literal\"\r\n\t\t\t],\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#DCDCAA\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": [\r\n\t\t\t\t\"support.class\",\r\n\t\t\t\t\"support.type\",\r\n\t\t\t\t\"entity.name.type\",\r\n\t\t\t\t\"entity.name.namespace\",\r\n\t\t\t\t\"entity.other.attribute\",\r\n\t\t\t\t\"entity.name.scope-resolution\",\r\n\t\t\t\t\"entity.name.class\",\r\n\t\t\t\t\"storage.type.numeric.go\",\r\n\t\t\t\t\"storage.type.byte.go\",\r\n\t\t\t\t\"storage.type.boolean.go\",\r\n\t\t\t\t\"storage.type.string.go\",\r\n\t\t\t\t\"storage.type.uintptr.go\",\r\n\t\t\t\t\"storage.type.error.go\",\r\n\t\t\t\t\"storage.type.rune.go\",\r\n\t\t\t\t\"storage.type.cs\",\r\n\t\t\t\t\"storage.type.generic.cs\",\r\n\t\t\t\t\"storage.type.modifier.cs\",\r\n\t\t\t\t\"storage.type.variable.cs\",\r\n\t\t\t\t\"storage.type.annotation.java\",\r\n\t\t\t\t\"storage.type.generic.java\",\r\n\t\t\t\t\"storage.type.java\",\r\n\t\t\t\t\"storage.type.object.array.java\",\r\n\t\t\t\t\"storage.type.primitive.array.java\",\r\n\t\t\t\t\"storage.type.primitive.java\",\r\n\t\t\t\t\"storage.type.token.java\",\r\n\t\t\t\t\"storage.type.groovy\",\r\n\t\t\t\t\"storage.type.annotation.groovy\",\r\n\t\t\t\t\"storage.type.parameters.groovy\",\r\n\t\t\t\t\"storage.type.generic.groovy\",\r\n\t\t\t\t\"storage.type.object.array.groovy\",\r\n\t\t\t\t\"storage.type.primitive.array.groovy\",\r\n\t\t\t\t\"storage.type.primitive.groovy\"\r\n\t\t\t],\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#4EC9B0\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": [\r\n\t\t\t\t\"meta.type.cast.expr\",\r\n\t\t\t\t\"meta.type.new.expr\",\r\n\t\t\t\t\"support.constant.math\",\r\n\t\t\t\t\"support.constant.dom\",\r\n\t\t\t\t\"support.constant.json\",\r\n\t\t\t\t\"entity.other.inherited-class\"\r\n\t\t\t],\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#4EC9B0\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": [\r\n\t\t\t\t\"keyword.control\",\r\n\t\t\t\t\"source.cpp keyword.operator.new\",\r\n\t\t\t\t\"keyword.operator.delete\",\r\n\t\t\t\t\"keyword.other.using\",\r\n\t\t\t\t\"keyword.other.operator\",\r\n\t\t\t\t\"entity.name.operator\"\r\n\t\t\t],\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#C586C0\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": [\r\n\t\t\t\t\"variable\",\r\n\t\t\t\t\"meta.definition.variable.name\",\r\n\t\t\t\t\"support.variable\",\r\n\t\t\t\t\"entity.name.variable\",\r\n\t\t\t\t\"constant.other.placeholder\"\r\n\t\t\t],\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#9CDCFE\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": [\r\n\t\t\t\t\"variable.other.constant\",\r\n\t\t\t\t\"variable.other.enummember\"\r\n\t\t\t],\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#4FC1FF\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": [\r\n\t\t\t\t\"meta.object-literal.key\"\r\n\t\t\t],\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#9CDCFE\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": [\r\n\t\t\t\t\"support.constant.property-value\",\r\n\t\t\t\t\"support.constant.font-name\",\r\n\t\t\t\t\"support.constant.media-type\",\r\n\t\t\t\t\"support.constant.media\",\r\n\t\t\t\t\"constant.other.color.rgb-value\",\r\n\t\t\t\t\"constant.other.rgb-value\",\r\n\t\t\t\t\"support.constant.color\"\r\n\t\t\t],\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#CE9178\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": [\r\n\t\t\t\t\"punctuation.definition.group.regexp\",\r\n\t\t\t\t\"punctuation.definition.group.assertion.regexp\",\r\n\t\t\t\t\"punctuation.definition.character-class.regexp\",\r\n\t\t\t\t\"punctuation.character.set.begin.regexp\",\r\n\t\t\t\t\"punctuation.character.set.end.regexp\",\r\n\t\t\t\t\"keyword.operator.negation.regexp\",\r\n\t\t\t\t\"support.other.parenthesis.regexp\"\r\n\t\t\t],\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#CE9178\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": [\r\n\t\t\t\t\"constant.character.character-class.regexp\",\r\n\t\t\t\t\"constant.other.character-class.set.regexp\",\r\n\t\t\t\t\"constant.other.character-class.regexp\",\r\n\t\t\t\t\"constant.character.set.regexp\"\r\n\t\t\t],\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#D16969\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": [\r\n\t\t\t\t\"keyword.operator.or.regexp\",\r\n\t\t\t\t\"keyword.control.anchor.regexp\"\r\n\t\t\t],\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#DCDCAA\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"keyword.operator.quantifier.regexp\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#D7BA7D\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": [\r\n\t\t\t\t\"constant.character\",\r\n\t\t\t\t\"constant.other.option\"\r\n\t\t\t],\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#569CD6\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"constant.character.escape\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#D7BA7D\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"entity.name.label\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#C8C8C8\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"token.info-token\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#6796E6\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"token.warn-token\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#CD9731\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"token.error-token\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#F44747\"\r\n\t\t\t}\r\n\t\t},\r\n\t\t{\r\n\t\t\t\"scope\": \"token.debug-token\",\r\n\t\t\t\"settings\": {\r\n\t\t\t\t\"foreground\": \"#B267E6\"\r\n\t\t\t}\r\n\t\t}\r\n\t]\r\n}\r\n```" ]
[]
"2023-06-14T11:13:05Z"
[ "type/bug", "good first issue", "scope/frontend", "status/accepted", "status/confirmed", "area/ux" ]
Dark mode scheme interferes with syntax highlighting visibility
### Issue submitter TODO list - [X] I've looked up my issue in [FAQ](https://docs.kafka-ui.provectus.io/faq/common-problems) - [X] I've searched for an already existing issues [here](https://github.com/provectus/kafka-ui/issues) - [X] I've tried running `master`-labeled docker image and the issue still persists there - [X] I'm running a supported version of the application which is listed [here](https://github.com/provectus/kafka-ui/blob/master/SECURITY.md) ### Describe the bug (actual behavior) Syntax highlighting/text editor for ksql conflicts with dark theme. ### Expected behavior Either having a solarized color scheme or some lighter tones would help. ### Your installation details v0.7.0 ### Steps to reproduce Observe dark mode w/syntax highlighting ### Screenshots ![image](https://github.com/provectus/kafka-ui/assets/6597925/95ac79fb-a781-40d3-a411-a4a12fe5d49a) ### Logs _No response_ ### Additional context _No response_
[ "kafka-ui-react-app/src/components/App.tsx", "kafka-ui-react-app/src/components/NavBar/NavBar.tsx", "kafka-ui-react-app/src/components/PageContainer/PageContainer.tsx", "kafka-ui-react-app/src/components/common/SQLEditor/SQLEditor.tsx", "kafka-ui-react-app/src/index.tsx" ]
[ "kafka-ui-react-app/src/components/App.tsx", "kafka-ui-react-app/src/components/NavBar/NavBar.tsx", "kafka-ui-react-app/src/components/PageContainer/PageContainer.tsx", "kafka-ui-react-app/src/components/common/SQLEditor/SQLEditor.tsx", "kafka-ui-react-app/src/components/contexts/ThemeModeContext.tsx", "kafka-ui-react-app/src/index.tsx" ]
[]
diff --git a/kafka-ui-react-app/src/components/App.tsx b/kafka-ui-react-app/src/components/App.tsx index 44409c403ab..0c06a6fddd6 100644 --- a/kafka-ui-react-app/src/components/App.tsx +++ b/kafka-ui-react-app/src/components/App.tsx @@ -1,4 +1,4 @@ -import React, { Suspense } from 'react'; +import React, { Suspense, useContext } from 'react'; import { Routes, Route, Navigate } from 'react-router-dom'; import { accessErrorPage, @@ -18,6 +18,7 @@ import { Toaster } from 'react-hot-toast'; import GlobalCSS from 'components/globalCss'; import * as S from 'components/App.styled'; import ClusterConfigForm from 'widgets/ClusterConfigForm'; +import { ThemeModeContext } from 'components/contexts/ThemeModeContext'; import ConfirmationModal from './common/ConfirmationModal/ConfirmationModal'; import { ConfirmContextProvider } from './contexts/ConfirmContext'; @@ -42,7 +43,7 @@ const queryClient = new QueryClient({ }, }); const App: React.FC = () => { - const [isDarkMode, setDarkMode] = React.useState<boolean>(false); + const { isDarkMode } = useContext(ThemeModeContext); return ( <QueryClientProvider client={queryClient}> @@ -53,7 +54,7 @@ const App: React.FC = () => { <ConfirmContextProvider> <GlobalCSS /> <S.Layout> - <PageContainer setDarkMode={setDarkMode}> + <PageContainer> <Routes> {['/', '/ui', '/ui/clusters'].map((path) => ( <Route diff --git a/kafka-ui-react-app/src/components/NavBar/NavBar.tsx b/kafka-ui-react-app/src/components/NavBar/NavBar.tsx index 2e363501288..4744eb0bffe 100644 --- a/kafka-ui-react-app/src/components/NavBar/NavBar.tsx +++ b/kafka-ui-react-app/src/components/NavBar/NavBar.tsx @@ -1,4 +1,4 @@ -import React from 'react'; +import React, { useContext } from 'react'; import Select from 'components/common/Select/Select'; import Logo from 'components/common/Logo/Logo'; import Version from 'components/Version/Version'; @@ -7,16 +7,16 @@ import DiscordIcon from 'components/common/Icons/DiscordIcon'; import AutoIcon from 'components/common/Icons/AutoIcon'; import SunIcon from 'components/common/Icons/SunIcon'; import MoonIcon from 'components/common/Icons/MoonIcon'; +import { ThemeModeContext } from 'components/contexts/ThemeModeContext'; import UserInfo from './UserInfo/UserInfo'; import * as S from './NavBar.styled'; interface Props { onBurgerClick: () => void; - setDarkMode: (value: boolean) => void; } -type ThemeDropDownValue = 'auto_theme' | 'light_theme' | 'dark_theme'; +export type ThemeDropDownValue = 'auto_theme' | 'light_theme' | 'dark_theme'; const options = [ { @@ -48,44 +48,8 @@ const options = [ }, ]; -const NavBar: React.FC<Props> = ({ onBurgerClick, setDarkMode }) => { - const matchDark = window.matchMedia('(prefers-color-scheme: dark)'); - const [themeMode, setThemeMode] = React.useState<ThemeDropDownValue>(); - - React.useLayoutEffect(() => { - const mode = localStorage.getItem('mode'); - if (mode) { - setThemeMode(mode as ThemeDropDownValue); - if (mode === 'auto_theme') { - setDarkMode(matchDark.matches); - } else if (mode === 'light_theme') { - setDarkMode(false); - } else if (mode === 'dark_theme') { - setDarkMode(true); - } - } else { - setThemeMode('auto_theme'); - } - }, []); - - React.useEffect(() => { - if (themeMode === 'auto_theme') { - setDarkMode(matchDark.matches); - matchDark.addListener((e) => { - setDarkMode(e.matches); - }); - } - }, [matchDark, themeMode]); - - const onChangeThemeMode = (value: string | number) => { - setThemeMode(value as ThemeDropDownValue); - localStorage.setItem('mode', value as string); - if (value === 'light_theme') { - setDarkMode(false); - } else if (value === 'dark_theme') { - setDarkMode(true); - } - }; +const NavBar: React.FC<Props> = ({ onBurgerClick }) => { + const { themeMode, setThemeMode } = useContext(ThemeModeContext); return ( <S.Navbar role="navigation" aria-label="Page Header"> @@ -117,7 +81,7 @@ const NavBar: React.FC<Props> = ({ onBurgerClick, setDarkMode }) => { <Select options={options} value={themeMode} - onChange={onChangeThemeMode} + onChange={setThemeMode} isThemeMode /> <S.SocialLink diff --git a/kafka-ui-react-app/src/components/PageContainer/PageContainer.tsx b/kafka-ui-react-app/src/components/PageContainer/PageContainer.tsx index a5697e89abd..93fe4dabe52 100644 --- a/kafka-ui-react-app/src/components/PageContainer/PageContainer.tsx +++ b/kafka-ui-react-app/src/components/PageContainer/PageContainer.tsx @@ -5,9 +5,7 @@ import * as S from 'components/PageContainer/PageContainer.styled'; import Nav from 'components/Nav/Nav'; import useBoolean from 'lib/hooks/useBoolean'; -const PageContainer: React.FC< - PropsWithChildren<{ setDarkMode: (value: boolean) => void }> -> = ({ children, setDarkMode }) => { +const PageContainer: React.FC<PropsWithChildren<unknown>> = ({ children }) => { const { value: isSidebarVisible, toggle, @@ -21,7 +19,7 @@ const PageContainer: React.FC< return ( <> - <NavBar onBurgerClick={toggle} setDarkMode={setDarkMode} /> + <NavBar onBurgerClick={toggle} /> <S.Container> <S.Sidebar aria-label="Sidebar" $visible={isSidebarVisible}> <Nav /> diff --git a/kafka-ui-react-app/src/components/common/SQLEditor/SQLEditor.tsx b/kafka-ui-react-app/src/components/common/SQLEditor/SQLEditor.tsx index a12170151e7..14433f06edd 100644 --- a/kafka-ui-react-app/src/components/common/SQLEditor/SQLEditor.tsx +++ b/kafka-ui-react-app/src/components/common/SQLEditor/SQLEditor.tsx @@ -3,7 +3,9 @@ import AceEditor, { IAceEditorProps } from 'react-ace'; import 'ace-builds/src-noconflict/ace'; import 'ace-builds/src-noconflict/mode-sql'; import 'ace-builds/src-noconflict/theme-textmate'; -import React from 'react'; +import 'ace-builds/src-noconflict/theme-dracula'; +import React, { useContext } from 'react'; +import { ThemeModeContext } from 'components/contexts/ThemeModeContext'; interface SQLEditorProps extends IAceEditorProps { isFixedHeight?: boolean; @@ -12,11 +14,13 @@ interface SQLEditorProps extends IAceEditorProps { const SQLEditor = React.forwardRef<AceEditor | null, SQLEditorProps>( (props, ref) => { const { isFixedHeight, ...rest } = props; + const { isDarkMode } = useContext(ThemeModeContext); + return ( <AceEditor ref={ref} mode="sql" - theme="textmate" + theme={isDarkMode ? 'dracula' : 'textmate'} tabSize={2} width="100%" height={ diff --git a/kafka-ui-react-app/src/components/contexts/ThemeModeContext.tsx b/kafka-ui-react-app/src/components/contexts/ThemeModeContext.tsx new file mode 100644 index 00000000000..5011dfe0813 --- /dev/null +++ b/kafka-ui-react-app/src/components/contexts/ThemeModeContext.tsx @@ -0,0 +1,58 @@ +import React, { useMemo } from 'react'; +import type { FC, PropsWithChildren } from 'react'; +import type { ThemeDropDownValue } from 'components/NavBar/NavBar'; + +interface ThemeModeContextProps { + isDarkMode: boolean; + themeMode: ThemeDropDownValue; + setThemeMode: (value: string | number) => void; +} + +export const ThemeModeContext = React.createContext<ThemeModeContextProps>({ + isDarkMode: false, + themeMode: 'auto_theme', + setThemeMode: () => {}, +}); + +export const ThemeModeProvider: FC<PropsWithChildren<unknown>> = ({ + children, +}) => { + const matchDark = window.matchMedia('(prefers-color-scheme: dark)'); + const [themeMode, setThemeModeState] = + React.useState<ThemeDropDownValue>('auto_theme'); + + React.useLayoutEffect(() => { + const mode = localStorage.getItem('mode'); + setThemeModeState((mode as ThemeDropDownValue) ?? 'auto_theme'); + }, [setThemeModeState]); + + const isDarkMode = React.useMemo(() => { + if (themeMode === 'auto_theme') { + return matchDark.matches; + } + return themeMode === 'dark_theme'; + }, [themeMode]); + + const setThemeMode = React.useCallback( + (value: string | number) => { + setThemeModeState(value as ThemeDropDownValue); + localStorage.setItem('mode', value as string); + }, + [setThemeModeState] + ); + + const contextValue = useMemo( + () => ({ + isDarkMode, + themeMode, + setThemeMode, + }), + [isDarkMode, themeMode, setThemeMode] + ); + + return ( + <ThemeModeContext.Provider value={contextValue}> + {children} + </ThemeModeContext.Provider> + ); +}; diff --git a/kafka-ui-react-app/src/index.tsx b/kafka-ui-react-app/src/index.tsx index d15e0528ca5..5ca52f75b8a 100644 --- a/kafka-ui-react-app/src/index.tsx +++ b/kafka-ui-react-app/src/index.tsx @@ -2,6 +2,7 @@ import React from 'react'; import { createRoot } from 'react-dom/client'; import { BrowserRouter } from 'react-router-dom'; import { Provider } from 'react-redux'; +import { ThemeModeProvider } from 'components/contexts/ThemeModeContext'; import App from 'components/App'; import { store } from 'redux/store'; import 'lib/constants'; @@ -14,7 +15,9 @@ const root = createRoot(container); root.render( <Provider store={store}> <BrowserRouter basename={window.basePath || '/'}> - <App /> + <ThemeModeProvider> + <App /> + </ThemeModeProvider> </BrowserRouter> </Provider> );
null
train
test
2023-06-14T03:29:22
"2023-06-01T22:08:00Z"
taylorsmithgg
train
provectus/kafka-ui/3872_3946
provectus/kafka-ui
provectus/kafka-ui/3872
provectus/kafka-ui/3946
[ "keyword_pr_to_issue" ]
2fb05ca9474da8fe6b9f91dc3b0f22ebdbceb9b8
6fe61654271fe74d595180fc29615b155fb66182
[ "Hello there javihernanp! πŸ‘‹\n\nThank you and congratulations πŸŽ‰ for opening your very first issue in this project! πŸ’–\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. πŸ‘€", "Hey, it seems to be working for me:\r\n```yaml\r\n - resource: topic\r\n value: \"dev.*\"\r\n actions: all\r\n```\r\n\r\n<img width=\"940\" alt=\"image\" src=\"https://github.com/provectus/kafka-ui/assets/1494347/84eb9b78-665e-47d4-824c-4800d2624f41\">\r\n\r\nPlease provide your full config and responses for `/api/authorization` and `/api/clusters/local/topics` requests, could be found in network tab within browser's dev console.", "Hello,\r\n\r\nThank you for your quick response,\r\n\r\nThis is the config file that I'm using:\r\n\r\n```\r\nkafka:\r\n clusters:\r\n -\r\n name: LAB01\r\n bootstrapServers: cluster1.confluent.cloud:9092\r\n properties.security.protocol: SASL_SSL\r\n properties.sasl.mechanism: PLAIN\r\n properties.sasl.jaas.config: 'org.apache.kafka.common.security.plain.PlainLoginModule required username=\"****\" password=\"****\";'\r\n -\r\n name: NON_PROD_AWS_EU-WEST-3\r\n bootstrapServers: cluster2.confluent.cloud:9092\r\n properties.security.protocol: SASL_SSL\r\n properties.sasl.mechanism: PLAIN\r\n properties.sasl.jaas.config: 'org.apache.kafka.common.security.plain.PlainLoginModule required username=\"****\" password=\"****\";'\r\n schemaRegistry: https://example\r\n schemaRegistryAuth:\r\n username: username\r\n password: password\r\n\r\nauth:\r\n type: OAUTH2\r\n oauth2:\r\n client:\r\n github:\r\n provider: github\r\n clientId: ****\r\n clientSecret: ****\r\n scope:\r\n - read:org\r\n user-name-attribute: login\r\n usernameattribute: login\r\n custom-params:\r\n type: github\r\n authorization-uri: https://example/login/oauth/authorize\r\n token-uri: https://example/login/oauth/access_token\r\n tokenuri: https://example/login/oauth/access_token\r\n user-info-uri: https://example/api/v3/user\r\n redirect-uri: https://ui.example/login/oauth2/code/github\r\n\r\nrbac:\r\n roles:\r\n - name: \"Admin Team\"\r\n subjects:\r\n - provider: oauth_github\r\n type: organization\r\n value: \"organization\"\r\n clusters:\r\n - LAB01\r\n - NON_PROD_AWS_EU-WEST-3\r\n permissions:\r\n - resource: clusterconfig\r\n actions: all\r\n - resource: topic\r\n value: \".*\"\r\n actions: all\r\n - resource: consumer\r\n value: \".*\"\r\n actions: all\r\n - resource: schema\r\n value: \".*\"\r\n actions: all\r\n - resource: connect\r\n value: \".*\"\r\n actions: all\r\n - resource: ksql\r\n actions: all\r\n value: \".*\"\r\n - resource: acl\r\n value: \".*\"\r\n actions: all\r\n - name: \"test\"\r\n subjects:\r\n - provider: oauth_github\r\n type: user\r\n value: \"fcojavier-hernandez\"\r\n clusters:\r\n - NON_PROD_AWS_EU-WEST-3\r\n permissions:\r\n - resource: clusterconfig\r\n value: \".*\"\r\n actions: all\r\n - resource: topic\r\n value: \"dev.*\"\r\n actions: all\r\n - resource: consumer\r\n value: \".*\"\r\n actions: all\r\n - resource: schema\r\n value: \".*\"\r\n actions: all\r\n - resource: connect\r\n value: \".*\"\r\n actions: all\r\n - resource: ksql\r\n actions: all\r\n value: \".*\"\r\n - resource: acl\r\n value: \".*\"\r\n actions: all\r\n\r\n```\r\nFor the two outputs you requested, here are both screenshots:\r\n\r\n![image](https://github.com/provectus/kafka-ui/assets/60904917/bbd520e2-b621-4e31-b544-ac54f51c729d)\r\n![image](https://github.com/provectus/kafka-ui/assets/60904917/f376768a-6315-40fc-8f6e-80f624bed437)\r\n\r\nI would like to ask if you are using the release 0.7\r\n\r\nKind regards.", "@javihernanp \r\n\r\n>I would like to ask if you are using the release 0.7\r\nEven better, master branch.\r\n\r\nWhich cluster stores the topic in question? Your screenshots are cropped, no way to determine that.\r\nIs that the same cluster that returns `topics: []`?\r\n\r\n", "Hello,\r\n\r\nIndeed, the cluster I obtain for topic[] is the same as the one I have configured, namely \"NON_PROD_AWS_EU-WEST-3\". I have continued testing, and it works correctly with a Confluent Cloud Basic cluster. However, if I configure a Confluent Cloud Dedicated cluster, such as \"NON_PROD_AWS_EU-WEST-3\", it doesn't work correctly. I have checked the Service Accounts I am using, and they have the same permissions in both types of clusters. Furthermore, it is worth mentioning that I am also using the code from the master branch.\r\n\r\nKind regards.", "1. Do you have these topics available if RBAC is disabled (no roles specified in config)? \r\n2. Are there any cluster/topic authorization errors in logs?", "Hello,\r\n\r\nDo you have these topics available if RBAC is disabled (no roles specified in config)?\r\n\r\n - Yes\r\n\r\nAre there any cluster/topic authorization errors in logs? \r\n\r\n - I could not see configuring different levels of logs, nothing related to authorization errors\r\n\r\nKind regards", "@javihernanp please try running this image:\r\n`public.ecr.aws/provectus/kafka-ui-custom-build:3914`\r\nwith the following config property:\r\n`logging.level.com.provectus.kafka.ui.service.rbac=TRACE`\r\nOpen the topic list page, also try searching by the topic name as you did, and extract the logs after that.", "Hello @Haarolean,\r\n\r\nI have been testing by modifying the log level as you indicated, and I have used that version, but I still haven't been able to see anything different in the console output. Would it be possible to have a quick meeting to explain in detail the issue and the different tests we have been conducting?\r\n\r\nKind regards", "@javihernanp Sure, let's talk. Got discord? The invite link [here](https://discord.gg/4DWzD7pGE5).", "@Haarolean, we have seen the same/similar behavior, but can add a bit more context. In our case, we RBAC topic filter on \"a.*\" and \"B.*\" as a test. What happens is we still see 39 pages of topics as if there was no RBAC topic filter but most pages are blank. Topics \"B.*\" show up on page #1 and topics \"a.*\" show up on pages 10-11. So sorted by case and the filter just prevents the topics from showing on the page were it would be. If you do try to add a non-allowed topic name to the URL directly, you do get a 403.", "Hello @Haarolean,\r\n\r\nI'm sorry, but at that time I was already disconnected and couldn't see that you sent me a link. I have been testing what @dmalik16 mentioned in the previous comment, and indeed, that is the cause of the error. I have been testing it, and it does show up on the original pages as if all the topics were displayed, just as explained. Since we already know the origin of the issue, I believe it is no longer necessary to have a call to further investigate the issue. Please let me know if you need more information. I would like to know an estimated time for the resolution of the issue., and thank you very much for the support and work done so far.\r\n\r\nBest regards.", "@javihernanp @dmalik16 could you please try this docker image and tell me if you experience any problems now?\r\n`image: public.ecr.aws/provectus/kafka-ui-custom-build:3946`", "@Haarolean, we pulled that image down and tested it out. It now displays as expected. In our case the filtered topics appeared over 2 pages total as if they were the only topics.\r\nThanks!", "Hello @Haarolean ,\r\n\r\nI have tested the changes, and now it is working correctly with the applied RBAC configuration. Thank you very much for resolving the issue promptly. I was wondering if these changes will be included in the main branch in the next patch. If so, could you please let me know the estimated release date for that patch? Once again, I would like to express my appreciation for the work done and the speed at which it was resolved.\r\n\r\nBest regards." ]
[ "I think it should be `when(mock.filterViewableTopics(any(), any())).then(i -> Mono.just(i.getArgument(0)));`", "thanks, done" ]
"2023-06-16T09:19:51Z"
[ "type/bug", "scope/backend", "status/accepted", "area/rbac" ]
The 'all-topics' view does not display according to the RBAC filter
### Issue submitter TODO list - [X] I've looked up my issue in [FAQ](https://docs.kafka-ui.provectus.io/faq/common-problems) - [X] I've searched for an already existing issues [here](https://github.com/provectus/kafka-ui/issues) - [X] I've tried running `master`-labeled docker image and the issue still persists there - [X] I'm running a supported version of the application which is listed [here](https://github.com/provectus/kafka-ui/blob/master/SECURITY.md) ### Describe the bug (actual behavior) I was working with RBAC to filter permissions by topics, and the issue I'm facing is that in the "all-topics" URI, unlike with schemas or other tabs, it doesn't display the filtered topic unless I search for them in the search bar. ### Expected behavior It should display, like in the rest of the tabs, only the topics that comply with the RBAC configuration. ### Your installation details 1 - 7.0 version 2 - Im not using it 3 - This is the RBAC that I'm using resource: topic value: "dev.*" actions: all ### Steps to reproduce Simply modify the configuration file in the topic permissions section to allow access only to specific topics. ### Screenshots ![image](https://github.com/provectus/kafka-ui/assets/60904917/eee159da-cbd5-45a5-8cd6-6ecd9a7cb8c3) ![image](https://github.com/provectus/kafka-ui/assets/60904917/2ec68880-8d26-44ec-9886-00f8cb82e275) ### Logs _No response_ ### Additional context 1 - I have tried all the methods listed in the documentation to define permissions on a topic
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/TopicsController.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/AccessControlService.java" ]
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/TopicsController.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/AccessControlService.java" ]
[ "kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/AccessControlServiceMock.java" ]
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/TopicsController.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/TopicsController.java index a587d53770a..818fab84d68 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/TopicsController.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/TopicsController.java @@ -167,12 +167,13 @@ public Mono<ResponseEntity<TopicsResponseDTO>> getTopics(String clusterName, ServerWebExchange exchange) { return topicsService.getTopicsForPagination(getCluster(clusterName)) - .flatMap(existingTopics -> { + .flatMap(topics -> accessControlService.filterViewableTopics(topics, clusterName)) + .flatMap(topics -> { int pageSize = perPage != null && perPage > 0 ? perPage : DEFAULT_PAGE_SIZE; var topicsToSkip = ((page != null && page > 0 ? page : 1) - 1) * pageSize; var comparator = sortOrder == null || !sortOrder.equals(SortOrderDTO.DESC) ? getComparatorForTopic(orderBy) : getComparatorForTopic(orderBy).reversed(); - List<InternalTopic> filtered = existingTopics.stream() + List<InternalTopic> filtered = topics.stream() .filter(topic -> !topic.isInternal() || showInternal != null && showInternal) .filter(topic -> search == null || StringUtils.containsIgnoreCase(topic.getName(), search)) @@ -189,7 +190,6 @@ public Mono<ResponseEntity<TopicsResponseDTO>> getTopics(String clusterName, return topicsService.loadTopics(getCluster(clusterName), topicsPage) .flatMapMany(Flux::fromIterable) - .filterWhen(dto -> accessControlService.isTopicAccessible(dto, clusterName)) .collectList() .map(topicsToRender -> new TopicsResponseDTO() diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/AccessControlService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/AccessControlService.java index b507d4c0810..debd9acd829 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/AccessControlService.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/AccessControlService.java @@ -202,19 +202,23 @@ public boolean isTopicAccessible(AccessContext context, AuthenticatedUser user) return isAccessible(Resource.TOPIC, context.getTopic(), user, context, requiredActions); } - public Mono<Boolean> isTopicAccessible(InternalTopic dto, String clusterName) { + public Mono<List<InternalTopic>> filterViewableTopics(List<InternalTopic> topics, String clusterName) { if (!rbacEnabled) { - return Mono.just(true); + return Mono.just(topics); } - AccessContext accessContext = AccessContext - .builder() - .cluster(clusterName) - .topic(dto.getName()) - .topicActions(TopicAction.VIEW) - .build(); - - return getUser().map(u -> isTopicAccessible(accessContext, u)); + return getUser() + .map(user -> topics.stream() + .filter(topic -> { + var accessContext = AccessContext + .builder() + .cluster(clusterName) + .topic(topic.getName()) + .topicActions(TopicAction.VIEW) + .build(); + return isTopicAccessible(accessContext, user); + } + ).toList()); } private boolean isConsumerGroupAccessible(AccessContext context, AuthenticatedUser user) {
diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/AccessControlServiceMock.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/AccessControlServiceMock.java index 263f1367d8c..852b75ae085 100644 --- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/AccessControlServiceMock.java +++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/util/AccessControlServiceMock.java @@ -5,6 +5,7 @@ import static org.mockito.Mockito.when; import com.provectus.kafka.ui.service.rbac.AccessControlService; +import java.util.Collections; import org.mockito.Mockito; import reactor.core.publisher.Mono; @@ -16,7 +17,7 @@ public AccessControlService getMock() { when(mock.validateAccess(any())).thenReturn(Mono.empty()); when(mock.isSchemaAccessible(anyString(), anyString())).thenReturn(Mono.just(true)); - when(mock.isTopicAccessible(any(), anyString())).thenReturn(Mono.just(true)); + when(mock.filterViewableTopics(any(), any())).then(invocation -> Mono.just(invocation.getArgument(0))); return mock; }
val
test
2023-06-16T08:38:48
"2023-05-29T06:51:38Z"
javihernanp
train
provectus/kafka-ui/3401_3957
provectus/kafka-ui
provectus/kafka-ui/3401
provectus/kafka-ui/3957
[ "keyword_pr_to_issue" ]
8337c9c183d632ea27b7c253d776fdfda4b19840
9549f68d7edcb0022687c8155010ba3c5b2cddac
[ "Hey! This is still causing issues.\r\nThe frontend client is not adding CORS header while initializing google oauth client as you can see in the network tab(in screenshot)\r\n\r\n![image](https://github.com/provectus/kafka-ui/assets/10064324/5cacb32b-21df-42f2-820d-5113a6ec04cf)\r\n", "@prdpx7 could you please try this image?\r\n`public.ecr.aws/provectus/kafka-ui-custom-build:3957`\r\nLet me know how it goes.", "> @prdpx7 could you please try this image? `public.ecr.aws/provectus/kafka-ui-custom-build:3957` Let me know how it goes.\r\n\r\nHey! The new build is working fine. Appreciate the help πŸ™ " ]
[]
"2023-06-20T08:11:21Z"
[ "type/bug", "status/pending", "scope/backend", "status/accepted", "area/auth" ]
OAuth2: Add filters allowing CORS
https://discord.com/channels/897805035122077716/897805035122077719/1077852545168445440 ![Uploading image.png…]()
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/CorsGlobalConfiguration.java" ]
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/CorsGlobalConfiguration.java" ]
[]
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/CorsGlobalConfiguration.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/CorsGlobalConfiguration.java index c6c88bfa984..448f92ad477 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/CorsGlobalConfiguration.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/CorsGlobalConfiguration.java @@ -1,18 +1,41 @@ package com.provectus.kafka.ui.config; +import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; +import org.springframework.http.HttpHeaders; +import org.springframework.http.HttpMethod; +import org.springframework.http.HttpStatus; +import org.springframework.http.server.reactive.ServerHttpRequest; +import org.springframework.http.server.reactive.ServerHttpResponse; import org.springframework.web.reactive.config.CorsRegistry; import org.springframework.web.reactive.config.WebFluxConfigurer; +import org.springframework.web.server.ServerWebExchange; +import org.springframework.web.server.WebFilter; +import org.springframework.web.server.WebFilterChain; +import reactor.core.publisher.Mono; @Configuration -public class CorsGlobalConfiguration implements WebFluxConfigurer { +public class CorsGlobalConfiguration { - @Override - public void addCorsMappings(CorsRegistry registry) { - registry.addMapping("/**") - .allowedOrigins("*") - .allowedMethods("*") - .allowedHeaders("*") - .allowCredentials(false); + @Bean + public WebFilter corsFilter() { + return (final ServerWebExchange ctx, final WebFilterChain chain) -> { + final ServerHttpRequest request = ctx.getRequest(); + + final ServerHttpResponse response = ctx.getResponse(); + final HttpHeaders headers = response.getHeaders(); + headers.add("Access-Control-Allow-Origin", "*"); + headers.add("Access-Control-Allow-Methods", "GET, PUT, POST, DELETE, OPTIONS"); + headers.add("Access-Control-Max-Age", "3600"); + headers.add("Access-Control-Allow-Headers", "Content-Type"); + + if (request.getMethod() == HttpMethod.OPTIONS) { + response.setStatusCode(HttpStatus.OK); + return Mono.empty(); + } + + return chain.filter(ctx); + }; } + }
null
train
test
2023-06-22T10:46:11
"2023-02-24T06:53:36Z"
Haarolean
train
provectus/kafka-ui/3959_3962
provectus/kafka-ui
provectus/kafka-ui/3959
provectus/kafka-ui/3962
[ "keyword_pr_to_issue" ]
4134d68316051c6ece70bb3cf0499ea5eb6d9380
cdb4f84e236de638fe9e45f267ece011c72463e7
[ "Hello there jonaslb! πŸ‘‹\n\nThank you and congratulations πŸŽ‰ for opening your very first issue in this project! πŸ’–\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. πŸ‘€", "Hey, thanks for the suggestion! " ]
[]
"2023-06-21T09:24:07Z"
[ "type/enhancement", "scope/frontend", "status/accepted", "area/ux" ]
Always show at least 2 or 3 digits for broker disk usage
### Issue submitter TODO list - [X] I've searched for an already existing issues [here](https://github.com/provectus/kafka-ui/issues) - [X] I'm running a supported version of the application which is listed [here](https://github.com/provectus/kafka-ui/blob/master/SECURITY.md) and the feature is not present there ### Is your proposal related to a problem? We have some brokers with storage in the "a few TB" range. My challenge is that disk usage says "1 TB" or "2 TB" on the broker overview (note: even for smaller brokers in the "a few GB" range, I guess the problem would be the same). This is not very informative, so in practice I have to look elsewhere (on the server, or other monitoring tools) to determine the actual (more precise) disk usage. ### Describe the feature you're interested in I'd like for the disk usage overview in the UI to always show 2 or 3 digits. E.g. if usage is between 1 and 10 GB (or TB), a decimal number with 1 or 2 decimals should be shown instead of an integer. (and if choosing 3 digits, also between 10 and 100 units). Ie: - Instead of "1 GB" show "1.4 GB" (or "1.43 GB") - Optionally instead of "11 GB" show "11.3 GB" (if choosing 3 digits) ### Describe alternatives you've considered Looking in other monitoring tools / ssh on server and `df` ### Version you're running fdd9ad9 ### Additional context _No response_
[ "kafka-ui-react-app/src/components/Brokers/Broker/Broker.tsx", "kafka-ui-react-app/src/components/Brokers/Broker/__test__/Broker.spec.tsx", "kafka-ui-react-app/src/components/Brokers/BrokersList/BrokersList.tsx", "kafka-ui-react-app/src/components/common/NewTable/SizeCell.tsx" ]
[ "kafka-ui-react-app/src/components/Brokers/Broker/Broker.tsx", "kafka-ui-react-app/src/components/Brokers/Broker/__test__/Broker.spec.tsx", "kafka-ui-react-app/src/components/Brokers/BrokersList/BrokersList.tsx", "kafka-ui-react-app/src/components/common/NewTable/SizeCell.tsx" ]
[]
diff --git a/kafka-ui-react-app/src/components/Brokers/Broker/Broker.tsx b/kafka-ui-react-app/src/components/Brokers/Broker/Broker.tsx index fcad7423f39..44a75c51ad8 100644 --- a/kafka-ui-react-app/src/components/Brokers/Broker/Broker.tsx +++ b/kafka-ui-react-app/src/components/Brokers/Broker/Broker.tsx @@ -44,7 +44,10 @@ const Broker: React.FC = () => { <Metrics.Wrapper> <Metrics.Section> <Metrics.Indicator label="Segment Size"> - <BytesFormatted value={brokerDiskUsage?.segmentSize} /> + <BytesFormatted + value={brokerDiskUsage?.segmentSize} + precision={2} + /> </Metrics.Indicator> <Metrics.Indicator label="Segment Count"> {brokerDiskUsage?.segmentCount} diff --git a/kafka-ui-react-app/src/components/Brokers/Broker/__test__/Broker.spec.tsx b/kafka-ui-react-app/src/components/Brokers/Broker/__test__/Broker.spec.tsx index ae6a7f4b3b6..89a553be7d0 100644 --- a/kafka-ui-react-app/src/components/Brokers/Broker/__test__/Broker.spec.tsx +++ b/kafka-ui-react-app/src/components/Brokers/Broker/__test__/Broker.spec.tsx @@ -66,7 +66,7 @@ describe('Broker Component', () => { expect( screen.getByText(brokerDiskUsage?.segmentCount || '') ).toBeInTheDocument(); - expect(screen.getByText('12 MB')).toBeInTheDocument(); + expect(screen.getByText('11.77 MB')).toBeInTheDocument(); expect(screen.getByText('Segment Count')).toBeInTheDocument(); expect( diff --git a/kafka-ui-react-app/src/components/Brokers/BrokersList/BrokersList.tsx b/kafka-ui-react-app/src/components/Brokers/BrokersList/BrokersList.tsx index d8cd0a2f763..e59c006b0c6 100644 --- a/kafka-ui-react-app/src/components/Brokers/BrokersList/BrokersList.tsx +++ b/kafka-ui-react-app/src/components/Brokers/BrokersList/BrokersList.tsx @@ -105,6 +105,7 @@ const BrokersList: React.FC = () => { getValue={getValue} renderValue={renderValue} renderSegments + precision={2} /> ), }, diff --git a/kafka-ui-react-app/src/components/common/NewTable/SizeCell.tsx b/kafka-ui-react-app/src/components/common/NewTable/SizeCell.tsx index 24485342aa4..7a230be8121 100644 --- a/kafka-ui-react-app/src/components/common/NewTable/SizeCell.tsx +++ b/kafka-ui-react-app/src/components/common/NewTable/SizeCell.tsx @@ -6,10 +6,10 @@ import BytesFormatted from 'components/common/BytesFormatted/BytesFormatted'; type AsAny = any; const SizeCell: React.FC< - CellContext<AsAny, unknown> & { renderSegments?: boolean } -> = ({ getValue, row, renderSegments = false }) => ( + CellContext<AsAny, unknown> & { renderSegments?: boolean; precision?: number } +> = ({ getValue, row, renderSegments = false, precision = 0 }) => ( <> - <BytesFormatted value={getValue<string | number>()} /> + <BytesFormatted value={getValue<string | number>()} precision={precision} /> {renderSegments ? `, ${row?.original.count} segment(s)` : null} </> );
null
val
test
2023-06-21T12:13:53
"2023-06-21T08:13:16Z"
jonaslb
train
provectus/kafka-ui/3584_3964
provectus/kafka-ui
provectus/kafka-ui/3584
provectus/kafka-ui/3964
[ "keyword_pr_to_issue" ]
81805703c855b3ea5ffde6ab022b207780b485bb
b9bbb1a8235037c9a4810bd4d586fb42408cb67b
[ "Hello there khudyakovan! πŸ‘‹\n\nThank you and congratulations πŸŽ‰ for opening your very first issue in this project! πŸ’–\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. πŸ‘€", "Hello @Haarolean, Can you assign me to this issue?", "Thank you for assigning this issue to me.\r\n\r\nI want to run the app with the configuration \"kafka-ui-auth-context.yaml\" file so I can access to the login button and test the problem.\r\n\r\nI have been trying to run my app with this configuration. Can you please help me with this?\r\n\r\nThanks\r\n", "@inanc-can you don't exactly need that compose, as you don't need authentication at all. You can run any of the compose examples.", "<img width=\"1792\" alt=\"Screenshot 2023-04-20 at 19 04 24\" src=\"https://user-images.githubusercontent.com/55583718/233437672-83f8c20c-707d-4539-8967-0a0f1ad68ab8.png\">\r\n\r\nIn this example which I reached with the \"kafka-ui % docker run -it -p 8080:8080 -e DYNAMIC_CONFIG_ENABLED=true provectuslabs/kafka-ui\", There is no option to Log In.\r\n\r\nI assumed that I should first need to \"Log In\" and Topic > Statistics to test the case.\r\n\r\nIf it is wrong please explain me how should I proceed later.", "@inanc-can you don't need to log in unless there is an authentication mechanism in place.\r\nThe app you run has no clusters configured, either add one manually via \"configure new cluster\" or run a compose which does have one, i.e. `docker-compose -f ./documentation/compose/kafka-ui.yaml up -d`.", "Thank you for responding quickly.\r\n\r\nI have recreated the situation that causes this problem.\r\n\r\nHow can I rerun the project so that my changes would appear on my localhost ?\r\n(In a React Project, I could reach this by saving the project then it would reload, so that you understand what I mean)\r\n\r\nThanks", "@inanc-can\r\n2. set the vite proxy as described [here](https://github.com/provectus/kafka-ui/tree/master/kafka-ui-react-app#proxying-api-requests-in-development), pointing at your container's 8080 port to use it as backend\r\n1. launch the frontend via npm start", "Hallo @Haarolean \r\n\r\nThis request causes the issue :\r\n<img width=\"566\" alt=\"Screenshot 2023-04-25 at 18 01 28\" src=\"https://user-images.githubusercontent.com/55583718/234335713-acc972ef-cca6-44e9-8c56-4b78096c98e9.png\">\r\n\r\nIs there a way to get a reload every time I change the API (as it happens in the front-end), so I can take a look if I can fix it through the backend?\r\n\r\nWould you happen to have any other suggestions?", "@inanc-can it's a pure frontend issue. 404 is _expected_ if there's no statistics result already available, meaning that there's nothing to display just yet until the next run is started & completed. We just need to remove the red warning upon 404 there.", "@inanc-can any updates?", "I have spent a considerable amount of time attempting to resolve the issue. 404 Error is caught at App.tsx in \"const query Client\". Although I have spent some time on it, I believe that the functionality is working properly since it displays an error message when an error occurs.\r\n\r\n", "@inanc-can I've already mentioned that 404 is expectable there, so we don't have to display it.\r\nSee #3964 if you're interested how the solution looks." ]
[]
"2023-06-21T11:41:02Z"
[ "type/bug", "scope/frontend", "status/accepted", "status/confirmed", "severity/low", "area/ux" ]
404 at first call "Statistic" page
Dear developers, Found an issue with topic statistics page How to reproduce: 1. First login in system. 2. Select topic and statistics tab then. 3. "404 Not found" label will appear at the rigth bottom of the page. Error appears only at first loading of the "Statistics" page. Next calls of the page error label does not appear. "Statistics" function is working as usual by the way Version of UI is v0.6.1 ef0dacb ![404-statistics](https://user-images.githubusercontent.com/19203188/228463688-05a5bbe0-4300-4797-83e3-0b8beac18d91.PNG)
[ "kafka-ui-react-app/src/lib/hooks/api/topics.ts" ]
[ "kafka-ui-react-app/src/lib/hooks/api/topics.ts" ]
[]
diff --git a/kafka-ui-react-app/src/lib/hooks/api/topics.ts b/kafka-ui-react-app/src/lib/hooks/api/topics.ts index a87673368dd..f06b6a87670 100644 --- a/kafka-ui-react-app/src/lib/hooks/api/topics.ts +++ b/kafka-ui-react-app/src/lib/hooks/api/topics.ts @@ -304,6 +304,11 @@ export function useTopicAnalysis( useErrorBoundary: true, retry: false, suspense: false, + onError: (error: Response) => { + if (error.status !== 404) { + showServerError(error as Response); + } + }, } ); }
null
train
test
2023-06-26T10:36:26
"2023-03-29T07:49:02Z"
khudyakovan
train
provectus/kafka-ui/3318_3969
provectus/kafka-ui
provectus/kafka-ui/3318
provectus/kafka-ui/3969
[ "keyword_pr_to_issue" ]
333eae24759aaa7b3fc14e5e7dea232200c13dcd
d2a5acc82d0be8a8f7e916f2cfeb8aed5d274256
[ "@Haarolean I would like to contribute to this issue." ]
[]
"2023-06-24T00:25:27Z"
[ "good first issue", "scope/frontend", "status/accepted", "type/chore" ]
Topics: UI minor fixes for Create new Topic form
**Describe the bug** (Actual behavior) Error message about Topic Name requiredness is "name is a required field" "e" letter is not fully displayed within "Maximum message size in bytes" field's placeholder Error message of "Number of Partitions" starts with lower case **Expected behavior** Display all placeholders fully Error messages should include the field name fully and start with upper cases\ **Set up** https://www.kafka-ui.provectus.io/ **Steps to Reproduce** 1. Login to kafka-ui 2. Navigate to Topics 3. Press "Add a Topic" **Screenshots** <img width="1724" alt="typos topic profile" src="https://user-images.githubusercontent.com/104780608/217455251-4c05bd83-a246-4e17-98e0-b3b15a1caa4a.png">
[ "kafka-ui-react-app/src/components/Topics/New/__test__/New.spec.tsx", "kafka-ui-react-app/src/components/Topics/shared/Form/TopicForm.styled.ts", "kafka-ui-react-app/src/components/Topics/shared/Form/TopicForm.tsx", "kafka-ui-react-app/src/components/Topics/shared/Form/__tests__/TopicForm.spec.tsx", "kafka-ui-react-app/src/lib/yupExtended.ts" ]
[ "kafka-ui-react-app/src/components/Topics/New/__test__/New.spec.tsx", "kafka-ui-react-app/src/components/Topics/shared/Form/TopicForm.styled.ts", "kafka-ui-react-app/src/components/Topics/shared/Form/TopicForm.tsx", "kafka-ui-react-app/src/components/Topics/shared/Form/__tests__/TopicForm.spec.tsx", "kafka-ui-react-app/src/lib/yupExtended.ts" ]
[]
diff --git a/kafka-ui-react-app/src/components/Topics/New/__test__/New.spec.tsx b/kafka-ui-react-app/src/components/Topics/New/__test__/New.spec.tsx index f3d75d1bc8b..867291e0bb5 100644 --- a/kafka-ui-react-app/src/components/Topics/New/__test__/New.spec.tsx +++ b/kafka-ui-react-app/src/components/Topics/New/__test__/New.spec.tsx @@ -60,16 +60,16 @@ describe('New', () => { await userEvent.clear(screen.getByPlaceholderText('Topic Name')); await userEvent.tab(); await expect( - screen.getByText('name is a required field') + screen.getByText('Topic Name is required') ).toBeInTheDocument(); await userEvent.type( - screen.getByLabelText('Number of partitions *'), + screen.getByLabelText('Number of Partitions *'), minValue ); - await userEvent.clear(screen.getByLabelText('Number of partitions *')); + await userEvent.clear(screen.getByLabelText('Number of Partitions *')); await userEvent.tab(); await expect( - screen.getByText('Number of partitions is required and must be a number') + screen.getByText('Number of Partitions is required and must be a number') ).toBeInTheDocument(); expect(createTopicMock).not.toHaveBeenCalled(); @@ -89,7 +89,7 @@ describe('New', () => { renderComponent(clusterTopicNewPath(clusterName)); await userEvent.type(screen.getByPlaceholderText('Topic Name'), topicName); await userEvent.type( - screen.getByLabelText('Number of partitions *'), + screen.getByLabelText('Number of Partitions *'), minValue ); await userEvent.click(screen.getByText('Create topic')); diff --git a/kafka-ui-react-app/src/components/Topics/shared/Form/TopicForm.styled.ts b/kafka-ui-react-app/src/components/Topics/shared/Form/TopicForm.styled.ts index fc9e7aae9d5..9dbe1d0aeef 100644 --- a/kafka-ui-react-app/src/components/Topics/shared/Form/TopicForm.styled.ts +++ b/kafka-ui-react-app/src/components/Topics/shared/Form/TopicForm.styled.ts @@ -1,4 +1,5 @@ import styled from 'styled-components'; +import Input from 'components/common/Input/Input'; export const Column = styled.div` display: flex; @@ -16,6 +17,10 @@ export const CustomParamsHeading = styled.h4` color: ${({ theme }) => theme.heading.h4}; `; +export const MessageSizeInput = styled(Input)` + min-width: 195px; +`; + export const Label = styled.div` display: flex; gap: 16px; diff --git a/kafka-ui-react-app/src/components/Topics/shared/Form/TopicForm.tsx b/kafka-ui-react-app/src/components/Topics/shared/Form/TopicForm.tsx index e29bfca7a88..677890cb721 100644 --- a/kafka-ui-react-app/src/components/Topics/shared/Form/TopicForm.tsx +++ b/kafka-ui-react-app/src/components/Topics/shared/Form/TopicForm.tsx @@ -109,12 +109,12 @@ const TopicForm: React.FC<Props> = ({ {!isEditing && ( <div> <InputLabel htmlFor="topicFormNumberOfPartitions"> - Number of partitions * + Number of Partitions * </InputLabel> <Input id="topicFormNumberOfPartitions" type="number" - placeholder="Number of partitions" + placeholder="Number of Partitions" min="1" name="partitions" positiveOnly @@ -228,7 +228,7 @@ const TopicForm: React.FC<Props> = ({ <InputLabel htmlFor="topicFormMaxMessageBytes"> Maximum message size in bytes </InputLabel> - <Input + <S.MessageSizeInput id="topicFormMaxMessageBytes" type="number" placeholder="Maximum message size" diff --git a/kafka-ui-react-app/src/components/Topics/shared/Form/__tests__/TopicForm.spec.tsx b/kafka-ui-react-app/src/components/Topics/shared/Form/__tests__/TopicForm.spec.tsx index e3fd7a7e5e8..347ce935e3d 100644 --- a/kafka-ui-react-app/src/components/Topics/shared/Form/__tests__/TopicForm.spec.tsx +++ b/kafka-ui-react-app/src/components/Topics/shared/Form/__tests__/TopicForm.spec.tsx @@ -37,7 +37,7 @@ describe('TopicForm', () => { expectByRoleAndNameToBeInDocument('textbox', 'Topic Name *'); - expectByRoleAndNameToBeInDocument('spinbutton', 'Number of partitions *'); + expectByRoleAndNameToBeInDocument('spinbutton', 'Number of Partitions *'); expectByRoleAndNameToBeInDocument('spinbutton', 'Replication Factor'); expectByRoleAndNameToBeInDocument('spinbutton', 'Min In Sync Replicas'); diff --git a/kafka-ui-react-app/src/lib/yupExtended.ts b/kafka-ui-react-app/src/lib/yupExtended.ts index 241dac9770a..ad0c9fb1923 100644 --- a/kafka-ui-react-app/src/lib/yupExtended.ts +++ b/kafka-ui-react-app/src/lib/yupExtended.ts @@ -66,17 +66,17 @@ export const topicFormValidationSchema = yup.object().shape({ name: yup .string() .max(249) - .required() + .required('Topic Name is required') .matches( TOPIC_NAME_VALIDATION_PATTERN, 'Only alphanumeric, _, -, and . allowed' ), partitions: yup .number() - .min(1) + .min(1, 'Number of Partitions must be greater than or equal to 1') .max(2147483647) .required() - .typeError('Number of partitions is required and must be a number'), + .typeError('Number of Partitions is required and must be a number'), replicationFactor: yup.string(), minInSyncReplicas: yup.string(), cleanupPolicy: yup.string().required(),
null
test
test
2023-08-04T13:43:40
"2023-02-08T06:52:01Z"
armenuikafka
train
provectus/kafka-ui/3878_3972
provectus/kafka-ui
provectus/kafka-ui/3878
provectus/kafka-ui/3972
[ "connected" ]
9549f68d7edcb0022687c8155010ba3c5b2cddac
6b67313d1a0626fc4db384e8fb2f3581987bfa85
[]
[]
"2023-06-26T07:57:31Z"
[ "scope/QA", "scope/AQA" ]
[e2e] Checking Broker's config search is case insensetive
### Issue submitter TODO list - [X] I've searched for an already existing issues [here](https://github.com/provectus/kafka-ui/issues) - [X] I'm running a supported version of the application which is listed [here](https://github.com/provectus/kafka-ui/blob/master/SECURITY.md) and the feature is not present there ### Is your proposal related to a problem? _No response_ ### Describe the feature you're interested in autotest implementation for https://app.qase.io/project/KAFKAUI?case=348&suite=1&tab= ### Describe alternatives you've considered _No response_ ### Version you're running master ### Additional context _No response_
[]
[ "kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/StringUtils.java" ]
[ "kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/backlog/SmokeBacklog.java", "kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/brokers/BrokersTest.java" ]
diff --git a/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/StringUtils.java b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/StringUtils.java new file mode 100644 index 00000000000..77a46b805e9 --- /dev/null +++ b/kafka-ui-e2e-checks/src/main/java/com/provectus/kafka/ui/utilities/StringUtils.java @@ -0,0 +1,15 @@ +package com.provectus.kafka.ui.utilities; + +import java.util.stream.IntStream; +import lombok.extern.slf4j.Slf4j; + +@Slf4j +public class StringUtils { + + public static String getMixedCase(String original) { + return IntStream.range(0, original.length()) + .mapToObj(i -> i % 2 == 0 ? Character.toUpperCase(original.charAt(i)) : original.charAt(i)) + .collect(StringBuilder::new, StringBuilder::appendCodePoint, StringBuilder::append) + .toString(); + } +}
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/backlog/SmokeBacklog.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/backlog/SmokeBacklog.java index f4cacc25207..eb82a50db1b 100644 --- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/backlog/SmokeBacklog.java +++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/manualsuite/backlog/SmokeBacklog.java @@ -1,6 +1,5 @@ package com.provectus.kafka.ui.manualsuite.backlog; -import static com.provectus.kafka.ui.qasesuite.BaseQaseTest.BROKERS_SUITE_ID; import static com.provectus.kafka.ui.qasesuite.BaseQaseTest.SCHEMAS_SUITE_ID; import static com.provectus.kafka.ui.qasesuite.BaseQaseTest.TOPICS_PROFILE_SUITE_ID; import static com.provectus.kafka.ui.qasesuite.BaseQaseTest.TOPICS_SUITE_ID; @@ -57,24 +56,17 @@ public void testCaseE() { public void testCaseF() { } - @Automation(state = TO_BE_AUTOMATED) - @Suite(id = BROKERS_SUITE_ID) - @QaseId(348) - @Test - public void testCaseG() { - } - @Automation(state = NOT_AUTOMATED) @Suite(id = TOPICS_SUITE_ID) @QaseId(50) @Test - public void testCaseH() { + public void testCaseG() { } @Automation(state = NOT_AUTOMATED) @Suite(id = SCHEMAS_SUITE_ID) @QaseId(351) @Test - public void testCaseI() { + public void testCaseH() { } } diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/brokers/BrokersTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/brokers/BrokersTest.java index 663018cbc3b..ec1bbc2313e 100644 --- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/brokers/BrokersTest.java +++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/smokesuite/brokers/BrokersTest.java @@ -1,6 +1,7 @@ package com.provectus.kafka.ui.smokesuite.brokers; import static com.provectus.kafka.ui.pages.brokers.BrokersDetails.DetailsTab.CONFIGS; +import static com.provectus.kafka.ui.utilities.StringUtils.getMixedCase; import static com.provectus.kafka.ui.variables.Expected.BROKER_SOURCE_INFO_TOOLTIP; import com.codeborne.selenide.Condition; @@ -8,6 +9,7 @@ import com.provectus.kafka.ui.pages.brokers.BrokersConfigTab; import io.qameta.allure.Issue; import io.qase.api.annotation.QaseId; +import java.util.List; import org.testng.Assert; import org.testng.annotations.Ignore; import org.testng.annotations.Test; @@ -100,6 +102,38 @@ public void brokersConfigSecondPageSearchCheck() { String.format("getAllConfigs().contains(%s)", anyConfigKeySecondPage)); } + @Ignore + @Issue("https://github.com/provectus/kafka-ui/issues/3347") + @QaseId(348) + @Test + public void brokersConfigCaseInsensitiveSearchCheck() { + navigateToBrokersAndOpenDetails(DEFAULT_BROKER_ID); + brokersDetails + .openDetailsTab(CONFIGS); + String anyConfigKeyFirstPage = brokersConfigTab + .getAllConfigs().stream() + .findAny().orElseThrow() + .getKey(); + brokersConfigTab + .clickNextButton(); + Assert.assertFalse(brokersConfigTab.getAllConfigs().stream() + .map(BrokersConfigTab.BrokersConfigItem::getKey) + .toList().contains(anyConfigKeyFirstPage), + String.format("getAllConfigs().contains(%s)", anyConfigKeyFirstPage)); + SoftAssert softly = new SoftAssert(); + List.of(anyConfigKeyFirstPage.toLowerCase(), anyConfigKeyFirstPage.toUpperCase(), + getMixedCase(anyConfigKeyFirstPage)) + .forEach(configCase -> { + brokersConfigTab + .searchConfig(configCase); + softly.assertTrue(brokersConfigTab.getAllConfigs().stream() + .map(BrokersConfigTab.BrokersConfigItem::getKey) + .toList().contains(anyConfigKeyFirstPage), + String.format("getAllConfigs().contains(%s)", configCase)); + }); + softly.assertAll(); + } + @QaseId(331) @Test public void brokersSourceInfoCheck() {
train
test
2023-06-23T10:45:12
"2023-05-30T09:10:00Z"
VladSenyuta
train
provectus/kafka-ui/3883_3979
provectus/kafka-ui
provectus/kafka-ui/3883
provectus/kafka-ui/3979
[ "keyword_pr_to_issue" ]
b9bbb1a8235037c9a4810bd4d586fb42408cb67b
b700ac3991dc5106b3d996b4dcfef92004e81521
[]
[ "I think it will be more readable if you embed groupNameExtractor code ", "let keep LdapAuthoritiesPopulator type here", "can be protected", "how can AccessControlService be null here? We rely on it to be present in all controllers.", "Not sure actually, but you're absolutely right. That used to by copy-pasted around, I guess might be related to some tests.", "done", "done", "done" ]
"2023-06-27T09:37:30Z"
[ "type/enhancement", "scope/backend", "status/accepted", "area/rbac" ]
RBAC: LDAP: Support group search filter filter and subtree search
Might be useful if there's a different filter than `(member={0})` is used.
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapProperties.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapSecurityConfig.java" ]
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapProperties.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapSecurityConfig.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/RbacLdapAuthoritiesExtractor.java" ]
[]
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapProperties.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapProperties.java index 9d07aca2dd5..9eac9e5db01 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapProperties.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapProperties.java @@ -15,6 +15,8 @@ public class LdapProperties { private String userFilterSearchBase; private String userFilterSearchFilter; private String groupFilterSearchBase; + private String groupFilterSearchFilter; + private String groupRoleAttribute; @Value("${oauth2.ldap.activeDirectory:false}") private boolean isActiveDirectory; diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapSecurityConfig.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapSecurityConfig.java index ce04a2e1659..b7750d528b8 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapSecurityConfig.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/auth/LdapSecurityConfig.java @@ -3,14 +3,16 @@ import static com.provectus.kafka.ui.config.auth.AbstractAuthSecurityConfig.AUTH_WHITELIST; import com.provectus.kafka.ui.service.rbac.AccessControlService; +import com.provectus.kafka.ui.service.rbac.extractor.RbacLdapAuthoritiesExtractor; import java.util.Collection; import java.util.List; -import javax.annotation.Nullable; +import java.util.Optional; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.boot.autoconfigure.ldap.LdapAutoConfiguration; import org.springframework.boot.context.properties.EnableConfigurationProperties; +import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; @@ -50,9 +52,9 @@ public class LdapSecurityConfig { @Bean public ReactiveAuthenticationManager authenticationManager(BaseLdapPathContextSource contextSource, - LdapAuthoritiesPopulator ldapAuthoritiesPopulator, - @Nullable AccessControlService acs) { - var rbacEnabled = acs != null && acs.isRbacEnabled(); + LdapAuthoritiesPopulator authoritiesExtractor, + AccessControlService acs) { + var rbacEnabled = acs.isRbacEnabled(); BindAuthenticator ba = new BindAuthenticator(contextSource); if (props.getBase() != null) { ba.setUserDnPatterns(new String[] {props.getBase()}); @@ -67,7 +69,7 @@ public ReactiveAuthenticationManager authenticationManager(BaseLdapPathContextSo AbstractLdapAuthenticationProvider authenticationProvider; if (!props.isActiveDirectory()) { authenticationProvider = rbacEnabled - ? new LdapAuthenticationProvider(ba, ldapAuthoritiesPopulator) + ? new LdapAuthenticationProvider(ba, authoritiesExtractor) : new LdapAuthenticationProvider(ba); } else { authenticationProvider = new ActiveDirectoryLdapAuthenticationProvider(props.getActiveDirectoryDomain(), @@ -97,11 +99,24 @@ public BaseLdapPathContextSource contextSource() { @Bean @Primary - public LdapAuthoritiesPopulator ldapAuthoritiesPopulator(BaseLdapPathContextSource contextSource) { - var authoritiesPopulator = new DefaultLdapAuthoritiesPopulator(contextSource, props.getGroupFilterSearchBase()); - authoritiesPopulator.setRolePrefix(""); - authoritiesPopulator.setConvertToUpperCase(false); - return authoritiesPopulator; + public DefaultLdapAuthoritiesPopulator ldapAuthoritiesExtractor(ApplicationContext context, + BaseLdapPathContextSource contextSource, + AccessControlService acs) { + var rbacEnabled = acs != null && acs.isRbacEnabled(); + + DefaultLdapAuthoritiesPopulator extractor; + + if (rbacEnabled) { + extractor = new RbacLdapAuthoritiesExtractor(context, contextSource, props.getGroupFilterSearchBase()); + } else { + extractor = new DefaultLdapAuthoritiesPopulator(contextSource, props.getGroupFilterSearchBase()); + } + + Optional.ofNullable(props.getGroupFilterSearchFilter()).ifPresent(extractor::setGroupSearchFilter); + extractor.setRolePrefix(""); + extractor.setConvertToUpperCase(false); + extractor.setSearchSubtree(true); + return extractor; } @Bean diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/RbacLdapAuthoritiesExtractor.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/RbacLdapAuthoritiesExtractor.java new file mode 100644 index 00000000000..ba30eb5cc38 --- /dev/null +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/RbacLdapAuthoritiesExtractor.java @@ -0,0 +1,78 @@ +package com.provectus.kafka.ui.service.rbac.extractor; + +import com.provectus.kafka.ui.config.auth.LdapProperties; +import com.provectus.kafka.ui.model.rbac.Role; +import com.provectus.kafka.ui.model.rbac.provider.Provider; +import com.provectus.kafka.ui.service.rbac.AccessControlService; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import lombok.extern.slf4j.Slf4j; +import org.springframework.context.ApplicationContext; +import org.springframework.ldap.core.DirContextOperations; +import org.springframework.ldap.core.support.BaseLdapPathContextSource; +import org.springframework.security.core.GrantedAuthority; +import org.springframework.security.core.authority.SimpleGrantedAuthority; +import org.springframework.security.ldap.userdetails.DefaultLdapAuthoritiesPopulator; +import org.springframework.util.Assert; + +@Slf4j +public class RbacLdapAuthoritiesExtractor extends DefaultLdapAuthoritiesPopulator { + + private final AccessControlService acs; + private final LdapProperties props; + + public RbacLdapAuthoritiesExtractor(ApplicationContext context, + BaseLdapPathContextSource contextSource, String groupFilterSearchBase) { + super(contextSource, groupFilterSearchBase); + this.acs = context.getBean(AccessControlService.class); + this.props = context.getBean(LdapProperties.class); + } + + @Override + protected Set<GrantedAuthority> getAdditionalRoles(DirContextOperations user, String username) { + var ldapGroups = getRoles(user.getNameInNamespace(), username); + + return acs.getRoles() + .stream() + .filter(r -> r.getSubjects() + .stream() + .filter(subject -> subject.getProvider().equals(Provider.LDAP)) + .filter(subject -> subject.getType().equals("group")) + .anyMatch(subject -> ldapGroups.contains(subject.getValue())) + ) + .map(Role::getName) + .peek(role -> log.trace("Mapped role [{}] for user [{}]", role, username)) + .map(SimpleGrantedAuthority::new) + .collect(Collectors.toSet()); + } + + private Set<String> getRoles(String userDn, String username) { + var groupSearchBase = props.getGroupFilterSearchBase(); + Assert.notNull(groupSearchBase, "groupSearchBase is empty"); + + var groupRoleAttribute = props.getGroupRoleAttribute(); + if (groupRoleAttribute == null) { + + groupRoleAttribute = "cn"; + } + + log.trace( + "Searching for roles for user [{}] with DN [{}], groupRoleAttribute [{}] and filter [{}] in search base [{}]", + username, userDn, groupRoleAttribute, getGroupSearchFilter(), groupSearchBase); + + var ldapTemplate = getLdapTemplate(); + ldapTemplate.setIgnoreNameNotFoundException(true); + + Set<Map<String, List<String>>> userRoles = ldapTemplate.searchForMultipleAttributeValues( + groupSearchBase, getGroupSearchFilter(), new String[] {userDn, username}, + new String[] {groupRoleAttribute}); + + return userRoles.stream() + .map(record -> record.get(getGroupRoleAttribute()).get(0)) + .peek(group -> log.trace("Found LDAP group [{}] for user [{}]", group, username)) + .collect(Collectors.toSet()); + } + +}
null
train
test
2023-06-26T11:32:21
"2023-05-30T14:01:52Z"
Haarolean
train
provectus/kafka-ui/3625_3986
provectus/kafka-ui
provectus/kafka-ui/3625
provectus/kafka-ui/3986
[ "keyword_issue_to_pr", "connected" ]
be2f9f0605b8dda210360caa693e3b5f58c752f7
d572e43b4f978473292757862aff08452fac57fb
[ "Hello there Dugong42! πŸ‘‹\n\nThank you and congratulations πŸŽ‰ for opening your very first issue in this project! πŸ’–\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. πŸ‘€", "Hey, thanks for bringing this to our attention. \r\n\r\nWe'll def look into this shortly. ", "Hello @Dugong42 \r\nCan you please provide your docker version?", "Hi. It's Docker `1.13`", "Hi, would it possible to have an ETA for this fix ?\r\nThx :)", "https://github.com/provectus/kafka-ui/pull/3861#issuecomment-1601009788", "The one left workflow fixed within #3986\r\nThe rest is our private ECR" ]
[]
"2023-06-29T07:32:10Z"
[ "type/bug", "status/accepted", "scope/infrastructure" ]
Cannot pull image of v0.6.1 from non OCI-compliant runtimes
**Describe the bug** (Actual behavior) Hi, the update of [docker/build-push-action](https://github.com/docker/build-push-action) to `v4.0.0` (introduced by #3488) seems to cause issues with some container runtimes that are not OCI-compliant. Docker fails when pulling the `v0.6.1` image from non OCI-compliant runtimes (older Openshift platform). `Failed to pull image "<dockerproxy>/provectuslabs/kafka-ui:v0.6.1": rpc error: code = Unknown desc = missing signature key ` The changelog of docker/build-push-action states that it could introduce issues with some runtimes. > ## v4.0.0 > **Note** > Buildx v0.10 enables support for a minimal [SLSA Provenance](https://slsa.dev/provenance/) attestation, which requires support for [OCI-compliant](https://github.com/opencontainers/image-spec) multi-platform images. This may introduce issues with registry and runtime support (e.g. [Google Cloud Run and AWS Lambda](https://redirect.github.com/docker/buildx/issues/1533)). You can optionally disable the default provenance attestation functionality using `provenance: false`. This could probably be fixed by using `provenance: false` as the changelog above suggests **Expected behavior** Can pull on older docker runtimes. **Set up** I realize this is a niche issue. In my case I got it when trying to pull the image in `v0.6.1` in an older Openshift platform, and I'm guessing that the problem comes from the OCI compliance. **Steps to Reproduce** Hard to test. Maybe by installing an older docker version and pulling v0.6.1. I could potentially test an image with the suggested fix and see if it works. **Additional information** The manifest of the image previously contained (version <= `v0.6.0`): ` "mediaType": "application/vnd.docker.distribution.manifest.list.v2+json",` From `v0.6.1` it became: ` "mediaType": "application/vnd.oci.image.index.v1+json",`
[ ".github/workflows/master.yaml" ]
[ ".github/workflows/master.yaml" ]
[]
diff --git a/.github/workflows/master.yaml b/.github/workflows/master.yaml index 48581359894..d751e500210 100644 --- a/.github/workflows/master.yaml +++ b/.github/workflows/master.yaml @@ -58,6 +58,7 @@ jobs: builder: ${{ steps.buildx.outputs.name }} context: kafka-ui-api platforms: linux/amd64,linux/arm64 + provenance: false push: true tags: | provectuslabs/kafka-ui:${{ steps.build.outputs.version }}
null
train
test
2023-06-28T14:13:41
"2023-04-05T14:56:26Z"
Dugong42
train
provectus/kafka-ui/3994_3995
provectus/kafka-ui
provectus/kafka-ui/3994
provectus/kafka-ui/3995
[ "connected" ]
be2f9f0605b8dda210360caa693e3b5f58c752f7
ab58618d830801655d652cbc9e3b13f1617dab13
[]
[]
"2023-06-30T10:42:42Z"
[ "scope/frontend", "type/security", "status/accepted" ]
FE: Create robots.txt to disallow scraping
Otherwise, some public instances could be found using search.
[ "kafka-ui-react-app/public/robots.txt" ]
[ "kafka-ui-react-app/public/robots.txt" ]
[]
diff --git a/kafka-ui-react-app/public/robots.txt b/kafka-ui-react-app/public/robots.txt index 01b0f9a1073..1f53798bb4f 100644 --- a/kafka-ui-react-app/public/robots.txt +++ b/kafka-ui-react-app/public/robots.txt @@ -1,2 +1,2 @@ -# https://www.robotstxt.org/robotstxt.html User-agent: * +Disallow: /
null
train
test
2023-06-28T14:13:41
"2023-06-30T10:41:52Z"
Haarolean
train
provectus/kafka-ui/3975_4003
provectus/kafka-ui
provectus/kafka-ui/3975
provectus/kafka-ui/4003
[ "keyword_pr_to_issue" ]
5f231c7681bb6558e0367df22d92f1099e2cac94
8ae8ae40a43331b90a095ad884e0ce789c594644
[]
[]
"2023-07-03T12:26:43Z"
[ "scope/backend", "type/security", "status/accepted" ]
CVE fixes, July 2023
+ kafka-clients
[ "kafka-ui-api/pom.xml", "pom.xml" ]
[ "kafka-ui-api/pom.xml", "pom.xml" ]
[ "kafka-ui-api/src/test/java/com/provectus/kafka/ui/AbstractIntegrationTest.java", "kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/ksql/KsqlApiClientTest.java", "kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/ksql/KsqlServiceV2Test.java" ]
diff --git a/kafka-ui-api/pom.xml b/kafka-ui-api/pom.xml index a01300167c4..829f7dc5c50 100644 --- a/kafka-ui-api/pom.xml +++ b/kafka-ui-api/pom.xml @@ -91,7 +91,7 @@ <dependency> <groupId>software.amazon.msk</groupId> <artifactId>aws-msk-iam-auth</artifactId> - <version>1.1.6</version> + <version>1.1.7</version> </dependency> <dependency> diff --git a/pom.xml b/pom.xml index 41a51da19fe..a07f15f1a98 100644 --- a/pom.xml +++ b/pom.xml @@ -26,17 +26,17 @@ <assertj.version>3.19.0</assertj.version> <avro.version>1.11.1</avro.version> <byte-buddy.version>1.12.19</byte-buddy.version> - <confluent.version>7.3.3</confluent.version> + <confluent.version>7.4.0</confluent.version> <datasketches-java.version>3.1.0</datasketches-java.version> <groovy.version>3.0.13</groovy.version> <jackson.version>2.14.0</jackson.version> - <kafka-clients.version>3.3.1</kafka-clients.version> + <kafka-clients.version>3.5.0</kafka-clients.version> <org.mapstruct.version>1.5.5.Final</org.mapstruct.version> <org.projectlombok.version>1.18.24</org.projectlombok.version> <protobuf-java.version>3.23.3</protobuf-java.version> <scala-lang.library.version>2.13.9</scala-lang.library.version> <snakeyaml.version>2.0</snakeyaml.version> - <spring-boot.version>3.0.6</spring-boot.version> + <spring-boot.version>3.1.1</spring-boot.version> <kafka-ui-serde-api.version>1.0.0</kafka-ui-serde-api.version> <odd-oddrn-generator.version>0.1.17</odd-oddrn-generator.version> <odd-oddrn-client.version>0.1.23</odd-oddrn-client.version>
diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/AbstractIntegrationTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/AbstractIntegrationTest.java index 1651f4951df..314abf914b0 100644 --- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/AbstractIntegrationTest.java +++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/AbstractIntegrationTest.java @@ -1,6 +1,7 @@ package com.provectus.kafka.ui; import com.provectus.kafka.ui.container.KafkaConnectContainer; +import com.provectus.kafka.ui.container.KsqlDbContainer; import com.provectus.kafka.ui.container.SchemaRegistryContainer; import java.nio.file.Path; import java.util.List; @@ -32,7 +33,7 @@ public abstract class AbstractIntegrationTest { public static final String LOCAL = "local"; public static final String SECOND_LOCAL = "secondLocal"; - private static final String CONFLUENT_PLATFORM_VERSION = "5.5.0"; + private static final String CONFLUENT_PLATFORM_VERSION = "7.2.1"; // Append ".arm64" for a local run public static final KafkaContainer kafka = new KafkaContainer( DockerImageName.parse("confluentinc/cp-kafka").withTag(CONFLUENT_PLATFORM_VERSION)) @@ -49,6 +50,11 @@ public abstract class AbstractIntegrationTest { .dependsOn(kafka) .dependsOn(schemaRegistry); + protected static final KsqlDbContainer KSQL_DB = new KsqlDbContainer( + DockerImageName.parse("confluentinc/cp-ksqldb-server") + .withTag(CONFLUENT_PLATFORM_VERSION)) + .withKafka(kafka); + @TempDir public static Path tmpDir; diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/ksql/KsqlApiClientTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/ksql/KsqlApiClientTest.java index cde000ac6ec..f266e07c6d5 100644 --- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/ksql/KsqlApiClientTest.java +++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/ksql/KsqlApiClientTest.java @@ -3,28 +3,22 @@ import static org.assertj.core.api.Assertions.assertThat; import com.fasterxml.jackson.databind.node.ArrayNode; -import com.fasterxml.jackson.databind.node.DoubleNode; +import com.fasterxml.jackson.databind.node.DecimalNode; import com.fasterxml.jackson.databind.node.IntNode; import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.TextNode; import com.provectus.kafka.ui.AbstractIntegrationTest; -import com.provectus.kafka.ui.container.KsqlDbContainer; +import java.math.BigDecimal; import java.time.Duration; -import java.util.List; import java.util.Map; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import org.testcontainers.shaded.org.awaitility.Awaitility; -import org.testcontainers.utility.DockerImageName; import reactor.test.StepVerifier; class KsqlApiClientTest extends AbstractIntegrationTest { - private static final KsqlDbContainer KSQL_DB = new KsqlDbContainer( - DockerImageName.parse("confluentinc/ksqldb-server").withTag("0.24.0")) - .withKafka(kafka); - @BeforeAll static void startContainer() { KSQL_DB.start(); @@ -72,7 +66,7 @@ void ksqTutorialQueriesWork() { private void assertLastKsqTutorialQueryResult(KsqlApiClient client) { // expected results: //{"header":"Schema","columnNames":[...],"values":null} - //{"header":"Row","columnNames":null,"values":[[0.0,["4ab5cbad","8b6eae59","4a7c7b41"],3]]} + //{"header":"Row","columnNames":null,"values":[[0,["4ab5cbad","8b6eae59","4a7c7b41"],3]]} //{"header":"Row","columnNames":null,"values":[[10.0,["18f4ea86"],1]]} StepVerifier.create( client.execute( @@ -86,34 +80,26 @@ private void assertLastKsqTutorialQueryResult(KsqlApiClient client) { assertThat(header.getValues()).isNull(); }) .assertNext(row -> { - assertThat(row).isEqualTo( - KsqlApiClient.KsqlResponseTable.builder() - .header("Row") - .columnNames(null) - .values(List.of(List.of( - new DoubleNode(0.0), - new ArrayNode(JsonNodeFactory.instance) - .add(new TextNode("4ab5cbad")) - .add(new TextNode("8b6eae59")) - .add(new TextNode("4a7c7b41")), - new IntNode(3) - ))) - .build() - ); + var distance = (DecimalNode) row.getValues().get(0).get(0); + var riders = (ArrayNode) row.getValues().get(0).get(1); + var count = (IntNode) row.getValues().get(0).get(2); + + assertThat(distance).isEqualTo(new DecimalNode(new BigDecimal(0))); + assertThat(riders).isEqualTo(new ArrayNode(JsonNodeFactory.instance) + .add(new TextNode("4ab5cbad")) + .add(new TextNode("8b6eae59")) + .add(new TextNode("4a7c7b41"))); + assertThat(count).isEqualTo(new IntNode(3)); }) .assertNext(row -> { - assertThat(row).isEqualTo( - KsqlApiClient.KsqlResponseTable.builder() - .header("Row") - .columnNames(null) - .values(List.of(List.of( - new DoubleNode(10.0), - new ArrayNode(JsonNodeFactory.instance) - .add(new TextNode("18f4ea86")), - new IntNode(1) - ))) - .build() - ); + var distance = (DecimalNode) row.getValues().get(0).get(0); + var riders = (ArrayNode) row.getValues().get(0).get(1); + var count = (IntNode) row.getValues().get(0).get(2); + + assertThat(distance).isEqualTo(new DecimalNode(new BigDecimal(10))); + assertThat(riders).isEqualTo(new ArrayNode(JsonNodeFactory.instance) + .add(new TextNode("18f4ea86"))); + assertThat(count).isEqualTo(new IntNode(1)); }) .verifyComplete(); } diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/ksql/KsqlServiceV2Test.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/ksql/KsqlServiceV2Test.java index afa3700c0fa..0e1717430ce 100644 --- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/ksql/KsqlServiceV2Test.java +++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/ksql/KsqlServiceV2Test.java @@ -3,7 +3,6 @@ import static org.assertj.core.api.Assertions.assertThat; import com.provectus.kafka.ui.AbstractIntegrationTest; -import com.provectus.kafka.ui.container.KsqlDbContainer; import com.provectus.kafka.ui.model.KafkaCluster; import com.provectus.kafka.ui.model.KsqlStreamDescriptionDTO; import com.provectus.kafka.ui.model.KsqlTableDescriptionDTO; @@ -15,14 +14,9 @@ import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; -import org.testcontainers.utility.DockerImageName; class KsqlServiceV2Test extends AbstractIntegrationTest { - private static final KsqlDbContainer KSQL_DB = new KsqlDbContainer( - DockerImageName.parse("confluentinc/ksqldb-server").withTag("0.24.0")) - .withKafka(kafka); - private static final Set<String> STREAMS_TO_DELETE = new CopyOnWriteArraySet<>(); private static final Set<String> TABLES_TO_DELETE = new CopyOnWriteArraySet<>();
test
test
2023-07-10T14:18:05
"2023-06-26T08:33:34Z"
Haarolean
train
provectus/kafka-ui/3902_4008
provectus/kafka-ui
provectus/kafka-ui/3902
provectus/kafka-ui/4008
[ "keyword_pr_to_issue" ]
17cde82dff6f31f5ca0c79a213ae5ba28b784232
5f231c7681bb6558e0367df22d92f1099e2cac94
[]
[]
"2023-07-04T09:00:33Z"
[ "type/bug", "good first issue", "scope/frontend", "status/accepted", "status/confirmed", "area/messages" ]
Messages: Produce: "Keep contents" prevents new dummy data from being generated
Since #2672 Setup: 1. Topic "test" 2. Two JSON schemas, "test-key", "test-value": ```json { "schema": "http://json-schema.org/draft-07/schema#", "id": "http://example.com/myURI.schema.json", "title": "TestRecord", "type": "object", "properties": { "f1": { "type": "integer" } } } ``` Steps to reproduce: 1. Click "produce message". Observe that dummy data is generated for value 2. Produce a message 3. Open produce message again. No new data has been generated #3901
[ "kafka-ui-react-app/src/components/Topics/Topic/Messages/getDefaultSerdeName.ts", "kafka-ui-react-app/src/components/Topics/Topic/SendMessage/SendMessage.tsx", "kafka-ui-react-app/src/components/Topics/Topic/SendMessage/utils.ts" ]
[ "kafka-ui-react-app/src/components/Topics/Topic/Messages/getDefaultSerdeName.ts", "kafka-ui-react-app/src/components/Topics/Topic/SendMessage/SendMessage.tsx", "kafka-ui-react-app/src/components/Topics/Topic/SendMessage/utils.ts" ]
[]
diff --git a/kafka-ui-react-app/src/components/Topics/Topic/Messages/getDefaultSerdeName.ts b/kafka-ui-react-app/src/components/Topics/Topic/Messages/getDefaultSerdeName.ts index 27fc112c819..a5235e9ac5b 100644 --- a/kafka-ui-react-app/src/components/Topics/Topic/Messages/getDefaultSerdeName.ts +++ b/kafka-ui-react-app/src/components/Topics/Topic/Messages/getDefaultSerdeName.ts @@ -1,8 +1,8 @@ import { SerdeDescription } from 'generated-sources'; -import { getPrefferedDescription } from 'components/Topics/Topic/SendMessage/utils'; +import { getPreferredDescription } from 'components/Topics/Topic/SendMessage/utils'; export const getDefaultSerdeName = (serdes: SerdeDescription[]) => { - const preffered = getPrefferedDescription(serdes); + const preffered = getPreferredDescription(serdes); if (preffered) { return preffered.name || ''; } diff --git a/kafka-ui-react-app/src/components/Topics/Topic/SendMessage/SendMessage.tsx b/kafka-ui-react-app/src/components/Topics/Topic/SendMessage/SendMessage.tsx index b7f31a230bd..bef7a4dddb5 100644 --- a/kafka-ui-react-app/src/components/Topics/Topic/SendMessage/SendMessage.tsx +++ b/kafka-ui-react-app/src/components/Topics/Topic/SendMessage/SendMessage.tsx @@ -118,8 +118,8 @@ const SendMessage: React.FC<{ closeSidebar: () => void }> = ({ valueSerde, }); if (!keepContents) { - setValue('key', ''); - setValue('content', ''); + setValue('key', defaultValues.key || ''); + setValue('content', defaultValues.content || ''); closeSidebar(); } } catch (e) { diff --git a/kafka-ui-react-app/src/components/Topics/Topic/SendMessage/utils.ts b/kafka-ui-react-app/src/components/Topics/Topic/SendMessage/utils.ts index 6f98c5916d3..c8161b0c823 100644 --- a/kafka-ui-react-app/src/components/Topics/Topic/SendMessage/utils.ts +++ b/kafka-ui-react-app/src/components/Topics/Topic/SendMessage/utils.ts @@ -13,21 +13,21 @@ jsf.option('fillProperties', false); jsf.option('alwaysFakeOptionals', true); jsf.option('failOnInvalidFormat', false); -const generateValueFromSchema = (preffered?: SerdeDescription) => { - if (!preffered?.schema) { +const generateValueFromSchema = (preferred?: SerdeDescription) => { + if (!preferred?.schema) { return undefined; } - const parsedSchema = JSON.parse(preffered.schema); + const parsedSchema = JSON.parse(preferred.schema); const value = jsf.generate(parsedSchema); return JSON.stringify(value); }; -export const getPrefferedDescription = (serdes: SerdeDescription[]) => +export const getPreferredDescription = (serdes: SerdeDescription[]) => serdes.find((s) => s.preferred); export const getDefaultValues = (serdes: TopicSerdeSuggestion) => { - const keySerde = getPrefferedDescription(serdes.key || []); - const valueSerde = getPrefferedDescription(serdes.value || []); + const keySerde = getPreferredDescription(serdes.key || []); + const valueSerde = getPreferredDescription(serdes.value || []); return { key: generateValueFromSchema(keySerde), @@ -65,15 +65,15 @@ export const validateBySchema = ( return errors; } - let parcedSchema; + let parsedSchema; let parsedValue; try { - parcedSchema = JSON.parse(schema); + parsedSchema = JSON.parse(schema); } catch (e) { return [`Error in parsing the "${type}" field schema`]; } - if (parcedSchema.type === 'string') { + if (parsedSchema.type === 'string') { return []; } try { @@ -84,7 +84,7 @@ export const validateBySchema = ( try { const ajv = new Ajv(); addFormats(ajv); - const validate = ajv.compile(parcedSchema); + const validate = ajv.compile(parsedSchema); validate(parsedValue); if (validate.errors) { errors = validate.errors.map(
null
train
test
2023-07-06T13:46:02
"2023-06-02T13:58:47Z"
Haarolean
train
provectus/kafka-ui/3728_4009
provectus/kafka-ui
provectus/kafka-ui/3728
provectus/kafka-ui/4009
[ "keyword_pr_to_issue" ]
d572e43b4f978473292757862aff08452fac57fb
9ab4580c47151cd64e323f8c79efeddcf8d0adf8
[ "Hye \r\nplease give some about the issue \r\n", "@shraddha761 we need to replace empty cells with \"N/A\"\r\n<img width=\"1099\" alt=\"image\" src=\"https://github.com/provectus/kafka-ui/assets/1494347/1403a0d8-16c6-4017-8146-eb7338761b74\">\r\n", "I am unable to find where to do changes.\r\n ", "Hi, could I work on this?", "@Haarolean Hi, I'm trying to setup my environment for windows but there's no instructions for windows on this page, could you help? https://docs.kafka-ui.provectus.io/development/building/prerequisites", "@nilling4 you can use WSL" ]
[]
"2023-07-04T09:30:39Z"
[ "good first issue", "scope/frontend", "status/accepted", "type/chore", "area/consumers" ]
Consumers: replace empty row with consumer lag with "N/A"
also in per consumer view <img width="1099" alt="image" src="https://github.com/provectus/kafka-ui/assets/1494347/0cb6a7c3-ab38-4c18-a7d6-7558570e6c2d">
[ "kafka-ui-react-app/src/components/ConsumerGroups/List.tsx" ]
[ "kafka-ui-react-app/src/components/ConsumerGroups/List.tsx" ]
[]
diff --git a/kafka-ui-react-app/src/components/ConsumerGroups/List.tsx b/kafka-ui-react-app/src/components/ConsumerGroups/List.tsx index 247a18b0bd8..da35c6bbad6 100644 --- a/kafka-ui-react-app/src/components/ConsumerGroups/List.tsx +++ b/kafka-ui-react-app/src/components/ConsumerGroups/List.tsx @@ -61,6 +61,9 @@ const List = () => { id: ConsumerGroupOrdering.MESSAGES_BEHIND, header: 'Consumer Lag', accessorKey: 'consumerLag', + cell: (args) => { + return args.getValue() || 'N/A'; + }, }, { header: 'Coordinator',
null
train
test
2023-07-03T14:31:53
"2023-04-27T00:39:25Z"
Haarolean
train
provectus/kafka-ui/4032_4033
provectus/kafka-ui
provectus/kafka-ui/4032
provectus/kafka-ui/4033
[ "connected" ]
d4001b5a39cba28f2cf4011ca62ed84056de081c
15f4543402694e877cd1de4e85ff850ffb505d7f
[]
[]
"2023-07-12T10:39:33Z"
[ "type/bug", "scope/backend", "status/accepted", "area/rbac" ]
RBAC: Exception on unknown resource types
Set up: 1. config.yml with rbac and resource like "audit" 2. If the older image is used where the feature is not present, it will fail with the following exception: ``` Caused by: java.lang.NullPointerException: Cannot invoke "com.provectus.kafka.ui.model.rbac.Resource.ordinal()" because "this.resource" is null at com.provectus.kafka.ui.model.rbac.Permission.getAllActionValues(Permission.java:72) at com.provectus.kafka.ui.model.rbac.Permission.transform(Permission.java:67) at java.base/java.util.ArrayList.forEach(ArrayList.java:1511) at com.provectus.kafka.ui.model.rbac.Role.validate(Role.java:15) at java.base/java.util.ArrayList.forEach(ArrayList.java:1511) at com.provectus.kafka.ui.config.auth.RoleBasedAccessControlProperties.init(RoleBasedAccessControlProperties.java:16) at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77) at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.base/java.lang.reflect.Method.invoke(Method.java:568) at org.springframework.beans.factory.annotation.InitDestroyAnnotationBeanPostProcessor$LifecycleElement.invoke(InitDestroyAnnotationBeanPostProcessor.java:425) at org.springframework.beans.factory.annotation.InitDestroyAnnotationBeanPostProcessor$LifecycleMetadata.invokeInitMethods(InitDestroyAnnotationBeanPostProcessor.java:369) at org.springframework.beans.factory.annotation.InitDestroyAnnotationBeanPostProcessor.postProcessBeforeInitialization(InitDestroyAnnotationBeanPostProcessor.java:193) ... 53 common frames omitted ```
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/rbac/Permission.java" ]
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/rbac/Permission.java" ]
[]
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/rbac/Permission.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/rbac/Permission.java index dd456400da5..56b0a098026 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/rbac/Permission.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/rbac/Permission.java @@ -16,6 +16,7 @@ import com.provectus.kafka.ui.model.rbac.permission.SchemaAction; import com.provectus.kafka.ui.model.rbac.permission.TopicAction; import java.util.Arrays; +import java.util.Collections; import java.util.List; import java.util.regex.Pattern; import javax.annotation.Nullable; @@ -73,6 +74,10 @@ public void transform() { } private List<String> getAllActionValues() { + if (resource == null) { + return Collections.emptyList(); + } + return switch (this.resource) { case APPLICATIONCONFIG -> Arrays.stream(ApplicationConfigAction.values()).map(Enum::toString).toList(); case CLUSTERCONFIG -> Arrays.stream(ClusterConfigAction.values()).map(Enum::toString).toList();
null
train
test
2023-07-12T12:47:38
"2023-07-12T10:38:25Z"
Haarolean
train
provectus/kafka-ui/3820_4034
provectus/kafka-ui
provectus/kafka-ui/3820
provectus/kafka-ui/4034
[ "connected" ]
1cd303a90b497546db1eccc664e26a277257923c
b2c3fcc321c84a5c85f4ca4a96a3c7b647f0d087
[ "Hello there sm-shevchenko! πŸ‘‹\n\nThank you and congratulations πŸŽ‰ for opening your very first issue in this project! πŸ’–\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. πŸ‘€", "@sm-shevchenko thank you for noticing that. We will think how it can be fixed. For now you can just ignore this message, since it should not affect kui functioning. " ]
[]
"2023-07-12T12:11:40Z"
[ "type/bug", "scope/backend", "status/accepted", "area/acl" ]
ACL enabled check results in an error
### Issue submitter TODO list - [X] I've looked up my issue in [FAQ](https://docs.kafka-ui.provectus.io/faq/common-problems) - [X] I've searched for an already existing issues [here](https://github.com/provectus/kafka-ui/issues) - [X] I've tried running `master`-labeled docker image and the issue still persists there - [X] I'm running a supported version of the application which is listed [here](https://github.com/provectus/kafka-ui/blob/master/SECURITY.md) ### Describe the bug (actual behavior) After upgrading from version 0.6.2 to 0.7.0 I see in the KafkaUI log-file strange repeated massages (that have not been before): org.apache.kafka.common.errors.UnknownServerException: The server experienced an unexpected error when processing the request. 2023-05-15 11:14:48,017 DEBUG [parallel-3] c.p.k.u.s.ClustersStatisticsScheduler: Metrics updated for cluster: KAFKATS_TEST 2023-05-15 11:15:17,964 DEBUG [parallel-4] c.p.k.u.s.ClustersStatisticsScheduler: Start getting metrics for kafkaCluster: KAFKATS_TEST 2023-05-15 11:15:17,971 WARN [parallel-2] c.p.k.u.s.ReactiveAdminClient: Error checking if security enabled org.apache.kafka.common.errors.UnknownServerException: The server experienced an unexpected error when processing the request. 2023-05-15 11:15:17,992 DEBUG [parallel-3] c.p.k.u.s.ClustersStatisticsScheduler: Metrics updated for cluster: KAFKATS_TEST 2023-05-15 11:15:47,964 DEBUG [parallel-4] c.p.k.u.s.ClustersStatisticsScheduler: Start getting metrics for kafkaCluster: KAFKATS_TEST 2023-05-15 11:15:47,972 WARN [parallel-2] c.p.k.u.s.ReactiveAdminClient: Error checking if security enabled org.apache.kafka.common.errors.UnknownServerException: The server experienced an unexpected error when processing the request. 2023-05-15 11:15:47,990 DEBUG [parallel-3] c.p.k.u.s.ClustersStatisticsScheduler: Metrics updated for cluster: KAFKATS_TEST 2023-05-15 11:16:17,964 DEBUG [parallel-4] c.p.k.u.s.ClustersStatisticsScheduler: Start getting metrics for kafkaCluster: KAFKATS_TEST 2023-05-15 11:16:17,972 WARN [parallel-2] c.p.k.u.s.ReactiveAdminClient: Error checking if security enabled org.apache.kafka.common.errors.UnknownServerException: The server experienced an unexpected error when processing the request. 2023-05-15 11:16:17,992 DEBUG [parallel-3] c.p.k.u.s.ClustersStatisticsScheduler: Metrics updated for cluster: KAFKATS_TEST Kafka 'server.log' contains a synchronous repeated messages: [2023-05-15 11:15:47,971] ERROR (getting) acls is not supported by Ranger for Kafka (org.apache.ranger.authorization.kafka.authorizer.RangerKafkaAuthorizer) [2023-05-15 11:15:47,971] ERROR [KafkaApi-1] Unexpected error handling request RequestHeader(apiKey=DESCRIBE_ACLS, apiVersion=2, clientId=kafka-ui-admin-1684138400-1, correlationId=49) -- DescribeAclsRequestData(resourceTypeFilter=1, resourceNameFilter=null, patternTypeFilter=1, principalFilter=null, hostFilter=null, operation=1, permissionType=1) with context RequestContext(header=RequestHeader(apiKey=DESCRIBE_ACLS, apiVersion=2, clientId=kafka-ui-admin-1684138400-1, correlationId=49), connectionId='10.120.152.240:9093-10.120.152.240:43102-474', clientAddress=/10.120.152.240, principal=User:kafkats, listenerName=ListenerName(SASL_SSL), securityProtocol=SASL_SSL, clientInformation=ClientInformation(softwareName=apache-kafka-java, softwareVersion=3.3.1), fromPrivilegedListener=true, principalSerde=Optional[org.apache.kafka.common.security.authenticator.DefaultKafkaPrincipalBuilder@6818625]) (kafka.server.KafkaApis) java.lang.UnsupportedOperationException: (getting) acls is not supported by Ranger for Kafka at org.apache.ranger.authorization.kafka.authorizer.RangerKafkaAuthorizer.acls(RangerKafkaAuthorizer.java:334) at org.apache.ranger.authorization.kafka.authorizer.RangerKafkaAuthorizer.acls(RangerKafkaAuthorizer.java:185) at kafka.server.AclApis.handleDescribeAcls(AclApis.scala:70) at kafka.server.KafkaApis.handleDescribeAcls(KafkaApis.scala:2574) at kafka.server.KafkaApis.handle(KafkaApis.scala:198) at kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:75) at java.lang.Thread.run(Thread.java:750) ### Expected behavior the messages "Error checking if security enabled" are absent in the KafakUI log and a synchronous messages 'acls is not supported by Ranger for Kafka' are absent in the Kafka 'server.log' ### Your installation details KafkaUI - [fdd9ad9](https://github.com/provectus/kafka-ui/commit/fdd9ad9) 11.05.2023, 17:02:15 KafkaUI startup script with configuration parameters: export DYNAMIC_CONFIG_ENABLED='false' export SECURITY_BASIC_ENABLED='false' export SERVER_PORT='8080' export SERVER_SSL_ENABLED='true' export SERVER_SSL_KEY_STORE_TYPE='JKS' export SERVER_SSL_KEY_STORE='/disk01/kafka-ui-api-v0.7.0/keystore.jks' export SERVER_SSL_KEY_STORE_PASSWORD='***' export AUTH_TYPE='LDAP' export SPRING_LDAP_URLS='ldaps://****.**.*.com:3269 ldaps://****.**.*.com:3269' export SPRING_LDAP_BASE='DC=**,DC=*,DC=com' export SPRING_LDAP_USER_FILTER_SEARCH_BASE='DC=**,DC=*,DC=com' export SPRING_LDAP_USER_FILTER_SEARCH_FILTER='(&(sAMAccountName={0})(|(memberOf=CN=kafka-admin,OU=Service,DC=**,DC=*,DC=com)(memberOf=CN=admin,OU=Service,DC=**,DC=*,DC=com)))' export SPRING_LDAP_ADMIN_USER='CN=ldap-user,OU=Service,DC=**,DC=*,DC=com' export SPRING_LDAP_ADMIN_PASSWORD='***' export KAFKA_CLUSTERS_0_METRICS_PORT='9094' export KAFKA_CLUSTERS_0_METRICS_SSL='false' export KAFKA_CLUSTERS_0_METRICS_TYPE='false' export KAFKA_CLUSTERS_0_NAME='KAFKATS_TEST' export KAFKA_CLUSTERS_0_READONLY='false' export KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS='kafka-tst.**.*.com:9093' export KAFKA_CLUSTERS_0_ZOOKEEPER='kafka-tst.**.*.com.com:2182' export KAFKA_CLUSTERS_0_SSL_TRUSTSTORELOCATION='/disk01/kafka-ui-api-v0.7.0/keystore.jks' export KAFKA_CLUSTERS_0_SSL_TRUSTSTOREPASSWORD='***' export KAFKA_CLUSTERS_0_PROPERTIES_SECURITY_PROTOCOL='SASL_SSL' export KAFKA_CLUSTERS_0_PROPERTIES_SASL_MECHANISM='GSSAPI' export KAFKA_CLUSTERS_0_PROPERTIES_SASL_KERBEROS_SERVICE_NAME='kafkats' export KAFKA_CLUSTERS_0_PROPERTIES_SASL_JAAS_CONFIG='com.sun.security.auth.module.Krb5LoginModule required serviceName="kafkats" useKeyTab=true storeKey=true keyTab="/disk01/kafka-ui-api-v0.7.0/kafka-tst.keytab" principal="kafkats/kafka-tst.**.*.com@**.*.COM";' export KAFKA_CLUSTERS_0_PROPERTIES_SSL_KEYSTORE_TYPE='JKS' export KAFKA_CLUSTERS_0_PROPERTIES_SSL_KEYSTORE_LOCATION='/disk01/kafka-ui-api-v0.7.0/keystore.jks' export KAFKA_CLUSTERS_0_PROPERTIES_SSL_KEYSTORE_PASSWORD='***' export KAFKA_CLUSTERS_0_PROPERTIES_SSL_KEY_PASSWORD='***' export KAFKA_CLUSTERS_0_PROPERTIES_SSL_TRUSTSTORE_TYPE='JKS' export KAFKA_CLUSTERS_0_PROPERTIES_SSL_TRUSTSTORE_LOCATION='/disk01/kafka-ui-api-v0.7.0/keystore.jks' export KAFKA_CLUSTERS_0_PROPERTIES_SSL_TRUSTSTORE_PASSWORD='***' export KAFKA_CLUSTERS_0_PROPERTIES_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM='' export KAFKA_CLUSTERS_0_SCHEMAREGISTRY='https://kafka-tst.**.*.com:8081' export KAFKA_CLUSTERS_0_SCHEMAREGISTRYSSL_KEYSTORELOCATION='/disk01/kafka-ui-api-v0.7.0/keystore.jks' export KAFKA_CLUSTERS_0_SCHEMAREGISTRYSSL_KEYSTOREPASSWORD='***' export KAFKA_CLUSTERS_0_SCHEMAREGISTRYSSL_KEYPASSWORD='***' export KAFKA_CLUSTERS_0_SCHEMAREGISTRYSSL_TRUSTSTORELOCATION='/disk01/kafka-ui-api-v0.7.0/keystore.jks' export KAFKA_CLUSTERS_0_SCHEMAREGISTRYSSL_TRUSTSTOREPASSWORD='***' export JAVA_OPTS=" -Dzookeeper.client.secure=false -Dzookeeper.ssl.client.enable=false -Djavax.net.ssl.keyStoreType=jks -Djavax.net.ssl.keyStore=/disk01/kafka-ui-api-v0.7.0/keystore.jks -Djavax.net.ssl.keyStorePassword=*** -Djavax.net.ssl.trustStoreType=jks -Djavax.net.ssl.trustStore=/disk01/kafka-ui-api-v0.7.0/keystore.jks -Djavax.net.ssl.trustStorePassword==***" export JAVA_OPTS="$JAVA_OPTS -Xms2g -Xmx4g -Djava.awt.headless=true --add-opens java.rmi/javax.rmi.ssl=ALL-UNNAMED" cd /disk01/kafka-ui-api-v0.7.0 nohup /opt/java/jdk-17.0.3.1/bin/java $JAVA_OPTS -jar /disk01/kafka-ui-api-v0.7.0/kafka-ui-api-v0.7.0.jar>/disk01/kafka-ui-api-v0.7.0/kafkaui-console.log 2>&1 & ===== Kafka - confluent-community-7.1.0 with 'ranger-kafka-plugin' enabled broker.id=1 listeners=SASL_SSL://kafka-tst.**.*.com:9093 advertised.listeners=SASL_SSL://kafka-tst.**.*.com:9093 num.network.threads=3 num.io.threads=8 socket.send.buffer.bytes=102400 socket.receive.buffer.bytes=102400 socket.request.max.bytes=104857600 log.dirs=/disk01/kafkats/kafka-logs num.partitions=1 num.recovery.threads.per.data.dir=1 offsets.topic.replication.factor=1 transaction.state.log.replication.factor=1 transaction.state.log.min.isr=1 log.retention.hours=168 log.segment.bytes=1073741824 log.retention.check.interval.ms=300000 zookeeper.connect=kafka-tst.**.*.com:2182 zookeeper.connection.timeout.ms=18000 group.initial.rebalance.delay.ms=0 sasl.kerberos.service.name=kafkats zookeeper.ssl.client.enable=true zookeeper.clientCnxnSocket=org.apache.zookeeper.ClientCnxnSocketNetty zookeeper.ssl.truststore.location=/disk01/kafkats/etc/truststore.jks zookeeper.ssl.truststore.password=*** inter.broker.listener.name=SASL_SSL ssl.client.auth=required ssl.keystore.location=/disk01/kafkats/etc/keystore.jks ssl.keystore.password=*** authorizer.class.name=org.apache.ranger.authorization.kafka.authorizer.RangerKafkaAuthorizer ssl.protokol=TLS auto.create.topics.enable=false auto.leader.rebalance.enable=true delete.topic.enable=true kerberos.auth.enable=true leader.imbalance.check.interval.seconds=300 leader.imbalance.per.broker.percentage=10 message.max.bytes=5242880 num.replica.fetchers=14 replica.fetch.max.bytes=5242880 replica.lag.time.max.ms=10000 zookeeper.session.timeout.ms=60000 sasl.mechanism.inter.broker.protokol=GSSAPI sasl.enabled.mechanisms=GSSAPI sasl.mechanism=GSSAPI ### Steps to reproduce As far as I understand, the problem is that we do not use the built-in Kafka ACL mechanism for restricting access to Kafka-topics, but we use the Rager-Kafka-Plugin. Therefore, we need the ability in KafkaUI to disable the functionality for Kafka ACL discovery from the Kafka server. ### Screenshots _No response_ ### Logs _No response_ ### Additional context _No response_
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java" ]
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java" ]
[]
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java index 0b6f16a2235..9de908efa7f 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ReactiveAdminClient.java @@ -15,6 +15,8 @@ import com.provectus.kafka.ui.util.KafkaVersion; import com.provectus.kafka.ui.util.annotation.KafkaClientInternalsDependant; import java.io.Closeable; +import java.time.Duration; +import java.time.temporal.ChronoUnit; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; @@ -129,38 +131,41 @@ private record ConfigRelatedInfo(String version, Set<SupportedFeature> features, boolean topicDeletionIsAllowed) { - private static Mono<ConfigRelatedInfo> extract(AdminClient ac, int controllerId) { - return loadBrokersConfig(ac, List.of(controllerId)) - .map(map -> map.isEmpty() ? List.<ConfigEntry>of() : map.get(controllerId)) - .flatMap(configs -> { - String version = "1.0-UNKNOWN"; - boolean topicDeletionEnabled = true; - for (ConfigEntry entry : configs) { - if (entry.name().contains("inter.broker.protocol.version")) { - version = entry.value(); - } - if (entry.name().equals("delete.topic.enable")) { - topicDeletionEnabled = Boolean.parseBoolean(entry.value()); - } - } - var builder = ConfigRelatedInfo.builder() - .version(version) - .topicDeletionIsAllowed(topicDeletionEnabled); - return SupportedFeature.forVersion(ac, version) - .map(features -> builder.features(features).build()); - }); + static final Duration UPDATE_DURATION = Duration.of(1, ChronoUnit.HOURS); + + private static Mono<ConfigRelatedInfo> extract(AdminClient ac) { + return ReactiveAdminClient.describeClusterImpl(ac, Set.of()) + .flatMap(desc -> { + // choosing node from which we will get configs (starting with controller) + var targetNodeId = Optional.ofNullable(desc.controller) + .map(Node::id) + .orElse(desc.getNodes().iterator().next().id()); + return loadBrokersConfig(ac, List.of(targetNodeId)) + .map(map -> map.isEmpty() ? List.<ConfigEntry>of() : map.get(targetNodeId)) + .flatMap(configs -> { + String version = "1.0-UNKNOWN"; + boolean topicDeletionEnabled = true; + for (ConfigEntry entry : configs) { + if (entry.name().contains("inter.broker.protocol.version")) { + version = entry.value(); + } + if (entry.name().equals("delete.topic.enable")) { + topicDeletionEnabled = Boolean.parseBoolean(entry.value()); + } + } + final String finalVersion = version; + final boolean finalTopicDeletionEnabled = topicDeletionEnabled; + return SupportedFeature.forVersion(ac, version) + .map(features -> new ConfigRelatedInfo(finalVersion, features, finalTopicDeletionEnabled)); + }); + }) + .cache(UPDATE_DURATION); } } public static Mono<ReactiveAdminClient> create(AdminClient adminClient) { - return describeClusterImpl(adminClient, Set.of()) - // choosing node from which we will get configs (starting with controller) - .flatMap(descr -> descr.controller != null - ? Mono.just(descr.controller) - : Mono.justOrEmpty(descr.nodes.stream().findFirst()) - ) - .flatMap(node -> ConfigRelatedInfo.extract(adminClient, node.id())) - .map(info -> new ReactiveAdminClient(adminClient, info)); + Mono<ConfigRelatedInfo> configRelatedInfoMono = ConfigRelatedInfo.extract(adminClient); + return configRelatedInfoMono.map(info -> new ReactiveAdminClient(adminClient, configRelatedInfoMono, info)); } @@ -170,7 +175,7 @@ private static Mono<Boolean> isAuthorizedSecurityEnabled(AdminClient ac, @Nullab .doOnError(th -> !(th instanceof SecurityDisabledException) && !(th instanceof InvalidRequestException) && !(th instanceof UnsupportedVersionException), - th -> log.warn("Error checking if security enabled", th)) + th -> log.debug("Error checking if security enabled", th)) .onErrorReturn(false); } @@ -202,6 +207,8 @@ public static <T> Mono<T> toMono(KafkaFuture<T> future) { @Getter(AccessLevel.PACKAGE) // visible for testing private final AdminClient client; + private final Mono<ConfigRelatedInfo> configRelatedInfoMono; + private volatile ConfigRelatedInfo configRelatedInfo; public Set<SupportedFeature> getClusterFeatures() { @@ -228,7 +235,7 @@ public Mono<Void> updateInternalStats(@Nullable Node controller) { if (controller == null) { return Mono.empty(); } - return ConfigRelatedInfo.extract(client, controller.id()) + return configRelatedInfoMono .doOnNext(info -> this.configRelatedInfo = info) .then(); }
null
val
test
2023-08-01T14:23:19
"2023-05-15T09:27:39Z"
sm-shevchenko
train
provectus/kafka-ui/4016_4045
provectus/kafka-ui
provectus/kafka-ui/4016
provectus/kafka-ui/4045
[ "keyword_pr_to_issue" ]
a32272d07e2f151d6397013b3547ca145ef75788
3cde6c21ecbc89ae902f89960afe2894014d1b89
[ "Hello there jadhavnitind! πŸ‘‹\n\nThank you and congratulations πŸŽ‰ for opening your very first issue in this project! πŸ’–\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. πŸ‘€", "Hello, I would like to work on this, can it be assigned to me", "@abhisheksharma886 hi, it's already assigned, sorry. Feel free to pick any other issue" ]
[]
"2023-07-18T07:04:37Z"
[ "type/bug", "good first issue", "scope/frontend", "status/accepted" ]
FE: Logout button link is bound to a wrong div
### Issue submitter TODO list - [x] I've looked up my issue in [FAQ](https://docs.kafka-ui.provectus.io/faq/common-problems) - [X] I've searched for an already existing issues [here](https://github.com/provectus/kafka-ui/issues) - [X] I've tried running `master`-labeled docker image and the issue still persists there - [X] I'm running a supported version of the application which is listed [here](https://github.com/provectus/kafka-ui/blob/master/SECURITY.md) ### Describe the bug (actual behavior) When I tried to logout from Kafka UI that was not working but latter on I realized that the the logout button doesn't work. Only the word written as logout works (hyperlink). I have attached screenshot out signout button and highlighted section which works. Also when signed out it askes "Are you sure you want to log out?" that doesn't make sense. I wanted signout that is why I have clicked on signout. ### Expected behavior The entire button should work for logout ### Your installation details App version: [56fa824](https://github.com/provectus/kafka-ui/commit/56fa824) v0.7.1 Helm chart version, v0.7.1 Your application config. Please remove the sensitive info like passwords or API keys. {REDACTED} ### Steps to reproduce Just install Kafka UI app. ### Screenshots ![image](https://github.com/provectus/kafka-ui/assets/17820833/adf60bfa-5f75-4a95-b5a5-90e086eb3ae8) ### Logs _No response_ ### Additional context _No response_
[ "kafka-ui-react-app/src/components/NavBar/UserInfo/UserInfo.tsx", "kafka-ui-react-app/src/components/NavBar/UserInfo/__tests__/UserInfo.spec.tsx" ]
[ "kafka-ui-react-app/src/components/NavBar/UserInfo/UserInfo.tsx", "kafka-ui-react-app/src/components/NavBar/UserInfo/__tests__/UserInfo.spec.tsx" ]
[]
diff --git a/kafka-ui-react-app/src/components/NavBar/UserInfo/UserInfo.tsx b/kafka-ui-react-app/src/components/NavBar/UserInfo/UserInfo.tsx index 2b432b10e3c..dae43364c45 100644 --- a/kafka-ui-react-app/src/components/NavBar/UserInfo/UserInfo.tsx +++ b/kafka-ui-react-app/src/components/NavBar/UserInfo/UserInfo.tsx @@ -19,8 +19,8 @@ const UserInfo = () => { </S.Wrapper> } > - <DropdownItem> - <S.LogoutLink href={`${window.basePath}/logout`}>Log out</S.LogoutLink> + <DropdownItem href={`${window.basePath}/logout`}> + <S.LogoutLink>Log out</S.LogoutLink> </DropdownItem> </Dropdown> ) : null; diff --git a/kafka-ui-react-app/src/components/NavBar/UserInfo/__tests__/UserInfo.spec.tsx b/kafka-ui-react-app/src/components/NavBar/UserInfo/__tests__/UserInfo.spec.tsx index b51f00da024..2231b09a250 100644 --- a/kafka-ui-react-app/src/components/NavBar/UserInfo/__tests__/UserInfo.spec.tsx +++ b/kafka-ui-react-app/src/components/NavBar/UserInfo/__tests__/UserInfo.spec.tsx @@ -34,7 +34,6 @@ describe('UserInfo', () => { const logout = screen.getByText('Log out'); expect(logout).toBeInTheDocument(); - expect(logout).toHaveAttribute('href', '/logout'); }); it('should render correct url during basePath initialization', async () => { @@ -50,7 +49,6 @@ describe('UserInfo', () => { const logout = screen.getByText('Log out'); expect(logout).toBeInTheDocument(); - expect(logout).toHaveAttribute('href', `${baseUrl}/logout`); }); it('should not render anything if the username does not exists', () => {
null
train
test
2023-07-24T09:25:13
"2023-07-06T15:13:11Z"
jadhavnitind
train
provectus/kafka-ui/3977_4061
provectus/kafka-ui
provectus/kafka-ui/3977
provectus/kafka-ui/4061
[ "connected" ]
476cbfb691a0b19f9227ecb12b2e5ce4b5a7156c
895d27a306f0a69e0f6c665be121edc62c61c730
[ "Hello there stalbot15! πŸ‘‹\n\nThank you and congratulations πŸŽ‰ for opening your very first issue in this project! πŸ’–\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. πŸ‘€" ]
[]
"2023-07-25T10:35:40Z"
[ "type/bug", "scope/backend", "status/accepted" ]
Connectors page breaks when one connect cluster unavailable
### Issue submitter TODO list - [X] I've looked up my issue in [FAQ](https://docs.kafka-ui.provectus.io/faq/common-problems) - [X] I've searched for an already existing issues [here](https://github.com/provectus/kafka-ui/issues) - [X] I've tried running `master`-labeled docker image and the issue still persists there - [X] I'm running a supported version of the application which is listed [here](https://github.com/provectus/kafka-ui/blob/master/SECURITY.md) ### Describe the bug (actual behavior) If any configured Kafka Connect Cluster is unavailable, the entire Connectors page fails to load. After a few retries, the uncaught exception causes the rendering to break entirely. ### Expected behavior The page should render and any unreachable/unavailable connect clusters should simply be omitted from the list of connectors. We use one connect cluster per connector, and thus have many clusters that we would like to monitor in the UI. If one cluster is unavailable, we still want to be able to see the rest without having to remove the offending cluster from the configuration. ### Your installation details App version: [0.7.0](https://github.com/provectus/kafka-ui/commit/fdd9ad9) Basic configurations, with an intentionally unavailable connect address: `http://not.available:8083` ### Steps to reproduce 1) Run image `docker run -p 8080:8080 -e DYNAMIC_CONFIG_ENABLED=true provectuslabs/kafka-ui:master` 2) Configure valid kafka cluster (I did this via the UI, but you could pass in however you like) 3) Configure invalid kafka connect 1) Name: `test` 2) Address: `http://not.available.com:8083` 4) Navigate to the Connectors page ### Screenshots _No response_ ### Logs ``` ERROR [reactor-http-epoll-2] o.s.b.a.w.r.e.AbstractErrorWebExceptionHandler: [5eaf4cf2-86] 500 Server Error for HTTP GET "/api/clusters/test/connectors" org.springframework.web.reactive.function.client.WebClientRequestException: Failed to resolve 'not.available.com' [A(1)] after 2 queries at org.springframework.web.reactive.function.client.ExchangeFunctions$DefaultExchangeFunction.lambda$wrapException$9(ExchangeFunctions.java:136) Suppressed: reactor.core.publisher.FluxOnAssembly$OnAssemblyException: Error has been observed at the following site(s): *__checkpoint β‡’ Request to GET http://not.available.com:8083/connectors [DefaultWebClient] *__checkpoint β‡’ Handler com.provectus.kafka.ui.controller.KafkaConnectController#getAllConnectors(String, String, ConnectorColumnsToSortDTO, SortOrderDTO, ServerWebExchange) [DispatcherHandler] *__checkpoint β‡’ com.provectus.kafka.ui.config.CorsGlobalConfiguration$$Lambda$1304/0x00000008017f1878 [DefaultWebFilterChain] *__checkpoint β‡’ com.provectus.kafka.ui.config.CustomWebFilter [DefaultWebFilterChain] *__checkpoint β‡’ com.provectus.kafka.ui.config.ReadOnlyModeFilter [DefaultWebFilterChain] *__checkpoint β‡’ AuthorizationWebFilter [DefaultWebFilterChain] *__checkpoint β‡’ ExceptionTranslationWebFilter [DefaultWebFilterChain] *__checkpoint β‡’ LogoutWebFilter [DefaultWebFilterChain] *__checkpoint β‡’ ServerRequestCacheWebFilter [DefaultWebFilterChain] *__checkpoint β‡’ SecurityContextServerWebExchangeWebFilter [DefaultWebFilterChain] *__checkpoint β‡’ ReactorContextWebFilter [DefaultWebFilterChain] *__checkpoint β‡’ HttpHeaderWriterWebFilter [DefaultWebFilterChain] *__checkpoint β‡’ ServerWebExchangeReactorContextWebFilter [DefaultWebFilterChain] *__checkpoint β‡’ org.springframework.security.web.server.WebFilterChainProxy [DefaultWebFilterChain] *__checkpoint β‡’ org.springframework.web.filter.reactive.ServerHttpObservationFilter [DefaultWebFilterChain] *__checkpoint β‡’ HTTP GET "/api/clusters/test/connectors" [ExceptionHandlingWebHandler] Original Stack Trace: at org.springframework.web.reactive.function.client.ExchangeFunctions$DefaultExchangeFunction.lambda$wrapException$9(ExchangeFunctions.java:136) at reactor.core.publisher.MonoErrorSupplied.subscribe(MonoErrorSupplied.java:55) at reactor.core.publisher.Mono.subscribe(Mono.java:4485) at reactor.core.publisher.FluxOnErrorResume$ResumeSubscriber.onError(FluxOnErrorResume.java:103) at reactor.core.publisher.FluxPeek$PeekSubscriber.onError(FluxPeek.java:222) at reactor.core.publisher.FluxPeek$PeekSubscriber.onError(FluxPeek.java:222) at reactor.core.publisher.FluxPeek$PeekSubscriber.onError(FluxPeek.java:222) at reactor.core.publisher.MonoNext$NextSubscriber.onError(MonoNext.java:93) at reactor.core.publisher.MonoFlatMapMany$FlatMapManyMain.onError(MonoFlatMapMany.java:204) at reactor.core.publisher.SerializedSubscriber.onError(SerializedSubscriber.java:124) at reactor.core.publisher.FluxRetryWhen$RetryWhenMainSubscriber.whenError(FluxRetryWhen.java:225) at reactor.core.publisher.FluxRetryWhen$RetryWhenOtherSubscriber.onError(FluxRetryWhen.java:274) at reactor.core.publisher.FluxContextWrite$ContextWriteSubscriber.onError(FluxContextWrite.java:121) at reactor.core.publisher.FluxConcatMapNoPrefetch$FluxConcatMapNoPrefetchSubscriber.maybeOnError(FluxConcatMapNoPrefetch.java:326) at reactor.core.publisher.FluxConcatMapNoPrefetch$FluxConcatMapNoPrefetchSubscriber.onNext(FluxConcatMapNoPrefetch.java:211) at reactor.core.publisher.FluxContextWrite$ContextWriteSubscriber.onNext(FluxContextWrite.java:107) at reactor.core.publisher.SinkManyEmitterProcessor.drain(SinkManyEmitterProcessor.java:471) at reactor.core.publisher.SinkManyEmitterProcessor$EmitterInner.drainParent(SinkManyEmitterProcessor.java:615) at reactor.core.publisher.FluxPublish$PubSubInner.request(FluxPublish.java:602) at reactor.core.publisher.FluxContextWrite$ContextWriteSubscriber.request(FluxContextWrite.java:136) at reactor.core.publisher.FluxConcatMapNoPrefetch$FluxConcatMapNoPrefetchSubscriber.request(FluxConcatMapNoPrefetch.java:336) at reactor.core.publisher.FluxContextWrite$ContextWriteSubscriber.request(FluxContextWrite.java:136) at reactor.core.publisher.Operators$DeferredSubscription.request(Operators.java:1717) at reactor.core.publisher.FluxRetryWhen$RetryWhenMainSubscriber.onError(FluxRetryWhen.java:192) at reactor.core.publisher.MonoCreate$DefaultMonoSink.error(MonoCreate.java:201) at reactor.netty.http.client.HttpClientConnect$MonoHttpConnect$ClientTransportSubscriber.onError(HttpClientConnect.java:311) at reactor.core.publisher.MonoCreate$DefaultMonoSink.error(MonoCreate.java:201) at reactor.netty.resources.DefaultPooledConnectionProvider$DisposableAcquire.onError(DefaultPooledConnectionProvider.java:162) at reactor.netty.internal.shaded.reactor.pool.AbstractPool$Borrower.fail(AbstractPool.java:475) at reactor.netty.internal.shaded.reactor.pool.SimpleDequePool.lambda$drainLoop$9(SimpleDequePool.java:429) at reactor.core.publisher.FluxDoOnEach$DoOnEachSubscriber.onError(FluxDoOnEach.java:186) at reactor.core.publisher.MonoCreate$DefaultMonoSink.error(MonoCreate.java:201) at reactor.netty.resources.DefaultPooledConnectionProvider$PooledConnectionAllocator$PooledConnectionInitializer.onError(DefaultPooledConnectionProvider.java:560) at reactor.core.publisher.MonoFlatMap$FlatMapMain.secondError(MonoFlatMap.java:241) at reactor.core.publisher.MonoFlatMap$FlatMapInner.onError(MonoFlatMap.java:315) at reactor.core.publisher.FluxOnErrorResume$ResumeSubscriber.onError(FluxOnErrorResume.java:106) at reactor.core.publisher.Operators.error(Operators.java:198) at reactor.core.publisher.MonoError.subscribe(MonoError.java:53) at reactor.core.publisher.Mono.subscribe(Mono.java:4485) at reactor.core.publisher.FluxOnErrorResume$ResumeSubscriber.onError(FluxOnErrorResume.java:103) at reactor.netty.transport.TransportConnector$MonoChannelPromise.tryFailure(TransportConnector.java:587) at reactor.netty.transport.TransportConnector.lambda$doResolveAndConnect$11(TransportConnector.java:394) at io.netty.util.concurrent.DefaultPromise.notifyListener0(DefaultPromise.java:590) at io.netty.util.concurrent.DefaultPromise.notifyListenersNow(DefaultPromise.java:557) at io.netty.util.concurrent.DefaultPromise.notifyListeners(DefaultPromise.java:492) at io.netty.util.concurrent.DefaultPromise.setValue0(DefaultPromise.java:636) at io.netty.util.concurrent.DefaultPromise.setFailure0(DefaultPromise.java:629) at io.netty.util.concurrent.DefaultPromise.setFailure(DefaultPromise.java:110) at io.netty.resolver.InetSocketAddressResolver$2.operationComplete(InetSocketAddressResolver.java:86) at io.netty.util.concurrent.DefaultPromise.notifyListener0(DefaultPromise.java:590) at io.netty.util.concurrent.DefaultPromise.notifyListeners0(DefaultPromise.java:583) at io.netty.util.concurrent.DefaultPromise.notifyListenersNow(DefaultPromise.java:559) at io.netty.util.concurrent.DefaultPromise.notifyListeners(DefaultPromise.java:492) at io.netty.util.concurrent.DefaultPromise.setValue0(DefaultPromise.java:636) at io.netty.util.concurrent.DefaultPromise.setFailure0(DefaultPromise.java:629) at io.netty.util.concurrent.DefaultPromise.tryFailure(DefaultPromise.java:118) at io.netty.resolver.dns.DnsResolveContext.finishResolve(DnsResolveContext.java:1096) at io.netty.resolver.dns.DnsResolveContext.tryToFinishResolve(DnsResolveContext.java:1035) at io.netty.resolver.dns.DnsResolveContext.query(DnsResolveContext.java:422) at io.netty.resolver.dns.DnsResolveContext.onResponse(DnsResolveContext.java:655) at io.netty.resolver.dns.DnsResolveContext.access$500(DnsResolveContext.java:66) at io.netty.resolver.dns.DnsResolveContext$2.operationComplete(DnsResolveContext.java:482) at io.netty.util.concurrent.DefaultPromise.notifyListener0(DefaultPromise.java:590) at io.netty.util.concurrent.DefaultPromise.notifyListeners0(DefaultPromise.java:583) at io.netty.util.concurrent.DefaultPromise.notifyListenersNow(DefaultPromise.java:559) at io.netty.util.concurrent.DefaultPromise.notifyListeners(DefaultPromise.java:492) at io.netty.util.concurrent.DefaultPromise.setValue0(DefaultPromise.java:636) at io.netty.util.concurrent.DefaultPromise.setSuccess0(DefaultPromise.java:625) at io.netty.util.concurrent.DefaultPromise.trySuccess(DefaultPromise.java:105) at io.netty.resolver.dns.DnsQueryContext.trySuccess(DnsQueryContext.java:232) at io.netty.resolver.dns.DnsQueryContext.finish(DnsQueryContext.java:224) at io.netty.resolver.dns.DnsNameResolver$DnsResponseHandler.channelRead(DnsNameResolver.java:1364) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:444) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412) at io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:444) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412) at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:440) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420) at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919) at io.netty.channel.epoll.EpollDatagramChannel.processPacket(EpollDatagramChannel.java:662) at io.netty.channel.epoll.EpollDatagramChannel.recvmsg(EpollDatagramChannel.java:697) at io.netty.channel.epoll.EpollDatagramChannel.access$300(EpollDatagramChannel.java:56) at io.netty.channel.epoll.EpollDatagramChannel$EpollDatagramChannelUnsafe.epollInReady(EpollDatagramChannel.java:536) at io.netty.channel.epoll.EpollEventLoop.processReady(EpollEventLoop.java:499) at io.netty.channel.epoll.EpollEventLoop.run(EpollEventLoop.java:397) at io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:997) at io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74) at io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30) at java.base/java.lang.Thread.run(Thread.java:833) Caused by: java.net.UnknownHostException: Failed to resolve 'not.available.com' [A(1)] after 2 queries at io.netty.resolver.dns.DnsResolveContext.finishResolve(DnsResolveContext.java:1088) at io.netty.resolver.dns.DnsResolveContext.tryToFinishResolve(DnsResolveContext.java:1035) at io.netty.resolver.dns.DnsResolveContext.query(DnsResolveContext.java:422) at io.netty.resolver.dns.DnsResolveContext.onResponse(DnsResolveContext.java:655) at io.netty.resolver.dns.DnsResolveContext.access$500(DnsResolveContext.java:66) at io.netty.resolver.dns.DnsResolveContext$2.operationComplete(DnsResolveContext.java:482) at io.netty.util.concurrent.DefaultPromise.notifyListener0(DefaultPromise.java:590) at io.netty.util.concurrent.DefaultPromise.notifyListeners0(DefaultPromise.java:583) at io.netty.util.concurrent.DefaultPromise.notifyListenersNow(DefaultPromise.java:559) at io.netty.util.concurrent.DefaultPromise.notifyListeners(DefaultPromise.java:492) at io.netty.util.concurrent.DefaultPromise.setValue0(DefaultPromise.java:636) at io.netty.util.concurrent.DefaultPromise.setSuccess0(DefaultPromise.java:625) at io.netty.util.concurrent.DefaultPromise.trySuccess(DefaultPromise.java:105) at io.netty.resolver.dns.DnsQueryContext.trySuccess(DnsQueryContext.java:232) at io.netty.resolver.dns.DnsQueryContext.finish(DnsQueryContext.java:224) at io.netty.resolver.dns.DnsNameResolver$DnsResponseHandler.channelRead(DnsNameResolver.java:1364) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:444) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412) at io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:444) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412) at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:440) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420) at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919) at io.netty.channel.epoll.EpollDatagramChannel.processPacket(EpollDatagramChannel.java:662) at io.netty.channel.epoll.EpollDatagramChannel.recvmsg(EpollDatagramChannel.java:697) at io.netty.channel.epoll.EpollDatagramChannel.access$300(EpollDatagramChannel.java:56) at io.netty.channel.epoll.EpollDatagramChannel$EpollDatagramChannelUnsafe.epollInReady(EpollDatagramChannel.java:536) at io.netty.channel.epoll.EpollEventLoop.processReady(EpollEventLoop.java:499) at io.netty.channel.epoll.EpollEventLoop.run(EpollEventLoop.java:397) at io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:997) at io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74) at io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30) at java.base/java.lang.Thread.run(Thread.java:833) ``` ### Additional context _No response_
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/KafkaConnectMapper.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaConnectService.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/ConnectorsExporter.java" ]
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/KafkaConnectMapper.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaConnectService.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/ConnectorsExporter.java" ]
[ "kafka-ui-api/src/test/java/com/provectus/kafka/ui/AbstractIntegrationTest.java", "kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/ConnectorsExporterTest.java" ]
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/KafkaConnectMapper.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/KafkaConnectMapper.java index 468c86ecbed..a41054de6cb 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/KafkaConnectMapper.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/KafkaConnectMapper.java @@ -34,7 +34,7 @@ ConnectorPluginConfigValidationResponseDTO fromClient( com.provectus.kafka.ui.connect.model.ConnectorPluginConfigValidationResponse connectorPluginConfigValidationResponse); - default FullConnectorInfoDTO fullConnectorInfoFromTuple(InternalConnectInfo connectInfo) { + default FullConnectorInfoDTO fullConnectorInfo(InternalConnectInfo connectInfo) { ConnectorDTO connector = connectInfo.getConnector(); List<TaskDTO> tasks = connectInfo.getTasks(); int failedTasksCount = (int) tasks.stream() diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaConnectService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaConnectService.java index 390348707d3..605d5cab205 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaConnectService.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaConnectService.java @@ -28,7 +28,6 @@ import java.util.Map; import java.util.Optional; import java.util.function.Predicate; -import java.util.stream.Collectors; import java.util.stream.Stream; import javax.annotation.Nullable; import lombok.RequiredArgsConstructor; @@ -39,7 +38,6 @@ import org.springframework.web.reactive.function.client.WebClientResponseException; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; -import reactor.util.function.Tuples; @Service @Slf4j @@ -61,39 +59,22 @@ public Flux<ConnectDTO> getConnects(KafkaCluster cluster) { public Flux<FullConnectorInfoDTO> getAllConnectors(final KafkaCluster cluster, @Nullable final String search) { return getConnects(cluster) - .flatMap(connect -> getConnectorNames(cluster, connect.getName()).map(cn -> Tuples.of(connect.getName(), cn))) - .flatMap(pair -> getConnector(cluster, pair.getT1(), pair.getT2())) - .flatMap(connector -> - getConnectorConfig(cluster, connector.getConnect(), connector.getName()) - .map(config -> InternalConnectInfo.builder() - .connector(connector) - .config(config) - .build() - ) - ) - .flatMap(connectInfo -> { - ConnectorDTO connector = connectInfo.getConnector(); - return getConnectorTasks(cluster, connector.getConnect(), connector.getName()) - .collectList() - .map(tasks -> InternalConnectInfo.builder() - .connector(connector) - .config(connectInfo.getConfig()) - .tasks(tasks) - .build() - ); - }) - .flatMap(connectInfo -> { - ConnectorDTO connector = connectInfo.getConnector(); - return getConnectorTopics(cluster, connector.getConnect(), connector.getName()) - .map(ct -> InternalConnectInfo.builder() - .connector(connector) - .config(connectInfo.getConfig()) - .tasks(connectInfo.getTasks()) - .topics(ct.getTopics()) - .build() - ); - }) - .map(kafkaConnectMapper::fullConnectorInfoFromTuple) + .flatMap(connect -> + getConnectorNamesWithErrorsSuppress(cluster, connect.getName()) + .flatMap(connectorName -> + Mono.zip( + getConnector(cluster, connect.getName(), connectorName), + getConnectorConfig(cluster, connect.getName(), connectorName), + getConnectorTasks(cluster, connect.getName(), connectorName).collectList(), + getConnectorTopics(cluster, connect.getName(), connectorName) + ).map(tuple -> + InternalConnectInfo.builder() + .connector(tuple.getT1()) + .config(tuple.getT2()) + .tasks(tuple.getT3()) + .topics(tuple.getT4().getTopics()) + .build()))) + .map(kafkaConnectMapper::fullConnectorInfo) .filter(matchesSearchTerm(search)); } @@ -132,6 +113,11 @@ public Flux<String> getConnectorNames(KafkaCluster cluster, String connectName) .flatMapMany(Flux::fromIterable); } + // returns empty flux if there was an error communicating with Connect + public Flux<String> getConnectorNamesWithErrorsSuppress(KafkaCluster cluster, String connectName) { + return getConnectorNames(cluster, connectName).onErrorComplete(); + } + @SneakyThrows private List<String> parseConnectorsNamesStringToList(String json) { return objectMapper.readValue(json, new TypeReference<>() { diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/ConnectorsExporter.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/ConnectorsExporter.java index 2fad00bbfaf..2259d5ebb1b 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/ConnectorsExporter.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/integration/odd/ConnectorsExporter.java @@ -25,7 +25,7 @@ class ConnectorsExporter { Flux<DataEntityList> export(KafkaCluster cluster) { return kafkaConnectService.getConnects(cluster) - .flatMap(connect -> kafkaConnectService.getConnectorNames(cluster, connect.getName()) + .flatMap(connect -> kafkaConnectService.getConnectorNamesWithErrorsSuppress(cluster, connect.getName()) .flatMap(connectorName -> kafkaConnectService.getConnector(cluster, connect.getName(), connectorName)) .flatMap(connectorDTO -> kafkaConnectService.getConnectorTopics(cluster, connect.getName(), connectorDTO.getName())
diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/AbstractIntegrationTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/AbstractIntegrationTest.java index 314abf914b0..d185e646714 100644 --- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/AbstractIntegrationTest.java +++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/AbstractIntegrationTest.java @@ -77,6 +77,8 @@ public void initialize(@NotNull ConfigurableApplicationContext context) { System.setProperty("kafka.clusters.0.kafkaConnect.0.userName", "kafka-connect"); System.setProperty("kafka.clusters.0.kafkaConnect.0.password", "kafka-connect"); System.setProperty("kafka.clusters.0.kafkaConnect.0.address", kafkaConnect.getTarget()); + System.setProperty("kafka.clusters.0.kafkaConnect.1.name", "notavailable"); + System.setProperty("kafka.clusters.0.kafkaConnect.1.address", "http://notavailable:6666"); System.setProperty("kafka.clusters.0.masking.0.type", "REPLACE"); System.setProperty("kafka.clusters.0.masking.0.replacement", "***"); System.setProperty("kafka.clusters.0.masking.0.topicValuesPattern", "masking-test-.*"); diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/ConnectorsExporterTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/ConnectorsExporterTest.java index 20c0d96ad16..e06a16388ee 100644 --- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/ConnectorsExporterTest.java +++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/integration/odd/ConnectorsExporterTest.java @@ -61,7 +61,7 @@ void exportsConnectorsAsDataTransformers() { when(kafkaConnectService.getConnects(CLUSTER)) .thenReturn(Flux.just(connect)); - when(kafkaConnectService.getConnectorNames(CLUSTER, connect.getName())) + when(kafkaConnectService.getConnectorNamesWithErrorsSuppress(CLUSTER, connect.getName())) .thenReturn(Flux.just(sinkConnector.getName(), sourceConnector.getName())); when(kafkaConnectService.getConnector(CLUSTER, connect.getName(), sinkConnector.getName()))
train
test
2023-08-01T13:47:03
"2023-06-26T22:02:34Z"
stalbot15
train
provectus/kafka-ui/4067_4074
provectus/kafka-ui
provectus/kafka-ui/4067
provectus/kafka-ui/4074
[ "connected" ]
0b99f745b01abd734738fc187afcec4589168d8f
2db89593a78cb6478889cd455c9e20b23e2e3a92
[ "Hello there jcpunk! πŸ‘‹\n\nThank you and congratulations πŸŽ‰ for opening your very first issue in this project! πŸ’–\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. πŸ‘€", "Hello @jcpunk, thank you for suggestion. We will implement this feature as a Hex serde ." ]
[]
"2023-07-31T09:34:06Z"
[ "type/enhancement", "scope/backend", "status/accepted", "area/serde" ]
Implement built-in HEX serde
### Issue submitter TODO list - [X] I've searched for an already existing issues [here](https://github.com/provectus/kafka-ui/issues) - [X] I'm running a supported version of the application which is listed [here](https://github.com/provectus/kafka-ui/blob/master/SECURITY.md) and the feature is not present there ### Is your proposal related to a problem? I'm frustrated when I'm trying to view a binary message posted to kafka. ### Describe the feature you're interested in A button I could press to switch between the native view and a hex view of the value. ### Describe alternatives you've considered Writing my own client. ### Version you're running 56fa824 ### Additional context This is related to trying to sort out why my protobuf messages aren't coming out correctly. The messages themselves come from a 3rd party so I need to extract the hex to show exactly what they are sending.
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/SerdesInitializer.java" ]
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/SerdesInitializer.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/HexSerde.java" ]
[ "kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/HexSerdeTest.java" ]
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/SerdesInitializer.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/SerdesInitializer.java index ac3c2241cfc..c833d9fc726 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/SerdesInitializer.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/SerdesInitializer.java @@ -12,6 +12,7 @@ import com.provectus.kafka.ui.serdes.builtin.AvroEmbeddedSerde; import com.provectus.kafka.ui.serdes.builtin.Base64Serde; import com.provectus.kafka.ui.serdes.builtin.ConsumerOffsetsSerde; +import com.provectus.kafka.ui.serdes.builtin.HexSerde; import com.provectus.kafka.ui.serdes.builtin.Int32Serde; import com.provectus.kafka.ui.serdes.builtin.Int64Serde; import com.provectus.kafka.ui.serdes.builtin.ProtobufFileSerde; @@ -47,6 +48,7 @@ public SerdesInitializer() { .put(UInt64Serde.name(), UInt64Serde.class) .put(AvroEmbeddedSerde.name(), AvroEmbeddedSerde.class) .put(Base64Serde.name(), Base64Serde.class) + .put(HexSerde.name(), HexSerde.class) .put(UuidBinarySerde.name(), UuidBinarySerde.class) .build(), new CustomSerdeLoader() diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/HexSerde.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/HexSerde.java new file mode 100644 index 00000000000..cf1a6b793ff --- /dev/null +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serdes/builtin/HexSerde.java @@ -0,0 +1,80 @@ +package com.provectus.kafka.ui.serdes.builtin; + +import com.provectus.kafka.ui.serde.api.DeserializeResult; +import com.provectus.kafka.ui.serde.api.PropertyResolver; +import com.provectus.kafka.ui.serde.api.SchemaDescription; +import com.provectus.kafka.ui.serdes.BuiltInSerde; +import java.util.HexFormat; +import java.util.Map; +import java.util.Optional; + +public class HexSerde implements BuiltInSerde { + + private HexFormat deserializeHexFormat; + + public static String name() { + return "Hex"; + } + + @Override + public void configure(PropertyResolver serdeProperties, + PropertyResolver kafkaClusterProperties, + PropertyResolver globalProperties) { + String delim = serdeProperties.getProperty("delimiter", String.class).orElse(" "); + boolean uppercase = serdeProperties.getProperty("uppercase", Boolean.class).orElse(true); + deserializeHexFormat = HexFormat.ofDelimiter(delim); + if (uppercase) { + deserializeHexFormat = deserializeHexFormat.withUpperCase(); + } + } + + @Override + public Optional<String> getDescription() { + return Optional.empty(); + } + + @Override + public Optional<SchemaDescription> getSchema(String topic, Target type) { + return Optional.empty(); + } + + @Override + public boolean canDeserialize(String topic, Target type) { + return true; + } + + @Override + public boolean canSerialize(String topic, Target type) { + return true; + } + + @Override + public Serializer serializer(String topic, Target type) { + return input -> { + input = input.trim(); + // it is a hack to provide ability to sent empty array as a key/value + if (input.length() == 0) { + return new byte[] {}; + } + return HexFormat.of().parseHex(prepareInputForParse(input)); + }; + } + + // removing most-common delimiters and prefixes + private static String prepareInputForParse(String input) { + return input + .replaceAll(" ", "") + .replaceAll("#", "") + .replaceAll(":", ""); + } + + @Override + public Deserializer deserializer(String topic, Target type) { + return (headers, data) -> + new DeserializeResult( + deserializeHexFormat.formatHex(data), + DeserializeResult.Type.STRING, + Map.of() + ); + } +}
diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/HexSerdeTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/HexSerdeTest.java new file mode 100644 index 00000000000..a318279f564 --- /dev/null +++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/serdes/builtin/HexSerdeTest.java @@ -0,0 +1,84 @@ +package com.provectus.kafka.ui.serdes.builtin; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.provectus.kafka.ui.serde.api.DeserializeResult; +import com.provectus.kafka.ui.serde.api.Serde; +import com.provectus.kafka.ui.serdes.PropertyResolverImpl; +import com.provectus.kafka.ui.serdes.RecordHeadersImpl; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.CsvSource; +import org.junit.jupiter.params.provider.EnumSource; + +public class HexSerdeTest { + + private static final byte[] TEST_BYTES = "hello world".getBytes(); + private static final String TEST_BYTES_HEX_ENCODED = "68 65 6C 6C 6F 20 77 6F 72 6C 64"; + + private Serde hexSerde; + + @BeforeEach + void init() { + hexSerde = new HexSerde(); + hexSerde.configure( + PropertyResolverImpl.empty(), + PropertyResolverImpl.empty(), + PropertyResolverImpl.empty() + ); + } + + + @ParameterizedTest + @CsvSource({ + "68656C6C6F20776F726C64", // uppercase + "68656c6c6f20776f726c64", // lowercase + "68:65:6c:6c:6f:20:77:6f:72:6c:64", // ':' delim + "68 65 6C 6C 6F 20 77 6F 72 6C 64", // space delim, UC + "68 65 6c 6c 6f 20 77 6f 72 6c 64", // space delim, LC + "#68 #65 #6C #6C #6F #20 #77 #6F #72 #6C #64" // '#' prefix, space delim + }) + void serializesInputAsHexString(String hexString) { + for (Serde.Target type : Serde.Target.values()) { + var serializer = hexSerde.serializer("anyTopic", type); + byte[] bytes = serializer.serialize(hexString); + assertThat(bytes).isEqualTo(TEST_BYTES); + } + } + + @ParameterizedTest + @EnumSource + void serializesEmptyStringAsEmptyBytesArray(Serde.Target type) { + var serializer = hexSerde.serializer("anyTopic", type); + byte[] bytes = serializer.serialize(""); + assertThat(bytes).isEqualTo(new byte[] {}); + } + + @ParameterizedTest + @EnumSource + void deserializesDataAsHexBytes(Serde.Target type) { + var deserializer = hexSerde.deserializer("anyTopic", type); + var result = deserializer.deserialize(new RecordHeadersImpl(), TEST_BYTES); + assertThat(result.getResult()).isEqualTo(TEST_BYTES_HEX_ENCODED); + assertThat(result.getType()).isEqualTo(DeserializeResult.Type.STRING); + assertThat(result.getAdditionalProperties()).isEmpty(); + } + + @ParameterizedTest + @EnumSource + void getSchemaReturnsEmpty(Serde.Target type) { + assertThat(hexSerde.getSchema("anyTopic", type)).isEmpty(); + } + + @ParameterizedTest + @EnumSource + void canDeserializeReturnsTrueForAllInputs(Serde.Target type) { + assertThat(hexSerde.canDeserialize("anyTopic", type)).isTrue(); + } + + @ParameterizedTest + @EnumSource + void canSerializeReturnsTrueForAllInput(Serde.Target type) { + assertThat(hexSerde.canSerialize("anyTopic", type)).isTrue(); + } +}
train
test
2023-07-31T11:01:36
"2023-07-27T23:44:03Z"
jcpunk
train
provectus/kafka-ui/4059_4078
provectus/kafka-ui
provectus/kafka-ui/4059
provectus/kafka-ui/4078
[ "keyword_pr_to_issue" ]
b2c3fcc321c84a5c85f4ca4a96a3c7b647f0d087
77f1ec949028d3ae7de2f79835096fedc2b22334
[ "Hello there fallen-up! πŸ‘‹\n\nThank you and congratulations πŸŽ‰ for opening your very first issue in this project! πŸ’–\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. πŸ‘€" ]
[]
"2023-08-01T14:12:16Z"
[ "type/bug", "scope/backend", "status/accepted", "area/rbac" ]
Wizard: "cluster value is empty"
### Issue submitter TODO list - [X] I've looked up my issue in [FAQ](https://docs.kafka-ui.provectus.io/faq/common-problems) - [X] I've searched for an already existing issues [here](https://github.com/provectus/kafka-ui/issues) - [X] I've tried running `master`-labeled docker image and the issue still persists there - [X] I'm running a supported version of the application which is listed [here](https://github.com/provectus/kafka-ui/blob/master/SECURITY.md) ### Describe the bug (actual behavior) RBAC that doesn't conflict with original settings (DYNAMIC_CONFIG_ENABLED: 'true') blocks opportunity to configure something for current clusters. First the page freezes, then goes to a white screen (with a lot of 500 errors) ### Expected behavior _No response_ ### Your installation details 1. 0.7.1 3. ``` DYNAMIC_CONFIG_ENABLED: 'true' AUTH_TYPE: OAUTH2 AUTH_OAUTH2_CLIENT_KEYCLOCK_CLIENTID: kafka-ui AUTH_OAUTH2_CLIENT_KEYCLOCK_CLIENTSECRET: yyy AUTH_OAUTH2_CLIENT_KEYCLOCK_SCOPE: openid AUTH_OAUTH2_CLIENT_KEYCLOCK_ISSUER-URI: https://xxx/auth/realms/kafka AUTH_OAUTH2_CLIENT_KEYCLOCK_USER-NAME-ATTRIBUTE: preferred_username AUTH_OAUTH2_CLIENT_KEYCLOCK_CLIENT-NAME: keycloak AUTH_OAUTH2_CLIENT_KEYCLOCK_PROVIDER: keycloak AUTH_OAUTH2_CLIENT_KEYCLOCK_CUSTOM-PARAMS_TYPE: oauth AUTH_OAUTH2_CLIENT_KEYCLOCK_CUSTOM-PARAMS_ROLES-FIELD: groups SPRING_CONFIG_ADDITIONAL-LOCATION: /ssl/roles.yml ``` 4. ``` #roles.yml rbac: roles: - name: "admins" clusters: - clusterA - clusterB subjects: - provider: keycloak type: group value: "groupC" permissions: - resource: applicationconfig actions: all - resource: clusterconfig actions: all - resource: topic value: ".*" actions: all - resource: consumer value: ".*" actions: all - resource: schema value: ".*" actions: all - resource: connect value: ".*" actions: all - resource: ksql actions: all - resource: acl actions: [ view ] ``` ### Steps to reproduce - Enable typical RBAC - Use DYNAMIC_CONFIG_ENABLED: 'true' - Go to http://localhost:8080/ui/clusters/$cluster-name/config ### Screenshots ![image](https://github.com/provectus/kafka-ui/assets/11389898/7cd98a1e-ab8e-45f2-8a92-30d7222c86ab) ![image](https://github.com/provectus/kafka-ui/assets/11389898/939dc058-1474-4d80-8a10-2e50154fc984) ![image](https://github.com/provectus/kafka-ui/assets/11389898/d1ed8bc6-54c7-4c0c-9e26-1eae16628f29) ### Logs ``` 2023-07-24 12:19:58,753 ERROR [reactor-http-epoll-6] o.s.b.a.w.r.e.AbstractErrorWebExceptionHandler: [4981702d-31] 500 Server Error for HTTP GET "/api/config" java.lang.IllegalArgumentException: cluster value is empty at org.springframework.util.Assert.isTrue(Assert.java:122) Suppressed: reactor.core.publisher.FluxOnAssembly$OnAssemblyException: Error has been observed at the following site(s): *__checkpoint β‡’ Handler com.provectus.kafka.ui.controller.ApplicationConfigController#getCurrentConfig(ServerWebExchange) [DispatcherHandler] *__checkpoint β‡’ com.provectus.kafka.ui.config.CorsGlobalConfiguration$$Lambda$1155/0x000000080177acf0 [DefaultWebFilterChain] *__checkpoint β‡’ com.provectus.kafka.ui.config.CustomWebFilter [DefaultWebFilterChain] *__checkpoint β‡’ com.provectus.kafka.ui.config.ReadOnlyModeFilter [DefaultWebFilterChain] *__checkpoint β‡’ AuthorizationWebFilter [DefaultWebFilterChain] *__checkpoint β‡’ ExceptionTranslationWebFilter [DefaultWebFilterChain] *__checkpoint β‡’ LogoutWebFilter [DefaultWebFilterChain] *__checkpoint β‡’ ServerRequestCacheWebFilter [DefaultWebFilterChain] *__checkpoint β‡’ SecurityContextServerWebExchangeWebFilter [DefaultWebFilterChain] *__checkpoint β‡’ LogoutPageGeneratingWebFilter [DefaultWebFilterChain] *__checkpoint β‡’ LoginPageGeneratingWebFilter [DefaultWebFilterChain] *__checkpoint β‡’ OAuth2LoginAuthenticationWebFilter [DefaultWebFilterChain] *__checkpoint β‡’ OAuth2AuthorizationRequestRedirectWebFilter [DefaultWebFilterChain] *__checkpoint β‡’ ReactorContextWebFilter [DefaultWebFilterChain] *__checkpoint β‡’ HttpHeaderWriterWebFilter [DefaultWebFilterChain] *__checkpoint β‡’ ServerWebExchangeReactorContextWebFilter [DefaultWebFilterChain] *__checkpoint β‡’ org.springframework.security.web.server.WebFilterChainProxy [DefaultWebFilterChain] *__checkpoint β‡’ org.springframework.web.filter.reactive.ServerHttpObservationFilter [DefaultWebFilterChain] *__checkpoint β‡’ HTTP GET "/api/config" [ExceptionHandlingWebHandler] Original Stack Trace: at org.springframework.util.Assert.isTrue(Assert.java:122) at com.provectus.kafka.ui.service.rbac.AccessControlService.isClusterAccessible(AccessControlService.java:148) at com.provectus.kafka.ui.service.rbac.AccessControlService.lambda$validateAccess$2(AccessControlService.java:104) at reactor.core.publisher.FluxPeekFuseable$PeekFuseableSubscriber.onNext(FluxPeekFuseable.java:196) at reactor.core.publisher.FluxMapFuseable$MapFuseableSubscriber.onNext(FluxMapFuseable.java:129) at reactor.core.publisher.FluxMapFuseable$MapFuseableSubscriber.onNext(FluxMapFuseable.java:129) at reactor.core.publisher.FluxFilterFuseable$FilterFuseableSubscriber.onNext(FluxFilterFuseable.java:118) at reactor.core.publisher.FluxMapFuseable$MapFuseableConditionalSubscriber.onNext(FluxMapFuseable.java:299) at reactor.core.publisher.MonoFlatMap$FlatMapMain.secondComplete(MonoFlatMap.java:245) at reactor.core.publisher.MonoFlatMap$FlatMapInner.onNext(MonoFlatMap.java:305) at reactor.core.publisher.MonoFlatMap$FlatMapMain.onNext(MonoFlatMap.java:158) at reactor.core.publisher.Operators$MonoSubscriber.complete(Operators.java:1839) at reactor.core.publisher.MonoCacheTime.subscribeOrReturn(MonoCacheTime.java:151) at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:57) at reactor.core.publisher.MonoFlatMap$FlatMapMain.onNext(MonoFlatMap.java:165) at reactor.core.publisher.FluxFilter$FilterSubscriber.onNext(FluxFilter.java:113) at reactor.core.publisher.FluxMap$MapConditionalSubscriber.onNext(FluxMap.java:224) at reactor.core.publisher.Operators$ScalarSubscription.request(Operators.java:2545) at reactor.core.publisher.FluxMap$MapConditionalSubscriber.request(FluxMap.java:295) at reactor.core.publisher.FluxFilter$FilterSubscriber.request(FluxFilter.java:186) at reactor.core.publisher.MonoFlatMap$FlatMapMain.request(MonoFlatMap.java:194) at reactor.core.publisher.FluxMapFuseable$MapFuseableConditionalSubscriber.request(FluxMapFuseable.java:360) at reactor.core.publisher.FluxFilterFuseable$FilterFuseableSubscriber.request(FluxFilterFuseable.java:191) at reactor.core.publisher.FluxMapFuseable$MapFuseableSubscriber.request(FluxMapFuseable.java:171) at reactor.core.publisher.FluxMapFuseable$MapFuseableSubscriber.request(FluxMapFuseable.java:171) at reactor.core.publisher.FluxPeekFuseable$PeekFuseableSubscriber.request(FluxPeekFuseable.java:144) at reactor.core.publisher.MonoIgnoreElements$IgnoreElementsSubscriber.onSubscribe(MonoIgnoreElements.java:72) at reactor.core.publisher.FluxPeekFuseable$PeekFuseableSubscriber.onSubscribe(FluxPeekFuseable.java:178) at reactor.core.publisher.FluxMapFuseable$MapFuseableSubscriber.onSubscribe(FluxMapFuseable.java:96) at reactor.core.publisher.FluxMapFuseable$MapFuseableSubscriber.onSubscribe(FluxMapFuseable.java:96) at reactor.core.publisher.FluxFilterFuseable$FilterFuseableSubscriber.onSubscribe(FluxFilterFuseable.java:87) at reactor.core.publisher.FluxMapFuseable$MapFuseableConditionalSubscriber.onSubscribe(FluxMapFuseable.java:265) at reactor.core.publisher.MonoFlatMap$FlatMapMain.onSubscribe(MonoFlatMap.java:117) at reactor.core.publisher.FluxFilter$FilterSubscriber.onSubscribe(FluxFilter.java:85) at reactor.core.publisher.FluxMap$MapConditionalSubscriber.onSubscribe(FluxMap.java:194) at reactor.core.publisher.MonoJust.subscribe(MonoJust.java:55) at reactor.core.publisher.MonoDeferContextual.subscribe(MonoDeferContextual.java:55) at reactor.core.publisher.Mono.subscribe(Mono.java:4485) at reactor.core.publisher.MonoIgnoreThen$ThenIgnoreMain.subscribeNext(MonoIgnoreThen.java:263) at reactor.core.publisher.MonoIgnoreThen.subscribe(MonoIgnoreThen.java:51) at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64) at reactor.core.publisher.MonoFlatMap$FlatMapMain.onNext(MonoFlatMap.java:165) at reactor.core.publisher.FluxOnErrorResume$ResumeSubscriber.onNext(FluxOnErrorResume.java:79) at reactor.core.publisher.FluxPeek$PeekSubscriber.onNext(FluxPeek.java:200) at reactor.core.publisher.FluxPeek$PeekSubscriber.onNext(FluxPeek.java:200) at reactor.core.publisher.MonoIgnoreThen$ThenIgnoreMain.complete(MonoIgnoreThen.java:292) at reactor.core.publisher.MonoIgnoreThen$ThenIgnoreMain.onNext(MonoIgnoreThen.java:187) at reactor.core.publisher.MonoFlatMap$FlatMapMain.onNext(MonoFlatMap.java:158) at reactor.core.publisher.MonoZip$ZipCoordinator.signal(MonoZip.java:293) at reactor.core.publisher.MonoZip$ZipInner.onNext(MonoZip.java:474) at reactor.core.publisher.MonoPeekTerminal$MonoTerminalPeekSubscriber.onNext(MonoPeekTerminal.java:180) at reactor.core.publisher.Operators$ScalarSubscription.request(Operators.java:2545) at reactor.core.publisher.MonoPeekTerminal$MonoTerminalPeekSubscriber.request(MonoPeekTerminal.java:139) at reactor.core.publisher.MonoZip$ZipInner.onSubscribe(MonoZip.java:466) at reactor.core.publisher.MonoPeekTerminal$MonoTerminalPeekSubscriber.onSubscribe(MonoPeekTerminal.java:152) at reactor.core.publisher.MonoJust.subscribe(MonoJust.java:55) at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64) at reactor.core.publisher.MonoZip$ZipCoordinator.request(MonoZip.java:216) at reactor.core.publisher.MonoFlatMap$FlatMapMain.request(MonoFlatMap.java:194) at reactor.core.publisher.MonoIgnoreThen$ThenIgnoreMain.onSubscribe(MonoIgnoreThen.java:134) at reactor.core.publisher.MonoFlatMap$FlatMapMain.onSubscribe(MonoFlatMap.java:117) at reactor.core.publisher.MonoZip.subscribe(MonoZip.java:125) at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64) at reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:52) at reactor.core.publisher.MonoIgnoreThen$ThenIgnoreMain.subscribeNext(MonoIgnoreThen.java:240) at reactor.core.publisher.MonoIgnoreThen$ThenIgnoreMain.onComplete(MonoIgnoreThen.java:203) at reactor.core.publisher.MonoFlatMap$FlatMapMain.onComplete(MonoFlatMap.java:189) at reactor.core.publisher.Operators.complete(Operators.java:137) at reactor.core.publisher.MonoZip.subscribe(MonoZip.java:121) at reactor.core.publisher.Mono.subscribe(Mono.java:4485) at reactor.core.publisher.MonoIgnoreThen$ThenIgnoreMain.subscribeNext(MonoIgnoreThen.java:263) at reactor.core.publisher.MonoIgnoreThen.subscribe(MonoIgnoreThen.java:51) at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64) at reactor.core.publisher.MonoFlatMap$FlatMapMain.onNext(MonoFlatMap.java:165) at reactor.core.publisher.FluxOnErrorResume$ResumeSubscriber.onNext(FluxOnErrorResume.java:79) at reactor.core.publisher.FluxSwitchIfEmpty$SwitchIfEmptySubscriber.onNext(FluxSwitchIfEmpty.java:74) at reactor.core.publisher.MonoNext$NextSubscriber.onNext(MonoNext.java:82) at reactor.core.publisher.FluxConcatMapNoPrefetch$FluxConcatMapNoPrefetchSubscriber.innerNext(FluxConcatMapNoPrefetch.java:258) at reactor.core.publisher.FluxConcatMap$ConcatMapInner.onNext(FluxConcatMap.java:863) at reactor.core.publisher.FluxMapFuseable$MapFuseableSubscriber.onNext(FluxMapFuseable.java:129) at reactor.core.publisher.MonoPeekTerminal$MonoTerminalPeekSubscriber.onNext(MonoPeekTerminal.java:180) at reactor.core.publisher.Operators$ScalarSubscription.request(Operators.java:2545) at reactor.core.publisher.MonoPeekTerminal$MonoTerminalPeekSubscriber.request(MonoPeekTerminal.java:139) at reactor.core.publisher.FluxMapFuseable$MapFuseableSubscriber.request(FluxMapFuseable.java:171) at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.request(Operators.java:2305) at reactor.core.publisher.FluxConcatMapNoPrefetch$FluxConcatMapNoPrefetchSubscriber.request(FluxConcatMapNoPrefetch.java:338) at reactor.core.publisher.MonoNext$NextSubscriber.request(MonoNext.java:108) at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.set(Operators.java:2341) at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.onSubscribe(Operators.java:2215) at reactor.core.publisher.MonoNext$NextSubscriber.onSubscribe(MonoNext.java:70) at reactor.core.publisher.FluxConcatMapNoPrefetch$FluxConcatMapNoPrefetchSubscriber.onSubscribe(FluxConcatMapNoPrefetch.java:164) at reactor.core.publisher.FluxIterable.subscribe(FluxIterable.java:201) at reactor.core.publisher.FluxIterable.subscribe(FluxIterable.java:83) at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64) at reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:52) at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64) at reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:52) at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64) at reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:52) at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64) at reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:52) at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64) at reactor.core.publisher.MonoDeferContextual.subscribe(MonoDeferContextual.java:55) at reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:52) at reactor.core.publisher.Mono.subscribe(Mono.java:4485) at reactor.core.publisher.FluxSwitchIfEmpty$SwitchIfEmptySubscriber.onComplete(FluxSwitchIfEmpty.java:82) at reactor.core.publisher.MonoPeekTerminal$MonoTerminalPeekSubscriber.onComplete(MonoPeekTerminal.java:299) at reactor.core.publisher.MonoPeekTerminal$MonoTerminalPeekSubscriber.onComplete(MonoPeekTerminal.java:299) at reactor.core.publisher.MonoFlatMap$FlatMapMain.onNext(MonoFlatMap.java:155) at reactor.core.publisher.FluxSwitchIfEmpty$SwitchIfEmptySubscriber.onNext(FluxSwitchIfEmpty.java:74) at reactor.core.publisher.FluxFilter$FilterSubscriber.onNext(FluxFilter.java:113) at reactor.core.publisher.MonoPeekTerminal$MonoTerminalPeekSubscriber.onNext(MonoPeekTerminal.java:180) at reactor.core.publisher.FluxPeekFuseable$PeekFuseableConditionalSubscriber.onNext(FluxPeekFuseable.java:503) at reactor.core.publisher.MonoPeekTerminal$MonoTerminalPeekSubscriber.onNext(MonoPeekTerminal.java:180) at reactor.core.publisher.FluxDefaultIfEmpty$DefaultIfEmptySubscriber.onNext(FluxDefaultIfEmpty.java:122) at reactor.core.publisher.MonoNext$NextSubscriber.onNext(MonoNext.java:82) at reactor.core.publisher.FluxConcatMapNoPrefetch$FluxConcatMapNoPrefetchSubscriber.innerNext(FluxConcatMapNoPrefetch.java:258) at reactor.core.publisher.FluxConcatMap$ConcatMapInner.onNext(FluxConcatMap.java:863) at reactor.core.publisher.MonoFlatMap$FlatMapMain.secondComplete(MonoFlatMap.java:245) at reactor.core.publisher.MonoFlatMap$FlatMapInner.onNext(MonoFlatMap.java:305) at reactor.core.publisher.FluxDefaultIfEmpty$DefaultIfEmptySubscriber.onNext(FluxDefaultIfEmpty.java:122) at reactor.core.publisher.FluxMapFuseable$MapFuseableSubscriber.onNext(FluxMapFuseable.java:129) at reactor.core.publisher.FluxFilterFuseable$FilterFuseableSubscriber.onNext(FluxFilterFuseable.java:118) at reactor.core.publisher.FluxMapFuseable$MapFuseableConditionalSubscriber.onNext(FluxMapFuseable.java:299) at reactor.core.publisher.FluxMapFuseable$MapFuseableConditionalSubscriber.onNext(FluxMapFuseable.java:299) at reactor.core.publisher.FluxFilterFuseable$FilterFuseableConditionalSubscriber.onNext(FluxFilterFuseable.java:337) at reactor.core.publisher.MonoFlatMap$FlatMapMain.secondComplete(MonoFlatMap.java:245) at reactor.core.publisher.MonoFlatMap$FlatMapInner.onNext(MonoFlatMap.java:305) at reactor.core.publisher.MonoFlatMap$FlatMapMain.onNext(MonoFlatMap.java:158) at reactor.core.publisher.Operators$MonoSubscriber.complete(Operators.java:1839) at reactor.core.publisher.MonoCacheTime.subscribeOrReturn(MonoCacheTime.java:151) at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:57) at reactor.core.publisher.MonoFlatMap$FlatMapMain.onNext(MonoFlatMap.java:165) at reactor.core.publisher.FluxFilter$FilterSubscriber.onNext(FluxFilter.java:113) at reactor.core.publisher.FluxMap$MapConditionalSubscriber.onNext(FluxMap.java:224) at reactor.core.publisher.Operators$ScalarSubscription.request(Operators.java:2545) at reactor.core.publisher.FluxMap$MapConditionalSubscriber.request(FluxMap.java:295) at reactor.core.publisher.FluxFilter$FilterSubscriber.request(FluxFilter.java:186) at reactor.core.publisher.MonoFlatMap$FlatMapMain.request(MonoFlatMap.java:194) at reactor.core.publisher.FluxFilterFuseable$FilterFuseableConditionalSubscriber.request(FluxFilterFuseable.java:411) at reactor.core.publisher.FluxMapFuseable$MapFuseableConditionalSubscriber.request(FluxMapFuseable.java:360) at reactor.core.publisher.FluxMapFuseable$MapFuseableConditionalSubscriber.request(FluxMapFuseable.java:360) at reactor.core.publisher.FluxFilterFuseable$FilterFuseableSubscriber.request(FluxFilterFuseable.java:191) at reactor.core.publisher.FluxMapFuseable$MapFuseableSubscriber.request(FluxMapFuseable.java:171) at reactor.core.publisher.FluxDefaultIfEmpty$DefaultIfEmptySubscriber.request(FluxDefaultIfEmpty.java:98) at reactor.core.publisher.MonoFlatMap$FlatMapInner.onSubscribe(MonoFlatMap.java:291) at reactor.core.publisher.Operators$BaseFluxToMonoOperator.onSubscribe(Operators.java:2025) at reactor.core.publisher.FluxMapFuseable$MapFuseableSubscriber.onSubscribe(FluxMapFuseable.java:96) at reactor.core.publisher.FluxFilterFuseable$FilterFuseableSubscriber.onSubscribe(FluxFilterFuseable.java:87) at reactor.core.publisher.FluxMapFuseable$MapFuseableConditionalSubscriber.onSubscribe(FluxMapFuseable.java:265) at reactor.core.publisher.FluxMapFuseable$MapFuseableConditionalSubscriber.onSubscribe(FluxMapFuseable.java:265) at reactor.core.publisher.FluxFilterFuseable$FilterFuseableConditionalSubscriber.onSubscribe(FluxFilterFuseable.java:305) at reactor.core.publisher.MonoFlatMap$FlatMapMain.onSubscribe(MonoFlatMap.java:117) at reactor.core.publisher.FluxFilter$FilterSubscriber.onSubscribe(FluxFilter.java:85) at reactor.core.publisher.FluxMap$MapConditionalSubscriber.onSubscribe(FluxMap.java:194) at reactor.core.publisher.MonoJust.subscribe(MonoJust.java:55) at reactor.core.publisher.MonoDeferContextual.subscribe(MonoDeferContextual.java:55) at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64) at reactor.core.publisher.MonoFlatMap$FlatMapMain.onNext(MonoFlatMap.java:165) at reactor.core.publisher.FluxMapFuseable$MapFuseableSubscriber.onNext(FluxMapFuseable.java:129) at reactor.core.publisher.FluxFilterFuseable$FilterFuseableSubscriber.onNext(FluxFilterFuseable.java:118) at reactor.core.publisher.Operators$ScalarSubscription.request(Operators.java:2545) at reactor.core.publisher.FluxFilterFuseable$FilterFuseableSubscriber.request(FluxFilterFuseable.java:191) at reactor.core.publisher.FluxMapFuseable$MapFuseableSubscriber.request(FluxMapFuseable.java:171) at reactor.core.publisher.MonoFlatMap$FlatMapMain.request(MonoFlatMap.java:194) at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.set(Operators.java:2341) at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.onSubscribe(Operators.java:2215) at reactor.core.publisher.MonoFlatMap$FlatMapMain.onSubscribe(MonoFlatMap.java:117) at reactor.core.publisher.FluxMapFuseable$MapFuseableSubscriber.onSubscribe(FluxMapFuseable.java:96) at reactor.core.publisher.FluxFilterFuseable$FilterFuseableSubscriber.onSubscribe(FluxFilterFuseable.java:87) at reactor.core.publisher.MonoJust.subscribe(MonoJust.java:55) at reactor.core.publisher.Mono.subscribe(Mono.java:4485) at reactor.core.publisher.FluxConcatMapNoPrefetch$FluxConcatMapNoPrefetchSubscriber.onNext(FluxConcatMapNoPrefetch.java:206) at reactor.core.publisher.FluxIterable$IterableSubscription.slowPath(FluxIterable.java:335) at reactor.core.publisher.FluxIterable$IterableSubscription.request(FluxIterable.java:294) at reactor.core.publisher.FluxConcatMapNoPrefetch$FluxConcatMapNoPrefetchSubscriber.innerComplete(FluxConcatMapNoPrefetch.java:274) at reactor.core.publisher.FluxConcatMap$ConcatMapInner.onComplete(FluxConcatMap.java:887) at reactor.core.publisher.MonoFlatMap$FlatMapMain.onComplete(MonoFlatMap.java:189) at reactor.core.publisher.FluxMap$MapSubscriber.onComplete(FluxMap.java:144) at reactor.core.publisher.FluxFilter$FilterSubscriber.onComplete(FluxFilter.java:166) at reactor.core.publisher.FluxPeekFuseable$PeekConditionalSubscriber.onComplete(FluxPeekFuseable.java:940) at reactor.core.publisher.FluxSwitchIfEmpty$SwitchIfEmptySubscriber.onComplete(FluxSwitchIfEmpty.java:85) at reactor.core.publisher.Operators$ScalarSubscription.request(Operators.java:2547) at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.set(Operators.java:2341) at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.onSubscribe(Operators.java:2215) at reactor.core.publisher.MonoJust.subscribe(MonoJust.java:55) at reactor.core.publisher.Mono.subscribe(Mono.java:4485) at reactor.core.publisher.FluxSwitchIfEmpty$SwitchIfEmptySubscriber.onComplete(FluxSwitchIfEmpty.java:82) at reactor.core.publisher.MonoNext$NextSubscriber.onComplete(MonoNext.java:102) at reactor.core.publisher.FluxFilter$FilterSubscriber.onComplete(FluxFilter.java:166) at reactor.core.publisher.FluxFlatMap$FlatMapMain.checkTerminated(FluxFlatMap.java:847) at reactor.core.publisher.FluxFlatMap$FlatMapMain.drainLoop(FluxFlatMap.java:609) at reactor.core.publisher.FluxFlatMap$FlatMapMain.drain(FluxFlatMap.java:589) at reactor.core.publisher.FluxFlatMap$FlatMapMain.request(FluxFlatMap.java:347) at reactor.core.publisher.FluxFilter$FilterSubscriber.request(FluxFilter.java:186) at reactor.core.publisher.MonoNext$NextSubscriber.request(MonoNext.java:108) at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.request(Operators.java:2305) at reactor.core.publisher.FluxPeekFuseable$PeekConditionalSubscriber.request(FluxPeekFuseable.java:783) at reactor.core.publisher.FluxFilter$FilterSubscriber.request(FluxFilter.java:186) at reactor.core.publisher.FluxMap$MapSubscriber.request(FluxMap.java:164) at reactor.core.publisher.MonoFlatMap$FlatMapMain.request(MonoFlatMap.java:194) at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.request(Operators.java:2305) at reactor.core.publisher.FluxConcatMapNoPrefetch$FluxConcatMapNoPrefetchSubscriber.request(FluxConcatMapNoPrefetch.java:338) at reactor.core.publisher.MonoNext$NextSubscriber.request(MonoNext.java:108) at reactor.core.publisher.FluxDefaultIfEmpty$DefaultIfEmptySubscriber.request(FluxDefaultIfEmpty.java:98) at reactor.core.publisher.MonoPeekTerminal$MonoTerminalPeekSubscriber.request(MonoPeekTerminal.java:139) at reactor.core.publisher.FluxPeekFuseable$PeekFuseableConditionalSubscriber.request(FluxPeekFuseable.java:437) at reactor.core.publisher.MonoPeekTerminal$MonoTerminalPeekSubscriber.request(MonoPeekTerminal.java:139) at reactor.core.publisher.FluxFilter$FilterSubscriber.request(FluxFilter.java:186) at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.set(Operators.java:2341) at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.onSubscribe(Operators.java:2215) at reactor.core.publisher.FluxFilter$FilterSubscriber.onSubscribe(FluxFilter.java:85) at reactor.core.publisher.MonoPeekTerminal$MonoTerminalPeekSubscriber.onSubscribe(MonoPeekTerminal.java:152) at reactor.core.publisher.FluxPeekFuseable$PeekFuseableConditionalSubscriber.onSubscribe(FluxPeekFuseable.java:471) at reactor.core.publisher.MonoPeekTerminal$MonoTerminalPeekSubscriber.onSubscribe(MonoPeekTerminal.java:152) at reactor.core.publisher.Operators$BaseFluxToMonoOperator.onSubscribe(Operators.java:2025) at reactor.core.publisher.MonoNext$NextSubscriber.onSubscribe(MonoNext.java:70) at reactor.core.publisher.FluxConcatMapNoPrefetch$FluxConcatMapNoPrefetchSubscriber.onSubscribe(FluxConcatMapNoPrefetch.java:164) at reactor.core.publisher.FluxIterable.subscribe(FluxIterable.java:201) at reactor.core.publisher.FluxIterable.subscribe(FluxIterable.java:83) at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64) at reactor.core.publisher.MonoDeferContextual.subscribe(MonoDeferContextual.java:55) at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64) at reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:52) at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64) at reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:52) at reactor.core.publisher.Mono.subscribe(Mono.java:4485) at reactor.core.publisher.MonoIgnoreThen$ThenIgnoreMain.subscribeNext(MonoIgnoreThen.java:263) at reactor.core.publisher.MonoIgnoreThen.subscribe(MonoIgnoreThen.java:51) at reactor.core.publisher.Mono.subscribe(Mono.java:4485) at reactor.core.publisher.FluxSwitchIfEmpty$SwitchIfEmptySubscriber.onComplete(FluxSwitchIfEmpty.java:82) at reactor.core.publisher.FluxFilter$FilterSubscriber.onComplete(FluxFilter.java:166) at reactor.core.publisher.FluxPeekFuseable$PeekConditionalSubscriber.onComplete(FluxPeekFuseable.java:940) at reactor.core.publisher.FluxSwitchIfEmpty$SwitchIfEmptySubscriber.onComplete(FluxSwitchIfEmpty.java:85) at reactor.core.publisher.Operators$ScalarSubscription.request(Operators.java:2547) at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.set(Operators.java:2341) at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.onSubscribe(Operators.java:2215) at reactor.core.publisher.MonoJust.subscribe(MonoJust.java:55) at reactor.core.publisher.Mono.subscribe(Mono.java:4485) at reactor.core.publisher.FluxSwitchIfEmpty$SwitchIfEmptySubscriber.onComplete(FluxSwitchIfEmpty.java:82) at reactor.core.publisher.MonoNext$NextSubscriber.onComplete(MonoNext.java:102) at reactor.core.publisher.FluxFilter$FilterSubscriber.onComplete(FluxFilter.java:166) at reactor.core.publisher.FluxFlatMap$FlatMapMain.checkTerminated(FluxFlatMap.java:847) at reactor.core.publisher.FluxFlatMap$FlatMapMain.drainLoop(FluxFlatMap.java:609) at reactor.core.publisher.FluxFlatMap$FlatMapMain.drain(FluxFlatMap.java:589) at reactor.core.publisher.FluxFlatMap$FlatMapMain.onComplete(FluxFlatMap.java:466) at reactor.core.publisher.FluxPeekFuseable$PeekFuseableSubscriber.onComplete(FluxPeekFuseable.java:277) at reactor.core.publisher.FluxIterable$IterableSubscription.slowPath(FluxIterable.java:357) at reactor.core.publisher.FluxIterable$IterableSubscription.request(FluxIterable.java:294) at reactor.core.publisher.FluxPeekFuseable$PeekFuseableSubscriber.request(FluxPeekFuseable.java:144) at reactor.core.publisher.FluxFlatMap$FlatMapMain.onSubscribe(FluxFlatMap.java:371) at reactor.core.publisher.FluxPeekFuseable$PeekFuseableSubscriber.onSubscribe(FluxPeekFuseable.java:178) at reactor.core.publisher.FluxIterable.subscribe(FluxIterable.java:201) at reactor.core.publisher.FluxIterable.subscribe(FluxIterable.java:83) at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64) at reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:52) at reactor.core.publisher.MonoFlatMap$FlatMapMain.onNext(MonoFlatMap.java:165) at reactor.core.publisher.Operators$BaseFluxToMonoOperator.completePossiblyEmpty(Operators.java:2071) at reactor.core.publisher.FluxDefaultIfEmpty$DefaultIfEmptySubscriber.onComplete(FluxDefaultIfEmpty.java:134) at reactor.core.publisher.FluxMap$MapSubscriber.onComplete(FluxMap.java:144) at reactor.core.publisher.FluxMap$MapSubscriber.onComplete(FluxMap.java:144) at reactor.core.publisher.FluxFilter$FilterSubscriber.onComplete(FluxFilter.java:166) at reactor.core.publisher.FluxMap$MapConditionalSubscriber.onComplete(FluxMap.java:275) at reactor.core.publisher.Operators$MonoSubscriber.complete(Operators.java:1840) at reactor.core.publisher.MonoCacheTime$CoordinatorSubscriber.signalCached(MonoCacheTime.java:337) at reactor.core.publisher.MonoCacheTime$CoordinatorSubscriber.onNext(MonoCacheTime.java:354) at reactor.core.publisher.FluxPeek$PeekSubscriber.onNext(FluxPeek.java:200) at reactor.core.publisher.FluxSwitchIfEmpty$SwitchIfEmptySubscriber.onNext(FluxSwitchIfEmpty.java:74) at reactor.core.publisher.MonoNext$NextSubscriber.onNext(MonoNext.java:82) at reactor.core.publisher.FluxConcatMapNoPrefetch$FluxConcatMapNoPrefetchSubscriber.innerNext(FluxConcatMapNoPrefetch.java:258) at reactor.core.publisher.FluxConcatMap$ConcatMapInner.onNext(FluxConcatMap.java:863) at reactor.core.publisher.FluxConcatMap$WeakScalarSubscription.request(FluxConcatMap.java:479) at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.request(Operators.java:2305) at reactor.core.publisher.FluxConcatMapNoPrefetch$FluxConcatMapNoPrefetchSubscriber.request(FluxConcatMapNoPrefetch.java:338) at reactor.core.publisher.MonoNext$NextSubscriber.request(MonoNext.java:108) at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.set(Operators.java:2341) at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.onSubscribe(Operators.java:2215) at reactor.core.publisher.MonoNext$NextSubscriber.onSubscribe(MonoNext.java:70) at reactor.core.publisher.FluxConcatMapNoPrefetch$FluxConcatMapNoPrefetchSubscriber.onSubscribe(FluxConcatMapNoPrefetch.java:164) at reactor.core.publisher.FluxIterable.subscribe(FluxIterable.java:201) at reactor.core.publisher.FluxIterable.subscribe(FluxIterable.java:83) at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64) at reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:52) at reactor.core.publisher.MonoCacheTime.subscribeOrReturn(MonoCacheTime.java:143) at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:57) at reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:52) at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64) at reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:52) at reactor.core.publisher.Mono.subscribe(Mono.java:4485) at reactor.core.publisher.MonoIgnoreThen$ThenIgnoreMain.subscribeNext(MonoIgnoreThen.java:263) at reactor.core.publisher.MonoIgnoreThen.subscribe(MonoIgnoreThen.java:51) at reactor.core.publisher.Mono.subscribe(Mono.java:4485) at reactor.core.publisher.FluxSwitchIfEmpty$SwitchIfEmptySubscriber.onComplete(FluxSwitchIfEmpty.java:82) at reactor.core.publisher.FluxFilter$FilterSubscriber.onComplete(FluxFilter.java:166) at reactor.core.publisher.FluxPeekFuseable$PeekConditionalSubscriber.onComplete(FluxPeekFuseable.java:940) at reactor.core.publisher.FluxSwitchIfEmpty$SwitchIfEmptySubscriber.onComplete(FluxSwitchIfEmpty.java:85) at reactor.core.publisher.Operators$ScalarSubscription.request(Operators.java:2547) at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.set(Operators.java:2341) at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.onSubscribe(Operators.java:2215) at reactor.core.publisher.MonoJust.subscribe(MonoJust.java:55) at reactor.core.publisher.Mono.subscribe(Mono.java:4485) at reactor.core.publisher.FluxSwitchIfEmpty$SwitchIfEmptySubscriber.onComplete(FluxSwitchIfEmpty.java:82) at reactor.core.publisher.MonoNext$NextSubscriber.onComplete(MonoNext.java:102) at reactor.core.publisher.FluxFilter$FilterSubscriber.onComplete(FluxFilter.java:166) at reactor.core.publisher.FluxFlatMap$FlatMapMain.checkTerminated(FluxFlatMap.java:847) at reactor.core.publisher.FluxFlatMap$FlatMapMain.drainLoop(FluxFlatMap.java:609) at reactor.core.publisher.FluxFlatMap$FlatMapMain.drain(FluxFlatMap.java:589) at reactor.core.publisher.FluxFlatMap$FlatMapMain.onComplete(FluxFlatMap.java:466) at reactor.core.publisher.FluxPeekFuseable$PeekFuseableSubscriber.onComplete(FluxPeekFuseable.java:277) at reactor.core.publisher.FluxIterable$IterableSubscription.slowPath(FluxIterable.java:357) at reactor.core.publisher.FluxIterable$IterableSubscription.request(FluxIterable.java:294) at reactor.core.publisher.FluxPeekFuseable$PeekFuseableSubscriber.request(FluxPeekFuseable.java:144) at reactor.core.publisher.FluxFlatMap$FlatMapMain.onSubscribe(FluxFlatMap.java:371) at reactor.core.publisher.FluxPeekFuseable$PeekFuseableSubscriber.onSubscribe(FluxPeekFuseable.java:178) at reactor.core.publisher.FluxIterable.subscribe(FluxIterable.java:201) at reactor.core.publisher.FluxIterable.subscribe(FluxIterable.java:83) at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64) at reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:52) at reactor.core.publisher.Mono.subscribe(Mono.java:4485) at reactor.core.publisher.MonoIgnoreThen$ThenIgnoreMain.subscribeNext(MonoIgnoreThen.java:263) at reactor.core.publisher.MonoIgnoreThen.subscribe(MonoIgnoreThen.java:51) at reactor.core.publisher.Mono.subscribe(Mono.java:4485) at reactor.core.publisher.FluxSwitchIfEmpty$SwitchIfEmptySubscriber.onComplete(FluxSwitchIfEmpty.java:82) at reactor.core.publisher.FluxFilter$FilterSubscriber.onComplete(FluxFilter.java:166) at reactor.core.publisher.FluxPeekFuseable$PeekConditionalSubscriber.onComplete(FluxPeekFuseable.java:940) at reactor.core.publisher.FluxSwitchIfEmpty$SwitchIfEmptySubscriber.onComplete(FluxSwitchIfEmpty.java:85) at reactor.core.publisher.Operators$ScalarSubscription.request(Operators.java:2547) at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.set(Operators.java:2341) at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.onSubscribe(Operators.java:2215) at reactor.core.publisher.MonoJust.subscribe(MonoJust.java:55) at reactor.core.publisher.Mono.subscribe(Mono.java:4485) at reactor.core.publisher.FluxSwitchIfEmpty$SwitchIfEmptySubscriber.onComplete(FluxSwitchIfEmpty.java:82) at reactor.core.publisher.MonoNext$NextSubscriber.onComplete(MonoNext.java:102) at reactor.core.publisher.FluxFilter$FilterSubscriber.onComplete(FluxFilter.java:166) at reactor.core.publisher.FluxFlatMap$FlatMapMain.checkTerminated(FluxFlatMap.java:847) at reactor.core.publisher.FluxFlatMap$FlatMapMain.drainLoop(FluxFlatMap.java:609) at reactor.core.publisher.FluxFlatMap$FlatMapMain.drain(FluxFlatMap.java:589) at reactor.core.publisher.FluxFlatMap$FlatMapMain.onComplete(FluxFlatMap.java:466) at reactor.core.publisher.FluxPeekFuseable$PeekFuseableSubscriber.onComplete(FluxPeekFuseable.java:277) at reactor.core.publisher.FluxIterable$IterableSubscription.slowPath(FluxIterable.java:357) at reactor.core.publisher.FluxIterable$IterableSubscription.request(FluxIterable.java:294) at reactor.core.publisher.FluxPeekFuseable$PeekFuseableSubscriber.request(FluxPeekFuseable.java:144) at reactor.core.publisher.FluxFlatMap$FlatMapMain.onSubscribe(FluxFlatMap.java:371) at reactor.core.publisher.FluxPeekFuseable$PeekFuseableSubscriber.onSubscribe(FluxPeekFuseable.java:178) at reactor.core.publisher.FluxIterable.subscribe(FluxIterable.java:201) at reactor.core.publisher.FluxIterable.subscribe(FluxIterable.java:83) at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64) at reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:52) at reactor.core.publisher.Mono.subscribe(Mono.java:4485) at reactor.core.publisher.MonoIgnoreThen$ThenIgnoreMain.subscribeNext(MonoIgnoreThen.java:263) at reactor.core.publisher.MonoIgnoreThen.subscribe(MonoIgnoreThen.java:51) at reactor.core.publisher.Mono.subscribe(Mono.java:4485) at reactor.core.publisher.FluxSwitchIfEmpty$SwitchIfEmptySubscriber.onComplete(FluxSwitchIfEmpty.java:82) at reactor.core.publisher.MonoFlatMap$FlatMapMain.onComplete(MonoFlatMap.java:189) at reactor.core.publisher.FluxFilterFuseable$FilterFuseableSubscriber.onComplete(FluxFilterFuseable.java:171) at reactor.core.publisher.FluxPeekFuseable$PeekFuseableConditionalSubscriber.onComplete(FluxPeekFuseable.java:595) at reactor.core.publisher.Operators$ScalarSubscription.request(Operators.java:2547) at reactor.core.publisher.FluxPeekFuseable$PeekFuseableConditionalSubscriber.request(FluxPeekFuseable.java:437) at reactor.core.publisher.FluxFilterFuseable$FilterFuseableSubscriber.request(FluxFilterFuseable.java:191) at reactor.core.publisher.MonoFlatMap$FlatMapMain.request(MonoFlatMap.java:194) at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.set(Operators.java:2341) at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.onSubscribe(Operators.java:2215) at reactor.core.publisher.MonoFlatMap$FlatMapMain.onSubscribe(MonoFlatMap.java:117) at reactor.core.publisher.FluxFilterFuseable$FilterFuseableSubscriber.onSubscribe(FluxFilterFuseable.java:87) at reactor.core.publisher.FluxPeekFuseable$PeekFuseableConditionalSubscriber.onSubscribe(FluxPeekFuseable.java:471) at reactor.core.publisher.MonoJust.subscribe(MonoJust.java:55) at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64) at reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:52) at reactor.core.publisher.Mono.subscribe(Mono.java:4485) at reactor.core.publisher.MonoIgnoreThen$ThenIgnoreMain.subscribeNext(MonoIgnoreThen.java:263) at reactor.core.publisher.MonoIgnoreThen.subscribe(MonoIgnoreThen.java:51) at reactor.core.publisher.Mono.subscribe(Mono.java:4485) at reactor.core.publisher.FluxSwitchIfEmpty$SwitchIfEmptySubscriber.onComplete(FluxSwitchIfEmpty.java:82) at reactor.core.publisher.MonoFlatMap$FlatMapMain.onComplete(MonoFlatMap.java:189) at reactor.core.publisher.FluxMapFuseable$MapFuseableSubscriber.onComplete(FluxMapFuseable.java:152) at reactor.core.publisher.FluxMapFuseable$MapFuseableSubscriber.onComplete(FluxMapFuseable.java:152) at reactor.core.publisher.FluxMapFuseable$MapFuseableSubscriber.onComplete(FluxMapFuseable.java:152) at reactor.core.publisher.FluxFilterFuseable$FilterFuseableSubscriber.onComplete(FluxFilterFuseable.java:171) at reactor.core.publisher.FluxPeekFuseable$PeekFuseableConditionalSubscriber.onComplete(FluxPeekFuseable.java:595) at reactor.core.publisher.Operators$ScalarSubscription.request(Operators.java:2547) at reactor.core.publisher.FluxPeekFuseable$PeekFuseableConditionalSubscriber.request(FluxPeekFuseable.java:437) at reactor.core.publisher.FluxFilterFuseable$FilterFuseableSubscriber.request(FluxFilterFuseable.java:191) at reactor.core.publisher.FluxMapFuseable$MapFuseableSubscriber.request(FluxMapFuseable.java:171) at reactor.core.publisher.FluxMapFuseable$MapFuseableSubscriber.request(FluxMapFuseable.java:171) at reactor.core.publisher.FluxMapFuseable$MapFuseableSubscriber.request(FluxMapFuseable.java:171) at reactor.core.publisher.MonoFlatMap$FlatMapMain.request(MonoFlatMap.java:194) at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.set(Operators.java:2341) at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.onSubscribe(Operators.java:2215) at reactor.core.publisher.MonoFlatMap$FlatMapMain.onSubscribe(MonoFlatMap.java:117) at reactor.core.publisher.FluxMapFuseable$MapFuseableSubscriber.onSubscribe(FluxMapFuseable.java:96) at reactor.core.publisher.FluxMapFuseable$MapFuseableSubscriber.onSubscribe(FluxMapFuseable.java:96) at reactor.core.publisher.FluxMapFuseable$MapFuseableSubscriber.onSubscribe(FluxMapFuseable.java:96) at reactor.core.publisher.FluxFilterFuseable$FilterFuseableSubscriber.onSubscribe(FluxFilterFuseable.java:87) at reactor.core.publisher.FluxPeekFuseable$PeekFuseableConditionalSubscriber.onSubscribe(FluxPeekFuseable.java:471) at reactor.core.publisher.MonoJust.subscribe(MonoJust.java:55) at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64) at reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:52) at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64) at reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:52) at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64) at reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:52) at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64) at reactor.core.publisher.MonoDeferContextual.subscribe(MonoDeferContextual.java:55) at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64) at reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:52) at reactor.core.publisher.MonoFlatMap$FlatMapMain.onNext(MonoFlatMap.java:165) at reactor.core.publisher.FluxMapFuseable$MapFuseableSubscriber.onNext(FluxMapFuseable.java:129) at reactor.core.publisher.MonoFlatMap$FlatMapMain.secondComplete(MonoFlatMap.java:245) at reactor.core.publisher.MonoFlatMap$FlatMapInner.onNext(MonoFlatMap.java:305) at reactor.core.publisher.Operators$BaseFluxToMonoOperator.completePossiblyEmpty(Operators.java:2071) at reactor.core.publisher.MonoCollectList$MonoCollectListSubscriber.onComplete(MonoCollectList.java:118) at reactor.core.publisher.FluxIterable$IterableSubscription.fastPath(FluxIterable.java:424) at reactor.core.publisher.FluxIterable$IterableSubscription.request(FluxIterable.java:291) at reactor.core.publisher.Operators$BaseFluxToMonoOperator.request(Operators.java:2041) at reactor.core.publisher.MonoFlatMap$FlatMapInner.onSubscribe(MonoFlatMap.java:291) at reactor.core.publisher.Operators$BaseFluxToMonoOperator.onSubscribe(Operators.java:2025) at reactor.core.publisher.FluxIterable.subscribe(FluxIterable.java:201) at reactor.core.publisher.FluxIterable.subscribe(FluxIterable.java:83) at reactor.core.publisher.MonoFromFluxOperator.subscribe(MonoFromFluxOperator.java:81) at reactor.core.publisher.MonoFlatMap$FlatMapMain.onNext(MonoFlatMap.java:165) at reactor.core.publisher.FluxSwitchIfEmpty$SwitchIfEmptySubscriber.onNext(FluxSwitchIfEmpty.java:74) at reactor.core.publisher.MonoNext$NextSubscriber.onNext(MonoNext.java:82) at reactor.core.publisher.FluxFilterWhen$FluxFilterWhenSubscriber.drain(FluxFilterWhen.java:301) at reactor.core.publisher.FluxFilterWhen$FluxFilterWhenSubscriber.onNext(FluxFilterWhen.java:140) at reactor.core.publisher.FluxIterable$IterableSubscription.slowPath(FluxIterable.java:335) at reactor.core.publisher.FluxIterable$IterableSubscription.request(FluxIterable.java:294) at reactor.core.publisher.FluxFilterWhen$FluxFilterWhenSubscriber.onSubscribe(FluxFilterWhen.java:200) at reactor.core.publisher.FluxIterable.subscribe(FluxIterable.java:201) at reactor.core.publisher.FluxIterable.subscribe(FluxIterable.java:83) at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64) at reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:52) at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64) at reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:52) at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64) at reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:52) at reactor.core.publisher.Mono.subscribe(Mono.java:4485) at reactor.core.publisher.MonoIgnoreThen$ThenIgnoreMain.subscribeNext(MonoIgnoreThen.java:263) at reactor.core.publisher.MonoIgnoreThen.subscribe(MonoIgnoreThen.java:51) at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64) at reactor.core.publisher.MonoDeferContextual.subscribe(MonoDeferContextual.java:55) at reactor.netty.http.server.HttpServer$HttpServerHandle.onStateChange(HttpServer.java:1006) at reactor.netty.ReactorNetty$CompositeConnectionObserver.onStateChange(ReactorNetty.java:710) at reactor.netty.transport.ServerTransport$ChildObserver.onStateChange(ServerTransport.java:481) at reactor.netty.http.server.HttpServerOperations.onInboundNext(HttpServerOperations.java:633) at reactor.netty.channel.ChannelOperationsHandler.channelRead(ChannelOperationsHandler.java:113) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:444) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412) at reactor.netty.http.server.HttpTrafficHandler.channelRead(HttpTrafficHandler.java:228) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:442) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412) at io.netty.channel.CombinedChannelDuplexHandler$DelegatingChannelHandlerContext.fireChannelRead(CombinedChannelDuplexHandler.java:436) at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:346) at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:318) at io.netty.channel.CombinedChannelDuplexHandler.channelRead(CombinedChannelDuplexHandler.java:251) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:442) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412) at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:440) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420) at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919) at io.netty.channel.epoll.AbstractEpollStreamChannel$EpollStreamUnsafe.epollInReady(AbstractEpollStreamChannel.java:800) at io.netty.channel.epoll.EpollEventLoop.processReady(EpollEventLoop.java:499) at io.netty.channel.epoll.EpollEventLoop.run(EpollEventLoop.java:397) at io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:997) at io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74) at io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30) at java.base/java.lang.Thread.run(Thread.java:833) ``` ### Additional context _No response_
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/AccessControlService.java" ]
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/AccessControlService.java" ]
[]
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/AccessControlService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/AccessControlService.java index 6cc455624bc..59ea02fea84 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/AccessControlService.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/AccessControlService.java @@ -51,6 +51,8 @@ @Slf4j public class AccessControlService { + private static final String ACCESS_DENIED = "Access denied"; + @Nullable private final InMemoryReactiveClientRegistrationRepository clientRegistrationRepository; private final RoleBasedAccessControlProperties properties; @@ -97,6 +99,17 @@ public Mono<Void> validateAccess(AccessContext context) { return Mono.empty(); } + if (CollectionUtils.isNotEmpty(context.getApplicationConfigActions())) { + return getUser() + .doOnNext(user -> { + boolean accessGranted = isApplicationConfigAccessible(context, user); + + if (!accessGranted) { + throw new AccessDeniedException(ACCESS_DENIED); + } + }).then(); + } + return getUser() .doOnNext(user -> { boolean accessGranted = @@ -113,7 +126,7 @@ && isAclAccessible(context, user) && isAuditAccessible(context, user); if (!accessGranted) { - throw new AccessDeniedException("Access denied"); + throw new AccessDeniedException(ACCESS_DENIED); } }) .then();
null
train
test
2023-08-01T14:42:00
"2023-07-24T14:14:31Z"
fallen-up
train
provectus/kafka-ui/3614_4085
provectus/kafka-ui
provectus/kafka-ui/3614
provectus/kafka-ui/4085
[ "keyword_pr_to_issue" ]
844eb17d7a3e112ae6742d7c8e4132c3dcef19b5
cca2c9699755c2128bb88cf8920c9ed4414dbd58
[ "Hi @iliax, can you assign it to me? I want to work on it.", "@Ksj14-kumar sure, go ahead", "@Ksj14-kumar any updates?", "@Haarolean I am still working on this issue, I will give you an update soon.", "@Ksj14-kumar any updates?", "@Haarolean, Can you unassign me this task, I can't solve it. Sorry for that.", "@Ksj14-kumar no worries" ]
[]
"2023-08-03T00:52:42Z"
[ "type/bug", "good first issue", "scope/frontend", "status/accepted", "status/confirmed" ]
Active controller badge on invalid node
In `/api/clusters/local/stats` endpoint `activeControllers` **is an id of a node** that is _Active controller_ node. Now green badge is rendered on another node or not even shown sometimes. Version: master commit [de21721](https://github.com/provectus/kafka-ui/commit/de21721) <img width="1540" alt="Screenshot 2023-04-03 at 12 24 34" src="https://user-images.githubusercontent.com/702205/229454092-15bbf5f2-6585-473d-bdc3-b6a0be8e7bfe.png">
[ "kafka-ui-react-app/src/components/Brokers/Broker/__test__/Broker.spec.tsx", "kafka-ui-react-app/src/components/Brokers/BrokersList/BrokersList.tsx", "kafka-ui-react-app/src/components/Brokers/BrokersList/__test__/BrokersList.spec.tsx", "kafka-ui-react-app/src/components/common/Icons/CheckMarkRoundIcon.tsx", "kafka-ui-react-app/src/lib/fixtures/brokers.ts", "kafka-ui-react-app/src/lib/fixtures/clusters.ts" ]
[ "kafka-ui-react-app/src/components/Brokers/Broker/__test__/Broker.spec.tsx", "kafka-ui-react-app/src/components/Brokers/BrokersList/BrokersList.tsx", "kafka-ui-react-app/src/components/Brokers/BrokersList/__test__/BrokersList.spec.tsx", "kafka-ui-react-app/src/components/common/Icons/CheckMarkRoundIcon.tsx", "kafka-ui-react-app/src/lib/fixtures/brokers.ts", "kafka-ui-react-app/src/lib/fixtures/clusters.ts" ]
[]
diff --git a/kafka-ui-react-app/src/components/Brokers/Broker/__test__/Broker.spec.tsx b/kafka-ui-react-app/src/components/Brokers/Broker/__test__/Broker.spec.tsx index 89a553be7d0..40a5deeebfe 100644 --- a/kafka-ui-react-app/src/components/Brokers/Broker/__test__/Broker.spec.tsx +++ b/kafka-ui-react-app/src/components/Brokers/Broker/__test__/Broker.spec.tsx @@ -13,7 +13,7 @@ import { brokersPayload } from 'lib/fixtures/brokers'; import { clusterStatsPayload } from 'lib/fixtures/clusters'; const clusterName = 'local'; -const brokerId = 1; +const brokerId = 200; const activeClassName = 'is-active'; const brokerLogdir = { pageText: 'brokerLogdir', diff --git a/kafka-ui-react-app/src/components/Brokers/BrokersList/BrokersList.tsx b/kafka-ui-react-app/src/components/Brokers/BrokersList/BrokersList.tsx index e59c006b0c6..ede570c655b 100644 --- a/kafka-ui-react-app/src/components/Brokers/BrokersList/BrokersList.tsx +++ b/kafka-ui-react-app/src/components/Brokers/BrokersList/BrokersList.tsx @@ -73,13 +73,13 @@ const BrokersList: React.FC = () => { header: 'Broker ID', accessorKey: 'brokerId', // eslint-disable-next-line react/no-unstable-nested-components - cell: ({ row: { id }, getValue }) => ( + cell: ({ getValue }) => ( <S.RowCell> <LinkCell value={`${getValue<string | number>()}`} to={encodeURIComponent(`${getValue<string | number>()}`)} /> - {id === String(activeControllers) && ( + {getValue<string | number>() === activeControllers && ( <Tooltip value={<CheckMarkRoundIcon />} content="Active Controller" diff --git a/kafka-ui-react-app/src/components/Brokers/BrokersList/__test__/BrokersList.spec.tsx b/kafka-ui-react-app/src/components/Brokers/BrokersList/__test__/BrokersList.spec.tsx index 0c60cf4749c..3e88569a39e 100644 --- a/kafka-ui-react-app/src/components/Brokers/BrokersList/__test__/BrokersList.spec.tsx +++ b/kafka-ui-react-app/src/components/Brokers/BrokersList/__test__/BrokersList.spec.tsx @@ -56,11 +56,11 @@ describe('BrokersList Component', () => { }); it('opens broker when row clicked', async () => { renderComponent(); - await userEvent.click(screen.getByRole('cell', { name: '0' })); + await userEvent.click(screen.getByRole('cell', { name: '100' })); await waitFor(() => expect(mockedUsedNavigate).toBeCalledWith( - clusterBrokerPath(clusterName, '0') + clusterBrokerPath(clusterName, '100') ) ); }); @@ -124,6 +124,39 @@ describe('BrokersList Component', () => { }); }); + describe('BrokersList', () => { + describe('when the brokers are loaded', () => { + const testActiveControllers = 0; + beforeEach(() => { + (useBrokers as jest.Mock).mockImplementation(() => ({ + data: brokersPayload, + })); + (useClusterStats as jest.Mock).mockImplementation(() => ({ + data: clusterStatsPayload, + })); + }); + + it(`Indicates correct active cluster`, async () => { + renderComponent(); + await waitFor(() => + expect(screen.getByRole('tooltip')).toBeInTheDocument() + ); + }); + it(`Correct display even if there is no active cluster: ${testActiveControllers} `, async () => { + (useClusterStats as jest.Mock).mockImplementation(() => ({ + data: { + ...clusterStatsPayload, + activeControllers: testActiveControllers, + }, + })); + renderComponent(); + await waitFor(() => + expect(screen.queryByRole('tooltip')).not.toBeInTheDocument() + ); + }); + }); + }); + describe('when diskUsage is empty', () => { beforeEach(() => { (useBrokers as jest.Mock).mockImplementation(() => ({ @@ -157,11 +190,11 @@ describe('BrokersList Component', () => { }); it('opens broker when row clicked', async () => { renderComponent(); - await userEvent.click(screen.getByRole('cell', { name: '1' })); + await userEvent.click(screen.getByRole('cell', { name: '100' })); await waitFor(() => expect(mockedUsedNavigate).toBeCalledWith( - clusterBrokerPath(clusterName, '1') + clusterBrokerPath(clusterName, '100') ) ); }); diff --git a/kafka-ui-react-app/src/components/common/Icons/CheckMarkRoundIcon.tsx b/kafka-ui-react-app/src/components/common/Icons/CheckMarkRoundIcon.tsx index e14898385b6..bbf212541e8 100644 --- a/kafka-ui-react-app/src/components/common/Icons/CheckMarkRoundIcon.tsx +++ b/kafka-ui-react-app/src/components/common/Icons/CheckMarkRoundIcon.tsx @@ -7,6 +7,7 @@ const CheckMarkRoundIcon: React.FC = () => { height="14" viewBox="0 0 14 14" fill="none" + role="tooltip" xmlns="http://www.w3.org/2000/svg" > <path diff --git a/kafka-ui-react-app/src/lib/fixtures/brokers.ts b/kafka-ui-react-app/src/lib/fixtures/brokers.ts index 8999bd2f814..1a76f73fe37 100644 --- a/kafka-ui-react-app/src/lib/fixtures/brokers.ts +++ b/kafka-ui-react-app/src/lib/fixtures/brokers.ts @@ -1,8 +1,8 @@ import { BrokerConfig, BrokersLogdirs, ConfigSource } from 'generated-sources'; export const brokersPayload = [ - { id: 1, host: 'b-1.test.kafka.amazonaws.com', port: 9092 }, - { id: 2, host: 'b-2.test.kafka.amazonaws.com', port: 9092 }, + { id: 100, host: 'b-1.test.kafka.amazonaws.com', port: 9092 }, + { id: 200, host: 'b-2.test.kafka.amazonaws.com', port: 9092 }, ]; const partition = { diff --git a/kafka-ui-react-app/src/lib/fixtures/clusters.ts b/kafka-ui-react-app/src/lib/fixtures/clusters.ts index 2b1cc8f8f51..3f4476bae8a 100644 --- a/kafka-ui-react-app/src/lib/fixtures/clusters.ts +++ b/kafka-ui-react-app/src/lib/fixtures/clusters.ts @@ -32,15 +32,15 @@ export const clustersPayload: Cluster[] = [ export const clusterStatsPayload = { brokerCount: 2, - activeControllers: 1, + activeControllers: 100, onlinePartitionCount: 138, offlinePartitionCount: 0, inSyncReplicasCount: 239, outOfSyncReplicasCount: 0, underReplicatedPartitionCount: 0, diskUsage: [ - { brokerId: 0, segmentSize: 334567, segmentCount: 245 }, - { brokerId: 1, segmentSize: 12345678, segmentCount: 121 }, + { brokerId: 100, segmentSize: 334567, segmentCount: 245 }, + { brokerId: 200, segmentSize: 12345678, segmentCount: 121 }, ], version: '2.2.1', };
null
val
test
2023-08-23T12:28:47
"2023-04-03T08:29:54Z"
iliax
train
provectus/kafka-ui/4087_4090
provectus/kafka-ui
provectus/kafka-ui/4087
provectus/kafka-ui/4090
[ "keyword_pr_to_issue" ]
b0583a3ca7d58bd9978e26a67bb8927fcbf03c42
bbb739af926a492f5d8e38866ba356f13b031890
[ "Hello, is this still open? Can I help with something? ", "@techpordentro we already have a PR for this (#4090), thank you though! Feel free to pick any other issue from up for grabs board." ]
[ "since the default parameter of formatTimestamp is `{ hourCycle: 'h23' }` , what if instead of constructing it , we can just `date.toLocaleString([], { hourCycle: 'h23' })` , did you try this way ?", "@MikeStrike101 can you take a look?", "@Haarolean sorry I was on vacation these days. Will take a look now." ]
"2023-08-03T18:22:33Z"
[ "good first issue", "scope/frontend", "status/accepted", "type/chore" ]
Investigate dateTimeHelpers.spec.ts linter error
@Haarolean i think we need to open an issue to investigate [dateTimeHelpers.spec.ts](https://github.com/provectus/kafka-ui/pull/3969/files#diff-3944835013941011f6e5cc58d818815553b691f0a67c3149561451253f90f5c9) linter error. _Originally posted by @Mgrdich in https://github.com/provectus/kafka-ui/issues/3969#issuecomment-1663830373_
[ "kafka-ui-react-app/src/lib/hooks/__tests__/dateTimeHelpers.spec.ts" ]
[ "kafka-ui-react-app/src/lib/hooks/__tests__/dateTimeHelpers.spec.ts" ]
[]
diff --git a/kafka-ui-react-app/src/lib/hooks/__tests__/dateTimeHelpers.spec.ts b/kafka-ui-react-app/src/lib/hooks/__tests__/dateTimeHelpers.spec.ts index 5ed95fa8515..ad5346283c6 100644 --- a/kafka-ui-react-app/src/lib/hooks/__tests__/dateTimeHelpers.spec.ts +++ b/kafka-ui-react-app/src/lib/hooks/__tests__/dateTimeHelpers.spec.ts @@ -13,10 +13,10 @@ describe('dateTimeHelpers', () => { it('should output the correct date', () => { const date = new Date(); expect(formatTimestamp(date)).toBe( - date.toLocaleString([], { hour12: false }) + date.toLocaleString([], { hourCycle: 'h23' }) ); expect(formatTimestamp(date.getTime())).toBe( - date.toLocaleString([], { hour12: false }) + date.toLocaleString([], { hourCycle: 'h23' }) ); }); });
null
test
test
2023-08-30T10:39:58
"2023-08-03T11:42:11Z"
Haarolean
train
provectus/kafka-ui/2751_4093
provectus/kafka-ui
provectus/kafka-ui/2751
provectus/kafka-ui/4093
[ "keyword_pr_to_issue" ]
ac09efcd3486fda6cee41986a01820710863eb81
150fc21fb84770520e669f8681b5c9a2bf3ba344
[ "Would be really nice to have", "It would be amazing!", "Feel free to test the things out:\r\nimage: `public.ecr.aws/provectus/kafka-ui-custom-build:4093`\r\nConfig example:\r\n```\r\n - provider: oauth_github\r\n type: team\r\n value: \"provectus/kafka-backend\"\r\n```\r\n" ]
[ "ORGANIZATION constant?", "is it really \"login\"", "please write type here for better readability", "distinct not useful here", "rm .distinct()", "Yes it is!" ]
"2023-08-04T13:59:49Z"
[ "type/enhancement", "scope/backend", "status/accepted", "area/rbac" ]
RBAC: Support github teams
Currently as of #753 we do support only usernames/organizations roles. Please upvote if you're interested in roles via gh teams. ``` - provider: oauth_github type: team value: "provectus/kafka-backend" ```
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/GithubAuthorityExtractor.java" ]
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/GithubAuthorityExtractor.java" ]
[]
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/GithubAuthorityExtractor.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/GithubAuthorityExtractor.java index 654654a05dd..90c4ceebc60 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/GithubAuthorityExtractor.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/rbac/extractor/GithubAuthorityExtractor.java @@ -5,6 +5,8 @@ import com.provectus.kafka.ui.model.rbac.Role; import com.provectus.kafka.ui.model.rbac.provider.Provider; import com.provectus.kafka.ui.service.rbac.AccessControlService; +import java.util.Collection; +import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; @@ -26,6 +28,8 @@ public class GithubAuthorityExtractor implements ProviderAuthorityExtractor { private static final String ORGANIZATION_ATTRIBUTE_NAME = "organizations_url"; private static final String USERNAME_ATTRIBUTE_NAME = "login"; private static final String ORGANIZATION_NAME = "login"; + private static final String ORGANIZATION = "organization"; + private static final String TEAM_NAME = "slug"; private static final String GITHUB_ACCEPT_HEADER = "application/vnd.github+json"; private static final String DUMMY = "dummy"; // The number of results (max 100) per page of list organizations for authenticated user. @@ -46,7 +50,7 @@ public Mono<Set<String>> extract(AccessControlService acs, Object value, Map<Str throw new RuntimeException(); } - Set<String> groupsByUsername = new HashSet<>(); + Set<String> rolesByUsername = new HashSet<>(); String username = principal.getAttribute(USERNAME_ATTRIBUTE_NAME); if (username == null) { log.debug("Github username param is not present"); @@ -59,13 +63,7 @@ public Mono<Set<String>> extract(AccessControlService acs, Object value, Map<Str .filter(s -> s.getType().equals("user")) .anyMatch(s -> s.getValue().equals(username))) .map(Role::getName) - .forEach(groupsByUsername::add); - } - - String organization = principal.getAttribute(ORGANIZATION_ATTRIBUTE_NAME); - if (organization == null) { - log.debug("Github organization param is not present"); - return Mono.just(groupsByUsername); + .forEach(rolesByUsername::add); } OAuth2UserRequest req = (OAuth2UserRequest) additionalParams.get("request"); @@ -80,8 +78,24 @@ public Mono<Set<String>> extract(AccessControlService acs, Object value, Map<Str .getUserInfoEndpoint() .getUri(); } + var webClient = WebClient.create(infoEndpoint); - WebClient webClient = WebClient.create(infoEndpoint); + Mono<Set<String>> rolesByOrganization = getOrganizationRoles(principal, additionalParams, acs, webClient); + Mono<Set<String>> rolesByTeams = getTeamRoles(webClient, additionalParams, acs); + + return Mono.zip(rolesByOrganization, rolesByTeams) + .map((t) -> Stream.of(t.getT1(), t.getT2(), rolesByUsername) + .flatMap(Collection::stream) + .collect(Collectors.toSet())); + } + + private Mono<Set<String>> getOrganizationRoles(DefaultOAuth2User principal, Map<String, Object> additionalParams, + AccessControlService acs, WebClient webClient) { + String organization = principal.getAttribute(ORGANIZATION_ATTRIBUTE_NAME); + if (organization == null) { + log.debug("Github organization param is not present"); + return Mono.just(Collections.emptySet()); + } final Mono<List<Map<String, Object>>> userOrganizations = webClient .get() @@ -99,22 +113,76 @@ public Mono<Set<String>> extract(AccessControlService acs, Object value, Map<Str //@formatter:on return userOrganizations - .map(orgsMap -> { - var groupsByOrg = acs.getRoles() - .stream() - .filter(role -> role.getSubjects() - .stream() - .filter(s -> s.getProvider().equals(Provider.OAUTH_GITHUB)) - .filter(s -> s.getType().equals("organization")) - .anyMatch(subject -> orgsMap.stream() - .map(org -> org.get(ORGANIZATION_NAME).toString()) - .distinct() - .anyMatch(orgName -> orgName.equalsIgnoreCase(subject.getValue())) - )) - .map(Role::getName); - - return Stream.concat(groupsByOrg, groupsByUsername.stream()).collect(Collectors.toSet()); - }); + .map(orgsMap -> acs.getRoles() + .stream() + .filter(role -> role.getSubjects() + .stream() + .filter(s -> s.getProvider().equals(Provider.OAUTH_GITHUB)) + .filter(s -> s.getType().equals(ORGANIZATION)) + .anyMatch(subject -> orgsMap.stream() + .map(org -> org.get(ORGANIZATION_NAME).toString()) + .anyMatch(orgName -> orgName.equalsIgnoreCase(subject.getValue())) + )) + .map(Role::getName) + .collect(Collectors.toSet())); + } + + @SuppressWarnings("unchecked") + private Mono<Set<String>> getTeamRoles(WebClient webClient, Map<String, Object> additionalParams, + AccessControlService acs) { + + var requestedTeams = acs.getRoles() + .stream() + .filter(r -> r.getSubjects() + .stream() + .filter(s -> s.getProvider().equals(Provider.OAUTH_GITHUB)) + .anyMatch(s -> s.getType().equals("team"))) + .collect(Collectors.toSet()); + + if (requestedTeams.isEmpty()) { + log.debug("No roles with github teams found, skipping"); + return Mono.just(Collections.emptySet()); + } + + final Mono<List<Map<String, Object>>> rawTeams = webClient + .get() + .uri(uriBuilder -> uriBuilder.path("/teams") + .queryParam("per_page", ORGANIZATIONS_PER_PAGE) + .build()) + .headers(headers -> { + headers.set(HttpHeaders.ACCEPT, GITHUB_ACCEPT_HEADER); + OAuth2UserRequest request = (OAuth2UserRequest) additionalParams.get("request"); + headers.setBearerAuth(request.getAccessToken().getTokenValue()); + }) + .retrieve() + //@formatter:off + .bodyToMono(new ParameterizedTypeReference<>() {}); + //@formatter:on + + final Mono<List<String>> mappedTeams = rawTeams + .map(teams -> teams.stream() + .map(teamInfo -> { + var name = teamInfo.get(TEAM_NAME); + var orgInfo = (Map<String, Object>) teamInfo.get(ORGANIZATION); + var orgName = orgInfo.get(ORGANIZATION_NAME); + return orgName + "/" + name; + }) + .map(Object::toString) + .collect(Collectors.toList()) + ); + + return mappedTeams + .map(teams -> acs.getRoles() + .stream() + .filter(role -> role.getSubjects() + .stream() + .filter(s -> s.getProvider().equals(Provider.OAUTH_GITHUB)) + .filter(s -> s.getType().equals("team")) + .anyMatch(subject -> teams.stream() + .anyMatch(teamName -> teamName.equalsIgnoreCase(subject.getValue())) + )) + .map(Role::getName) + .collect(Collectors.toSet())); } }
null
train
test
2023-08-07T16:04:10
"2022-10-14T10:31:32Z"
Haarolean
train
provectus/kafka-ui/4030_4097
provectus/kafka-ui
provectus/kafka-ui/4030
provectus/kafka-ui/4097
[ "keyword_pr_to_issue" ]
333eae24759aaa7b3fc14e5e7dea232200c13dcd
7a82079471d8be926c6a3eb8a850ccf4886a0591
[ "@Haarolean when `retention.bytes` (Max size on disk) is not set implicitly - frontend **should not** pass this parameter to backend. \r\n", "@Haarolean hi, can i pr like i said? or does it still need to be reviewed? thank you.", "@p-eye the proper solution is described here: https://github.com/provectus/kafka-ui/issues/4030#issuecomment-1652008533\r\nif you can do just that, feel free to raise a PR", "@Haarolean\r\nalthough i captuered my test only for `retention.bytes`, i think other 4 properties also should not be passed to backend because of the same reason. (when a specific value is not assigned, a config should be set with the default, or cluster level value...)\r\ni will raise the PR for five properties, thank you\r\n- `cleanup.policy`\r\n- `retention.ms`\r\n- `max.message.bytes`\r\n- `min.insync.replicas`\r\n\r\nref)\r\nhttps://docs.confluent.io/platform/current/installation/configuration/broker-configs.html" ]
[]
"2023-08-07T06:38:06Z"
[ "type/bug", "good first issue", "scope/frontend", "status/accepted", "severity/medium" ]
FE: Topics: Do not send properties which are not defined explicitly
### Issue submitter TODO list - [X] I've looked up my issue in [FAQ](https://docs.kafka-ui.provectus.io/faq/common-problems) - [X] I've searched for an already existing issues [here](https://github.com/provectus/kafka-ui/issues) - [X] I've tried running `master`-labeled docker image and the issue still persists there - [X] I'm running a supported version of the application which is listed [here](https://github.com/provectus/kafka-ui/blob/master/SECURITY.md) ### Describe the bug (actual behavior) Hi I create a topic without any 'Custom parameters', in order to set the topic with cluster configurations. however, It is created with TOPIC_CUSTOM_PARAMS values, not cluster configuration values. ### Expected behavior _No response_ ### Your installation details 1. hash commit: 9549f68 ### Steps to reproduce 1. I set cluster configurations at AWS. ![스크란샷 2023-07-12 α„‹α…©α„Œα…₯ᆫ 9 23 15](https://github.com/provectus/kafka-ui/assets/50516754/14ff521a-8a12-4981-ace9-f8bff2548994) 'line 14' means that the default retention bytes of topic, when created without a specific retention bytes value, is 1073741824 bytes (1GB) 2. It works well when I create a topic with kafka cli (```./kafka-topics.sh --create --replication-factor=1 --partitions=1 --topic brokertest1 --bootstrap-server sth:9092```) 3. however, when I create a topic at kafka-ui without any custom parameter, it means to set a topic with the default cluster config, It doesn't work and retention.bytes is set to '-1' ![스크란샷 2023-07-12 α„‹α…©α„Œα…₯ᆫ 9 34 18](https://github.com/provectus/kafka-ui/assets/50516754/80807267-1bcb-456e-a351-dbc82fefe174) ![252823352-05cea9d8-701b-4bb5-a8cf-2059aa0940ae](https://github.com/provectus/kafka-ui/assets/50516754/7a2728a5-590a-4b1d-bce3-a61b391330b9) ### Screenshots _No response_ ### Logs _No response_ ### Additional context I tried to solve the issue, and found that ```configs``` at ```formatTopicCreation``` is related. https://github.com/provectus/kafka-ui/blob/f124fa632d10aaad7d79c9ee0b00f32922079972/kafka-ui-react-app/src/lib/hooks/api/topics.ts#L86C5-L86C5 the config values are from constants ```TOPIC_CUSTOM_PARAMS``` https://github.com/provectus/kafka-ui/blob/f124fa632d10aaad7d79c9ee0b00f32922079972/kafka-ui-react-app/src/lib/constants.ts#L22C1-L22C1 Each value means a default input value in a custom parameter form. ![스크란샷 2023-07-12 α„‹α…©α„Œα…₯ᆫ 10 07 42](https://github.com/provectus/kafka-ui/assets/50516754/e5985c67-561e-4441-af5c-0e98dc875326) however, below five configs are set with constants, so expected cluster default config does not work. solved: After commenting out like below, topic creation w/o custom parameters works well. - custom cluster config works well : ex) retention.bytes = 1073741824 bytes - kafka default config works well: ex) cleanup.policy = delete ``` const configs = { // 'cleanup.policy': cleanupPolicy, // 'retention.ms': retentionMs.toString(), // 'retention.bytes': retentionBytes.toString(), // 'max.message.bytes': maxMessageBytes.toString(), // 'min.insync.replicas': minInSyncReplicas.toString(), ...Object.values(customParams || {}).reduce(topicReducer, {}), }; ``` We actually run MSK with cluster level config now. but now, default config is not work so we have to set the values to each topic manually. I think removing five lines can be a solution.. but there may be other purposes of the code. could your team review this issue? thank you.
[ "kafka-ui-react-app/src/components/Topics/New/New.tsx", "kafka-ui-react-app/src/lib/hooks/api/topics.ts", "kafka-ui-react-app/src/redux/interfaces/topic.ts" ]
[ "kafka-ui-react-app/src/components/Topics/New/New.tsx", "kafka-ui-react-app/src/lib/hooks/api/topics.ts", "kafka-ui-react-app/src/redux/interfaces/topic.ts" ]
[]
diff --git a/kafka-ui-react-app/src/components/Topics/New/New.tsx b/kafka-ui-react-app/src/components/Topics/New/New.tsx index a8d2600a43a..c7528fb99ae 100644 --- a/kafka-ui-react-app/src/components/Topics/New/New.tsx +++ b/kafka-ui-react-app/src/components/Topics/New/New.tsx @@ -15,7 +15,7 @@ enum Filters { PARTITION_COUNT = 'partitionCount', REPLICATION_FACTOR = 'replicationFactor', INSYNC_REPLICAS = 'inSyncReplicas', - CLEANUP_POLICY = 'Delete', + CLEANUP_POLICY = 'cleanUpPolicy', } const New: React.FC = () => { diff --git a/kafka-ui-react-app/src/lib/hooks/api/topics.ts b/kafka-ui-react-app/src/lib/hooks/api/topics.ts index f06b6a87670..00d08bc66b8 100644 --- a/kafka-ui-react-app/src/lib/hooks/api/topics.ts +++ b/kafka-ui-react-app/src/lib/hooks/api/topics.ts @@ -76,7 +76,6 @@ const formatTopicCreation = (form: TopicFormData): TopicCreation => { partitions, replicationFactor, cleanupPolicy, - retentionBytes, retentionMs, maxMessageBytes, minInSyncReplicas, @@ -86,7 +85,6 @@ const formatTopicCreation = (form: TopicFormData): TopicCreation => { const configs = { 'cleanup.policy': cleanupPolicy, 'retention.ms': retentionMs.toString(), - 'retention.bytes': retentionBytes.toString(), 'max.message.bytes': maxMessageBytes.toString(), 'min.insync.replicas': minInSyncReplicas.toString(), ...Object.values(customParams || {}).reduce(topicReducer, {}), diff --git a/kafka-ui-react-app/src/redux/interfaces/topic.ts b/kafka-ui-react-app/src/redux/interfaces/topic.ts index 153002240a1..bdc25ee0c60 100644 --- a/kafka-ui-react-app/src/redux/interfaces/topic.ts +++ b/kafka-ui-react-app/src/redux/interfaces/topic.ts @@ -44,7 +44,6 @@ export interface TopicFormData { minInSyncReplicas: number; cleanupPolicy: string; retentionMs: number; - retentionBytes: number; maxMessageBytes: number; customParams: { name: string;
null
train
test
2023-08-04T13:43:40
"2023-07-12T01:57:45Z"
p-eye
train
provectus/kafka-ui/31_34
provectus/kafka-ui
provectus/kafka-ui/31
provectus/kafka-ui/34
[ "timestamp(timedelta=352.0, similarity=0.894506078113938)" ]
e2918b41ca928283339f256e552b0643e501bc92
5d0b783abb40e00644d326588ffa0566e8064bdc
[]
[]
"2020-04-16T15:19:28Z"
[ "scope/frontend" ]
Add ui prefix to react router path
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/rest/config/CustomWebFilter.java", "kafka-ui-react-app/src/components/App.tsx", "kafka-ui-react-app/src/components/ConsumerGroups/ConsumerGroups.tsx", "kafka-ui-react-app/src/components/Nav/Nav.tsx", "kafka-ui-react-app/src/components/Topics/Topics.tsx", "kafka-ui-react-app/src/lib/paths.ts" ]
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/rest/config/CustomWebFilter.java", "kafka-ui-react-app/src/components/App.tsx", "kafka-ui-react-app/src/components/ConsumerGroups/ConsumerGroups.tsx", "kafka-ui-react-app/src/components/Nav/Nav.tsx", "kafka-ui-react-app/src/components/Topics/Topics.tsx", "kafka-ui-react-app/src/lib/paths.ts" ]
[]
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/rest/config/CustomWebFilter.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/rest/config/CustomWebFilter.java index 370e89847f8..e117a43b6c1 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/rest/config/CustomWebFilter.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/rest/config/CustomWebFilter.java @@ -10,7 +10,7 @@ public class CustomWebFilter implements WebFilter { @Override public Mono<Void> filter(ServerWebExchange exchange, WebFilterChain chain) { - if (exchange.getRequest().getURI().getPath().equals("/")) { + if (exchange.getRequest().getURI().getPath().equals("/") || exchange.getRequest().getURI().getPath().startsWith("/ui")) { return chain.filter(exchange.mutate().request(exchange.getRequest().mutate().path("/index.html").build()).build()); } diff --git a/kafka-ui-react-app/src/components/App.tsx b/kafka-ui-react-app/src/components/App.tsx index 46a5ff84191..7c53592906b 100644 --- a/kafka-ui-react-app/src/components/App.tsx +++ b/kafka-ui-react-app/src/components/App.tsx @@ -1,9 +1,5 @@ import React from 'react'; -import { - Switch, - Route, - Redirect, -} from 'react-router-dom'; +import { Switch, Route, Redirect } from 'react-router-dom'; import './App.scss'; import BrokersContainer from './Brokers/BrokersContainer'; import TopicsContainer from './Topics/TopicsContainer'; @@ -21,13 +17,19 @@ const App: React.FC<AppProps> = ({ isClusterListFetched, fetchClustersList, }) => { - React.useEffect(() => { fetchClustersList() }, [fetchClustersList]); + React.useEffect(() => { + fetchClustersList(); + }, [fetchClustersList]); return ( <div className="Layout"> - <nav className="navbar is-fixed-top is-white Layout__header" role="navigation" aria-label="main navigation"> + <nav + className="navbar is-fixed-top is-white Layout__header" + role="navigation" + aria-label="main navigation" + > <div className="navbar-brand"> - <a className="navbar-item title is-5 is-marginless" href="/"> + <a className="navbar-item title is-5 is-marginless" href="/ui"> Kafka UI </a> </div> @@ -36,17 +38,31 @@ const App: React.FC<AppProps> = ({ <NavConatiner className="Layout__navbar" /> {isClusterListFetched ? ( <Switch> - <Route exact path="/" component={Dashboard} /> - <Route exact path="/clusters" component={Dashboard} /> - <Route path="/clusters/:clusterName/topics" component={TopicsContainer} /> - <Route path="/clusters/:clusterName/brokers" component={BrokersContainer} /> - <Route path="/clusters/:clusterName/consumer-groups" component={ConsumersGroupsContainer} /> - <Redirect from="/clusters/:clusterName" to="/clusters/:clusterName/brokers" /> + <Route + exact + path={['/', '/ui', '/ui/clusters']} + component={Dashboard} + /> + <Route + path="/ui/clusters/:clusterName/brokers" + component={BrokersContainer} + /> + <Route + path="/ui/clusters/:clusterName/topics" + component={TopicsContainer} + /> + <Route + path="/ui/clusters/:clusterName/consumer-groups" + component={ConsumersGroupsContainer} + /> + <Redirect + from="/ui/clusters/:clusterName" + to="/ui/clusters/:clusterName/brokers" + /> </Switch> ) : ( <PageLoader /> )} - </main> </div> ); diff --git a/kafka-ui-react-app/src/components/ConsumerGroups/ConsumerGroups.tsx b/kafka-ui-react-app/src/components/ConsumerGroups/ConsumerGroups.tsx index 6286345e018..747cf998f2e 100644 --- a/kafka-ui-react-app/src/components/ConsumerGroups/ConsumerGroups.tsx +++ b/kafka-ui-react-app/src/components/ConsumerGroups/ConsumerGroups.tsx @@ -1,11 +1,8 @@ import React from 'react'; import { ClusterName } from 'redux/interfaces'; -import { - Switch, - Route, -} from 'react-router-dom'; -import ListContainer from './List/ListContainer'; +import { Switch, Route } from 'react-router-dom'; import PageLoader from 'components/common/PageLoader/PageLoader'; +import ListContainer from './List/ListContainer'; interface Props { clusterName: ClusterName; @@ -18,17 +15,23 @@ const ConsumerGroups: React.FC<Props> = ({ isFetched, fetchConsumerGroupsList, }) => { - React.useEffect(() => { fetchConsumerGroupsList(clusterName); }, [fetchConsumerGroupsList, clusterName]); + React.useEffect(() => { + fetchConsumerGroupsList(clusterName); + }, [fetchConsumerGroupsList, clusterName]); if (isFetched) { return ( <Switch> - <Route exact path="/clusters/:clusterName/consumer-groups" component={ListContainer} /> + <Route + exact + path="/ui/clusters/:clusterName/consumer-groups" + component={ListContainer} + /> </Switch> ); } - return (<PageLoader />); + return <PageLoader />; }; export default ConsumerGroups; diff --git a/kafka-ui-react-app/src/components/Nav/Nav.tsx b/kafka-ui-react-app/src/components/Nav/Nav.tsx index acd427bc6fa..a6c40087894 100644 --- a/kafka-ui-react-app/src/components/Nav/Nav.tsx +++ b/kafka-ui-react-app/src/components/Nav/Nav.tsx @@ -5,7 +5,7 @@ import cx from 'classnames'; import ClusterMenu from './ClusterMenu'; interface Props { - isClusterListFetched: boolean, + isClusterListFetched: boolean; clusters: Cluster[]; className?: string; } @@ -16,22 +16,21 @@ const Nav: React.FC<Props> = ({ className, }) => ( <aside className={cx('menu has-shadow has-background-white', className)}> - <p className="menu-label"> - General - </p> + <p className="menu-label">General</p> <ul className="menu-list"> <li> - <NavLink exact to="/" activeClassName="is-active" title="Dashboard"> + <NavLink exact to="/ui" activeClassName="is-active" title="Dashboard"> Dashboard </NavLink> </li> </ul> - <p className="menu-label"> - Clusters - </p> + <p className="menu-label">Clusters</p> {!isClusterListFetched && <div className="loader" />} - {isClusterListFetched && clusters.map((cluster, index) => <ClusterMenu {...cluster} key={`cluster-list-item-key-${index}`}/>)} + {isClusterListFetched && + clusters.map((cluster, index) => ( + <ClusterMenu {...cluster} key={`cluster-list-item-key-${index}`} /> + ))} </aside> ); diff --git a/kafka-ui-react-app/src/components/Topics/Topics.tsx b/kafka-ui-react-app/src/components/Topics/Topics.tsx index ed1652c847c..b66a72d3d56 100644 --- a/kafka-ui-react-app/src/components/Topics/Topics.tsx +++ b/kafka-ui-react-app/src/components/Topics/Topics.tsx @@ -1,12 +1,9 @@ import React from 'react'; import { ClusterName } from 'redux/interfaces'; -import { - Switch, - Route, -} from 'react-router-dom'; +import { Switch, Route } from 'react-router-dom'; +import PageLoader from 'components/common/PageLoader/PageLoader'; import ListContainer from './List/ListContainer'; import DetailsContainer from './Details/DetailsContainer'; -import PageLoader from 'components/common/PageLoader/PageLoader'; import NewContainer from './New/NewContainer'; interface Props { @@ -21,19 +18,32 @@ const Topics: React.FC<Props> = ({ isFetched, fetchTopicList, }) => { - React.useEffect(() => { fetchTopicList(clusterName); }, [fetchTopicList, clusterName]); + React.useEffect(() => { + fetchTopicList(clusterName); + }, [fetchTopicList, clusterName]); if (isFetched) { return ( <Switch> - <Route exact path="/clusters/:clusterName/topics" component={ListContainer} /> - <Route exact path="/clusters/:clusterName/topics/new" component={NewContainer} /> - <Route path="/clusters/:clusterName/topics/:topicName" component={DetailsContainer} /> + <Route + exact + path="/ui/clusters/:clusterName/topics" + component={ListContainer} + /> + <Route + exact + path="/ui/clusters/:clusterName/topics/new" + component={NewContainer} + /> + <Route + path="/ui/clusters/:clusterName/topics/:topicName" + component={DetailsContainer} + /> </Switch> ); } - return (<PageLoader />); + return <PageLoader />; }; export default Topics; diff --git a/kafka-ui-react-app/src/lib/paths.ts b/kafka-ui-react-app/src/lib/paths.ts index c01592db0ed..78e5e0167c2 100644 --- a/kafka-ui-react-app/src/lib/paths.ts +++ b/kafka-ui-react-app/src/lib/paths.ts @@ -1,17 +1,25 @@ -import { - ClusterName, - TopicName, -} from 'redux/interfaces'; +import { ClusterName, TopicName } from 'redux/interfaces'; -const clusterPath = (clusterName: ClusterName) => `/clusters/${clusterName}`; +const clusterPath = (clusterName: ClusterName) => `/ui/clusters/${clusterName}`; -export const clusterBrokersPath = (clusterName: ClusterName) => `${clusterPath(clusterName)}/brokers`; +export const clusterBrokersPath = (clusterName: ClusterName) => + `${clusterPath(clusterName)}/brokers`; +export const clusterTopicsPath = (clusterName: ClusterName) => + `${clusterPath(clusterName)}/topics`; +export const clusterTopicNewPath = (clusterName: ClusterName) => + `${clusterPath(clusterName)}/topics/new`; +export const clusterConsumerGroupsPath = (clusterName: ClusterName) => + `${clusterPath(clusterName)}/consumer-groups`; -export const clusterTopicsPath = (clusterName: ClusterName) => `${clusterPath(clusterName)}/topics`; -export const clusterTopicNewPath = (clusterName: ClusterName) => `${clusterPath(clusterName)}/topics/new`; - -export const clusterTopicPath = (clusterName: ClusterName, topicName: TopicName) => `${clusterTopicsPath(clusterName)}/${topicName}`; -export const clusterTopicSettingsPath = (clusterName: ClusterName, topicName: TopicName) => `${clusterTopicsPath(clusterName)}/${topicName}/settings`; -export const clusterTopicMessagesPath = (clusterName: ClusterName, topicName: TopicName) => `${clusterTopicsPath(clusterName)}/${topicName}/messages`; - -export const clusterConsumerGroupsPath = (clusterName: ClusterName) => `${clusterPath(clusterName)}/consumer-groups`; \ No newline at end of file +export const clusterTopicPath = ( + clusterName: ClusterName, + topicName: TopicName +) => `${clusterTopicsPath(clusterName)}/${topicName}`; +export const clusterTopicSettingsPath = ( + clusterName: ClusterName, + topicName: TopicName +) => `${clusterTopicsPath(clusterName)}/${topicName}/settings`; +export const clusterTopicMessagesPath = ( + clusterName: ClusterName, + topicName: TopicName +) => `${clusterTopicsPath(clusterName)}/${topicName}/messages`;
null
val
train
2020-04-14T14:36:05
"2020-04-16T11:16:57Z"
germanosin
train
provectus/kafka-ui/108_110
provectus/kafka-ui
provectus/kafka-ui/108
provectus/kafka-ui/110
[ "timestamp(timedelta=0.0, similarity=0.9255320246886437)" ]
88cc301bb6db74ce3279e80ddfc8b0d25f601cdc
cee189a86190e6ce23213fd62f7f31a527fb1f48
[]
[]
"2020-11-02T14:13:21Z"
[ "type/enhancement", "scope/frontend" ]
Display topic message content in a tree view
If topic message content is a valid JSON it should be displayed as a tree with expandable nodes.
[ "kafka-ui-react-app/package-lock.json", "kafka-ui-react-app/package.json", "kafka-ui-react-app/src/components/Topics/Details/Messages/Messages.tsx" ]
[ "kafka-ui-react-app/package-lock.json", "kafka-ui-react-app/package.json", "kafka-ui-react-app/src/components/Topics/Details/Messages/Messages.tsx" ]
[]
diff --git a/kafka-ui-react-app/package-lock.json b/kafka-ui-react-app/package-lock.json index 80d728a361d..bc28dec3291 100644 --- a/kafka-ui-react-app/package-lock.json +++ b/kafka-ui-react-app/package-lock.json @@ -1803,6 +1803,11 @@ "@babel/types": "^7.3.0" } }, + "@types/base16": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@types/base16/-/base16-1.0.2.tgz", + "integrity": "sha512-oYO/U4VD1DavwrKuCSQWdLG+5K22SLPem2OQaHmFcQuwHoVeGC+JGVRji2MUqZUAIQZHEonOeVfAX09hYiLsdg==" + }, "@types/classnames": { "version": "2.2.9", "resolved": "https://registry.npmjs.org/@types/classnames/-/classnames-2.2.9.tgz", @@ -1902,8 +1907,15 @@ "@types/lodash": { "version": "4.14.149", "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.149.tgz", - "integrity": "sha512-ijGqzZt/b7BfzcK9vTrS6MFljQRPn5BFWOx8oE0GYxribu6uV+aA9zZuXI1zc/etK9E8nrgdoF2+LgUw7+9tJQ==", - "dev": true + "integrity": "sha512-ijGqzZt/b7BfzcK9vTrS6MFljQRPn5BFWOx8oE0GYxribu6uV+aA9zZuXI1zc/etK9E8nrgdoF2+LgUw7+9tJQ==" + }, + "@types/lodash.curry": { + "version": "4.1.6", + "resolved": "https://registry.npmjs.org/@types/lodash.curry/-/lodash.curry-4.1.6.tgz", + "integrity": "sha512-x3ctCcmOYqRrihNNnQJW6fe/yZFCgnrIa6p80AiPQRO8Jis29bBdy1dEw1FwngoF/mCZa3Bx+33fUZvOEE635Q==", + "requires": { + "@types/lodash": "*" + } }, "@types/minimatch": { "version": "3.0.3", @@ -3393,6 +3405,11 @@ } } }, + "base16": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/base16/-/base16-1.0.0.tgz", + "integrity": "sha1-4pf2DX7BAUp6lxo568ipjAtoHnA=" + }, "base64-js": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.3.1.tgz", @@ -4346,7 +4363,6 @@ "version": "3.1.2", "resolved": "https://registry.npmjs.org/color/-/color-3.1.2.tgz", "integrity": "sha512-vXTJhHebByxZn3lDvDJYw4lR5+uB3vuoHsuYA5AKuxRVn5wzzIfQKGLBmgdVRHKTJYeK5rvJcHnrd0Li49CFpg==", - "dev": true, "requires": { "color-convert": "^1.9.1", "color-string": "^1.5.2" @@ -4356,7 +4372,6 @@ "version": "1.9.3", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", - "dev": true, "requires": { "color-name": "1.1.3" } @@ -4364,14 +4379,12 @@ "color-name": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", - "dev": true + "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=" }, "color-string": { "version": "1.5.3", "resolved": "https://registry.npmjs.org/color-string/-/color-string-1.5.3.tgz", "integrity": "sha512-dC2C5qeWoYkxki5UAXapdjqO672AM4vZuPGRQfO8b5HKuKGBbKWpITyDYN7TOFKvRW7kOgAn3746clDBMDJyQw==", - "dev": true, "requires": { "color-name": "^1.0.0", "simple-swizzle": "^0.2.2" @@ -10560,6 +10573,11 @@ "integrity": "sha1-DM8tiRZq8Ds2Y8eWU4t1rG4RTZ0=", "dev": true }, + "lodash.curry": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/lodash.curry/-/lodash.curry-4.1.1.tgz", + "integrity": "sha1-JI42By7ekGUB11lmIAqG2riyMXA=" + }, "lodash.memoize": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", @@ -14055,6 +14073,26 @@ "whatwg-fetch": "^3.0.0" } }, + "react-base16-styling": { + "version": "0.8.0", + "resolved": "https://registry.npmjs.org/react-base16-styling/-/react-base16-styling-0.8.0.tgz", + "integrity": "sha512-ElvciPaL4xpWh7ISX7ugkNS/dvoh7DpVMp4t93ngnEsS2LkMd8Gu+cDDOLis2rj4889CNK662UdjOfv3wvZg9w==", + "requires": { + "@types/base16": "^1.0.2", + "@types/lodash.curry": "^4.1.6", + "base16": "^1.0.0", + "color": "^3.1.2", + "csstype": "^3.0.2", + "lodash.curry": "^4.1.1" + }, + "dependencies": { + "csstype": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.0.4.tgz", + "integrity": "sha512-xc8DUsCLmjvCfoD7LTGE0ou2MIWLx0K9RCZwSHMOdynqRsP4MtUcLeqh1HcQ2dInwDTqn+3CE0/FZh1et+p4jA==" + } + } + }, "react-datepicker": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/react-datepicker/-/react-datepicker-3.0.0.tgz", @@ -14344,6 +14382,16 @@ "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.12.0.tgz", "integrity": "sha512-rPCkf/mWBtKc97aLL9/txD8DZdemK0vkA3JMLShjlJB3Pj3s+lpf1KaBzMfQrAmhMQB0n1cU/SUGgKKBCe837Q==" }, + "react-json-tree": { + "version": "0.13.0", + "resolved": "https://registry.npmjs.org/react-json-tree/-/react-json-tree-0.13.0.tgz", + "integrity": "sha512-FPJUQzYWi7pvBUAnMd9ENOnAUT2mXLhe01VUbPfKH9Q4gk4FQ0fpS1e1WZ3o7g6zfYJpYJeBTo1WwlMHlMlZOw==", + "requires": { + "@types/prop-types": "^15.7.3", + "prop-types": "^15.7.2", + "react-base16-styling": "^0.8.0" + } + }, "react-multi-select-component": { "version": "2.0.12", "resolved": "https://registry.npmjs.org/react-multi-select-component/-/react-multi-select-component-2.0.12.tgz", @@ -16076,7 +16124,6 @@ "version": "0.2.2", "resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.2.tgz", "integrity": "sha1-pNprY1/8zMoz9w0Xy5JZLeleVXo=", - "dev": true, "requires": { "is-arrayish": "^0.3.1" }, @@ -16084,8 +16131,7 @@ "is-arrayish": { "version": "0.3.2", "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz", - "integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==", - "dev": true + "integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==" } } }, diff --git a/kafka-ui-react-app/package.json b/kafka-ui-react-app/package.json index 63dd7934c1f..43e31221fba 100644 --- a/kafka-ui-react-app/package.json +++ b/kafka-ui-react-app/package.json @@ -16,6 +16,7 @@ "react-datepicker": "^3.0.0", "react-dom": "^16.12.0", "react-hook-form": "^4.5.5", + "react-json-tree": "^0.13.0", "react-multi-select-component": "^2.0.12", "react-redux": "^7.1.3", "react-router-dom": "^5.1.2", @@ -75,6 +76,7 @@ "@types/redux-thunk": "^2.1.0", "@typescript-eslint/eslint-plugin": "^2.27.0", "@typescript-eslint/parser": "^2.27.0", + "dotenv": "^8.2.0", "eslint": "^6.8.0", "eslint-config-airbnb": "^18.1.0", "eslint-config-prettier": "^6.10.1", @@ -90,8 +92,7 @@ "node-sass": "^4.13.1", "prettier": "^2.0.4", "react-scripts": "3.4.0", - "typescript": "~3.7.4", - "dotenv": "^8.2.0" + "typescript": "~3.7.4" }, "proxy": "http://localhost:8080" } diff --git a/kafka-ui-react-app/src/components/Topics/Details/Messages/Messages.tsx b/kafka-ui-react-app/src/components/Topics/Details/Messages/Messages.tsx index a218d17edf2..99ea852b258 100644 --- a/kafka-ui-react-app/src/components/Topics/Details/Messages/Messages.tsx +++ b/kafka-ui-react-app/src/components/Topics/Details/Messages/Messages.tsx @@ -11,7 +11,7 @@ import { import PageLoader from 'components/common/PageLoader/PageLoader'; import { format } from 'date-fns'; import DatePicker from 'react-datepicker'; - +import JSONTree from 'react-json-tree'; import 'react-datepicker/dist/react-datepicker.css'; import CustomParamButton, { CustomParamButtonType, @@ -176,34 +176,35 @@ const Messages: React.FC<Props> = ({ return format(Date.parse(timestamp), 'yyyy-MM-dd HH:mm:ss'); }; - const getMessageContentHeaders = React.useMemo(() => { - const message = messages[0]; - const headers: JSX.Element[] = []; - try { - const content = - typeof message.content !== 'object' - ? JSON.parse(message.content) - : message.content; - Object.keys(content).forEach((k) => - headers.push(<th key={Math.random()}>{`content.${k}`}</th>) - ); - } catch (e) { - headers.push(<th>Content</th>); - } - return headers; - }, [messages]); - const getMessageContentBody = (content: any) => { - const columns: JSX.Element[] = []; try { - const c = typeof content !== 'object' ? JSON.parse(content) : content; - Object.values(c).map((v) => - columns.push(<td key={Math.random()}>{JSON.stringify(v)}</td>) + const contentObj = + typeof content !== 'object' ? JSON.parse(content) : content; + return ( + <JSONTree + data={contentObj} + hideRoot + invertTheme={false} + theme={{ + tree: ({ style }) => ({ + style: { + ...style, + backgroundColor: undefined, + marginLeft: 0, + marginTop: 0, + }, + }), + value: ({ style }) => ({ + style: { ...style, marginLeft: 0 }, + }), + base0D: '#3273dc', + base0B: '#363636', + }} + /> ); } catch (e) { - columns.push(<td>{content}</td>); + return content; } - return columns; }; const onNext = (event: React.MouseEvent<HTMLButtonElement>) => { @@ -241,16 +242,20 @@ const Messages: React.FC<Props> = ({ <th>Timestamp</th> <th>Offset</th> <th>Partition</th> - {getMessageContentHeaders} + <th>Content</th> </tr> </thead> <tbody> {messages.map((message) => ( <tr key={`${message.timestamp}${Math.random()}`}> - <td>{getTimestampDate(message.timestamp)}</td> - <td>{message.offset}</td> - <td>{message.partition}</td> - {getMessageContentBody(message.content)} + <td style={{ width: 200 }}> + {getTimestampDate(message.timestamp)} + </td> + <td style={{ width: 150 }}>{message.offset}</td> + <td style={{ width: 100 }}>{message.partition}</td> + <td key={Math.random()} style={{ wordBreak: 'break-word' }}> + {getMessageContentBody(message.content)} + </td> </tr> ))} </tbody>
null
train
train
2020-11-02T13:21:07
"2020-11-02T09:58:48Z"
soffest
train
provectus/kafka-ui/169_172
provectus/kafka-ui
provectus/kafka-ui/169
provectus/kafka-ui/172
[ "timestamp(timedelta=71.0, similarity=0.9049740441220967)" ]
1d60db44df81ed0a1c8a935f49bdbe4c7ad4ead6
481c321d0680e3b2ce8abfe585976ffcae417c3e
[ "Thank you for your issue, this feature was applied in PR-172." ]
[]
"2021-02-01T14:51:16Z"
[ "type/enhancement", "scope/backend" ]
feature request: List topics in alphabetical order
Hi Team, Currently, the topics are listed in unknown order. It would be great if they would be listed in alphabetical order. Thanks, Andor Markus
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/service/ClusterService.java" ]
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/service/ClusterService.java" ]
[]
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/service/ClusterService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/service/ClusterService.java index e18c6328943..1f1a38a98ca 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/service/ClusterService.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/cluster/service/ClusterService.java @@ -68,6 +68,7 @@ public List<Topic> getTopics(String name) { .map(c -> c.getTopics().values().stream() .map(clusterMapper::toTopic) + .sorted(Comparator.comparing(Topic::getName)) .collect(Collectors.toList()) ).orElse(Collections.emptyList()); }
null
test
train
2021-01-29T13:24:59
"2021-02-01T10:29:22Z"
andormarkus
train
provectus/kafka-ui/224_271
provectus/kafka-ui
provectus/kafka-ui/224
provectus/kafka-ui/271
[ "timestamp(timedelta=75.0, similarity=0.889589466097754)" ]
217b36307615effe6dad8c272e4472d82b74425e
595707edb67b93a98524265ce7f12db94ac796d4
[]
[ "pls use useCallback", "Should be\r\n```\r\nconst mapDispatchToProps = { deleteTopic };\r\n```", "I'm not sure there is a need to use a container in this case. I would suggest to provide deleteTopic from parent component. " ]
"2021-03-17T14:55:31Z"
[ "type/enhancement", "scope/frontend" ]
Add support for deleting topic
I would like to ask support for deleting a topic
[ "kafka-ui-react-app/src/components/Topics/List/List.tsx", "kafka-ui-react-app/src/components/Topics/List/ListContainer.ts", "kafka-ui-react-app/src/components/Topics/List/ListItem.tsx", "kafka-ui-react-app/src/components/Topics/List/__tests__/List.spec.tsx", "kafka-ui-react-app/src/redux/actions/__test__/thunks.spec.ts", "kafka-ui-react-app/src/redux/actions/actions.ts", "kafka-ui-react-app/src/redux/actions/thunks/topics.ts", "kafka-ui-react-app/src/redux/reducers/topics/reducer.ts" ]
[ "kafka-ui-react-app/src/components/Topics/List/List.tsx", "kafka-ui-react-app/src/components/Topics/List/ListContainer.ts", "kafka-ui-react-app/src/components/Topics/List/ListItem.tsx", "kafka-ui-react-app/src/components/Topics/List/__tests__/List.spec.tsx", "kafka-ui-react-app/src/components/Topics/List/__tests__/ListItem.spec.tsx", "kafka-ui-react-app/src/redux/actions/__test__/thunks.spec.ts", "kafka-ui-react-app/src/redux/actions/actions.ts", "kafka-ui-react-app/src/redux/actions/thunks/topics.ts", "kafka-ui-react-app/src/redux/reducers/topics/__tests__/reducer.spec.ts", "kafka-ui-react-app/src/redux/reducers/topics/reducer.ts" ]
[]
diff --git a/kafka-ui-react-app/src/components/Topics/List/List.tsx b/kafka-ui-react-app/src/components/Topics/List/List.tsx index 3d5db93eedf..9b5ad3c039b 100644 --- a/kafka-ui-react-app/src/components/Topics/List/List.tsx +++ b/kafka-ui-react-app/src/components/Topics/List/List.tsx @@ -1,5 +1,9 @@ import React from 'react'; -import { TopicWithDetailedInfo, ClusterName } from 'redux/interfaces'; +import { + TopicWithDetailedInfo, + ClusterName, + TopicName, +} from 'redux/interfaces'; import Breadcrumb from 'components/common/Breadcrumb/Breadcrumb'; import { Link, useParams } from 'react-router-dom'; import { clusterTopicNewPath } from 'lib/paths'; @@ -16,6 +20,7 @@ interface Props { externalTopics: TopicWithDetailedInfo[]; totalPages: number; fetchTopicsList(props: FetchTopicsListParams): void; + deleteTopic(topicName: TopicName, clusterName: ClusterName): void; } const List: React.FC<Props> = ({ @@ -24,6 +29,7 @@ const List: React.FC<Props> = ({ externalTopics, totalPages, fetchTopicsList, + deleteTopic, }) => { const { isReadOnly } = React.useContext(ClusterContext); const { clusterName } = useParams<{ clusterName: ClusterName }>(); @@ -82,17 +88,23 @@ const List: React.FC<Props> = ({ <th>Total Partitions</th> <th>Out of sync replicas</th> <th>Type</th> + <th> </th> </tr> </thead> <tbody> + {items.map((topic) => ( + <ListItem + clusterName={clusterName} + key={topic.name} + topic={topic} + deleteTopic={deleteTopic} + /> + ))} {items.length === 0 && ( <tr> <td colSpan={10}>No topics found</td> </tr> )} - {items.map((topic) => ( - <ListItem key={topic.name} topic={topic} /> - ))} </tbody> </table> <Pagination totalPages={totalPages} /> diff --git a/kafka-ui-react-app/src/components/Topics/List/ListContainer.ts b/kafka-ui-react-app/src/components/Topics/List/ListContainer.ts index 6ade3cf75a4..18f1df98aae 100644 --- a/kafka-ui-react-app/src/components/Topics/List/ListContainer.ts +++ b/kafka-ui-react-app/src/components/Topics/List/ListContainer.ts @@ -1,6 +1,6 @@ import { connect } from 'react-redux'; import { RootState } from 'redux/interfaces'; -import { fetchTopicsList } from 'redux/actions'; +import { fetchTopicsList, deleteTopic } from 'redux/actions'; import { getTopicList, getExternalTopicList, @@ -18,6 +18,7 @@ const mapStateToProps = (state: RootState) => ({ const mapDispatchToProps = { fetchTopicsList, + deleteTopic, }; export default connect(mapStateToProps, mapDispatchToProps)(List); diff --git a/kafka-ui-react-app/src/components/Topics/List/ListItem.tsx b/kafka-ui-react-app/src/components/Topics/List/ListItem.tsx index de2c2c78f2c..c467b9831a5 100644 --- a/kafka-ui-react-app/src/components/Topics/List/ListItem.tsx +++ b/kafka-ui-react-app/src/components/Topics/List/ListItem.tsx @@ -1,14 +1,22 @@ import React from 'react'; import cx from 'classnames'; import { NavLink } from 'react-router-dom'; -import { TopicWithDetailedInfo } from 'redux/interfaces'; +import { + ClusterName, + TopicName, + TopicWithDetailedInfo, +} from 'redux/interfaces'; interface ListItemProps { topic: TopicWithDetailedInfo; + deleteTopic: (clusterName: ClusterName, topicName: TopicName) => void; + clusterName: ClusterName; } const ListItem: React.FC<ListItemProps> = ({ topic: { name, internal, partitions }, + deleteTopic, + clusterName, }) => { const outOfSyncReplicas = React.useMemo(() => { if (partitions === undefined || partitions.length === 0) { @@ -21,6 +29,10 @@ const ListItem: React.FC<ListItemProps> = ({ }, 0); }, [partitions]); + const deleteTopicHandler = React.useCallback(() => { + deleteTopic(clusterName, name); + }, [clusterName, name]); + return ( <tr> <td> @@ -42,6 +54,17 @@ const ListItem: React.FC<ListItemProps> = ({ {internal ? 'Internal' : 'External'} </div> </td> + <td> + <button + type="button" + className="is-small button is-danger" + onClick={deleteTopicHandler} + > + <span className="icon is-small"> + <i className="far fa-trash-alt" /> + </span> + </button> + </td> </tr> ); }; diff --git a/kafka-ui-react-app/src/components/Topics/List/__tests__/List.spec.tsx b/kafka-ui-react-app/src/components/Topics/List/__tests__/List.spec.tsx index bd065658d66..dda12424135 100644 --- a/kafka-ui-react-app/src/components/Topics/List/__tests__/List.spec.tsx +++ b/kafka-ui-react-app/src/components/Topics/List/__tests__/List.spec.tsx @@ -16,6 +16,7 @@ describe('List', () => { externalTopics={[]} totalPages={1} fetchTopicsList={jest.fn()} + deleteTopic={jest.fn()} /> </ClusterContext.Provider> </StaticRouter> @@ -35,6 +36,7 @@ describe('List', () => { externalTopics={[]} totalPages={1} fetchTopicsList={jest.fn()} + deleteTopic={jest.fn()} /> </ClusterContext.Provider> </StaticRouter> diff --git a/kafka-ui-react-app/src/components/Topics/List/__tests__/ListItem.spec.tsx b/kafka-ui-react-app/src/components/Topics/List/__tests__/ListItem.spec.tsx new file mode 100644 index 00000000000..5c3c72e4d44 --- /dev/null +++ b/kafka-ui-react-app/src/components/Topics/List/__tests__/ListItem.spec.tsx @@ -0,0 +1,21 @@ +import { shallow } from 'enzyme'; +import React from 'react'; +import ListItem from '../ListItem'; + +describe('ListItem', () => { + it('triggers the deleting thunk when clicked on the delete button', () => { + const mockDelete = jest.fn(); + const topic = { name: 'topic', id: 'id' }; + const clustterName = 'cluster'; + const component = shallow( + <ListItem + topic={topic} + deleteTopic={mockDelete} + clusterName={clustterName} + /> + ); + component.find('button').simulate('click'); + expect(mockDelete).toBeCalledTimes(1); + expect(mockDelete).toBeCalledWith(clustterName, topic.name); + }); +}); diff --git a/kafka-ui-react-app/src/redux/actions/__test__/thunks.spec.ts b/kafka-ui-react-app/src/redux/actions/__test__/thunks.spec.ts index 522b7e1f121..382f8b2b848 100644 --- a/kafka-ui-react-app/src/redux/actions/__test__/thunks.spec.ts +++ b/kafka-ui-react-app/src/redux/actions/__test__/thunks.spec.ts @@ -22,6 +22,7 @@ const mockStoreCreator: MockStoreCreator< const store: MockStoreEnhanced<RootState, DispatchExts> = mockStoreCreator(); const clusterName = 'local'; +const topicName = 'localTopic'; const subject = 'test'; describe('Thunks', () => { @@ -137,4 +138,34 @@ describe('Thunks', () => { } }); }); + + describe('deleteTopis', () => { + it('creates DELETE_TOPIC__SUCCESS when deleting existing topic', async () => { + fetchMock.deleteOnce( + `/api/clusters/${clusterName}/topics/${topicName}`, + 200 + ); + await store.dispatch(thunks.deleteTopic(clusterName, topicName)); + expect(store.getActions()).toEqual([ + actions.deleteTopicAction.request(), + actions.deleteTopicAction.success(topicName), + ]); + }); + + it('creates DELETE_TOPIC__FAILURE when deleting existing topic', async () => { + fetchMock.postOnce( + `/api/clusters/${clusterName}/topics/${topicName}`, + 404 + ); + try { + await store.dispatch(thunks.deleteTopic(clusterName, topicName)); + } catch (error) { + expect(error.status).toEqual(404); + expect(store.getActions()).toEqual([ + actions.deleteTopicAction.request(), + actions.deleteTopicAction.failure(), + ]); + } + }); + }); }); diff --git a/kafka-ui-react-app/src/redux/actions/actions.ts b/kafka-ui-react-app/src/redux/actions/actions.ts index 7e9b1bc64b1..a7e4132bd94 100644 --- a/kafka-ui-react-app/src/redux/actions/actions.ts +++ b/kafka-ui-react-app/src/redux/actions/actions.ts @@ -1,5 +1,5 @@ import { createAsyncAction } from 'typesafe-actions'; -import { ConsumerGroupID, TopicsState } from 'redux/interfaces'; +import { ConsumerGroupID, TopicName, TopicsState } from 'redux/interfaces'; import { Cluster, @@ -79,6 +79,12 @@ export const updateTopicAction = createAsyncAction( 'PATCH_TOPIC__FAILURE' )<undefined, TopicsState, undefined>(); +export const deleteTopicAction = createAsyncAction( + 'DELETE_TOPIC__REQUEST', + 'DELETE_TOPIC__SUCCESS', + 'DELETE_TOPIC__FAILURE' +)<undefined, TopicName, undefined>(); + export const fetchConsumerGroupsAction = createAsyncAction( 'GET_CONSUMER_GROUPS__REQUEST', 'GET_CONSUMER_GROUPS__SUCCESS', diff --git a/kafka-ui-react-app/src/redux/actions/thunks/topics.ts b/kafka-ui-react-app/src/redux/actions/thunks/topics.ts index da5616b174e..0b3e024182c 100644 --- a/kafka-ui-react-app/src/redux/actions/thunks/topics.ts +++ b/kafka-ui-react-app/src/redux/actions/thunks/topics.ts @@ -230,3 +230,19 @@ export const updateTopic = ( dispatch(actions.updateTopicAction.failure()); } }; + +export const deleteTopic = ( + clusterName: ClusterName, + topicName: TopicName +): PromiseThunkResult => async (dispatch) => { + dispatch(actions.deleteTopicAction.request()); + try { + await topicsApiClient.deleteTopic({ + clusterName, + topicName, + }); + dispatch(actions.deleteTopicAction.success(topicName)); + } catch (e) { + dispatch(actions.deleteTopicAction.failure()); + } +}; diff --git a/kafka-ui-react-app/src/redux/reducers/topics/__tests__/reducer.spec.ts b/kafka-ui-react-app/src/redux/reducers/topics/__tests__/reducer.spec.ts new file mode 100644 index 00000000000..46bbcc0140d --- /dev/null +++ b/kafka-ui-react-app/src/redux/reducers/topics/__tests__/reducer.spec.ts @@ -0,0 +1,29 @@ +import { deleteTopicAction } from 'redux/actions'; +import reducer from '../reducer'; + +describe('topics reducer', () => { + it('deletes the topic from the list on DELETE_TOPIC__SUCCESS', () => { + const topic = { + name: 'topic', + id: 'id', + }; + expect( + reducer( + { + byName: { + topic, + }, + allNames: [topic.name], + messages: [], + totalPages: 1, + }, + deleteTopicAction.success(topic.name) + ) + ).toEqual({ + byName: {}, + allNames: [], + messages: [], + totalPages: 1, + }); + }); +}); diff --git a/kafka-ui-react-app/src/redux/reducers/topics/reducer.ts b/kafka-ui-react-app/src/redux/reducers/topics/reducer.ts index 665bb57e319..464bd6e6a11 100644 --- a/kafka-ui-react-app/src/redux/reducers/topics/reducer.ts +++ b/kafka-ui-react-app/src/redux/reducers/topics/reducer.ts @@ -45,6 +45,14 @@ const reducer = (state = initialState, action: Action): TopicsState => { return action.payload; case getType(actions.fetchTopicMessagesAction.success): return transformTopicMessages(state, action.payload); + case getType(actions.deleteTopicAction.success): { + const newState: TopicsState = { ...state }; + delete newState.byName[action.payload]; + newState.allNames = newState.allNames.filter( + (name) => name !== action.payload + ); + return newState; + } default: return state; }
null
val
train
2021-03-18T19:16:59
"2021-03-03T17:38:05Z"
ommr101
train
provectus/kafka-ui/243_771
provectus/kafka-ui
provectus/kafka-ui/243
provectus/kafka-ui/771
[ "timestamp(timedelta=0.0, similarity=0.9007291333681994)" ]
40678809661cb2659d4c93b2fbcd0cd940b4006a
d035aa9e80bca48e3d6f621d698ee0441bf994f7
[]
[ "```suggestion\r\n ? 'Are you sure you want to remove selected topics?'\r\n : 'Are you sure you want to purge messages of selected topics?'}\r\n```" ]
"2021-08-05T10:11:48Z"
[ "type/enhancement", "scope/frontend" ]
Delete topics using multi-selection
As a user, I would like to select multiple Kafka topics (multi-selection) and then delete them instead of deleting them one by one.
[ "kafka-ui-react-app/src/components/Topics/List/List.tsx", "kafka-ui-react-app/src/components/Topics/List/ListContainer.ts", "kafka-ui-react-app/src/components/Topics/List/ListItem.tsx", "kafka-ui-react-app/src/components/Topics/List/__tests__/List.spec.tsx", "kafka-ui-react-app/src/components/Topics/List/__tests__/ListItem.spec.tsx", "kafka-ui-react-app/src/components/Topics/List/__tests__/__snapshots__/List.spec.tsx.snap", "kafka-ui-react-app/src/redux/actions/thunks/topics.ts" ]
[ "kafka-ui-react-app/src/components/Topics/List/List.tsx", "kafka-ui-react-app/src/components/Topics/List/ListContainer.ts", "kafka-ui-react-app/src/components/Topics/List/ListItem.tsx", "kafka-ui-react-app/src/components/Topics/List/__tests__/List.spec.tsx", "kafka-ui-react-app/src/components/Topics/List/__tests__/ListItem.spec.tsx", "kafka-ui-react-app/src/components/Topics/List/__tests__/__snapshots__/List.spec.tsx.snap", "kafka-ui-react-app/src/redux/actions/thunks/topics.ts" ]
[]
diff --git a/kafka-ui-react-app/src/components/Topics/List/List.tsx b/kafka-ui-react-app/src/components/Topics/List/List.tsx index 9440dff902f..a2f21437ae1 100644 --- a/kafka-ui-react-app/src/components/Topics/List/List.tsx +++ b/kafka-ui-react-app/src/components/Topics/List/List.tsx @@ -12,6 +12,7 @@ import usePagination from 'lib/hooks/usePagination'; import ClusterContext from 'components/contexts/ClusterContext'; import PageLoader from 'components/common/PageLoader/PageLoader'; import Pagination from 'components/common/Pagination/Pagination'; +import ConfirmationModal from 'components/common/ConfirmationModal/ConfirmationModal'; import { GetTopicsRequest, TopicColumnsToSort } from 'generated-sources'; import SortableColumnHeader from 'components/common/table/SortableCulumnHeader/SortableColumnHeader'; import Search from 'components/common/Search/Search'; @@ -25,6 +26,8 @@ export interface TopicsListProps { totalPages: number; fetchTopicsList(props: GetTopicsRequest): void; deleteTopic(topicName: TopicName, clusterName: ClusterName): void; + deleteTopics(topicName: TopicName, clusterNames: ClusterName[]): void; + clearTopicsMessages(topicName: TopicName, clusterNames: ClusterName[]): void; clearTopicMessages( topicName: TopicName, clusterName: ClusterName, @@ -42,7 +45,9 @@ const List: React.FC<TopicsListProps> = ({ totalPages, fetchTopicsList, deleteTopic, + deleteTopics, clearTopicMessages, + clearTopicsMessages, search, orderBy, setTopicsSearch, @@ -78,6 +83,45 @@ const List: React.FC<TopicsListProps> = ({ history.push(`${pathname}?page=1&perPage=${perPage || PER_PAGE}`); }, [showInternal]); + const [confirmationModal, setConfirmationModal] = React.useState< + '' | 'deleteTopics' | 'purgeMessages' + >(''); + + const closeConfirmationModal = () => { + setConfirmationModal(''); + }; + + const [selectedTopics, setSelectedTopics] = React.useState<Set<string>>( + new Set() + ); + + const clearSelectedTopics = () => { + setSelectedTopics(new Set()); + }; + + const toggleTopicSelected = (topicName: string) => { + setSelectedTopics((prevState) => { + const newState = new Set(prevState); + if (newState.has(topicName)) { + newState.delete(topicName); + } else { + newState.add(topicName); + } + return newState; + }); + }; + + const deleteTopicsHandler = React.useCallback(() => { + deleteTopics(clusterName, Array.from(selectedTopics)); + closeConfirmationModal(); + clearSelectedTopics(); + }, [clusterName, selectedTopics]); + const purgeMessagesHandler = React.useCallback(() => { + clearTopicsMessages(clusterName, Array.from(selectedTopics)); + closeConfirmationModal(); + clearSelectedTopics(); + }, [clusterName, selectedTopics]); + return ( <div className="section"> <Breadcrumb>{showInternal ? `All Topics` : `External Topics`}</Breadcrumb> @@ -119,9 +163,47 @@ const List: React.FC<TopicsListProps> = ({ <PageLoader /> ) : ( <div className="box"> + {selectedTopics.size > 0 && ( + <> + <div className="buttons"> + <button + type="button" + className="button is-danger" + onClick={() => { + setConfirmationModal('deleteTopics'); + }} + > + Delete selected topics + </button> + <button + type="button" + className="button is-danger" + onClick={() => { + setConfirmationModal('purgeMessages'); + }} + > + Purge messages of selected topics + </button> + </div> + <ConfirmationModal + isOpen={confirmationModal !== ''} + onCancel={closeConfirmationModal} + onConfirm={ + confirmationModal === 'deleteTopics' + ? deleteTopicsHandler + : purgeMessagesHandler + } + > + {confirmationModal === 'deleteTopics' + ? 'Are you sure you want to remove selected topics?' + : 'Are you sure you want to purge messages of selected topics?'} + </ConfirmationModal> + </> + )} <table className="table is-fullwidth"> <thead> <tr> + <th> </th> <SortableColumnHeader value={TopicColumnsToSort.NAME} title="Topic Name" @@ -154,6 +236,8 @@ const List: React.FC<TopicsListProps> = ({ clusterName={clusterName} key={topic.name} topic={topic} + selected={selectedTopics.has(topic.name)} + toggleTopicSelected={toggleTopicSelected} deleteTopic={deleteTopic} clearTopicMessages={clearTopicMessages} /> diff --git a/kafka-ui-react-app/src/components/Topics/List/ListContainer.ts b/kafka-ui-react-app/src/components/Topics/List/ListContainer.ts index 2f2a3c02350..2c556d8f3e8 100644 --- a/kafka-ui-react-app/src/components/Topics/List/ListContainer.ts +++ b/kafka-ui-react-app/src/components/Topics/List/ListContainer.ts @@ -3,6 +3,8 @@ import { RootState } from 'redux/interfaces'; import { fetchTopicsList, deleteTopic, + deleteTopics, + clearTopicsMessages, clearTopicMessages, setTopicsSearchAction, setTopicsOrderByAction, @@ -28,6 +30,8 @@ const mapStateToProps = (state: RootState) => ({ const mapDispatchToProps = { fetchTopicsList, deleteTopic, + deleteTopics, + clearTopicsMessages, clearTopicMessages, setTopicsSearch: setTopicsSearchAction, setTopicsOrderBy: setTopicsOrderByAction, diff --git a/kafka-ui-react-app/src/components/Topics/List/ListItem.tsx b/kafka-ui-react-app/src/components/Topics/List/ListItem.tsx index 2a533027aa6..903c988c8aa 100644 --- a/kafka-ui-react-app/src/components/Topics/List/ListItem.tsx +++ b/kafka-ui-react-app/src/components/Topics/List/ListItem.tsx @@ -14,6 +14,8 @@ import BytesFormatted from 'components/common/BytesFormatted/BytesFormatted'; export interface ListItemProps { topic: TopicWithDetailedInfo; + selected: boolean; + toggleTopicSelected(topicName: TopicName): void; deleteTopic: (clusterName: ClusterName, topicName: TopicName) => void; clusterName: ClusterName; clearTopicMessages(topicName: TopicName, clusterName: ClusterName): void; @@ -28,6 +30,8 @@ const ListItem: React.FC<ListItemProps> = ({ replicationFactor, cleanUpPolicy, }, + selected, + toggleTopicSelected, deleteTopic, clusterName, clearTopicMessages, @@ -70,6 +74,17 @@ const ListItem: React.FC<ListItemProps> = ({ return ( <tr> + <td> + {!internal && ( + <input + type="checkbox" + checked={selected} + onChange={() => { + toggleTopicSelected(name); + }} + /> + )} + </td> <td className="has-text-overflow-ellipsis"> <NavLink exact diff --git a/kafka-ui-react-app/src/components/Topics/List/__tests__/List.spec.tsx b/kafka-ui-react-app/src/components/Topics/List/__tests__/List.spec.tsx index bab7e7fdf68..85dcc128067 100644 --- a/kafka-ui-react-app/src/components/Topics/List/__tests__/List.spec.tsx +++ b/kafka-ui-react-app/src/components/Topics/List/__tests__/List.spec.tsx @@ -1,6 +1,7 @@ import React from 'react'; import { mount, ReactWrapper } from 'enzyme'; -import { Router } from 'react-router-dom'; +import { Route, Router } from 'react-router-dom'; +import { act } from 'react-dom/test-utils'; import ClusterContext, { ContextProps, } from 'components/contexts/ClusterContext'; @@ -8,6 +9,13 @@ import List, { TopicsListProps } from 'components/Topics/List/List'; import { createMemoryHistory } from 'history'; import { StaticRouter } from 'react-router'; import Search from 'components/common/Search/Search'; +import { externalTopicPayload } from 'redux/reducers/topics/__test__/fixtures'; +import { ConfirmationModalProps } from 'components/common/ConfirmationModal/ConfirmationModal'; + +jest.mock( + 'components/common/ConfirmationModal/ConfirmationModal', + () => 'mock-ConfirmationModal' +); describe('List', () => { const setupComponent = (props: Partial<TopicsListProps> = {}) => ( @@ -17,6 +25,8 @@ describe('List', () => { totalPages={1} fetchTopicsList={jest.fn()} deleteTopic={jest.fn()} + deleteTopics={jest.fn()} + clearTopicsMessages={jest.fn()} clearTopicMessages={jest.fn()} search="" orderBy={null} @@ -123,4 +133,130 @@ describe('List', () => { expect(mockedHistory.push).toHaveBeenCalledWith('/?page=1&perPage=25'); }); }); + + describe('when some list items are selected', () => { + const mockDeleteTopics = jest.fn(); + const mockClearTopicsMessages = jest.fn(); + jest.useFakeTimers(); + const pathname = '/ui/clusters/local/topics'; + const component = mount( + <StaticRouter location={{ pathname }}> + <Route path="/ui/clusters/:clusterName"> + <ClusterContext.Provider + value={{ + isReadOnly: false, + hasKafkaConnectConfigured: true, + hasSchemaRegistryConfigured: true, + }} + > + {setupComponent({ + topics: [ + externalTopicPayload, + { ...externalTopicPayload, name: 'external.topic2' }, + ], + deleteTopics: mockDeleteTopics, + clearTopicsMessages: mockClearTopicsMessages, + })} + </ClusterContext.Provider> + </Route> + </StaticRouter> + ); + const getCheckboxInput = (at: number) => + component.find('ListItem').at(at).find('input[type="checkbox"]').at(0); + + const getConfirmationModal = () => + component.find('mock-ConfirmationModal').at(0); + + it('renders delete/purge buttons', () => { + expect(getCheckboxInput(0).props().checked).toBeFalsy(); + expect(getCheckboxInput(1).props().checked).toBeFalsy(); + expect(component.find('.buttons').length).toEqual(0); + + // check first item + getCheckboxInput(0).simulate('change'); + expect(getCheckboxInput(0).props().checked).toBeTruthy(); + expect(getCheckboxInput(1).props().checked).toBeFalsy(); + expect(component.find('.buttons').length).toEqual(1); + + // check second item + getCheckboxInput(1).simulate('change'); + expect(getCheckboxInput(0).props().checked).toBeTruthy(); + expect(getCheckboxInput(1).props().checked).toBeTruthy(); + expect(component.find('.buttons').length).toEqual(1); + + // uncheck second item + getCheckboxInput(1).simulate('change'); + expect(getCheckboxInput(0).props().checked).toBeTruthy(); + expect(getCheckboxInput(1).props().checked).toBeFalsy(); + expect(component.find('.buttons').length).toEqual(1); + + // uncheck first item + getCheckboxInput(0).simulate('change'); + expect(getCheckboxInput(0).props().checked).toBeFalsy(); + expect(getCheckboxInput(1).props().checked).toBeFalsy(); + expect(component.find('.buttons').length).toEqual(0); + }); + + const checkActionButtonClick = async (action: string) => { + const buttonIndex = action === 'deleteTopics' ? 0 : 1; + const confirmationText = + action === 'deleteTopics' + ? 'Are you sure you want to remove selected topics?' + : 'Are you sure you want to purge messages of selected topics?'; + const mockFn = + action === 'deleteTopics' ? mockDeleteTopics : mockClearTopicsMessages; + getCheckboxInput(0).simulate('change'); + getCheckboxInput(1).simulate('change'); + let modal = getConfirmationModal(); + expect(modal.prop('isOpen')).toBeFalsy(); + component + .find('.buttons') + .find('button') + .at(buttonIndex) + .simulate('click'); + expect(modal.text()).toEqual(confirmationText); + modal = getConfirmationModal(); + expect(modal.prop('isOpen')).toBeTruthy(); + await act(async () => { + (modal.props() as ConfirmationModalProps).onConfirm(); + }); + component.update(); + expect(getConfirmationModal().prop('isOpen')).toBeFalsy(); + expect(getCheckboxInput(0).props().checked).toBeFalsy(); + expect(getCheckboxInput(1).props().checked).toBeFalsy(); + expect(component.find('.buttons').length).toEqual(0); + expect(mockFn).toBeCalledTimes(1); + expect(mockFn).toBeCalledWith('local', [ + externalTopicPayload.name, + 'external.topic2', + ]); + }; + + it('triggers the deleteTopics when clicked on the delete button', async () => { + await checkActionButtonClick('deleteTopics'); + }); + + it('triggers the clearTopicsMessages when clicked on the clear button', async () => { + await checkActionButtonClick('clearTopicsMessages'); + }); + + it('closes ConfirmationModal when clicked on the cancel button', async () => { + getCheckboxInput(0).simulate('change'); + getCheckboxInput(1).simulate('change'); + let modal = getConfirmationModal(); + expect(modal.prop('isOpen')).toBeFalsy(); + component.find('.buttons').find('button').at(0).simulate('click'); + modal = getConfirmationModal(); + expect(modal.prop('isOpen')).toBeTruthy(); + await act(async () => { + (modal.props() as ConfirmationModalProps).onCancel(); + }); + component.update(); + expect(getConfirmationModal().prop('isOpen')).toBeFalsy(); + expect(getCheckboxInput(0).props().checked).toBeTruthy(); + expect(getCheckboxInput(1).props().checked).toBeTruthy(); + expect(component.find('.buttons').length).toEqual(1); + expect(mockDeleteTopics).toBeCalledTimes(0); + }); + }); }); diff --git a/kafka-ui-react-app/src/components/Topics/List/__tests__/ListItem.spec.tsx b/kafka-ui-react-app/src/components/Topics/List/__tests__/ListItem.spec.tsx index f6521553749..9af84430eda 100644 --- a/kafka-ui-react-app/src/components/Topics/List/__tests__/ListItem.spec.tsx +++ b/kafka-ui-react-app/src/components/Topics/List/__tests__/ListItem.spec.tsx @@ -10,6 +10,7 @@ import ListItem, { ListItemProps } from 'components/Topics/List/ListItem'; const mockDelete = jest.fn(); const clusterName = 'local'; const mockDeleteMessages = jest.fn(); +const mockToggleTopicSelected = jest.fn(); jest.mock( 'components/common/ConfirmationModal/ConfirmationModal', @@ -23,6 +24,8 @@ describe('ListItem', () => { deleteTopic={mockDelete} clusterName={clusterName} clearTopicMessages={mockDeleteMessages} + selected={false} + toggleTopicSelected={mockToggleTopicSelected} {...props} /> ); @@ -73,6 +76,18 @@ describe('ListItem', () => { expect(wrapper.find('.tag.is-light').text()).toEqual('Internal'); }); + it('renders without checkbox for internal topic', () => { + const wrapper = mount( + <StaticRouter> + <table> + <tbody>{setupComponent()}</tbody> + </table> + </StaticRouter> + ); + + expect(wrapper.find('td').at(0).html()).toEqual('<td></td>'); + }); + it('renders correct tags for external topic', () => { const wrapper = mount( <StaticRouter> @@ -85,6 +100,28 @@ describe('ListItem', () => { expect(wrapper.find('.tag.is-primary').text()).toEqual('External'); }); + it('renders with checkbox for external topic', () => { + const wrapper = mount( + <StaticRouter> + <table> + <tbody>{setupComponent({ topic: externalTopicPayload })}</tbody> + </table> + </StaticRouter> + ); + + expect(wrapper.find('td').at(0).html()).toEqual( + '<td><input type="checkbox"></td>' + ); + }); + + it('triggers the toggleTopicSelected when clicked on the checkbox input', () => { + const wrapper = shallow(setupComponent({ topic: externalTopicPayload })); + expect(wrapper.exists('input')).toBeTruthy(); + wrapper.find('input[type="checkbox"]').at(0).simulate('change'); + expect(mockToggleTopicSelected).toBeCalledTimes(1); + expect(mockToggleTopicSelected).toBeCalledWith(externalTopicPayload.name); + }); + it('renders correct out of sync replicas number', () => { const wrapper = mount( <StaticRouter> @@ -98,6 +135,6 @@ describe('ListItem', () => { </StaticRouter> ); - expect(wrapper.find('td').at(2).text()).toEqual('0'); + expect(wrapper.find('td').at(3).text()).toEqual('0'); }); }); diff --git a/kafka-ui-react-app/src/components/Topics/List/__tests__/__snapshots__/List.spec.tsx.snap b/kafka-ui-react-app/src/components/Topics/List/__tests__/__snapshots__/List.spec.tsx.snap index 29e22250df2..59f2f1930e3 100644 --- a/kafka-ui-react-app/src/components/Topics/List/__tests__/__snapshots__/List.spec.tsx.snap +++ b/kafka-ui-react-app/src/components/Topics/List/__tests__/__snapshots__/List.spec.tsx.snap @@ -27,7 +27,9 @@ exports[`List when it does not have readonly flag matches the snapshot 1`] = ` <List areTopicsFetching={false} clearTopicMessages={[MockFunction]} + clearTopicsMessages={[MockFunction]} deleteTopic={[MockFunction]} + deleteTopics={[MockFunction]} fetchTopicsList={ [MockFunction] { "calls": Array [ @@ -165,6 +167,9 @@ exports[`List when it does not have readonly flag matches the snapshot 1`] = ` > <thead> <tr> + <th> + + </th> <ListHeaderCell orderBy={null} setOrderBy={[MockFunction]} diff --git a/kafka-ui-react-app/src/redux/actions/thunks/topics.ts b/kafka-ui-react-app/src/redux/actions/thunks/topics.ts index a00bee66bff..55dd2e6baff 100644 --- a/kafka-ui-react-app/src/redux/actions/thunks/topics.ts +++ b/kafka-ui-react-app/src/redux/actions/thunks/topics.ts @@ -87,6 +87,14 @@ export const clearTopicMessages = } }; +export const clearTopicsMessages = + (clusterName: ClusterName, topicsName: TopicName[]): PromiseThunkResult => + async (dispatch) => { + topicsName.forEach((topicName) => { + dispatch(clearTopicMessages(clusterName, topicName)); + }); + }; + export const fetchTopicDetails = (clusterName: ClusterName, topicName: TopicName): PromiseThunkResult => async (dispatch, getState) => { @@ -285,6 +293,14 @@ export const deleteTopic = } }; +export const deleteTopics = + (clusterName: ClusterName, topicsName: TopicName[]): PromiseThunkResult => + async (dispatch) => { + topicsName.forEach((topicName) => { + dispatch(deleteTopic(clusterName, topicName)); + }); + }; + export const fetchTopicConsumerGroups = (clusterName: ClusterName, topicName: TopicName): PromiseThunkResult => async (dispatch, getState) => {
null
val
train
2021-08-05T08:05:21
"2021-03-11T14:37:37Z"
IldarAlmakaev
train
provectus/kafka-ui/244_771
provectus/kafka-ui
provectus/kafka-ui/244
provectus/kafka-ui/771
[ "timestamp(timedelta=0.0, similarity=0.8809084063905879)" ]
40678809661cb2659d4c93b2fbcd0cd940b4006a
d035aa9e80bca48e3d6f621d698ee0441bf994f7
[]
[ "```suggestion\r\n ? 'Are you sure you want to remove selected topics?'\r\n : 'Are you sure you want to purge messages of selected topics?'}\r\n```" ]
"2021-08-05T10:11:48Z"
[ "type/enhancement", "scope/frontend" ]
Purge messages of multiple topics using multi-selection
As a user, I'd like to select multiple Kafka topics and then clear them up/purge
[ "kafka-ui-react-app/src/components/Topics/List/List.tsx", "kafka-ui-react-app/src/components/Topics/List/ListContainer.ts", "kafka-ui-react-app/src/components/Topics/List/ListItem.tsx", "kafka-ui-react-app/src/components/Topics/List/__tests__/List.spec.tsx", "kafka-ui-react-app/src/components/Topics/List/__tests__/ListItem.spec.tsx", "kafka-ui-react-app/src/components/Topics/List/__tests__/__snapshots__/List.spec.tsx.snap", "kafka-ui-react-app/src/redux/actions/thunks/topics.ts" ]
[ "kafka-ui-react-app/src/components/Topics/List/List.tsx", "kafka-ui-react-app/src/components/Topics/List/ListContainer.ts", "kafka-ui-react-app/src/components/Topics/List/ListItem.tsx", "kafka-ui-react-app/src/components/Topics/List/__tests__/List.spec.tsx", "kafka-ui-react-app/src/components/Topics/List/__tests__/ListItem.spec.tsx", "kafka-ui-react-app/src/components/Topics/List/__tests__/__snapshots__/List.spec.tsx.snap", "kafka-ui-react-app/src/redux/actions/thunks/topics.ts" ]
[]
diff --git a/kafka-ui-react-app/src/components/Topics/List/List.tsx b/kafka-ui-react-app/src/components/Topics/List/List.tsx index 9440dff902f..a2f21437ae1 100644 --- a/kafka-ui-react-app/src/components/Topics/List/List.tsx +++ b/kafka-ui-react-app/src/components/Topics/List/List.tsx @@ -12,6 +12,7 @@ import usePagination from 'lib/hooks/usePagination'; import ClusterContext from 'components/contexts/ClusterContext'; import PageLoader from 'components/common/PageLoader/PageLoader'; import Pagination from 'components/common/Pagination/Pagination'; +import ConfirmationModal from 'components/common/ConfirmationModal/ConfirmationModal'; import { GetTopicsRequest, TopicColumnsToSort } from 'generated-sources'; import SortableColumnHeader from 'components/common/table/SortableCulumnHeader/SortableColumnHeader'; import Search from 'components/common/Search/Search'; @@ -25,6 +26,8 @@ export interface TopicsListProps { totalPages: number; fetchTopicsList(props: GetTopicsRequest): void; deleteTopic(topicName: TopicName, clusterName: ClusterName): void; + deleteTopics(topicName: TopicName, clusterNames: ClusterName[]): void; + clearTopicsMessages(topicName: TopicName, clusterNames: ClusterName[]): void; clearTopicMessages( topicName: TopicName, clusterName: ClusterName, @@ -42,7 +45,9 @@ const List: React.FC<TopicsListProps> = ({ totalPages, fetchTopicsList, deleteTopic, + deleteTopics, clearTopicMessages, + clearTopicsMessages, search, orderBy, setTopicsSearch, @@ -78,6 +83,45 @@ const List: React.FC<TopicsListProps> = ({ history.push(`${pathname}?page=1&perPage=${perPage || PER_PAGE}`); }, [showInternal]); + const [confirmationModal, setConfirmationModal] = React.useState< + '' | 'deleteTopics' | 'purgeMessages' + >(''); + + const closeConfirmationModal = () => { + setConfirmationModal(''); + }; + + const [selectedTopics, setSelectedTopics] = React.useState<Set<string>>( + new Set() + ); + + const clearSelectedTopics = () => { + setSelectedTopics(new Set()); + }; + + const toggleTopicSelected = (topicName: string) => { + setSelectedTopics((prevState) => { + const newState = new Set(prevState); + if (newState.has(topicName)) { + newState.delete(topicName); + } else { + newState.add(topicName); + } + return newState; + }); + }; + + const deleteTopicsHandler = React.useCallback(() => { + deleteTopics(clusterName, Array.from(selectedTopics)); + closeConfirmationModal(); + clearSelectedTopics(); + }, [clusterName, selectedTopics]); + const purgeMessagesHandler = React.useCallback(() => { + clearTopicsMessages(clusterName, Array.from(selectedTopics)); + closeConfirmationModal(); + clearSelectedTopics(); + }, [clusterName, selectedTopics]); + return ( <div className="section"> <Breadcrumb>{showInternal ? `All Topics` : `External Topics`}</Breadcrumb> @@ -119,9 +163,47 @@ const List: React.FC<TopicsListProps> = ({ <PageLoader /> ) : ( <div className="box"> + {selectedTopics.size > 0 && ( + <> + <div className="buttons"> + <button + type="button" + className="button is-danger" + onClick={() => { + setConfirmationModal('deleteTopics'); + }} + > + Delete selected topics + </button> + <button + type="button" + className="button is-danger" + onClick={() => { + setConfirmationModal('purgeMessages'); + }} + > + Purge messages of selected topics + </button> + </div> + <ConfirmationModal + isOpen={confirmationModal !== ''} + onCancel={closeConfirmationModal} + onConfirm={ + confirmationModal === 'deleteTopics' + ? deleteTopicsHandler + : purgeMessagesHandler + } + > + {confirmationModal === 'deleteTopics' + ? 'Are you sure you want to remove selected topics?' + : 'Are you sure you want to purge messages of selected topics?'} + </ConfirmationModal> + </> + )} <table className="table is-fullwidth"> <thead> <tr> + <th> </th> <SortableColumnHeader value={TopicColumnsToSort.NAME} title="Topic Name" @@ -154,6 +236,8 @@ const List: React.FC<TopicsListProps> = ({ clusterName={clusterName} key={topic.name} topic={topic} + selected={selectedTopics.has(topic.name)} + toggleTopicSelected={toggleTopicSelected} deleteTopic={deleteTopic} clearTopicMessages={clearTopicMessages} /> diff --git a/kafka-ui-react-app/src/components/Topics/List/ListContainer.ts b/kafka-ui-react-app/src/components/Topics/List/ListContainer.ts index 2f2a3c02350..2c556d8f3e8 100644 --- a/kafka-ui-react-app/src/components/Topics/List/ListContainer.ts +++ b/kafka-ui-react-app/src/components/Topics/List/ListContainer.ts @@ -3,6 +3,8 @@ import { RootState } from 'redux/interfaces'; import { fetchTopicsList, deleteTopic, + deleteTopics, + clearTopicsMessages, clearTopicMessages, setTopicsSearchAction, setTopicsOrderByAction, @@ -28,6 +30,8 @@ const mapStateToProps = (state: RootState) => ({ const mapDispatchToProps = { fetchTopicsList, deleteTopic, + deleteTopics, + clearTopicsMessages, clearTopicMessages, setTopicsSearch: setTopicsSearchAction, setTopicsOrderBy: setTopicsOrderByAction, diff --git a/kafka-ui-react-app/src/components/Topics/List/ListItem.tsx b/kafka-ui-react-app/src/components/Topics/List/ListItem.tsx index 2a533027aa6..903c988c8aa 100644 --- a/kafka-ui-react-app/src/components/Topics/List/ListItem.tsx +++ b/kafka-ui-react-app/src/components/Topics/List/ListItem.tsx @@ -14,6 +14,8 @@ import BytesFormatted from 'components/common/BytesFormatted/BytesFormatted'; export interface ListItemProps { topic: TopicWithDetailedInfo; + selected: boolean; + toggleTopicSelected(topicName: TopicName): void; deleteTopic: (clusterName: ClusterName, topicName: TopicName) => void; clusterName: ClusterName; clearTopicMessages(topicName: TopicName, clusterName: ClusterName): void; @@ -28,6 +30,8 @@ const ListItem: React.FC<ListItemProps> = ({ replicationFactor, cleanUpPolicy, }, + selected, + toggleTopicSelected, deleteTopic, clusterName, clearTopicMessages, @@ -70,6 +74,17 @@ const ListItem: React.FC<ListItemProps> = ({ return ( <tr> + <td> + {!internal && ( + <input + type="checkbox" + checked={selected} + onChange={() => { + toggleTopicSelected(name); + }} + /> + )} + </td> <td className="has-text-overflow-ellipsis"> <NavLink exact diff --git a/kafka-ui-react-app/src/components/Topics/List/__tests__/List.spec.tsx b/kafka-ui-react-app/src/components/Topics/List/__tests__/List.spec.tsx index bab7e7fdf68..85dcc128067 100644 --- a/kafka-ui-react-app/src/components/Topics/List/__tests__/List.spec.tsx +++ b/kafka-ui-react-app/src/components/Topics/List/__tests__/List.spec.tsx @@ -1,6 +1,7 @@ import React from 'react'; import { mount, ReactWrapper } from 'enzyme'; -import { Router } from 'react-router-dom'; +import { Route, Router } from 'react-router-dom'; +import { act } from 'react-dom/test-utils'; import ClusterContext, { ContextProps, } from 'components/contexts/ClusterContext'; @@ -8,6 +9,13 @@ import List, { TopicsListProps } from 'components/Topics/List/List'; import { createMemoryHistory } from 'history'; import { StaticRouter } from 'react-router'; import Search from 'components/common/Search/Search'; +import { externalTopicPayload } from 'redux/reducers/topics/__test__/fixtures'; +import { ConfirmationModalProps } from 'components/common/ConfirmationModal/ConfirmationModal'; + +jest.mock( + 'components/common/ConfirmationModal/ConfirmationModal', + () => 'mock-ConfirmationModal' +); describe('List', () => { const setupComponent = (props: Partial<TopicsListProps> = {}) => ( @@ -17,6 +25,8 @@ describe('List', () => { totalPages={1} fetchTopicsList={jest.fn()} deleteTopic={jest.fn()} + deleteTopics={jest.fn()} + clearTopicsMessages={jest.fn()} clearTopicMessages={jest.fn()} search="" orderBy={null} @@ -123,4 +133,130 @@ describe('List', () => { expect(mockedHistory.push).toHaveBeenCalledWith('/?page=1&perPage=25'); }); }); + + describe('when some list items are selected', () => { + const mockDeleteTopics = jest.fn(); + const mockClearTopicsMessages = jest.fn(); + jest.useFakeTimers(); + const pathname = '/ui/clusters/local/topics'; + const component = mount( + <StaticRouter location={{ pathname }}> + <Route path="/ui/clusters/:clusterName"> + <ClusterContext.Provider + value={{ + isReadOnly: false, + hasKafkaConnectConfigured: true, + hasSchemaRegistryConfigured: true, + }} + > + {setupComponent({ + topics: [ + externalTopicPayload, + { ...externalTopicPayload, name: 'external.topic2' }, + ], + deleteTopics: mockDeleteTopics, + clearTopicsMessages: mockClearTopicsMessages, + })} + </ClusterContext.Provider> + </Route> + </StaticRouter> + ); + const getCheckboxInput = (at: number) => + component.find('ListItem').at(at).find('input[type="checkbox"]').at(0); + + const getConfirmationModal = () => + component.find('mock-ConfirmationModal').at(0); + + it('renders delete/purge buttons', () => { + expect(getCheckboxInput(0).props().checked).toBeFalsy(); + expect(getCheckboxInput(1).props().checked).toBeFalsy(); + expect(component.find('.buttons').length).toEqual(0); + + // check first item + getCheckboxInput(0).simulate('change'); + expect(getCheckboxInput(0).props().checked).toBeTruthy(); + expect(getCheckboxInput(1).props().checked).toBeFalsy(); + expect(component.find('.buttons').length).toEqual(1); + + // check second item + getCheckboxInput(1).simulate('change'); + expect(getCheckboxInput(0).props().checked).toBeTruthy(); + expect(getCheckboxInput(1).props().checked).toBeTruthy(); + expect(component.find('.buttons').length).toEqual(1); + + // uncheck second item + getCheckboxInput(1).simulate('change'); + expect(getCheckboxInput(0).props().checked).toBeTruthy(); + expect(getCheckboxInput(1).props().checked).toBeFalsy(); + expect(component.find('.buttons').length).toEqual(1); + + // uncheck first item + getCheckboxInput(0).simulate('change'); + expect(getCheckboxInput(0).props().checked).toBeFalsy(); + expect(getCheckboxInput(1).props().checked).toBeFalsy(); + expect(component.find('.buttons').length).toEqual(0); + }); + + const checkActionButtonClick = async (action: string) => { + const buttonIndex = action === 'deleteTopics' ? 0 : 1; + const confirmationText = + action === 'deleteTopics' + ? 'Are you sure you want to remove selected topics?' + : 'Are you sure you want to purge messages of selected topics?'; + const mockFn = + action === 'deleteTopics' ? mockDeleteTopics : mockClearTopicsMessages; + getCheckboxInput(0).simulate('change'); + getCheckboxInput(1).simulate('change'); + let modal = getConfirmationModal(); + expect(modal.prop('isOpen')).toBeFalsy(); + component + .find('.buttons') + .find('button') + .at(buttonIndex) + .simulate('click'); + expect(modal.text()).toEqual(confirmationText); + modal = getConfirmationModal(); + expect(modal.prop('isOpen')).toBeTruthy(); + await act(async () => { + (modal.props() as ConfirmationModalProps).onConfirm(); + }); + component.update(); + expect(getConfirmationModal().prop('isOpen')).toBeFalsy(); + expect(getCheckboxInput(0).props().checked).toBeFalsy(); + expect(getCheckboxInput(1).props().checked).toBeFalsy(); + expect(component.find('.buttons').length).toEqual(0); + expect(mockFn).toBeCalledTimes(1); + expect(mockFn).toBeCalledWith('local', [ + externalTopicPayload.name, + 'external.topic2', + ]); + }; + + it('triggers the deleteTopics when clicked on the delete button', async () => { + await checkActionButtonClick('deleteTopics'); + }); + + it('triggers the clearTopicsMessages when clicked on the clear button', async () => { + await checkActionButtonClick('clearTopicsMessages'); + }); + + it('closes ConfirmationModal when clicked on the cancel button', async () => { + getCheckboxInput(0).simulate('change'); + getCheckboxInput(1).simulate('change'); + let modal = getConfirmationModal(); + expect(modal.prop('isOpen')).toBeFalsy(); + component.find('.buttons').find('button').at(0).simulate('click'); + modal = getConfirmationModal(); + expect(modal.prop('isOpen')).toBeTruthy(); + await act(async () => { + (modal.props() as ConfirmationModalProps).onCancel(); + }); + component.update(); + expect(getConfirmationModal().prop('isOpen')).toBeFalsy(); + expect(getCheckboxInput(0).props().checked).toBeTruthy(); + expect(getCheckboxInput(1).props().checked).toBeTruthy(); + expect(component.find('.buttons').length).toEqual(1); + expect(mockDeleteTopics).toBeCalledTimes(0); + }); + }); }); diff --git a/kafka-ui-react-app/src/components/Topics/List/__tests__/ListItem.spec.tsx b/kafka-ui-react-app/src/components/Topics/List/__tests__/ListItem.spec.tsx index f6521553749..9af84430eda 100644 --- a/kafka-ui-react-app/src/components/Topics/List/__tests__/ListItem.spec.tsx +++ b/kafka-ui-react-app/src/components/Topics/List/__tests__/ListItem.spec.tsx @@ -10,6 +10,7 @@ import ListItem, { ListItemProps } from 'components/Topics/List/ListItem'; const mockDelete = jest.fn(); const clusterName = 'local'; const mockDeleteMessages = jest.fn(); +const mockToggleTopicSelected = jest.fn(); jest.mock( 'components/common/ConfirmationModal/ConfirmationModal', @@ -23,6 +24,8 @@ describe('ListItem', () => { deleteTopic={mockDelete} clusterName={clusterName} clearTopicMessages={mockDeleteMessages} + selected={false} + toggleTopicSelected={mockToggleTopicSelected} {...props} /> ); @@ -73,6 +76,18 @@ describe('ListItem', () => { expect(wrapper.find('.tag.is-light').text()).toEqual('Internal'); }); + it('renders without checkbox for internal topic', () => { + const wrapper = mount( + <StaticRouter> + <table> + <tbody>{setupComponent()}</tbody> + </table> + </StaticRouter> + ); + + expect(wrapper.find('td').at(0).html()).toEqual('<td></td>'); + }); + it('renders correct tags for external topic', () => { const wrapper = mount( <StaticRouter> @@ -85,6 +100,28 @@ describe('ListItem', () => { expect(wrapper.find('.tag.is-primary').text()).toEqual('External'); }); + it('renders with checkbox for external topic', () => { + const wrapper = mount( + <StaticRouter> + <table> + <tbody>{setupComponent({ topic: externalTopicPayload })}</tbody> + </table> + </StaticRouter> + ); + + expect(wrapper.find('td').at(0).html()).toEqual( + '<td><input type="checkbox"></td>' + ); + }); + + it('triggers the toggleTopicSelected when clicked on the checkbox input', () => { + const wrapper = shallow(setupComponent({ topic: externalTopicPayload })); + expect(wrapper.exists('input')).toBeTruthy(); + wrapper.find('input[type="checkbox"]').at(0).simulate('change'); + expect(mockToggleTopicSelected).toBeCalledTimes(1); + expect(mockToggleTopicSelected).toBeCalledWith(externalTopicPayload.name); + }); + it('renders correct out of sync replicas number', () => { const wrapper = mount( <StaticRouter> @@ -98,6 +135,6 @@ describe('ListItem', () => { </StaticRouter> ); - expect(wrapper.find('td').at(2).text()).toEqual('0'); + expect(wrapper.find('td').at(3).text()).toEqual('0'); }); }); diff --git a/kafka-ui-react-app/src/components/Topics/List/__tests__/__snapshots__/List.spec.tsx.snap b/kafka-ui-react-app/src/components/Topics/List/__tests__/__snapshots__/List.spec.tsx.snap index 29e22250df2..59f2f1930e3 100644 --- a/kafka-ui-react-app/src/components/Topics/List/__tests__/__snapshots__/List.spec.tsx.snap +++ b/kafka-ui-react-app/src/components/Topics/List/__tests__/__snapshots__/List.spec.tsx.snap @@ -27,7 +27,9 @@ exports[`List when it does not have readonly flag matches the snapshot 1`] = ` <List areTopicsFetching={false} clearTopicMessages={[MockFunction]} + clearTopicsMessages={[MockFunction]} deleteTopic={[MockFunction]} + deleteTopics={[MockFunction]} fetchTopicsList={ [MockFunction] { "calls": Array [ @@ -165,6 +167,9 @@ exports[`List when it does not have readonly flag matches the snapshot 1`] = ` > <thead> <tr> + <th> + + </th> <ListHeaderCell orderBy={null} setOrderBy={[MockFunction]} diff --git a/kafka-ui-react-app/src/redux/actions/thunks/topics.ts b/kafka-ui-react-app/src/redux/actions/thunks/topics.ts index a00bee66bff..55dd2e6baff 100644 --- a/kafka-ui-react-app/src/redux/actions/thunks/topics.ts +++ b/kafka-ui-react-app/src/redux/actions/thunks/topics.ts @@ -87,6 +87,14 @@ export const clearTopicMessages = } }; +export const clearTopicsMessages = + (clusterName: ClusterName, topicsName: TopicName[]): PromiseThunkResult => + async (dispatch) => { + topicsName.forEach((topicName) => { + dispatch(clearTopicMessages(clusterName, topicName)); + }); + }; + export const fetchTopicDetails = (clusterName: ClusterName, topicName: TopicName): PromiseThunkResult => async (dispatch, getState) => { @@ -285,6 +293,14 @@ export const deleteTopic = } }; +export const deleteTopics = + (clusterName: ClusterName, topicsName: TopicName[]): PromiseThunkResult => + async (dispatch) => { + topicsName.forEach((topicName) => { + dispatch(deleteTopic(clusterName, topicName)); + }); + }; + export const fetchTopicConsumerGroups = (clusterName: ClusterName, topicName: TopicName): PromiseThunkResult => async (dispatch, getState) => {
null
train
train
2021-08-05T08:05:21
"2021-03-11T14:38:45Z"
IldarAlmakaev
train
provectus/kafka-ui/245_247
provectus/kafka-ui
provectus/kafka-ui/245
provectus/kafka-ui/247
[ "timestamp(timedelta=464.0, similarity=0.8703313865382377)" ]
6355bf3d693470cbc36f76fadfe26eb29d388ac1
c91d6aad9fdd4b3ee7a3d7e6d7fe14e04af49526
[ "Hi, @andormarkus \r\n\r\n```-e KAFKA_CLUSTERS_0_READONLY=true``` should work for backend in version 0.0.10, frontend will be released in next version.", "Hi @germanosin \r\n\r\nI have adjusted the documentation according it. See my PR #247 \r\n\r\nThanks,\r\nAndor" ]
[]
"2021-03-12T08:16:01Z"
[]
How to enable read only mode?
Hi All, How to enable real only mode in docker? Based on #218 PR it is enable with `readOnly` environment variable. In docker I have tried the following configuration: ``` -e readOnly=True -e readOnly=true -e readOnly='True' -e readOnly='true' -e KAFKA_CLUSTERS_0_READONLY=true -e KAFKA_CLUSTERS_0_READONLY=True ``` In every cases "Add topic" and "edit setting" button on the UI was still active. When I click on these buttons I dont get any error message. In the "edit settings" menu when I click on the "submit" button I got no error message. Thanks, Andor
[ "README.md", "docker-compose.md" ]
[ "README.md", "docker-compose.md" ]
[]
diff --git a/README.md b/README.md index 4e38d7bec88..84521839aba 100644 --- a/README.md +++ b/README.md @@ -39,7 +39,8 @@ docker run -p 8080:8080 \ -d provectuslabs/kafka-ui:latest ``` -Then access the web UI at [http://localhost:8080](http://localhost:8080). +Then access the web UI at [http://localhost:8080](http://localhost:8080). +Further configuration with environment variables - [see environment variables](#env_variables) ### Docker Compose @@ -138,10 +139,11 @@ kafka: * `schemaRegistry`: schemaRegistry's address * `schemaNameTemplate`: how keys are saved to schemaRegistry * `jmxPort`: open jmxPosrts of a broker +* `readOnly`: enable read only mode Configure as many clusters as you need by adding their configs below separated with `-`. -## Environment Variables +## <a name="env_variables"></a> Environment Variables Alternatively, each variable of of the .yml file can be set with an environment variable. For example, if you want to use an environment variable to set the `name` parameter, you can write it like this: `KAFKA_CLUSTERS_2_NAME` @@ -154,6 +156,7 @@ For example, if you want to use an environment variable to set the `name` parame |`KAFKA_CLUSTERS_0_SCHEMAREGISTRY` |SchemaRegistry's address |`KAFKA_CLUSTERS_0_SCHEMANAMETEMPLATE` |How keys are saved to schemaRegistry |`KAFKA_CLUSTERS_0_JMXPORT` |Open jmxPosrts of a broker +|`KAFKA_CLUSTERS_0_READONLY` |Enable read only mode. Default: false diff --git a/docker-compose.md b/docker-compose.md index 1ec8b135045..68be207ade4 100644 --- a/docker-compose.md +++ b/docker-compose.md @@ -1,5 +1,7 @@ # Quick Start with docker-compose +Envinronment variables documentation - [see usage](README.md#env_variables) + * Add a new service in docker-compose.yml ```yaml @@ -9,14 +11,31 @@ services: image: provectuslabs/kafka-ui container_name: kafka-ui ports: - - "9000:8080" + - "8080:8080" restart: always environment: - -e KAFKA_CLUSTERS_0_NAME=local - -e KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS=kafka:9092 - -e KAFKA_CLUSTERS_0_ZOOKEEPER=localhost:2181 + - KAFKA_CLUSTERS_0_NAME=local + - KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS=kafka:9092 + - KAFKA_CLUSTERS_0_ZOOKEEPER=localhost:2181 ``` + +* If you prefer Kafka UI in read only mode +```yaml +version: '2' +services: + kafka-ui: + image: provectuslabs/kafka-ui + container_name: kafka-ui + ports: + - "8080:8080" + restart: always + environment: + - KAFKA_CLUSTERS_0_NAME=local + - KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS=kafka:9092 + - KAFKA_CLUSTERS_0_ZOOKEEPER=localhost:2181 + - KAFKA_CLUSTERS_0_READONLY=true +``` * Start Kafka UI process
null
train
train
2021-03-12T08:30:42
"2021-03-11T22:59:41Z"
andormarkus
train
provectus/kafka-ui/253_286
provectus/kafka-ui
provectus/kafka-ui/253
provectus/kafka-ui/286
[ "timestamp(timedelta=0.0, similarity=0.9016211552695692)" ]
590bdfb6106aa61e12afe19cb0d02d50af73dec5
992e8b0898a2cb6bfe2daed75f39a9365e9c33aa
[]
[]
"2021-03-18T14:45:12Z"
[ "type/enhancement", "scope/backend" ]
Get messages implementation improvement
Currently ConsumingService.RecordEmitter has following limitations & problems: https://github.com/provectus/kafka-ui/blob/master/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumingService.java#L118 1. will block 3 sec even if topic was read to the end (or empty) 2. always will do 3 polls (bug?) 3. has a very strict max records limit (100) Suggestion is to rewrite RecordEmitter as a Flow.Publisher with end offsets check. Also backpressure support can be useful if you'll decide to use json streaming (application/stream+json) to return huge number of messages in the future.
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ClusterService.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumingService.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/SchemaRegistryService.java" ]
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ClusterService.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumingService.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/SchemaRegistryService.java" ]
[ "kafka-ui-api/src/test/java/com/provectus/kafka/ui/AbstractBaseTest.java", "kafka-ui-api/src/test/java/com/provectus/kafka/ui/producer/KafkaTestProducer.java", "kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/OffsetsSeekTest.java", "kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/RecordEmitterTest.java" ]
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ClusterService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ClusterService.java index 9142968eb60..3676d71b8c3 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ClusterService.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ClusterService.java @@ -247,7 +247,7 @@ public Mono<Void> deleteTopicMessages(String clusterName, String topicName, if (!cluster.getTopics().containsKey(topicName)) { throw new NotFoundException("No such topic"); } - return consumingService.loadOffsets(cluster, topicName, partitions) + return consumingService.offsetsForDeletion(cluster, topicName, partitions) .flatMap(offsets -> kafkaService.deleteTopicMessages(cluster, offsets)); } -} +} \ No newline at end of file diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumingService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumingService.java index f34f62dbbff..310292bc6ee 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumingService.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ConsumingService.java @@ -10,14 +10,18 @@ import com.provectus.kafka.ui.model.TopicMessage; import com.provectus.kafka.ui.util.ClusterUtil; import java.time.Duration; +import java.util.Collection; +import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.function.Supplier; import java.util.stream.Collectors; import lombok.RequiredArgsConstructor; import lombok.extern.log4j.Log4j2; import org.apache.commons.lang3.StringUtils; +import org.apache.kafka.clients.consumer.Consumer; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.KafkaConsumer; @@ -47,30 +51,23 @@ public Flux<TopicMessage> loadMessages(KafkaCluster cluster, String topic, int recordsLimit = Optional.ofNullable(limit) .map(s -> Math.min(s, MAX_RECORD_LIMIT)) .orElse(DEFAULT_RECORD_LIMIT); - RecordEmitter emitter = new RecordEmitter(kafkaService, cluster, topic, consumerPosition); + RecordEmitter emitter = new RecordEmitter( + () -> kafkaService.createConsumer(cluster), + new OffsetsSeek(topic, consumerPosition)); RecordDeserializer recordDeserializer = deserializationService.getRecordDeserializerForCluster(cluster); - return Flux.create(emitter::emit) + return Flux.create(emitter) .subscribeOn(Schedulers.boundedElastic()) .map(r -> ClusterUtil.mapToTopicMessage(r, recordDeserializer)) .filter(m -> filterTopicMessage(m, query)) .limitRequest(recordsLimit); } - public Mono<Map<TopicPartition, Long>> loadOffsets(KafkaCluster cluster, String topicName, - List<Integer> partitionsToInclude) { + public Mono<Map<TopicPartition, Long>> offsetsForDeletion(KafkaCluster cluster, String topicName, + List<Integer> partitionsToInclude) { return Mono.fromSupplier(() -> { try (KafkaConsumer<Bytes, Bytes> consumer = kafkaService.createConsumer(cluster)) { - var partitions = consumer.partitionsFor(topicName).stream() - .filter( - p -> partitionsToInclude.isEmpty() || partitionsToInclude.contains(p.partition())) - .map(p -> new TopicPartition(topicName, p.partition())) - .collect(Collectors.toList()); - var beginningOffsets = consumer.beginningOffsets(partitions); - var endOffsets = consumer.endOffsets(partitions); - return endOffsets.entrySet().stream() - .filter(entry -> !beginningOffsets.get(entry.getKey()).equals(entry.getValue())) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + return significantOffsets(consumer, topicName, partitionsToInclude); } catch (Exception e) { log.error("Error occurred while consuming records", e); throw new RuntimeException(e); @@ -78,6 +75,25 @@ public Mono<Map<TopicPartition, Long>> loadOffsets(KafkaCluster cluster, String }); } + /** + * returns end offsets for partitions where start offset != end offsets. + * This is useful when we need to verify that partition is not empty. + */ + private static Map<TopicPartition, Long> significantOffsets(Consumer<?, ?> consumer, + String topicName, + Collection<Integer> + partitionsToInclude) { + var partitions = consumer.partitionsFor(topicName).stream() + .filter(p -> partitionsToInclude.isEmpty() || partitionsToInclude.contains(p.partition())) + .map(p -> new TopicPartition(topicName, p.partition())) + .collect(Collectors.toList()); + var beginningOffsets = consumer.beginningOffsets(partitions); + var endOffsets = consumer.endOffsets(partitions); + return endOffsets.entrySet().stream() + .filter(entry -> !beginningOffsets.get(entry.getKey()).equals(entry.getValue())) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } + private boolean filterTopicMessage(TopicMessage message, String query) { if (StringUtils.isEmpty(query)) { return true; @@ -110,52 +126,48 @@ private boolean treeContainsValue(JsonNode tree, String query) { } @RequiredArgsConstructor - private static class RecordEmitter { - private static final int MAX_EMPTY_POLLS_COUNT = 3; + static class RecordEmitter + implements java.util.function.Consumer<FluxSink<ConsumerRecord<Bytes, Bytes>>> { + private static final Duration POLL_TIMEOUT_MS = Duration.ofMillis(1000L); - private final KafkaService kafkaService; - private final KafkaCluster cluster; - private final String topic; - private final ConsumerPosition consumerPosition; + private final Supplier<KafkaConsumer<Bytes, Bytes>> consumerSupplier; + private final OffsetsSeek offsetsSeek; - public void emit(FluxSink<ConsumerRecord<Bytes, Bytes>> sink) { - try (KafkaConsumer<Bytes, Bytes> consumer = kafkaService.createConsumer(cluster)) { - assignAndSeek(consumer); - int emptyPollsCount = 0; - log.info("assignment: {}", consumer.assignment()); - while (!sink.isCancelled()) { + @Override + public void accept(FluxSink<ConsumerRecord<Bytes, Bytes>> sink) { + try (KafkaConsumer<Bytes, Bytes> consumer = consumerSupplier.get()) { + var waitingOffsets = offsetsSeek.assignAndSeek(consumer); + while (!sink.isCancelled() && !waitingOffsets.endReached()) { ConsumerRecords<Bytes, Bytes> records = consumer.poll(POLL_TIMEOUT_MS); log.info("{} records polled", records.count()); - if (records.count() == 0 && emptyPollsCount > MAX_EMPTY_POLLS_COUNT) { - break; - } else { - emptyPollsCount++; + for (ConsumerRecord<Bytes, Bytes> record : records) { + if (!sink.isCancelled() && !waitingOffsets.endReached()) { + sink.next(record); + waitingOffsets.markPolled(record); + } else { + break; + } } - records.iterator() - .forEachRemaining(sink::next); } sink.complete(); + log.info("Polling finished"); } catch (Exception e) { log.error("Error occurred while consuming records", e); throw new RuntimeException(e); } } + } - private List<TopicPartition> getRequestedPartitions() { - Map<Integer, Long> partitionPositions = consumerPosition.getSeekTo(); + @RequiredArgsConstructor + static class OffsetsSeek { - return Optional.ofNullable(cluster.getTopics().get(topic)) - .orElseThrow(() -> new IllegalArgumentException("Unknown topic: " + topic)) - .getPartitions().values().stream() - .filter(internalPartition -> partitionPositions.isEmpty() - || partitionPositions.containsKey(internalPartition.getPartition())) - .map(partitionInfo -> new TopicPartition(topic, partitionInfo.getPartition())) - .collect(Collectors.toList()); - } + private final String topic; + private final ConsumerPosition consumerPosition; - private void assignAndSeek(KafkaConsumer<Bytes, Bytes> consumer) { + public WaitingOffsets assignAndSeek(Consumer<Bytes, Bytes> consumer) { SeekType seekType = consumerPosition.getSeekType(); + log.info("Positioning consumer for topic {} with {}", topic, consumerPosition); switch (seekType) { case OFFSET: assignAndSeekForOffset(consumer); @@ -169,10 +181,21 @@ private void assignAndSeek(KafkaConsumer<Bytes, Bytes> consumer) { default: throw new IllegalArgumentException("Unknown seekType: " + seekType); } + log.info("Assignment: {}", consumer.assignment()); + return new WaitingOffsets(topic, consumer); } - private void assignAndSeekForOffset(KafkaConsumer<Bytes, Bytes> consumer) { - List<TopicPartition> partitions = getRequestedPartitions(); + private List<TopicPartition> getRequestedPartitions(Consumer<Bytes, Bytes> consumer) { + Map<Integer, Long> partitionPositions = consumerPosition.getSeekTo(); + return consumer.partitionsFor(topic).stream() + .filter( + p -> partitionPositions.isEmpty() || partitionPositions.containsKey(p.partition())) + .map(p -> new TopicPartition(p.topic(), p.partition())) + .collect(Collectors.toList()); + } + + private void assignAndSeekForOffset(Consumer<Bytes, Bytes> consumer) { + List<TopicPartition> partitions = getRequestedPartitions(consumer); consumer.assign(partitions); consumerPosition.getSeekTo().forEach((partition, offset) -> { TopicPartition topicPartition = new TopicPartition(topic, partition); @@ -180,7 +203,7 @@ private void assignAndSeekForOffset(KafkaConsumer<Bytes, Bytes> consumer) { }); } - private void assignAndSeekForTimestamp(KafkaConsumer<Bytes, Bytes> consumer) { + private void assignAndSeekForTimestamp(Consumer<Bytes, Bytes> consumer) { Map<TopicPartition, Long> timestampsToSearch = consumerPosition.getSeekTo().entrySet().stream() .collect(Collectors.toMap( @@ -200,10 +223,34 @@ private void assignAndSeekForTimestamp(KafkaConsumer<Bytes, Bytes> consumer) { offsetsForTimestamps.forEach(consumer::seek); } - private void assignAndSeekFromBeginning(KafkaConsumer<Bytes, Bytes> consumer) { - List<TopicPartition> partitions = getRequestedPartitions(); + private void assignAndSeekFromBeginning(Consumer<Bytes, Bytes> consumer) { + List<TopicPartition> partitions = getRequestedPartitions(consumer); consumer.assign(partitions); consumer.seekToBeginning(partitions); } + + static class WaitingOffsets { + final Map<Integer, Long> offsets = new HashMap<>(); // partition number -> offset + + WaitingOffsets(String topic, Consumer<?, ?> consumer) { + var partitions = consumer.assignment().stream() + .map(TopicPartition::partition) + .collect(Collectors.toList()); + significantOffsets(consumer, topic, partitions) + .forEach((tp, offset) -> offsets.put(tp.partition(), offset - 1)); + } + + void markPolled(ConsumerRecord<?, ?> rec) { + Long waiting = offsets.get(rec.partition()); + if (waiting != null && waiting <= rec.offset()) { + offsets.remove(rec.partition()); + } + } + + boolean endReached() { + return offsets.isEmpty(); + } + } + } } diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/SchemaRegistryService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/SchemaRegistryService.java index 1e51a7e44eb..e66b5cdf2ff 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/SchemaRegistryService.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/SchemaRegistryService.java @@ -19,6 +19,7 @@ import com.provectus.kafka.ui.model.schemaregistry.SubjectIdResponse; import java.util.Formatter; import java.util.Objects; +import java.util.Optional; import java.util.function.Function; import lombok.RequiredArgsConstructor; import lombok.extern.log4j.Log4j2; @@ -121,14 +122,7 @@ private Mono<SchemaSubject> getSchemaSubject(String clusterName, String schemaNa */ @NotNull private SchemaSubject withSchemaType(SchemaSubject s) { - SchemaType schemaType = - Objects.nonNull(s.getSchemaType()) ? s.getSchemaType() : SchemaType.AVRO; - return new SchemaSubject() - .schema(s.getSchema()) - .subject(s.getSubject()) - .version(s.getVersion()) - .id(s.getId()) - .schemaType(schemaType); + return s.schemaType(Optional.ofNullable(s.getSchemaType()).orElse(SchemaType.AVRO)); } public Mono<ResponseEntity<Void>> deleteSchemaSubjectByVersion(String clusterName,
diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/AbstractBaseTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/AbstractBaseTest.java index 3bdca7ef21b..6fd1b0fc5bb 100644 --- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/AbstractBaseTest.java +++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/AbstractBaseTest.java @@ -2,8 +2,14 @@ import com.provectus.kafka.ui.container.KafkaConnectContainer; import com.provectus.kafka.ui.container.SchemaRegistryContainer; +import java.util.List; +import java.util.Properties; +import org.apache.kafka.clients.admin.AdminClient; +import org.apache.kafka.clients.admin.AdminClientConfig; +import org.apache.kafka.clients.admin.NewTopic; import org.jetbrains.annotations.NotNull; import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.api.function.ThrowingConsumer; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.context.ApplicationContextInitializer; import org.springframework.context.ConfigurableApplicationContext; @@ -13,25 +19,30 @@ import org.testcontainers.containers.Network; import org.testcontainers.utility.DockerImageName; + @ExtendWith(SpringExtension.class) @SpringBootTest @ActiveProfiles("test") public abstract class AbstractBaseTest { + public static String LOCAL = "local"; + public static String SECOND_LOCAL = "secondLocal"; + private static final String CONFLUENT_PLATFORM_VERSION = "5.5.0"; + public static final KafkaContainer kafka = new KafkaContainer( DockerImageName.parse("confluentinc/cp-kafka").withTag(CONFLUENT_PLATFORM_VERSION)) .withNetwork(Network.SHARED); + public static final SchemaRegistryContainer schemaRegistry = new SchemaRegistryContainer(CONFLUENT_PLATFORM_VERSION) .withKafka(kafka) .dependsOn(kafka); + public static final KafkaConnectContainer kafkaConnect = new KafkaConnectContainer(CONFLUENT_PLATFORM_VERSION) .withKafka(kafka) .dependsOn(kafka) .dependsOn(schemaRegistry); - public static String LOCAL = "local"; - public static String SECOND_LOCAL = "secondLocal"; static { kafka.start(); @@ -57,4 +68,24 @@ public void initialize(@NotNull ConfigurableApplicationContext context) { System.setProperty("kafka.clusters.1.kafkaConnect.0.address", kafkaConnect.getTarget()); } } + + public static void createTopic(NewTopic topic) { + withAdminClient(client -> client.createTopics(List.of(topic)).all().get()); + } + + public static void deleteTopic(String topic) { + withAdminClient(client -> client.deleteTopics(List.of(topic)).all().get()); + } + + private static void withAdminClient(ThrowingConsumer<AdminClient> consumer) { + Properties properties = new Properties(); + properties.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, kafka.getBootstrapServers()); + try (var client = AdminClient.create(properties)) { + try { + consumer.accept(client); + } catch (Throwable throwable) { + throw new RuntimeException(throwable); + } + } + } } diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/producer/KafkaTestProducer.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/producer/KafkaTestProducer.java index 9d8f7ba7d4a..eb59d4977ab 100644 --- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/producer/KafkaTestProducer.java +++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/producer/KafkaTestProducer.java @@ -1,9 +1,11 @@ package com.provectus.kafka.ui.producer; import java.util.Map; +import java.util.concurrent.Future; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.clients.producer.RecordMetadata; import org.apache.kafka.common.serialization.StringSerializer; import org.testcontainers.containers.KafkaContainer; @@ -23,8 +25,12 @@ public static KafkaTestProducer<String, String> forKafka(KafkaContainer kafkaCon ))); } - public void send(String topic, ValueT value) { - producer.send(new ProducerRecord<>(topic, value)); + public Future<RecordMetadata> send(String topic, ValueT value) { + return producer.send(new ProducerRecord<>(topic, value)); + } + + public Future<RecordMetadata> send(ProducerRecord<KeyT, ValueT> record) { + return producer.send(record); } @Override diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/OffsetsSeekTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/OffsetsSeekTest.java new file mode 100644 index 00000000000..8f5ec97ecc7 --- /dev/null +++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/OffsetsSeekTest.java @@ -0,0 +1,119 @@ +package com.provectus.kafka.ui.service; + +import static org.assertj.core.api.Assertions.assertThat; + +import com.provectus.kafka.ui.model.ConsumerPosition; +import com.provectus.kafka.ui.model.SeekType; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.MockConsumer; +import org.apache.kafka.clients.consumer.OffsetResetStrategy; +import org.apache.kafka.common.PartitionInfo; +import org.apache.kafka.common.TopicPartition; +import org.apache.kafka.common.utils.Bytes; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Nested; +import org.junit.jupiter.api.Test; + +class OffsetsSeekTest { + + String topic = "test"; + TopicPartition tp0 = new TopicPartition(topic, 0); //offsets: start 0, end 0 + TopicPartition tp1 = new TopicPartition(topic, 1); //offsets: start 10, end 10 + TopicPartition tp2 = new TopicPartition(topic, 2); //offsets: start 0, end 20 + TopicPartition tp3 = new TopicPartition(topic, 3); //offsets: start 25, end 30 + + MockConsumer<Bytes, Bytes> consumer = new MockConsumer<>(OffsetResetStrategy.EARLIEST); + + @BeforeEach + void initConsumer() { + consumer = new MockConsumer<>(OffsetResetStrategy.EARLIEST); + consumer.updatePartitions( + topic, + Stream.of(tp0, tp1, tp2, tp3) + .map(tp -> new PartitionInfo(topic, tp.partition(), null, null, null, null)) + .collect(Collectors.toList())); + consumer.updateBeginningOffsets(Map.of( + tp0, 0L, + tp1, 10L, + tp2, 0L, + tp3, 25L + )); + consumer.addEndOffsets(Map.of( + tp0, 0L, + tp1, 10L, + tp2, 20L, + tp3, 30L + )); + } + + @Test + void seekToBeginningAllPartitions() { + var seek = new ConsumingService.OffsetsSeek( + topic, + new ConsumerPosition(SeekType.BEGINNING, Map.of(0, 0L, 1, 0L))); + seek.assignAndSeek(consumer); + assertThat(consumer.assignment()).containsExactlyInAnyOrder(tp0, tp1); + assertThat(consumer.position(tp0)).isEqualTo(0L); + assertThat(consumer.position(tp1)).isEqualTo(10L); + } + + @Test + void seekToBeginningWithPartitionsList() { + var seek = new ConsumingService.OffsetsSeek( + topic, + new ConsumerPosition(SeekType.BEGINNING, Map.of())); + seek.assignAndSeek(consumer); + assertThat(consumer.assignment()).containsExactlyInAnyOrder(tp0, tp1, tp2, tp3); + assertThat(consumer.position(tp0)).isEqualTo(0L); + assertThat(consumer.position(tp1)).isEqualTo(10L); + assertThat(consumer.position(tp2)).isEqualTo(0L); + assertThat(consumer.position(tp3)).isEqualTo(25L); + } + + @Test + void seekToOffset() { + var seek = new ConsumingService.OffsetsSeek( + topic, + new ConsumerPosition(SeekType.OFFSET, Map.of(0, 0L, 1, 1L, 2, 2L))); + seek.assignAndSeek(consumer); + assertThat(consumer.assignment()).containsExactlyInAnyOrder(tp0, tp1, tp2); + assertThat(consumer.position(tp0)).isEqualTo(0L); + assertThat(consumer.position(tp1)).isEqualTo(1L); + assertThat(consumer.position(tp2)).isEqualTo(2L); + } + + @Nested + class WaitingOffsetsTest { + + ConsumingService.OffsetsSeek.WaitingOffsets offsets; + + @BeforeEach + void assignAndCreateOffsets() { + consumer.assign(List.of(tp0, tp1, tp2, tp3)); + offsets = new ConsumingService.OffsetsSeek.WaitingOffsets(topic, consumer); + } + + @Test + void collectsSignificantOffsetsMinus1ForAssignedPartitions() { + // offsets for partition 0 & 1 should be skipped because they + // effectively contains no data (start offset = end offset) + assertThat(offsets.offsets).containsExactlyInAnyOrderEntriesOf( + Map.of(2, 19L, 3, 29L) + ); + } + + @Test + void returnTrueWhenOffsetsReachedReached() { + assertThat(offsets.endReached()).isFalse(); + offsets.markPolled(new ConsumerRecord<>(topic, 2, 19, null, null)); + assertThat(offsets.endReached()).isFalse(); + offsets.markPolled(new ConsumerRecord<>(topic, 3, 29, null, null)); + assertThat(offsets.endReached()).isTrue(); + } + } + +} diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/RecordEmitterTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/RecordEmitterTest.java new file mode 100644 index 00000000000..11af012277a --- /dev/null +++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/RecordEmitterTest.java @@ -0,0 +1,169 @@ +package com.provectus.kafka.ui.service; + +import static com.provectus.kafka.ui.service.ConsumingService.OffsetsSeek; +import static com.provectus.kafka.ui.service.ConsumingService.RecordEmitter; +import static org.assertj.core.api.Assertions.assertThat; + +import com.provectus.kafka.ui.AbstractBaseTest; +import com.provectus.kafka.ui.model.ConsumerPosition; +import com.provectus.kafka.ui.model.SeekType; +import com.provectus.kafka.ui.producer.KafkaTestProducer; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import java.util.concurrent.ThreadLocalRandom; +import java.util.stream.Collectors; +import lombok.Value; +import org.apache.kafka.clients.admin.NewTopic; +import org.apache.kafka.clients.consumer.ConsumerConfig; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.serialization.BytesDeserializer; +import org.apache.kafka.common.serialization.StringDeserializer; +import org.apache.kafka.common.utils.Bytes; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import reactor.core.publisher.Flux; + +class RecordEmitterTest extends AbstractBaseTest { + + static final int PARTITIONS = 5; + static final int MSGS_PER_PARTITION = 100; + + static final String TOPIC = RecordEmitterTest.class.getSimpleName() + "_" + UUID.randomUUID(); + static final String EMPTY_TOPIC = TOPIC + "_empty"; + static final List<Record> SENT_RECORDS = new ArrayList<>(); + + @BeforeAll + static void generateMsgs() throws Exception { + createTopic(new NewTopic(TOPIC, PARTITIONS, (short) 1)); + createTopic(new NewTopic(EMPTY_TOPIC, PARTITIONS, (short) 1)); + try (var producer = KafkaTestProducer.forKafka(kafka)) { + for (int partition = 0; partition < PARTITIONS; partition++) { + for (int i = 0; i < MSGS_PER_PARTITION; i++) { + long ts = System.currentTimeMillis() + i; + var value = "msg_" + partition + "_" + i; + var metadata = + producer.send(new ProducerRecord<>(TOPIC, partition, ts, null, value)).get(); + SENT_RECORDS.add(new Record(value, metadata.partition(), metadata.offset(), ts)); + } + } + } + } + + @AfterAll + static void cleanup() { + deleteTopic(TOPIC); + deleteTopic(EMPTY_TOPIC); + } + + @Test + void pollNothingOnEmptyTopic() { + var emitter = new RecordEmitter( + this::createConsumer, + new OffsetsSeek(EMPTY_TOPIC, new ConsumerPosition(SeekType.BEGINNING, Map.of()))); + + Long polledValues = Flux.create(emitter) + .limitRequest(100) + .count() + .block(); + + assertThat(polledValues).isZero(); + } + + @Test + void pollFullTopicFromBeginning() { + var emitter = new RecordEmitter( + this::createConsumer, + new OffsetsSeek(TOPIC, new ConsumerPosition(SeekType.BEGINNING, Map.of()))); + + var polledValues = Flux.create(emitter) + .map(this::deserialize) + .limitRequest(Long.MAX_VALUE) + .collect(Collectors.toList()) + .block(); + + assertThat(polledValues).containsExactlyInAnyOrderElementsOf( + SENT_RECORDS.stream().map(Record::getValue).collect(Collectors.toList())); + } + + @Test + void pollWithOffsets() { + Map<Integer, Long> targetOffsets = new HashMap<>(); + for (int i = 0; i < PARTITIONS; i++) { + long offset = ThreadLocalRandom.current().nextLong(MSGS_PER_PARTITION); + targetOffsets.put(i, offset); + } + + var emitter = new RecordEmitter( + this::createConsumer, + new OffsetsSeek(TOPIC, new ConsumerPosition(SeekType.OFFSET, targetOffsets))); + + var polledValues = Flux.create(emitter) + .map(this::deserialize) + .limitRequest(Long.MAX_VALUE) + .collect(Collectors.toList()) + .block(); + + var expectedValues = SENT_RECORDS.stream() + .filter(r -> r.getOffset() >= targetOffsets.get(r.getPartition())) + .map(Record::getValue) + .collect(Collectors.toList()); + + assertThat(polledValues).containsExactlyInAnyOrderElementsOf(expectedValues); + } + + @Test + void pollWithTimestamps() { + Map<Integer, Long> targetTimestamps = new HashMap<>(); + for (int i = 0; i < PARTITIONS; i++) { + int randRecordIdx = ThreadLocalRandom.current().nextInt(SENT_RECORDS.size()); + targetTimestamps.put(i, SENT_RECORDS.get(randRecordIdx).getTimestamp()); + } + + var emitter = new RecordEmitter( + this::createConsumer, + new OffsetsSeek(TOPIC, new ConsumerPosition(SeekType.TIMESTAMP, targetTimestamps))); + + var polledValues = Flux.create(emitter) + .map(this::deserialize) + .limitRequest(Long.MAX_VALUE) + .collect(Collectors.toList()) + .block(); + + var expectedValues = SENT_RECORDS.stream() + .filter(r -> r.getTimestamp() >= targetTimestamps.get(r.getPartition())) + .map(Record::getValue) + .collect(Collectors.toList()); + + assertThat(polledValues).containsExactlyInAnyOrderElementsOf(expectedValues); + } + + private KafkaConsumer<Bytes, Bytes> createConsumer() { + return new KafkaConsumer<>( + Map.of( + ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafka.getBootstrapServers(), + ConsumerConfig.GROUP_ID_CONFIG, UUID.randomUUID().toString(), + ConsumerConfig.MAX_POLL_RECORDS_CONFIG, 20, // to check multiple polls + ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, BytesDeserializer.class, + ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, BytesDeserializer.class + ) + ); + } + + private String deserialize(ConsumerRecord<Bytes, Bytes> rec) { + return new StringDeserializer().deserialize(TOPIC, rec.value().get()); + } + + @Value + static class Record { + String value; + int partition; + long offset; + long timestamp; + } +}
train
train
2021-03-23T08:55:28
"2021-03-16T11:50:54Z"
iliax
train
provectus/kafka-ui/289_295
provectus/kafka-ui
provectus/kafka-ui/289
provectus/kafka-ui/295
[ "timestamp(timedelta=0.0, similarity=0.8433503638953539)" ]
217b36307615effe6dad8c272e4472d82b74425e
a153ce7b21e7dcb9a6438ff5d4dd90ab027a9e84
[ "@andormarkus thank you for creating this issue. Will try to fix it asap" ]
[]
"2021-03-22T13:20:19Z"
[ "type/bug", "scope/frontend" ]
Frontend: Segment size calculation error
Hi Team, Bases on my testing when a topic segment size is larger than 1gb it wont calculate its values in `mb` or `gb` just returns it in `bytes`. ![Screenshot 2021-03-22 at 08 10 10](https://user-images.githubusercontent.com/51825189/111952883-15ee9300-8ae6-11eb-8721-0ae0073faa8d.png) Thanks, Andor
[ "kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml", "kafka-ui-react-app/src/components/common/BytesFormatted/BytesFormatted.tsx", "kafka-ui-react-app/src/components/common/BytesFormatted/__tests__/BytesFormatted.spec.tsx" ]
[ "kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml", "kafka-ui-react-app/src/components/common/BytesFormatted/BytesFormatted.tsx", "kafka-ui-react-app/src/components/common/BytesFormatted/__tests__/BytesFormatted.spec.tsx" ]
[]
diff --git a/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml b/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml index 493290c0f2f..bd81c4118d0 100644 --- a/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml +++ b/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml @@ -1182,6 +1182,7 @@ components: type: integer segmentSize: type: integer + format: int64 segmentCount: type: integer underReplicatedPartitions: diff --git a/kafka-ui-react-app/src/components/common/BytesFormatted/BytesFormatted.tsx b/kafka-ui-react-app/src/components/common/BytesFormatted/BytesFormatted.tsx index 9447f724025..ad45e454db7 100644 --- a/kafka-ui-react-app/src/components/common/BytesFormatted/BytesFormatted.tsx +++ b/kafka-ui-react-app/src/components/common/BytesFormatted/BytesFormatted.tsx @@ -11,7 +11,7 @@ const BytesFormatted: React.FC<Props> = ({ value, precision = 0 }) => { const formatedValue = React.useMemo((): string => { try { const bytes = typeof value === 'string' ? parseInt(value, 10) : value; - if (Number.isNaN(bytes)) return `-Bytes`; + if (Number.isNaN(bytes) || (bytes && bytes < 0)) return `-Bytes`; if (!bytes || bytes < 1024) return `${Math.ceil(bytes || 0)}${sizes[0]}`; const pow = Math.floor(Math.log2(bytes) / 10); const multiplier = 10 ** (precision < 0 ? 0 : precision); diff --git a/kafka-ui-react-app/src/components/common/BytesFormatted/__tests__/BytesFormatted.spec.tsx b/kafka-ui-react-app/src/components/common/BytesFormatted/__tests__/BytesFormatted.spec.tsx index 85ed10877b3..135673586c6 100644 --- a/kafka-ui-react-app/src/components/common/BytesFormatted/__tests__/BytesFormatted.spec.tsx +++ b/kafka-ui-react-app/src/components/common/BytesFormatted/__tests__/BytesFormatted.spec.tsx @@ -32,6 +32,9 @@ describe('BytesFormatted', () => { component = shallow(<BytesFormatted value="some string" />); expect(component.text()).toEqual(`-${sizes[0]}`); + component = shallow(<BytesFormatted value={-100000} />); + expect(component.text()).toEqual(`-${sizes[0]}`); + component = shallow(<BytesFormatted value={undefined} />); expect(component.text()).toEqual(`0${sizes[0]}`); });
null
train
train
2021-03-18T19:16:59
"2021-03-22T07:11:18Z"
andormarkus
train
provectus/kafka-ui/297_306
provectus/kafka-ui
provectus/kafka-ui/297
provectus/kafka-ui/306
[ "timestamp(timedelta=55806.0, similarity=0.8611471619108559)" ]
a153ce7b21e7dcb9a6438ff5d4dd90ab027a9e84
7672f5e4cdd0441b0f2d3aacede4fcef1125a3b8
[]
[]
"2021-03-24T09:57:36Z"
[ "scope/backend" ]
Fix sonar code smell warnings
[ "docker/message.json", "kafka-ui-react-app/package-lock.json", "kafka-ui-react-app/package.json", "kafka-ui-react-app/public/index.html", "kafka-ui-react-app/src/redux/reducers/schemas/reducer.ts", "kafka-ui-react-app/src/redux/reducers/schemas/selectors.ts", "kafka-ui-react-app/src/redux/store/configureStore/dev.ts", "kafka-ui-react-app/src/redux/store/configureStore/prod.ts", "kafka-ui-react-app/src/theme/index.scss" ]
[ "kafka-ui-react-app/package-lock.json", "kafka-ui-react-app/package.json", "kafka-ui-react-app/public/index.html", "kafka-ui-react-app/src/redux/reducers/schemas/reducer.ts", "kafka-ui-react-app/src/redux/reducers/schemas/selectors.ts", "kafka-ui-react-app/src/redux/store/configureStore/dev.ts", "kafka-ui-react-app/src/redux/store/configureStore/prod.ts", "kafka-ui-react-app/src/theme/index.scss" ]
[]
diff --git a/docker/message.json b/docker/message.json deleted file mode 100644 index 490cbe1527d..00000000000 --- a/docker/message.json +++ /dev/null @@ -1,100 +0,0 @@ -{"message":"hello1","destination":"kafka2"} -{"message":"hello2","destination":"kafka2"} -{"message":"hello3","destination":"kafka2"} -{"message":"hello4","destination":"kafka2"} -{"message":"hello5","destination":"kafka2"} -{"message":"hello6","destination":"kafka2"} -{"message":"hello7","destination":"kafka2"} -{"message":"hello8","destination":"kafka2"} -{"message":"hello9","destination":"kafka2"} -{"message":"hello10","destination":"kafka2"} -{"message":"hello11","destination":"kafka2"} -{"message":"hello12","destination":"kafka2"} -{"message":"hello13","destination":"kafka2"} -{"message":"hello14","destination":"kafka2"} -{"message":"hello15","destination":"kafka2"} -{"message":"hello16","destination":"kafka2"} -{"message":"hello17","destination":"kafka2"} -{"message":"hello18","destination":"kafka2"} -{"message":"hello19","destination":"kafka2"} -{"message":"hello20","destination":"kafka2"} -{"message":"hello21","destination":"kafka2"} -{"message":"hello22","destination":"kafka2"} -{"message":"hello23","destination":"kafka2"} -{"message":"hello24","destination":"kafka2"} -{"message":"hello25","destination":"kafka2"} -{"message":"hello26","destination":"kafka2"} -{"message":"hello27","destination":"kafka2"} -{"message":"hello28","destination":"kafka2"} -{"message":"hello29","destination":"kafka2"} -{"message":"hello30","destination":"kafka2"} -{"message":"hello31","destination":"kafka2"} -{"message":"hello32","destination":"kafka2"} -{"message":"hello33","destination":"kafka2"} -{"message":"hello34","destination":"kafka2"} -{"message":"hello35","destination":"kafka2"} -{"message":"hello36","destination":"kafka2"} -{"message":"hello37","destination":"kafka2"} -{"message":"hello38","destination":"kafka2"} -{"message":"hello39","destination":"kafka2"} -{"message":"hello40","destination":"kafka2"} -{"message":"hello41","destination":"kafka2"} -{"message":"hello42","destination":"kafka2"} -{"message":"hello43","destination":"kafka2"} -{"message":"hello44","destination":"kafka2"} -{"message":"hello45","destination":"kafka2"} -{"message":"hello46","destination":"kafka2"} -{"message":"hello47","destination":"kafka2"} -{"message":"hello48","destination":"kafka2"} -{"message":"hello49","destination":"kafka2"} -{"message":"hello50","destination":"kafka2"} -{"message":"hello51","destination":"kafka2"} -{"message":"hello52","destination":"kafka2"} -{"message":"hello53","destination":"kafka2"} -{"message":"hello54","destination":"kafka2"} -{"message":"hello55","destination":"kafka2"} -{"message":"hello56","destination":"kafka2"} -{"message":"hello57","destination":"kafka2"} -{"message":"hello58","destination":"kafka2"} -{"message":"hello59","destination":"kafka2"} -{"message":"hello60","destination":"kafka2"} -{"message":"hello61","destination":"kafka2"} -{"message":"hello62","destination":"kafka2"} -{"message":"hello63","destination":"kafka2"} -{"message":"hello64","destination":"kafka2"} -{"message":"hello65","destination":"kafka2"} -{"message":"hello66","destination":"kafka2"} -{"message":"hello67","destination":"kafka2"} -{"message":"hello68","destination":"kafka2"} -{"message":"hello69","destination":"kafka2"} -{"message":"hello70","destination":"kafka2"} -{"message":"hello71","destination":"kafka2"} -{"message":"hello72","destination":"kafka2"} -{"message":"hello73","destination":"kafka2"} -{"message":"hello74","destination":"kafka2"} -{"message":"hello75","destination":"kafka2"} -{"message":"hello76","destination":"kafka2"} -{"message":"hello77","destination":"kafka2"} -{"message":"hello78","destination":"kafka2"} -{"message":"hello79","destination":"kafka2"} -{"message":"hello80","destination":"kafka2"} -{"message":"hello81","destination":"kafka2"} -{"message":"hello82","destination":"kafka2"} -{"message":"hello83","destination":"kafka2"} -{"message":"hello84","destination":"kafka2"} -{"message":"hello85","destination":"kafka2"} -{"message":"hello86","destination":"kafka2"} -{"message":"hello87","destination":"kafka2"} -{"message":"hello88","destination":"kafka2"} -{"message":"hello89","destination":"kafka2"} -{"message":"hello90","destination":"kafka2"} -{"message":"hello91","destination":"kafka2"} -{"message":"hello92","destination":"kafka2"} -{"message":"hello93","destination":"kafka2"} -{"message":"hello94","destination":"kafka2"} -{"message":"hello95","destination":"kafka2"} -{"message":"hello96","destination":"kafka2"} -{"message":"hello97","destination":"kafka2"} -{"message":"hello98","destination":"kafka2"} -{"message":"hello99","destination":"kafka2"} -{"message":"hello100","destination":"kafka2"} \ No newline at end of file diff --git a/kafka-ui-react-app/package-lock.json b/kafka-ui-react-app/package-lock.json index c9475fb562b..b47ee3dc5a8 100644 --- a/kafka-ui-react-app/package-lock.json +++ b/kafka-ui-react-app/package-lock.json @@ -3047,6 +3047,11 @@ } } }, + "@fortawesome/fontawesome-free": { + "version": "5.15.3", + "resolved": "https://registry.npmjs.org/@fortawesome/fontawesome-free/-/fontawesome-free-5.15.3.tgz", + "integrity": "sha512-rFnSUN/QOtnOAgqFRooTA3H57JLDm0QEG/jPdk+tLQNL/eWd+Aok8g3qCI+Q1xuDPWpGW/i9JySpJVsq8Q0s9w==" + }, "@hapi/address": { "version": "2.1.4", "resolved": "https://registry.npmjs.org/@hapi/address/-/address-2.1.4.tgz", diff --git a/kafka-ui-react-app/package.json b/kafka-ui-react-app/package.json index bb523c6811b..f33d04cdd8d 100644 --- a/kafka-ui-react-app/package.json +++ b/kafka-ui-react-app/package.json @@ -3,6 +3,7 @@ "version": "0.1.0", "private": true, "dependencies": { + "@fortawesome/fontawesome-free": "^5.15.3", "@hookform/error-message": "0.0.5", "@rooks/use-outside-click-ref": "^4.10.0", "bulma": "^0.9.2", @@ -33,7 +34,6 @@ "lint-staged": { "*.{js,ts,jsx,tsx}": [ "eslint -c .eslintrc.json --fix", - "git add", "npm test -- --bail --findRelatedTests --watchAll=false" ] }, diff --git a/kafka-ui-react-app/public/index.html b/kafka-ui-react-app/public/index.html index 5d52ab46268..5faabaa4a7f 100644 --- a/kafka-ui-react-app/public/index.html +++ b/kafka-ui-react-app/public/index.html @@ -6,7 +6,6 @@ <meta name="viewport" content="width=device-width, initial-scale=1" /> <link rel="apple-touch-icon" href="%PUBLIC_URL%/logo192.png" /> <link rel="manifest" href="%PUBLIC_URL%/manifest.json" /> - <script defer src="https://use.fontawesome.com/releases/v5.12.0/js/all.js"></script> <title>Kafka UI</title> </head> <body> diff --git a/kafka-ui-react-app/src/redux/reducers/schemas/reducer.ts b/kafka-ui-react-app/src/redux/reducers/schemas/reducer.ts index 2f495287b8a..11072289e63 100644 --- a/kafka-ui-react-app/src/redux/reducers/schemas/reducer.ts +++ b/kafka-ui-react-app/src/redux/reducers/schemas/reducer.ts @@ -39,8 +39,8 @@ const addToSchemaList = ( const newState: SchemasState = { ...state, }; - newState.allNames.push(payload.subject as string); - newState.byName[payload.subject as string] = { ...payload }; + newState.allNames.push(payload.subject); + newState.byName[payload.subject] = { ...payload }; return newState; }; diff --git a/kafka-ui-react-app/src/redux/reducers/schemas/selectors.ts b/kafka-ui-react-app/src/redux/reducers/schemas/selectors.ts index 65df69145cc..5da7fd3f0b5 100644 --- a/kafka-ui-react-app/src/redux/reducers/schemas/selectors.ts +++ b/kafka-ui-react-app/src/redux/reducers/schemas/selectors.ts @@ -1,6 +1,7 @@ import { createSelector } from 'reselect'; import { RootState, SchemasState } from 'redux/interfaces'; import { createFetchingSelector } from 'redux/reducers/loader/selectors'; +import { sortBy } from 'lodash'; const schemasState = ({ schemas }: RootState): SchemasState => schemas; @@ -55,6 +56,5 @@ export const getSchema = createSelector( export const getSortedSchemaVersions = createSelector( schemasState, - ({ currentSchemaVersions }) => - currentSchemaVersions.sort((a, b) => a.id - b.id) + ({ currentSchemaVersions }) => sortBy(currentSchemaVersions, ['id']) ); diff --git a/kafka-ui-react-app/src/redux/store/configureStore/dev.ts b/kafka-ui-react-app/src/redux/store/configureStore/dev.ts index 4a68a5a5137..40e8ea244b0 100644 --- a/kafka-ui-react-app/src/redux/store/configureStore/dev.ts +++ b/kafka-ui-react-app/src/redux/store/configureStore/dev.ts @@ -12,13 +12,8 @@ declare global { export default () => { const middlewares = [thunk as ThunkMiddleware<RootState, Action>]; - const composeEnhancers = window.__REDUX_DEVTOOLS_EXTENSION_COMPOSE__ || compose; - const enhancer = composeEnhancers(applyMiddleware(...middlewares)); - - const store = createStore(rootReducer, undefined, enhancer); - - return store; + return createStore(rootReducer, undefined, enhancer); }; diff --git a/kafka-ui-react-app/src/redux/store/configureStore/prod.ts b/kafka-ui-react-app/src/redux/store/configureStore/prod.ts index 2316c82211d..63d9e28bfaa 100644 --- a/kafka-ui-react-app/src/redux/store/configureStore/prod.ts +++ b/kafka-ui-react-app/src/redux/store/configureStore/prod.ts @@ -4,10 +4,6 @@ import rootReducer from 'redux/reducers'; export default () => { const middlewares = [thunk]; - const enhancer = applyMiddleware(...middlewares); - - const store = createStore(rootReducer, undefined, enhancer); - - return store; + return createStore(rootReducer, undefined, enhancer); }; diff --git a/kafka-ui-react-app/src/theme/index.scss b/kafka-ui-react-app/src/theme/index.scss index 0cd8e4978be..bdf0979a948 100644 --- a/kafka-ui-react-app/src/theme/index.scss +++ b/kafka-ui-react-app/src/theme/index.scss @@ -1,3 +1,4 @@ +@import '@fortawesome/fontawesome-free/css/all.min.css'; @import 'bulma'; @import '~bulma-switch'; @import 'src/theme/bulma_overrides';
null
train
train
2021-03-24T10:08:54
"2021-03-23T11:50:08Z"
iliax
train
provectus/kafka-ui/297_307
provectus/kafka-ui
provectus/kafka-ui/297
provectus/kafka-ui/307
[ "timestamp(timedelta=68256.0, similarity=0.8430106627754953)" ]
7672f5e4cdd0441b0f2d3aacede4fcef1125a3b8
a256709580b34683ad6861adf5ff610ba55025ea
[]
[]
"2021-03-24T11:53:34Z"
[ "scope/backend" ]
Fix sonar code smell warnings
[ "kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParamOptions.tsx", "kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParamSelect.tsx", "kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParamValue.tsx", "kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/customParamsOptions.tsx", "kafka-ui-react-app/src/lib/constants.ts", "kafka-ui-react-app/src/redux/interfaces/topic.ts" ]
[ "kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParamSelect.tsx", "kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParamValue.tsx", "kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/__tests__/CustomParamSelect.spec.tsx", "kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/__tests__/__snapshots__/CustomParamSelect.spec.tsx.snap", "kafka-ui-react-app/src/lib/constants.ts", "kafka-ui-react-app/src/redux/interfaces/topic.ts" ]
[]
diff --git a/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParamOptions.tsx b/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParamOptions.tsx deleted file mode 100644 index ae3aca7fb90..00000000000 --- a/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParamOptions.tsx +++ /dev/null @@ -1,27 +0,0 @@ -import React from 'react'; -import { TopicConfigOption } from 'redux/interfaces'; -import { omitBy } from 'lodash'; -import CUSTOM_PARAMS_OPTIONS from './customParamsOptions'; - -interface Props { - existingFields: string[]; -} - -const CustomParamOptions: React.FC<Props> = ({ existingFields }) => { - const fields = omitBy(Object.values(CUSTOM_PARAMS_OPTIONS), (field) => - existingFields.includes(field.name) - ); - - return ( - <> - <option value="">Select</option> - {Object.values(fields).map((opt: TopicConfigOption) => ( - <option key={opt.name} value={opt.name}> - {opt.name} - </option> - ))} - </> - ); -}; - -export default React.memo(CustomParamOptions); diff --git a/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParamSelect.tsx b/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParamSelect.tsx index 143984d6b7a..2be0fd1783e 100644 --- a/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParamSelect.tsx +++ b/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParamSelect.tsx @@ -2,10 +2,10 @@ import React from 'react'; import { useFormContext } from 'react-hook-form'; import { TopicConfigValue } from 'redux/interfaces'; import { ErrorMessage } from '@hookform/error-message'; -import CustomParamOptions from './CustomParamOptions'; +import { TOPIC_CUSTOM_PARAMS } from 'lib/constants'; import { INDEX_PREFIX } from './CustomParams'; -interface Props { +export interface CustomParamSelectProps { isDisabled: boolean; index: string; name: string; @@ -13,7 +13,7 @@ interface Props { onNameChange: (inputName: string, name: string) => void; } -const CustomParamSelect: React.FC<Props> = ({ +const CustomParamSelect: React.FC<CustomParamSelectProps> = ({ isDisabled, index, name, @@ -59,7 +59,16 @@ const CustomParamSelect: React.FC<Props> = ({ disabled={isDisabled} defaultValue={name} > - <CustomParamOptions existingFields={existingFields} /> + <option value="">Select</option> + {Object.keys(TOPIC_CUSTOM_PARAMS).map((opt) => ( + <option + key={opt} + value={opt} + disabled={existingFields.includes(opt)} + > + {opt} + </option> + ))} </select> <p className="help is-danger"> <ErrorMessage errors={errors} name={optInputName} /> diff --git a/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParamValue.tsx b/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParamValue.tsx index af98f870bb1..64e03a218e5 100644 --- a/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParamValue.tsx +++ b/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParamValue.tsx @@ -2,7 +2,7 @@ import React from 'react'; import { useFormContext } from 'react-hook-form'; import { TopicConfig } from 'generated-sources'; import { ErrorMessage } from '@hookform/error-message'; -import CUSTOM_PARAMS_OPTIONS from './customParamsOptions'; +import { TOPIC_CUSTOM_PARAMS } from 'lib/constants'; interface Props { isDisabled: boolean; @@ -24,10 +24,7 @@ const CustomParamValue: React.FC<Props> = ({ React.useEffect(() => { if (selectedParamName && !defaultValue) { - setValue( - valInputName, - CUSTOM_PARAMS_OPTIONS[selectedParamName].defaultValue - ); + setValue(valInputName, TOPIC_CUSTOM_PARAMS[selectedParamName]); } }, [selectedParamName, setValue, valInputName]); diff --git a/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/__tests__/CustomParamSelect.spec.tsx b/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/__tests__/CustomParamSelect.spec.tsx new file mode 100644 index 00000000000..ce2ddbe476e --- /dev/null +++ b/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/__tests__/CustomParamSelect.spec.tsx @@ -0,0 +1,49 @@ +import React from 'react'; +import { mount } from 'enzyme'; +import { useForm, FormProvider, useFormContext } from 'react-hook-form'; +import { TOPIC_CUSTOM_PARAMS } from 'lib/constants'; +import CustomParamSelect, { + CustomParamSelectProps, +} from '../CustomParamSelect'; + +const existingFields = [ + 'leader.replication.throttled.replicas', + 'segment.index.bytes', + 'message.timestamp.difference.max.ms', +]; + +const Wrapper: React.FC<Partial<CustomParamSelectProps>> = (props = {}) => { + const methods = useForm(); + return ( + <FormProvider {...methods}> + <CustomParamSelect + index="1" + name="my_custom_param" + existingFields={[]} + isDisabled + onNameChange={jest.fn()} + {...props} + /> + </FormProvider> + ); +}; + +describe('CustomParamSelect', () => { + it('renders correct options', () => { + const fieldsCount = Object.keys(TOPIC_CUSTOM_PARAMS).length; + const wrapper = mount(<Wrapper existingFields={existingFields} />); + const options = wrapper.find('option'); + const disabledOptions = options.filterWhere((o) => !!o.prop('disabled')); + + expect(options.length).toEqual(fieldsCount + 1); + expect(disabledOptions.length).toEqual(existingFields.length); + }); + + it('matches snapshot', () => { + expect( + mount(<Wrapper existingFields={existingFields} />).find( + 'Memo(CustomParamSelect)' + ) + ).toMatchSnapshot(); + }); +}); diff --git a/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/__tests__/__snapshots__/CustomParamSelect.spec.tsx.snap b/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/__tests__/__snapshots__/CustomParamSelect.spec.tsx.snap new file mode 100644 index 00000000000..dab5c8e1107 --- /dev/null +++ b/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/__tests__/__snapshots__/CustomParamSelect.spec.tsx.snap @@ -0,0 +1,201 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`CustomParamSelect matches snapshot 1`] = ` +<Memo(CustomParamSelect) + existingFields={ + Array [ + "leader.replication.throttled.replicas", + "segment.index.bytes", + "message.timestamp.difference.max.ms", + ] + } + index="1" + isDisabled={true} + name="my_custom_param" + onNameChange={[MockFunction]} +> + <label + className="label" + > + Custom Parameter + </label> + <div + className="select is-block" + > + <select + defaultValue="my_custom_param" + disabled={true} + name="1[name]" + onChange={[Function]} + > + <option + value="" + > + Select + </option> + <option + disabled={false} + key="compression.type" + value="compression.type" + > + compression.type + </option> + <option + disabled={true} + key="leader.replication.throttled.replicas" + value="leader.replication.throttled.replicas" + > + leader.replication.throttled.replicas + </option> + <option + disabled={false} + key="message.downconversion.enable" + value="message.downconversion.enable" + > + message.downconversion.enable + </option> + <option + disabled={false} + key="segment.jitter.ms" + value="segment.jitter.ms" + > + segment.jitter.ms + </option> + <option + disabled={false} + key="flush.ms" + value="flush.ms" + > + flush.ms + </option> + <option + disabled={false} + key="follower.replication.throttled.replicas" + value="follower.replication.throttled.replicas" + > + follower.replication.throttled.replicas + </option> + <option + disabled={false} + key="segment.bytes" + value="segment.bytes" + > + segment.bytes + </option> + <option + disabled={false} + key="flush.messages" + value="flush.messages" + > + flush.messages + </option> + <option + disabled={false} + key="message.format.version" + value="message.format.version" + > + message.format.version + </option> + <option + disabled={false} + key="file.delete.delay.ms" + value="file.delete.delay.ms" + > + file.delete.delay.ms + </option> + <option + disabled={false} + key="max.compaction.lag.ms" + value="max.compaction.lag.ms" + > + max.compaction.lag.ms + </option> + <option + disabled={false} + key="min.compaction.lag.ms" + value="min.compaction.lag.ms" + > + min.compaction.lag.ms + </option> + <option + disabled={false} + key="message.timestamp.type" + value="message.timestamp.type" + > + message.timestamp.type + </option> + <option + disabled={false} + key="preallocate" + value="preallocate" + > + preallocate + </option> + <option + disabled={false} + key="min.cleanable.dirty.ratio" + value="min.cleanable.dirty.ratio" + > + min.cleanable.dirty.ratio + </option> + <option + disabled={false} + key="index.interval.bytes" + value="index.interval.bytes" + > + index.interval.bytes + </option> + <option + disabled={false} + key="unclean.leader.election.enable" + value="unclean.leader.election.enable" + > + unclean.leader.election.enable + </option> + <option + disabled={false} + key="retention.bytes" + value="retention.bytes" + > + retention.bytes + </option> + <option + disabled={false} + key="delete.retention.ms" + value="delete.retention.ms" + > + delete.retention.ms + </option> + <option + disabled={false} + key="segment.ms" + value="segment.ms" + > + segment.ms + </option> + <option + disabled={true} + key="message.timestamp.difference.max.ms" + value="message.timestamp.difference.max.ms" + > + message.timestamp.difference.max.ms + </option> + <option + disabled={true} + key="segment.index.bytes" + value="segment.index.bytes" + > + segment.index.bytes + </option> + </select> + <p + className="help is-danger" + > + <Component + errors={Object {}} + name="1[name]" + /> + </p> + </div> +</Memo(CustomParamSelect)> +`; diff --git a/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/customParamsOptions.tsx b/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/customParamsOptions.tsx deleted file mode 100644 index 4613baeecb5..00000000000 --- a/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/customParamsOptions.tsx +++ /dev/null @@ -1,98 +0,0 @@ -import { TopicConfigOption } from 'redux/interfaces'; - -interface TopicConfigOptions { - [optionName: string]: TopicConfigOption; -} - -const CUSTOM_PARAMS_OPTIONS: TopicConfigOptions = { - 'compression.type': { - name: 'compression.type', - defaultValue: 'producer', - }, - 'leader.replication.throttled.replicas': { - name: 'leader.replication.throttled.replicas', - defaultValue: '', - }, - 'message.downconversion.enable': { - name: 'message.downconversion.enable', - defaultValue: 'true', - }, - 'segment.jitter.ms': { - name: 'segment.jitter.ms', - defaultValue: '0', - }, - 'flush.ms': { - name: 'flush.ms', - defaultValue: '9223372036854775807', - }, - 'follower.replication.throttled.replicas': { - name: 'follower.replication.throttled.replicas', - defaultValue: '', - }, - 'segment.bytes': { - name: 'segment.bytes', - defaultValue: '1073741824', - }, - 'flush.messages': { - name: 'flush.messages', - defaultValue: '9223372036854775807', - }, - 'message.format.version': { - name: 'message.format.version', - defaultValue: '2.3-IV1', - }, - 'file.delete.delay.ms': { - name: 'file.delete.delay.ms', - defaultValue: '60000', - }, - 'max.compaction.lag.ms': { - name: 'max.compaction.lag.ms', - defaultValue: '9223372036854775807', - }, - 'min.compaction.lag.ms': { - name: 'min.compaction.lag.ms', - defaultValue: '0', - }, - 'message.timestamp.type': { - name: 'message.timestamp.type', - defaultValue: 'CreateTime', - }, - preallocate: { - name: 'preallocate', - defaultValue: 'false', - }, - 'min.cleanable.dirty.ratio': { - name: 'min.cleanable.dirty.ratio', - defaultValue: '0.5', - }, - 'index.interval.bytes': { - name: 'index.interval.bytes', - defaultValue: '4096', - }, - 'unclean.leader.election.enable': { - name: 'unclean.leader.election.enable', - defaultValue: 'true', - }, - 'retention.bytes': { - name: 'retention.bytes', - defaultValue: '-1', - }, - 'delete.retention.ms': { - name: 'delete.retention.ms', - defaultValue: '86400000', - }, - 'segment.ms': { - name: 'segment.ms', - defaultValue: '604800000', - }, - 'message.timestamp.difference.max.ms': { - name: 'message.timestamp.difference.max.ms', - defaultValue: '9223372036854775807', - }, - 'segment.index.bytes': { - name: 'segment.index.bytes', - defaultValue: '10485760', - }, -}; - -export default CUSTOM_PARAMS_OPTIONS; diff --git a/kafka-ui-react-app/src/lib/constants.ts b/kafka-ui-react-app/src/lib/constants.ts index 53bf47144e8..76de060f45f 100644 --- a/kafka-ui-react-app/src/lib/constants.ts +++ b/kafka-ui-react-app/src/lib/constants.ts @@ -11,6 +11,31 @@ export const BASE_PARAMS: ConfigurationParameters = { export const TOPIC_NAME_VALIDATION_PATTERN = RegExp(/^[.,A-Za-z0-9_-]+$/); export const SCHEMA_NAME_VALIDATION_PATTERN = RegExp(/^[.,A-Za-z0-9_-]+$/); +export const TOPIC_CUSTOM_PARAMS: Record<string, string> = { + 'compression.type': 'producer', + 'leader.replication.throttled.replicas': '', + 'message.downconversion.enable': 'true', + 'segment.jitter.ms': '0', + 'flush.ms': '9223372036854775807', + 'follower.replication.throttled.replicas': '', + 'segment.bytes': '1073741824', + 'flush.messages': '9223372036854775807', + 'message.format.version': '2.3-IV1', + 'file.delete.delay.ms': '60000', + 'max.compaction.lag.ms': '9223372036854775807', + 'min.compaction.lag.ms': '0', + 'message.timestamp.type': 'CreateTime', + preallocate: 'false', + 'min.cleanable.dirty.ratio': '0.5', + 'index.interval.bytes': '4096', + 'unclean.leader.election.enable': 'true', + 'retention.bytes': '-1', + 'delete.retention.ms': '86400000', + 'segment.ms': '604800000', + 'message.timestamp.difference.max.ms': '9223372036854775807', + 'segment.index.bytes': '10485760', +}; + export const MILLISECONDS_IN_WEEK = 604_800_000; export const MILLISECONDS_IN_DAY = 86_400_000; export const MILLISECONDS_IN_SECOND = 1_000; diff --git a/kafka-ui-react-app/src/redux/interfaces/topic.ts b/kafka-ui-react-app/src/redux/interfaces/topic.ts index d7c2ee99fcb..16968241f6a 100644 --- a/kafka-ui-react-app/src/redux/interfaces/topic.ts +++ b/kafka-ui-react-app/src/redux/interfaces/topic.ts @@ -22,11 +22,6 @@ export interface TopicConfigParams { [paramName: string]: TopicConfig; } -export interface TopicConfigOption { - name: TopicConfig['name']; - defaultValue: TopicConfig['defaultValue']; -} - export interface TopicConfigValue { name: TopicConfig['name']; value: TopicConfig['value'];
null
val
train
2021-03-24T11:24:10
"2021-03-23T11:50:08Z"
iliax
train
provectus/kafka-ui/422_495
provectus/kafka-ui
provectus/kafka-ui/422
provectus/kafka-ui/495
[ "timestamp(timedelta=1509.0, similarity=0.982228428969711)" ]
acdcf2eab687de9289c7080bac04d9cde9cab5f8
30ceb98491702d982baaf9e88bd354ce4dc184ee
[]
[]
"2021-05-25T09:46:53Z"
[ "good first issue", "scope/frontend" ]
Bump redux from 4.0.5 to 4.1.0
[ "kafka-ui-react-app/package-lock.json", "kafka-ui-react-app/package.json", "kafka-ui-react-app/src/components/Connect/List/__tests__/__snapshots__/ListItem.spec.tsx.snap", "kafka-ui-react-app/src/components/__tests__/__snapshots__/App.spec.tsx.snap" ]
[ "kafka-ui-react-app/package-lock.json", "kafka-ui-react-app/package.json", "kafka-ui-react-app/src/components/Connect/List/__tests__/__snapshots__/ListItem.spec.tsx.snap", "kafka-ui-react-app/src/components/__tests__/__snapshots__/App.spec.tsx.snap" ]
[]
diff --git a/kafka-ui-react-app/package-lock.json b/kafka-ui-react-app/package-lock.json index f1bb970b6fd..ef74fb76cfc 100644 --- a/kafka-ui-react-app/package-lock.json +++ b/kafka-ui-react-app/package-lock.json @@ -32,7 +32,7 @@ "react-redux": "^7.2.2", "react-router": "^5.2.0", "react-router-dom": "^5.2.0", - "redux": "^4.0.5", + "redux": "^4.1.0", "redux-thunk": "^2.3.0", "reselect": "^4.0.0", "typesafe-actions": "^5.1.0", @@ -2059,6 +2059,7 @@ "jest-resolve": "^26.6.2", "jest-util": "^26.6.2", "jest-worker": "^26.6.2", + "node-notifier": "^8.0.0", "slash": "^3.0.0", "source-map": "^0.6.0", "string-length": "^4.0.1", @@ -7752,6 +7753,9 @@ "resolved": "https://registry.npmjs.org/easy-table/-/easy-table-1.1.0.tgz", "integrity": "sha1-hvmrTBAvA3G3KXuSplHVgkvIy3M=", "dev": true, + "dependencies": { + "wcwidth": ">=1.0.1" + }, "optionalDependencies": { "wcwidth": ">=1.0.1" } @@ -12532,6 +12536,7 @@ "@types/node": "*", "anymatch": "^3.0.3", "fb-watchman": "^2.0.0", + "fsevents": "^2.1.2", "graceful-fs": "^4.2.4", "jest-regex-util": "^26.0.0", "jest-serializer": "^26.6.2", @@ -18787,6 +18792,7 @@ "eslint-webpack-plugin": "^2.5.2", "file-loader": "6.1.1", "fs-extra": "^9.0.1", + "fsevents": "^2.1.3", "html-webpack-plugin": "4.5.0", "identity-obj-proxy": "3.0.0", "jest": "26.6.0", @@ -19145,12 +19151,11 @@ } }, "node_modules/redux": { - "version": "4.0.5", - "resolved": "https://registry.npmjs.org/redux/-/redux-4.0.5.tgz", - "integrity": "sha512-VSz1uMAH24DM6MF72vcojpYPtrTUu3ByVWfPL1nPfVRb5mZVTve5GnNCUV53QM/BZ66xfWrm0CTWoM+Xlz8V1w==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/redux/-/redux-4.1.0.tgz", + "integrity": "sha512-uI2dQN43zqLWCt6B/BMGRMY6db7TTY4qeHHfGeKb3EOhmOKjU3KdWvNLJyqaHRksv/ErdNH7cFZWg9jXtewy4g==", "dependencies": { - "loose-envify": "^1.4.0", - "symbol-observable": "^1.2.0" + "@babel/runtime": "^7.9.2" } }, "node_modules/redux-mock-store": { @@ -21582,14 +21587,6 @@ "node": ">=4" } }, - "node_modules/symbol-observable": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/symbol-observable/-/symbol-observable-1.2.0.tgz", - "integrity": "sha512-e900nM8RRtGhlV36KGEU9k65K3mPb1WV70OdjfxlG2EAuM1noi/E/BaW/uMhL7bPEssK8QV57vN3esixjUvcXQ==", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/symbol-tree": { "version": "3.2.4", "resolved": "https://registry.npmjs.org/symbol-tree/-/symbol-tree-3.2.4.tgz", @@ -22920,8 +22917,10 @@ "integrity": "sha512-9P3MWk6SrKjHsGkLT2KHXdQ/9SNkyoJbabxnKOoJepsvJjJG8uYTR3yTPxPQvNDI3w4Nz1xnE0TLHK4RIVe/MQ==", "dev": true, "dependencies": { + "chokidar": "^3.4.1", "graceful-fs": "^4.1.2", - "neo-async": "^2.5.0" + "neo-async": "^2.5.0", + "watchpack-chokidar2": "^2.0.1" }, "optionalDependencies": { "chokidar": "^3.4.1", @@ -23387,6 +23386,7 @@ "anymatch": "^2.0.0", "async-each": "^1.0.1", "braces": "^2.3.2", + "fsevents": "^1.2.7", "glob-parent": "^3.1.0", "inherits": "^2.0.3", "is-binary-path": "^1.0.0", @@ -40726,12 +40726,11 @@ } }, "redux": { - "version": "4.0.5", - "resolved": "https://registry.npmjs.org/redux/-/redux-4.0.5.tgz", - "integrity": "sha512-VSz1uMAH24DM6MF72vcojpYPtrTUu3ByVWfPL1nPfVRb5mZVTve5GnNCUV53QM/BZ66xfWrm0CTWoM+Xlz8V1w==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/redux/-/redux-4.1.0.tgz", + "integrity": "sha512-uI2dQN43zqLWCt6B/BMGRMY6db7TTY4qeHHfGeKb3EOhmOKjU3KdWvNLJyqaHRksv/ErdNH7cFZWg9jXtewy4g==", "requires": { - "loose-envify": "^1.4.0", - "symbol-observable": "^1.2.0" + "@babel/runtime": "^7.9.2" } }, "redux-mock-store": { @@ -42840,11 +42839,6 @@ } } }, - "symbol-observable": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/symbol-observable/-/symbol-observable-1.2.0.tgz", - "integrity": "sha512-e900nM8RRtGhlV36KGEU9k65K3mPb1WV70OdjfxlG2EAuM1noi/E/BaW/uMhL7bPEssK8QV57vN3esixjUvcXQ==" - }, "symbol-tree": { "version": "3.2.4", "resolved": "https://registry.npmjs.org/symbol-tree/-/symbol-tree-3.2.4.tgz", diff --git a/kafka-ui-react-app/package.json b/kafka-ui-react-app/package.json index d3d42565586..857893af15c 100644 --- a/kafka-ui-react-app/package.json +++ b/kafka-ui-react-app/package.json @@ -27,7 +27,7 @@ "react-redux": "^7.2.2", "react-router": "^5.2.0", "react-router-dom": "^5.2.0", - "redux": "^4.0.5", + "redux": "^4.1.0", "redux-thunk": "^2.3.0", "reselect": "^4.0.0", "typesafe-actions": "^5.1.0", diff --git a/kafka-ui-react-app/src/components/Connect/List/__tests__/__snapshots__/ListItem.spec.tsx.snap b/kafka-ui-react-app/src/components/Connect/List/__tests__/__snapshots__/ListItem.spec.tsx.snap index 49164c48f82..02692276d8c 100644 --- a/kafka-ui-react-app/src/components/Connect/List/__tests__/__snapshots__/ListItem.spec.tsx.snap +++ b/kafka-ui-react-app/src/components/Connect/List/__tests__/__snapshots__/ListItem.spec.tsx.snap @@ -4,11 +4,11 @@ exports[`Connectors ListItem matches snapshot 1`] = ` <Provider store={ Object { + "@@observable": [Function], "dispatch": [Function], "getState": [Function], "replaceReducer": [Function], "subscribe": [Function], - Symbol(observable): [Function], } } > diff --git a/kafka-ui-react-app/src/components/__tests__/__snapshots__/App.spec.tsx.snap b/kafka-ui-react-app/src/components/__tests__/__snapshots__/App.spec.tsx.snap index 1dfab96895f..7d792e0c5a5 100644 --- a/kafka-ui-react-app/src/components/__tests__/__snapshots__/App.spec.tsx.snap +++ b/kafka-ui-react-app/src/components/__tests__/__snapshots__/App.spec.tsx.snap @@ -4,11 +4,11 @@ exports[`App view matches snapshot 1`] = ` <Provider store={ Object { + "@@observable": [Function], "dispatch": [Function], "getState": [Function], "replaceReducer": [Function], "subscribe": [Function], - Symbol(observable): [Function], } } >
null
train
train
2021-05-24T21:40:17
"2021-05-03T21:46:37Z"
workshur
train
provectus/kafka-ui/447_558
provectus/kafka-ui
provectus/kafka-ui/447
provectus/kafka-ui/558
[ "timestamp(timedelta=40.0, similarity=0.8442054842264117)" ]
13463fe95f8c6c375b09d47e7da41a87b8a47134
536d7281ddd243f56e507f7adc4bcb46e7ff1196
[]
[ "you should use generated api from openapi" ]
"2021-06-15T12:41:40Z"
[ "scope/QA" ]
[e2e-check] [test-data-preparation] Creating new topic via API
e2e checks should be able to use entities(such as topics) created in previous steps. But for creating certain test state we shouldn't always bother UI, otherwise it'll make checks slow and brittle. We need step for creating topic via `kafka-ui` API
[ "kafka-ui-contract/pom.xml", "kafka-ui-e2e-checks/pom.xml" ]
[ "kafka-ui-contract/pom.xml", "kafka-ui-e2e-checks/pom.xml" ]
[ "kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/base/BaseTest.java", "kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/helpers/ApiHelper.java", "kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/helpers/Helpers.java", "kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/topics/TopicTests.java" ]
diff --git a/kafka-ui-contract/pom.xml b/kafka-ui-contract/pom.xml index 8c3ff23786c..b6700196f44 100644 --- a/kafka-ui-contract/pom.xml +++ b/kafka-ui-contract/pom.xml @@ -48,6 +48,29 @@ <artifactId>openapi-generator-maven-plugin</artifactId> <version>${openapi-generator-maven-plugin.version}</version> <executions> + <execution> + <id>generate-kafka-ui-client</id> + <goals> + <goal>generate</goal> + </goals> + <configuration> + <inputSpec>${project.basedir}/src/main/resources/swagger/kafka-ui-api.yaml + </inputSpec> + <output>${project.build.directory}/generated-sources/kafka-ui-client</output> + <generatorName>java</generatorName> + <generateApiTests>false</generateApiTests> + <generateModelTests>false</generateModelTests> + <configOptions> + <modelPackage>com.provectus.kafka.ui.api.model</modelPackage> + <apiPackage>com.provectus.kafka.ui.api.api</apiPackage> + <sourceFolder>kafka-ui-client</sourceFolder> + <asyncNative>true</asyncNative> + <library>webclient</library> + <useBeanValidation>true</useBeanValidation> + <dateLibrary>java8</dateLibrary> + </configOptions> + </configuration> + </execution> <execution> <id>generate-backend-api</id> <goals> diff --git a/kafka-ui-e2e-checks/pom.xml b/kafka-ui-e2e-checks/pom.xml index 74e93453cb9..e934fb6e289 100644 --- a/kafka-ui-e2e-checks/pom.xml +++ b/kafka-ui-e2e-checks/pom.xml @@ -11,9 +11,11 @@ <artifactId>kafka-ui-e2e-checks</artifactId> <properties> + <kafka-ui-contract>0.1.1-SNAPSHOT</kafka-ui-contract> <junit.version>5.7.0</junit.version> <aspectj.version>1.9.6</aspectj.version> <allure.version>2.13.7</allure.version> + <json-smart.version>1.1.1</json-smart.version> <testcontainers.version>1.15.2</testcontainers.version> <selenide.version>5.16.2</selenide.version> <assertj.version>3.17.1</assertj.version> @@ -32,6 +34,11 @@ </properties> <dependencies> + <dependency> + <groupId>net.minidev</groupId> + <artifactId>json-smart</artifactId> + <version>${json-smart.version}</version> + </dependency> <dependency> <groupId>org.apache.kafka</groupId> <artifactId>kafka_2.13</artifactId> @@ -141,6 +148,12 @@ <artifactId>screen-diff-plugin</artifactId> <version>${allure.screendiff-plugin.version}</version> </dependency> + <dependency> + <groupId>com.provectus</groupId> + <artifactId>kafka-ui-contract</artifactId> + <version>${kafka-ui-contract}</version> + <scope>test</scope> + </dependency> </dependencies> <build> <plugins>
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/base/BaseTest.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/base/BaseTest.java index ebf90807880..a5ac78f5660 100644 --- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/base/BaseTest.java +++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/base/BaseTest.java @@ -2,6 +2,7 @@ import com.codeborne.selenide.Configuration; import com.codeborne.selenide.logevents.SelenideLogger; +import com.provectus.kafka.ui.helpers.Helpers; import com.provectus.kafka.ui.pages.Pages; import com.provectus.kafka.ui.screenshots.Screenshooter; import com.provectus.kafka.ui.steps.Steps; @@ -27,6 +28,7 @@ public class BaseTest { protected Steps steps = Steps.INSTANCE; protected Pages pages = Pages.INSTANCE; + protected Helpers helpers = Helpers.INSTANCE; private Screenshooter screenshooter = new Screenshooter(); diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/helpers/ApiHelper.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/helpers/ApiHelper.java new file mode 100644 index 00000000000..0008cd64e4e --- /dev/null +++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/helpers/ApiHelper.java @@ -0,0 +1,47 @@ +package com.provectus.kafka.ui.helpers; + +import lombok.SneakyThrows; + +import com.provectus.kafka.ui.api.*; +import com.provectus.kafka.ui.api.model.*; +import com.provectus.kafka.ui.api.api.TopicsApi; + +import java.util.List; + +import static org.junit.jupiter.api.Assertions.assertTrue; + +public class ApiHelper { + int partitions = 1; + int replicationFactor = 1; + String newTopic = "new-topic"; + String baseURL = "http://localhost:8080/"; + + + + + @SneakyThrows + private TopicsApi topicApi(){ + ApiClient defaultClient = new ApiClient(); + defaultClient.setBasePath(baseURL); + TopicsApi topicsApi = new TopicsApi(defaultClient); + return topicsApi; + } + + + + + @SneakyThrows + public void createTopic(String clusterName, String topicName) { + TopicCreation topic = new TopicCreation(); + topic.setName(topicName); + topic.setPartitions(partitions); + topic.setReplicationFactor(replicationFactor); + topicApi().createTopic(clusterName,topic).block(); + } + + @SneakyThrows + public void deleteTopic(String clusterName, String topicName) { + topicApi().deleteTopic(clusterName,topicName).block(); + } + +} diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/helpers/Helpers.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/helpers/Helpers.java new file mode 100644 index 00000000000..cda55a0cb25 --- /dev/null +++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/helpers/Helpers.java @@ -0,0 +1,11 @@ +package com.provectus.kafka.ui.helpers; + + + +public class Helpers { + public static final Helpers INSTANCE = new Helpers(); + + private Helpers(){} + + public ApiHelper apiHelper = new ApiHelper(); +} diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/topics/TopicTests.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/topics/TopicTests.java index a6a97299f03..b32d9b992f1 100644 --- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/topics/TopicTests.java +++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/topics/TopicTests.java @@ -3,8 +3,12 @@ import com.provectus.kafka.ui.base.BaseTest; import com.provectus.kafka.ui.pages.MainPage; import com.provectus.kafka.ui.steps.kafka.KafkaSteps; +import com.provectus.kafka.ui.helpers.ApiHelper; import lombok.SneakyThrows; -import org.junit.jupiter.api.*; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; public class TopicTests extends BaseTest { @@ -13,18 +17,18 @@ public class TopicTests extends BaseTest { @AfterEach @SneakyThrows - void afterEach(){ - steps.kafka.deleteTopic(KafkaSteps.Cluster.SECOND_LOCAL,NEW_TOPIC); + void afterEach(){ + helpers.apiHelper.deleteTopic("secondLocal","new-topic"); } @SneakyThrows @DisplayName("should create a topic") @Test void createTopic(){ - steps.kafka.createTopic(KafkaSteps.Cluster.SECOND_LOCAL,NEW_TOPIC); + helpers.apiHelper.createTopic("secondLocal","new-topic"); pages.open() .mainPage.shouldBeOnPage() - .goToSideMenu(KafkaSteps.Cluster.SECOND_LOCAL.getName(), MainPage.SideMenuOptions.TOPICS) + .goToSideMenu("secondLocal", MainPage.SideMenuOptions.TOPICS) .shouldBeTopic(NEW_TOPIC); } }
val
train
2021-07-09T14:55:43
"2021-05-11T13:31:18Z"
mikementor
train
provectus/kafka-ui/449_472
provectus/kafka-ui
provectus/kafka-ui/449
provectus/kafka-ui/472
[ "timestamp(timedelta=79967.0, similarity=0.8602940856927024)" ]
7801c292af24049f7d53e32815eb06c707e86dfb
407375c6788aa7b7812ba25291ac17afbcf99b99
[ "https://github.com/marketplace/actions/issue-checklist-checker" ]
[]
"2021-05-18T08:03:02Z"
[]
add github/templates for pull-requests and issues
Also, look into bots, that will check that issue described properly
[]
[ ".github/ISSUE_TEMPLATE/bug_report.md", ".github/ISSUE_TEMPLATE/feature_request.md", ".github/PULL_REQUEST_TEMPLATE.md", ".github/workflows/pr-checks.yaml" ]
[]
diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 00000000000..0f821e5b689 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,31 @@ +--- +name: "\U0001F41E Bug report" +about: Create a report to help us improve +title: '' +labels: bug +assignees: '' + +--- + +**Describe the bug** +(A clear and concise description of what the bug is.) + + +**Set up** +(How do you run the app?) + + +**Steps to Reproduce** +Steps to reproduce the behavior: + +1. + +**Expected behavior** +(A clear and concise description of what you expected to happen) + +**Screenshots** +(If applicable, add screenshots to help explain your problem) + + +**Additional context** +(Add any other context about the problem here) \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 00000000000..38947541fd3 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,42 @@ +--- +name: "\U0001F680 Feature request" +about: Propose a new feature +title: '' +labels: enhancement +assignees: '' + +--- + +### Is your proposal related to a problem? + +<!-- + Provide a clear and concise description of what the problem is. + For example, "I'm always frustrated when..." +--> + +(Write your answer here.) + +### Describe the solution you'd like + +<!-- + Provide a clear and concise description of what you want to happen. +--> + +(Describe your proposed solution here.) + +### Describe alternatives you've considered + +<!-- + Let us know about other solutions you've tried or researched. +--> + +(Write your answer here.) + +### Additional context + +<!-- + Is there anything else you can add about the proposal? + You might want to link to related issues here, if you haven't already. +--> + +(Write your answer here.) \ No newline at end of file diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 00000000000..d42d66aaa57 --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,27 @@ +<!-- ignore-task-list-start --> +- [ ] **Breaking change?** (if so, please describe the impact and migration path for existing applications:) +<!-- ignore-task-list-end --> +**What changes did you make?** (Give an overview) + +**Is there anything you'd like reviewers to focus on?** + + +**How Has This Been Tested?** (put an "X" next to an item) +<!-- ignore-task-list-start --> +- [ ] No need to +- [ ] Manually(please, describe, when necessary) +- [ ] Unit checks +- [ ] Integration checks +- [ ] Covered by existing automation +<!-- ignore-task-list-end --> + +**Checklist** (put an "X" next to an item, otherwise PR will fail) +- [ ] I have performed a self-review of my own code +- [ ] I have commented my code, particularly in hard-to-understand areas +- [ ] I have made corresponding changes to the documentation (e.g. **ENVIRONMENT VARIABLES**) +- [ ] My changes generate no new warnings(e.g. Sonar is happy) +- [ ] I have added tests that prove my fix is effective or that my feature works +- [ ] New and existing unit tests pass locally with my changes +- [ ] Any dependent changes have been merged + +Check out [Contributing](https://github.com/provectus/kafka-ui/blob/master/CONTRIBUTING.md) and [Code of Conduct](https://github.com/provectus/kafka-ui/blob/master/CODE-OF-CONDUCT.md) \ No newline at end of file diff --git a/.github/workflows/pr-checks.yaml b/.github/workflows/pr-checks.yaml new file mode 100644 index 00000000000..723966eb77d --- /dev/null +++ b/.github/workflows/pr-checks.yaml @@ -0,0 +1,15 @@ +name: 'PR Checklist checked' +on: + pull_request: + types: [opened, edited, synchronized, reopened] + +jobs: + task-check: + runs-on: ubuntu-latest + steps: + - uses: kentaro-m/[email protected] + with: + repo-token: "${{ secrets.GITHUB_TOKEN }}" + - uses: derkinderfietsen/pr-description-enforcer@v1 + with: + repo-token: '${{ secrets.GITHUB_TOKEN }}' \ No newline at end of file
null
train
train
2021-05-18T09:05:25
"2021-05-12T09:11:53Z"
mikementor
train
provectus/kafka-ui/493_679
provectus/kafka-ui
provectus/kafka-ui/493
provectus/kafka-ui/679
[ "timestamp(timedelta=0.0, similarity=0.9221294910731154)" ]
326786e4ff5a9ff47dd4902dd981ac9d2d6bcad9
20cbcd33e25c4187d6c35b636b453090a24aa8d5
[]
[ "url?", "could we support both object and string scenarios?", "could we check only user & pass fields? why do we need additional boolean for this?", "could we set it on webclient level?", "Here we can make requests for different clusters and in each cluster can be it's own Schema Registry with it's own settings. So we can't use pre configured web client", "Could you please move these properties to constants or replace them with existing ones?", "I added method which returns configured WebClient", "Resolved", "I meant `basic.auth.user.info` and `basic.auth.credentials.source`", "Done.\r\nI used constants stored in `io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig`." ]
"2021-07-14T13:05:09Z"
[ "type/enhancement", "scope/backend" ]
Add authentication for Schema Registry
It's not possible to add authentication for Schema Registry, so I get this error: Failed to get Schema for topic appuntamento-claimgen io.confluent.kafka.schemaregistry.client.rest.exceptions.RestClientException: Unexpected character ('<' (code 60)): expected a valid value (JSON String, Number, Array, Object or token 'null', 'true' or 'false') at [Source: (sun.net.www.protocol.http.HttpURLConnection$HttpInputStream); line: 1, column: 2]; error code: 50005 at io.confluent.kafka.schemaregistry.client.rest.RestService.sendHttpRequest(RestService.java:292) ~[kafka-schema-registry-client-5.5.1.jar!/:?] Would be appreciated if it will be possibile to add env variables to add auth for Schema Registry.
[ "README.md", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ClusterMapper.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/KafkaCluster.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/SchemaRegistryAwareRecordSerDe.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/SchemaRegistryService.java" ]
[ "README.md", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ClusterMapper.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalSchemaRegistry.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/KafkaCluster.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/SchemaRegistryAwareRecordSerDe.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/SchemaRegistryService.java" ]
[]
diff --git a/README.md b/README.md index 6feb09572ae..927b67b2800 100644 --- a/README.md +++ b/README.md @@ -132,6 +132,9 @@ kafka: bootstrapServers: localhost:29091 zookeeper: localhost:2183 schemaRegistry: http://localhost:8085 + schemaRegistryAuth: + username: username + password: password # schemaNameTemplate: "%s-value" jmxPort: 9997 - @@ -141,6 +144,8 @@ kafka: * `bootstrapServers`: where to connect * `zookeeper`: zookeeper service address * `schemaRegistry`: schemaRegistry's address +* `schemaRegistryAuth.username`: schemaRegistry's basic authentication username +* `schemaRegistryAuth.password`: schemaRegistry's basic authentication password * `schemaNameTemplate`: how keys are saved to schemaRegistry * `jmxPort`: open jmxPosrts of a broker * `readOnly`: enable read only mode @@ -160,6 +165,8 @@ For example, if you want to use an environment variable to set the `name` parame |`KAFKA_CLUSTERS_0_ZOOKEEPER` | Zookeper service address |`KAFKA_CLUSTERS_0_PROPERTIES_SECURITY_PROTOCOL` |Security protocol to connect to the brokers. For SSL connection use "SSL", for plaintext connection don't set this environment variable |`KAFKA_CLUSTERS_0_SCHEMAREGISTRY` |SchemaRegistry's address +|`KAFKA_CLUSTERS_0_SCHEMAREGISTRYAUTH_USERNAME` |SchemaRegistry's basic authentication username +|`KAFKA_CLUSTERS_0_SCHEMAREGISTRYAUTH_PASSWORD` |SchemaRegistry's basic authentication password |`KAFKA_CLUSTERS_0_SCHEMANAMETEMPLATE` |How keys are saved to schemaRegistry |`KAFKA_CLUSTERS_0_JMXPORT` |Open jmxPosrts of a broker |`KAFKA_CLUSTERS_0_READONLY` |Enable read only mode. Default: false diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java index 82954dff363..f847ca2a80d 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/ClustersProperties.java @@ -20,6 +20,7 @@ public static class Cluster { String bootstrapServers; String zookeeper; String schemaRegistry; + SchemaRegistryAuth schemaRegistryAuth; String schemaNameTemplate = "%s-value"; String keySchemaNameTemplate = "%s-key"; String protobufFile; @@ -35,4 +36,10 @@ public static class ConnectCluster { String name; String address; } + + @Data + public static class SchemaRegistryAuth { + String username; + String password; + } } diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ClusterMapper.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ClusterMapper.java index 766f3f24b32..321a6c08713 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ClusterMapper.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/mapper/ClusterMapper.java @@ -15,6 +15,7 @@ import com.provectus.kafka.ui.model.InternalClusterMetrics; import com.provectus.kafka.ui.model.InternalPartition; import com.provectus.kafka.ui.model.InternalReplica; +import com.provectus.kafka.ui.model.InternalSchemaRegistry; import com.provectus.kafka.ui.model.InternalTopic; import com.provectus.kafka.ui.model.InternalTopicConfig; import com.provectus.kafka.ui.model.KafkaCluster; @@ -49,6 +50,7 @@ public interface ClusterMapper { @Mapping(target = "protobufFile", source = "protobufFile", qualifiedByName = "resolvePath") @Mapping(target = "properties", source = "properties", qualifiedByName = "setProperties") + @Mapping(target = "schemaRegistry", source = ".", qualifiedByName = "setSchemaRegistry") KafkaCluster toKafkaCluster(ClustersProperties.Cluster clusterProperties); @Mapping(target = "diskUsage", source = "internalBrokerDiskUsage", @@ -64,6 +66,24 @@ public interface ClusterMapper { Partition toPartition(InternalPartition topic); + default InternalSchemaRegistry setSchemaRegistry(ClustersProperties.Cluster clusterProperties) { + if (clusterProperties == null) { + return null; + } + + InternalSchemaRegistry.InternalSchemaRegistryBuilder internalSchemaRegistry = + InternalSchemaRegistry.builder(); + + internalSchemaRegistry.url(clusterProperties.getSchemaRegistry()); + + if (clusterProperties.getSchemaRegistryAuth() != null) { + internalSchemaRegistry.username(clusterProperties.getSchemaRegistryAuth().getUsername()); + internalSchemaRegistry.password(clusterProperties.getSchemaRegistryAuth().getPassword()); + } + + return internalSchemaRegistry.build(); + } + TopicDetails toTopicDetails(InternalTopic topic); default TopicDetails toTopicDetails(InternalTopic topic, InternalClusterMetrics metrics) { diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalSchemaRegistry.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalSchemaRegistry.java new file mode 100644 index 00000000000..378f2706f58 --- /dev/null +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalSchemaRegistry.java @@ -0,0 +1,12 @@ +package com.provectus.kafka.ui.model; + +import lombok.Builder; +import lombok.Data; + +@Data +@Builder(toBuilder = true) +public class InternalSchemaRegistry { + private final String username; + private final String password; + private final String url; +} diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/KafkaCluster.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/KafkaCluster.java index 805f0937e96..68b16073761 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/KafkaCluster.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/KafkaCluster.java @@ -15,7 +15,7 @@ public class KafkaCluster { private final Integer jmxPort; private final String bootstrapServers; private final String zookeeper; - private final String schemaRegistry; + private final InternalSchemaRegistry schemaRegistry; private final List<KafkaConnectCluster> kafkaConnect; private final String schemaNameTemplate; private final String keySchemaNameTemplate; diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/SchemaRegistryAwareRecordSerDe.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/SchemaRegistryAwareRecordSerDe.java index 08aef7455ff..03f1431b82d 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/SchemaRegistryAwareRecordSerDe.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/SchemaRegistryAwareRecordSerDe.java @@ -1,6 +1,11 @@ package com.provectus.kafka.ui.serde.schemaregistry; + +import static io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig.BASIC_AUTH_CREDENTIALS_SOURCE; +import static io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig.USER_INFO_CONFIG; + import com.fasterxml.jackson.databind.ObjectMapper; +import com.provectus.kafka.ui.exception.ValidationException; import com.provectus.kafka.ui.model.KafkaCluster; import com.provectus.kafka.ui.model.MessageSchema; import com.provectus.kafka.ui.model.TopicMessageSchema; @@ -22,6 +27,7 @@ import java.net.URI; import java.nio.ByteBuffer; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; @@ -63,14 +69,29 @@ public class SchemaRegistryAwareRecordSerDe implements RecordSerDe { private static SchemaRegistryClient createSchemaRegistryClient(KafkaCluster cluster) { Objects.requireNonNull(cluster.getSchemaRegistry()); + Objects.requireNonNull(cluster.getSchemaRegistry().getUrl()); List<SchemaProvider> schemaProviders = List.of(new AvroSchemaProvider(), new ProtobufSchemaProvider(), new JsonSchemaProvider()); - //TODO add auth + + Map<String, String> configs = new HashMap<>(); + String username = cluster.getSchemaRegistry().getUsername(); + String password = cluster.getSchemaRegistry().getPassword(); + + if (username != null && password != null) { + configs.put(BASIC_AUTH_CREDENTIALS_SOURCE, "USER_INFO"); + configs.put(USER_INFO_CONFIG, username + ":" + password); + } else if (username != null) { + throw new ValidationException( + "You specified username but do not specified password"); + } else if (password != null) { + throw new ValidationException( + "You specified password but do not specified username"); + } return new CachedSchemaRegistryClient( - Collections.singletonList(cluster.getSchemaRegistry()), + Collections.singletonList(cluster.getSchemaRegistry().getUrl()), CLIENT_IDENTITY_MAP_CAPACITY, schemaProviders, - Collections.emptyMap() + configs ); } @@ -181,7 +202,8 @@ public TopicMessageSchema getTopicSchema(String topic) { private String convertSchema(SchemaMetadata schema) { String jsonSchema; - URI basePath = new URI(cluster.getSchemaRegistry()).resolve(Integer.toString(schema.getId())); + URI basePath = new URI(cluster.getSchemaRegistry().getUrl()) + .resolve(Integer.toString(schema.getId())); final ParsedSchema schemaById = Objects.requireNonNull(schemaRegistryClient) .getSchemaById(schema.getId()); diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/SchemaRegistryService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/SchemaRegistryService.java index 41374120b81..611d6eedf42 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/SchemaRegistryService.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/SchemaRegistryService.java @@ -7,9 +7,11 @@ import com.provectus.kafka.ui.exception.DuplicateEntityException; import com.provectus.kafka.ui.exception.SchemaNotFoundException; import com.provectus.kafka.ui.exception.UnprocessableEntityException; +import com.provectus.kafka.ui.exception.ValidationException; import com.provectus.kafka.ui.mapper.ClusterMapper; import com.provectus.kafka.ui.model.CompatibilityCheckResponse; import com.provectus.kafka.ui.model.CompatibilityLevel; +import com.provectus.kafka.ui.model.InternalSchemaRegistry; import com.provectus.kafka.ui.model.KafkaCluster; import com.provectus.kafka.ui.model.NewSchemaSubject; import com.provectus.kafka.ui.model.SchemaSubject; @@ -26,6 +28,8 @@ import lombok.RequiredArgsConstructor; import lombok.extern.log4j.Log4j2; import org.jetbrains.annotations.NotNull; +import org.springframework.http.HttpHeaders; +import org.springframework.http.HttpMethod; import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Service; @@ -61,8 +65,10 @@ public Flux<SchemaSubject> getAllLatestVersionSchemas(String clusterName) { public Mono<String[]> getAllSubjectNames(String clusterName) { return clustersStorage.getClusterByName(clusterName) - .map(cluster -> webClient.get() - .uri(cluster.getSchemaRegistry() + URL_SUBJECTS) + .map(cluster -> configuredWebClient( + cluster, + HttpMethod.GET, + URL_SUBJECTS) .retrieve() .bodyToMono(String[].class) .doOnError(log::error) @@ -77,8 +83,10 @@ public Flux<SchemaSubject> getAllVersionsBySubject(String clusterName, String su private Flux<Integer> getSubjectVersions(String clusterName, String schemaName) { return clustersStorage.getClusterByName(clusterName) - .map(cluster -> webClient.get() - .uri(cluster.getSchemaRegistry() + URL_SUBJECT_VERSIONS, schemaName) + .map(cluster -> configuredWebClient( + cluster, + HttpMethod.GET, + URL_SUBJECT_VERSIONS, schemaName) .retrieve() .onStatus(NOT_FOUND::equals, throwIfNotFoundStatus(formatted(NO_SUCH_SCHEMA, schemaName)) @@ -99,8 +107,10 @@ public Mono<SchemaSubject> getLatestSchemaVersionBySubject(String clusterName, private Mono<SchemaSubject> getSchemaSubject(String clusterName, String schemaName, String version) { return clustersStorage.getClusterByName(clusterName) - .map(cluster -> webClient.get() - .uri(cluster.getSchemaRegistry() + URL_SUBJECT_BY_VERSION, schemaName, version) + .map(cluster -> configuredWebClient( + cluster, + HttpMethod.GET, + URL_SUBJECT_BY_VERSION, schemaName, version) .retrieve() .onStatus(NOT_FOUND::equals, throwIfNotFoundStatus(formatted(NO_SUCH_SCHEMA_VERSION, schemaName, version)) @@ -140,8 +150,10 @@ public Mono<ResponseEntity<Void>> deleteLatestSchemaSubject(String clusterName, private Mono<ResponseEntity<Void>> deleteSchemaSubject(String clusterName, String schemaName, String version) { return clustersStorage.getClusterByName(clusterName) - .map(cluster -> webClient.delete() - .uri(cluster.getSchemaRegistry() + URL_SUBJECT_BY_VERSION, schemaName, version) + .map(cluster -> configuredWebClient( + cluster, + HttpMethod.DELETE, + URL_SUBJECT_BY_VERSION, schemaName, version) .retrieve() .onStatus(NOT_FOUND::equals, throwIfNotFoundStatus(formatted(NO_SUCH_SCHEMA_VERSION, schemaName, version)) @@ -152,8 +164,10 @@ private Mono<ResponseEntity<Void>> deleteSchemaSubject(String clusterName, Strin public Mono<ResponseEntity<Void>> deleteSchemaSubjectEntirely(String clusterName, String schemaName) { return clustersStorage.getClusterByName(clusterName) - .map(cluster -> webClient.delete() - .uri(cluster.getSchemaRegistry() + URL_SUBJECT, schemaName) + .map(cluster -> configuredWebClient( + cluster, + HttpMethod.DELETE, + URL_SUBJECT, schemaName) .retrieve() .onStatus(NOT_FOUND::equals, throwIfNotFoundStatus(formatted(NO_SUCH_SCHEMA, schemaName)) @@ -178,8 +192,8 @@ public Mono<SchemaSubject> registerNewSchema(String clusterName, return clustersStorage.getClusterByName(clusterName) .map(KafkaCluster::getSchemaRegistry) .map( - schemaRegistryUrl -> checkSchemaOnDuplicate(subject, newSchema, schemaRegistryUrl) - .flatMap(s -> submitNewSchema(subject, newSchema, schemaRegistryUrl)) + schemaRegistry -> checkSchemaOnDuplicate(subject, newSchema, schemaRegistry) + .flatMap(s -> submitNewSchema(subject, newSchema, schemaRegistry)) .flatMap(resp -> getLatestSchemaVersionBySubject(clusterName, subject)) ) .orElse(Mono.error(ClusterNotFoundException::new)); @@ -189,9 +203,11 @@ public Mono<SchemaSubject> registerNewSchema(String clusterName, @NotNull private Mono<SubjectIdResponse> submitNewSchema(String subject, Mono<InternalNewSchema> newSchemaSubject, - String schemaRegistryUrl) { - return webClient.post() - .uri(schemaRegistryUrl + URL_SUBJECT_VERSIONS, subject) + InternalSchemaRegistry schemaRegistry) { + return configuredWebClient( + schemaRegistry, + HttpMethod.POST, + URL_SUBJECT_VERSIONS, subject) .contentType(MediaType.APPLICATION_JSON) .body(BodyInserters.fromPublisher(newSchemaSubject, InternalNewSchema.class)) .retrieve() @@ -204,9 +220,11 @@ private Mono<SubjectIdResponse> submitNewSchema(String subject, @NotNull private Mono<SchemaSubject> checkSchemaOnDuplicate(String subject, Mono<InternalNewSchema> newSchemaSubject, - String schemaRegistryUrl) { - return webClient.post() - .uri(schemaRegistryUrl + URL_SUBJECT, subject) + InternalSchemaRegistry schemaRegistry) { + return configuredWebClient( + schemaRegistry, + HttpMethod.POST, + URL_SUBJECT, subject) .contentType(MediaType.APPLICATION_JSON) .body(BodyInserters.fromPublisher(newSchemaSubject, InternalNewSchema.class)) .retrieve() @@ -236,8 +254,10 @@ public Mono<Void> updateSchemaCompatibility(String clusterName, String schemaNam return clustersStorage.getClusterByName(clusterName) .map(cluster -> { String configEndpoint = Objects.isNull(schemaName) ? "/config" : "/config/{schemaName}"; - return webClient.put() - .uri(cluster.getSchemaRegistry() + configEndpoint, schemaName) + return configuredWebClient( + cluster, + HttpMethod.PUT, + configEndpoint, schemaName) .contentType(MediaType.APPLICATION_JSON) .body(BodyInserters.fromPublisher(compatibilityLevel, CompatibilityLevel.class)) .retrieve() @@ -257,8 +277,10 @@ public Mono<CompatibilityLevel> getSchemaCompatibilityLevel(String clusterName, return clustersStorage.getClusterByName(clusterName) .map(cluster -> { String configEndpoint = Objects.isNull(schemaName) ? "/config" : "/config/{schemaName}"; - return webClient.get() - .uri(cluster.getSchemaRegistry() + configEndpoint, schemaName) + return configuredWebClient( + cluster, + HttpMethod.GET, + configEndpoint, schemaName) .retrieve() .bodyToMono(InternalCompatibilityLevel.class) .map(mapper::toCompatibilityLevel) @@ -279,9 +301,10 @@ private Mono<CompatibilityLevel> getSchemaCompatibilityInfoOrGlobal(String clust public Mono<CompatibilityCheckResponse> checksSchemaCompatibility( String clusterName, String schemaName, Mono<NewSchemaSubject> newSchemaSubject) { return clustersStorage.getClusterByName(clusterName) - .map(cluster -> webClient.post() - .uri(cluster.getSchemaRegistry() - + "/compatibility/subjects/{schemaName}/versions/latest", schemaName) + .map(cluster -> configuredWebClient( + cluster, + HttpMethod.POST, + "/compatibility/subjects/{schemaName}/versions/latest", schemaName) .contentType(MediaType.APPLICATION_JSON) .body(BodyInserters.fromPublisher(newSchemaSubject, NewSchemaSubject.class)) .retrieve() @@ -296,4 +319,32 @@ public Mono<CompatibilityCheckResponse> checksSchemaCompatibility( public String formatted(String str, Object... args) { return new Formatter().format(str, args).toString(); } + + private void setBasicAuthIfEnabled(InternalSchemaRegistry schemaRegistry, HttpHeaders headers) { + if (schemaRegistry.getUsername() != null && schemaRegistry.getPassword() != null) { + headers.setBasicAuth( + schemaRegistry.getUsername(), + schemaRegistry.getPassword() + ); + } else if (schemaRegistry.getUsername() != null) { + throw new ValidationException( + "You specified username but do not specified password"); + } else if (schemaRegistry.getPassword() != null) { + throw new ValidationException( + "You specified password but do not specified username"); + } + } + + private WebClient.RequestBodySpec configuredWebClient(KafkaCluster cluster, HttpMethod method, + String uri, Object... params) { + return configuredWebClient(cluster.getSchemaRegistry(), method, uri, params); + } + + private WebClient.RequestBodySpec configuredWebClient(InternalSchemaRegistry schemaRegistry, + HttpMethod method, String uri, + Object... params) { + return webClient.method(method) + .uri(schemaRegistry.getUrl() + uri, params) + .headers(headers -> setBasicAuthIfEnabled(schemaRegistry, headers)); + } }
null
train
train
2021-07-21T14:55:30
"2021-05-24T14:51:53Z"
antonettiandrea
train
provectus/kafka-ui/498_748
provectus/kafka-ui
provectus/kafka-ui/498
provectus/kafka-ui/748
[ "timestamp(timedelta=68485.0, similarity=0.868153380463537)" ]
2ab1601a7f0aa1d7eee9b70fd0657f48d539665a
96d07a7d6f87de52144b4e21753bf4eafa42149a
[]
[]
"2021-07-29T14:17:58Z"
[ "type/enhancement", "scope/QA" ]
[e2e] separate e2e-checks module building and running from the main application
### Is your proposal related to a problem? Problem: e2e-checks require ready Docker image of the `kafka-ui`. Being one of the submodules, it runs tests on `mvn verify`, also it triggers on `./mvnw clean install -Pprod` and fail the installing. ### Describe the solution you'd like - remove it as a submodule. - create separate github workflow for running checks ### Describe alternatives you've considered <!-- Let us know about other solutions you've tried or researched. --> (Write your answer here.) ### Additional context <!-- Is there anything else you can add about the proposal? You might want to link to related issues here, if you haven't already. --> (Write your answer here.)
[ ".github/workflows/backend.yml" ]
[ ".github/workflows/backend.yml", ".github/workflows/e2e-checks.yaml" ]
[ "kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/extensions/WaitUtils.java" ]
diff --git a/.github/workflows/backend.yml b/.github/workflows/backend.yml index ae5757ce6f3..5df7b74bc08 100644 --- a/.github/workflows/backend.yml +++ b/.github/workflows/backend.yml @@ -4,7 +4,6 @@ on: types: ['opened', 'edited', 'reopened', 'synchronize'] paths: - 'kafka-ui-api/**' - - 'kafka-ui-e2e-checks/**' jobs: build: runs-on: ubuntu-latest @@ -17,14 +16,6 @@ jobs: restore-keys: | ${{ runner.os }}-maven- - uses: actions/checkout@v2 - - name: Set the values - id: step_one - run: | - cat "./kafka-ui-e2e-checks/.env.ci" >> "./kafka-ui-e2e-checks/.env" - - name: pull docker - id: step_four - run: | - docker pull selenoid/vnc:chrome_86.0 - name: compose app id: step_five run: | @@ -44,29 +35,3 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Needed to get PR information, if any SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} run: mvn -B verify org.sonarsource.scanner.maven:sonar-maven-plugin:sonar - - name: Generate allure report - uses: simple-elf/allure-report-action@master - if: always() - id: allure-report - with: - allure_results: ./kafka-ui-e2e-checks/allure-results - gh_pages: allure-results - allure_report: allure-report - subfolder: allure-results - - name: Deploy allure report to Github Pages - if: always() - uses: peaceiris/actions-gh-pages@v3 - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - publish_dir: allure-history - publish_branch: gh-pages - destination_dir: ./allure - - name: Post the link to allure report - if: always() - uses: Sibz/github-status-action@v1 - with: - authToken: ${{secrets.GITHUB_TOKEN}} - context: 'Test report' - state: 'success' - sha: ${{ github.event.pull_request.head.sha || github.sha }} - target_url: https://${{ github.repository_owner }}.github.io/kafka-ui/allure/allure-results/${{ github.run_number }} \ No newline at end of file diff --git a/.github/workflows/e2e-checks.yaml b/.github/workflows/e2e-checks.yaml new file mode 100644 index 00000000000..6622c0908a9 --- /dev/null +++ b/.github/workflows/e2e-checks.yaml @@ -0,0 +1,68 @@ +name: e2e-checks +on: + pull_request: + types: ['opened', 'edited', 'reopened', 'synchronize'] + paths: + - 'kafka-ui-api/**' + - 'kafka-ui-contract/**' + - 'kafka-ui-react-app/**' + - 'kafka-ui-e2e-checks/**' +jobs: + build: + runs-on: ubuntu-latest + steps: + - name: Cache local Maven repository + uses: actions/cache@v2 + with: + path: ~/.m2/repository + key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }} + restore-keys: | + ${{ runner.os }}-maven- + - uses: actions/checkout@v2 + - name: Set the values + id: step_one + run: | + cat "./kafka-ui-e2e-checks/.env.ci" >> "./kafka-ui-e2e-checks/.env" + - name: pull docker + id: step_four + run: | + docker pull selenoid/vnc:chrome_86.0 + - name: compose app + id: step_five + run: | + docker-compose -f ./docker/kafka-ui.yaml up -d + - name: Set up JDK 1.13 + uses: actions/setup-java@v1 + with: + java-version: 1.13 + - name: Build and analyze + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Needed to get PR information, if any + SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} + run: mvn -B verify org.sonarsource.scanner.maven:sonar-maven-plugin:sonar + - name: Generate allure report + uses: simple-elf/allure-report-action@master + if: always() + id: allure-report + with: + allure_results: ./kafka-ui-e2e-checks/allure-results + gh_pages: allure-results + allure_report: allure-report + subfolder: allure-results + - name: Deploy allure report to Github Pages + if: always() + uses: peaceiris/actions-gh-pages@v3 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: allure-history + publish_branch: gh-pages + destination_dir: ./allure + - name: Post the link to allure report + if: always() + uses: Sibz/github-status-action@v1 + with: + authToken: ${{secrets.GITHUB_TOKEN}} + context: 'Test report' + state: 'success' + sha: ${{ github.event.pull_request.head.sha || github.sha }} + target_url: https://${{ github.repository_owner }}.github.io/kafka-ui/allure/allure-results/${{ github.run_number }} \ No newline at end of file
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/extensions/WaitUtils.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/extensions/WaitUtils.java index f4d18c79775..5632ce72c22 100644 --- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/extensions/WaitUtils.java +++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/extensions/WaitUtils.java @@ -25,7 +25,7 @@ public static void waitForSelectedValue(SelenideElement element, String selected refresh(); i++; sleep(2000); - } while (!selectedValue.equals(element.getSelectedValue()) && i != 20); + } while (!selectedValue.equals(element.getSelectedValue()) && i != 30); Assertions.assertEquals(selectedValue, element.getSelectedValue()) ; } }
val
train
2021-07-29T13:52:02
"2021-05-26T07:54:57Z"
mikementor
train
provectus/kafka-ui/608_611
provectus/kafka-ui
provectus/kafka-ui/608
provectus/kafka-ui/611
[ "timestamp(timedelta=13052.0, similarity=0.9642211375135101)" ]
e3b2ea1052a3a2b8f92da046fe1c379485cfbea5
d8fecf61cf339fbf579aa74b77b069de8a47374e
[ "I can help!" ]
[ "Pls add setTopicReplicationFactor action. We also need to add td to show replicationFactor from topic object" ]
"2021-07-03T15:44:36Z"
[ "type/enhancement", "good first issue", "scope/frontend" ]
Display replication factor field in topics list
### Is your proposal related to a problem? No ### Describe the solution you'd like On the topics list table add column replication factor, backend is already exposing this info ### Describe alternatives you've considered No alternatives ### Additional context
[ "kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml", "kafka-ui-react-app/src/components/Topics/List/List.tsx" ]
[ "kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml", "kafka-ui-react-app/src/components/Topics/List/List.tsx" ]
[]
diff --git a/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml b/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml index 04ad5bda55a..caa7e21053a 100644 --- a/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml +++ b/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml @@ -1374,6 +1374,7 @@ components: enum: - NAME - OUT_OF_SYNC_REPLICAS + - REPLICATION_FACTOR - TOTAL_PARTITIONS Topic: diff --git a/kafka-ui-react-app/src/components/Topics/List/List.tsx b/kafka-ui-react-app/src/components/Topics/List/List.tsx index 99784bb869f..8ca2eee32cf 100644 --- a/kafka-ui-react-app/src/components/Topics/List/List.tsx +++ b/kafka-ui-react-app/src/components/Topics/List/List.tsx @@ -135,6 +135,12 @@ const List: React.FC<Props> = ({ orderBy={orderBy} setOrderBy={setTopicsOrderBy} /> + <SortableColumnHeader + value={TopicColumnsToSort.REPLICATION_FACTOR} + title="Replication Factor" + orderBy={orderBy} + setOrderBy={setTopicsOrderBy} + /> <th>Type</th> <th> </th> </tr>
null
train
train
2021-07-02T14:33:52
"2021-07-02T11:54:37Z"
germanosin
train
provectus/kafka-ui/608_697
provectus/kafka-ui
provectus/kafka-ui/608
provectus/kafka-ui/697
[ "timestamp(timedelta=29.0, similarity=0.9545519612869706)" ]
443ed8bc8ca5e7b54f8c29396f8e3efed63041cb
326786e4ff5a9ff47dd4902dd981ac9d2d6bcad9
[ "I can help!" ]
[]
"2021-07-21T09:11:18Z"
[ "type/enhancement", "good first issue", "scope/frontend" ]
Display replication factor field in topics list
### Is your proposal related to a problem? No ### Describe the solution you'd like On the topics list table add column replication factor, backend is already exposing this info ### Describe alternatives you've considered No alternatives ### Additional context
[ "kafka-ui-react-app/src/components/Topics/List/List.tsx", "kafka-ui-react-app/src/components/Topics/List/ListItem.tsx", "kafka-ui-react-app/src/components/Topics/List/__tests__/__snapshots__/List.spec.tsx.snap" ]
[ "kafka-ui-react-app/src/components/Topics/List/List.tsx", "kafka-ui-react-app/src/components/Topics/List/ListItem.tsx", "kafka-ui-react-app/src/components/Topics/List/__tests__/__snapshots__/List.spec.tsx.snap" ]
[]
diff --git a/kafka-ui-react-app/src/components/Topics/List/List.tsx b/kafka-ui-react-app/src/components/Topics/List/List.tsx index 8bc231a98d9..34b7bc8a955 100644 --- a/kafka-ui-react-app/src/components/Topics/List/List.tsx +++ b/kafka-ui-react-app/src/components/Topics/List/List.tsx @@ -135,6 +135,7 @@ const List: React.FC<Props> = ({ orderBy={orderBy} setOrderBy={setTopicsOrderBy} /> + <th>Replication Factor</th> <th>Number of messages</th> <th>Size</th> <th>Type</th> diff --git a/kafka-ui-react-app/src/components/Topics/List/ListItem.tsx b/kafka-ui-react-app/src/components/Topics/List/ListItem.tsx index 6a086771a91..ea2e99953fd 100644 --- a/kafka-ui-react-app/src/components/Topics/List/ListItem.tsx +++ b/kafka-ui-react-app/src/components/Topics/List/ListItem.tsx @@ -20,7 +20,7 @@ export interface ListItemProps { } const ListItem: React.FC<ListItemProps> = ({ - topic: { name, internal, partitions, segmentSize }, + topic: { name, internal, partitions, segmentSize, replicationFactor }, deleteTopic, clusterName, clearTopicMessages, @@ -75,6 +75,7 @@ const ListItem: React.FC<ListItemProps> = ({ </td> <td>{partitions?.length}</td> <td>{outOfSyncReplicas}</td> + <td>{replicationFactor}</td> <td>{numberOfMessages}</td> <td> <BytesFormatted value={segmentSize} /> diff --git a/kafka-ui-react-app/src/components/Topics/List/__tests__/__snapshots__/List.spec.tsx.snap b/kafka-ui-react-app/src/components/Topics/List/__tests__/__snapshots__/List.spec.tsx.snap index 17a86b1f2fe..b3012f81387 100644 --- a/kafka-ui-react-app/src/components/Topics/List/__tests__/__snapshots__/List.spec.tsx.snap +++ b/kafka-ui-react-app/src/components/Topics/List/__tests__/__snapshots__/List.spec.tsx.snap @@ -205,6 +205,9 @@ exports[`List when it does not have readonly flag matches the snapshot 1`] = ` </span> </th> </ListHeaderCell> + <th> + Replication Factor + </th> <th> Number of messages </th>
null
val
train
2021-07-20T17:00:46
"2021-07-02T11:54:37Z"
germanosin
train
provectus/kafka-ui/610_673
provectus/kafka-ui
provectus/kafka-ui/610
provectus/kafka-ui/673
[ "timestamp(timedelta=0.0, similarity=0.9807460411361394)" ]
587e08ab01032bdcc76058a1e217c0d31aeca648
9b6952c480831615a5368f0dd5603937eb5e049f
[]
[]
"2021-07-13T15:54:15Z"
[ "type/enhancement", "good first issue", "scope/frontend" ]
Display kafka cluser version
### Describe the solution you'd like Add kafka cluster version on dashboard and brokers page. On brokers page we could add it in the box with uptime
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalClusterMetrics.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaService.java", "kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml", "kafka-ui-react-app/src/components/Brokers/Brokers.tsx", "kafka-ui-react-app/src/components/Brokers/BrokersContainer.ts", "kafka-ui-react-app/src/components/Brokers/__test__/Brokers.spec.tsx", "kafka-ui-react-app/src/components/Brokers/__test__/__snapshots__/Brokers.spec.tsx.snap", "kafka-ui-react-app/src/components/Dashboard/ClustersWidget/ClusterWidget.tsx", "kafka-ui-react-app/src/components/Dashboard/ClustersWidget/__test__/__snapshots__/ClusterWidget.spec.tsx.snap", "kafka-ui-react-app/src/redux/reducers/brokers/selectors.ts" ]
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalClusterMetrics.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaService.java", "kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml", "kafka-ui-react-app/src/components/Brokers/Brokers.tsx", "kafka-ui-react-app/src/components/Brokers/BrokersContainer.ts", "kafka-ui-react-app/src/components/Brokers/__test__/Brokers.spec.tsx", "kafka-ui-react-app/src/components/Brokers/__test__/__snapshots__/Brokers.spec.tsx.snap", "kafka-ui-react-app/src/components/Dashboard/ClustersWidget/ClusterWidget.tsx", "kafka-ui-react-app/src/components/Dashboard/ClustersWidget/__test__/__snapshots__/ClusterWidget.spec.tsx.snap", "kafka-ui-react-app/src/redux/reducers/brokers/selectors.ts" ]
[]
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalClusterMetrics.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalClusterMetrics.java index 28717dbcd3a..66bf2ccaa75 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalClusterMetrics.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/model/InternalClusterMetrics.java @@ -27,4 +27,5 @@ public class InternalClusterMetrics { private final Map<Integer, InternalBrokerMetrics> internalBrokerMetrics; private final List<Metric> metrics; private final int zooKeeperStatus; + private final String version; } diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaService.java index d61f807c155..cc978d7a376 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaService.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaService.java @@ -159,6 +159,7 @@ private KafkaCluster buildFromData(KafkaCluster currentCluster, .onlinePartitionCount(topicsMetrics.getOnlinePartitionCount()) .offlinePartitionCount(topicsMetrics.getOfflinePartitionCount()) .zooKeeperStatus(ClusterUtil.convertToIntServerStatus(zookeeperStatus)) + .version(version) .build(); return currentCluster.toBuilder() diff --git a/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml b/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml index 488566c166d..2bb431ac6ee 100644 --- a/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml +++ b/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml @@ -1417,6 +1417,8 @@ components: type: array items: $ref: '#/components/schemas/BrokerDiskUsage' + version: + type: string BrokerDiskUsage: type: object diff --git a/kafka-ui-react-app/src/components/Brokers/Brokers.tsx b/kafka-ui-react-app/src/components/Brokers/Brokers.tsx index 982cda4568f..9a14fe1cf28 100644 --- a/kafka-ui-react-app/src/components/Brokers/Brokers.tsx +++ b/kafka-ui-react-app/src/components/Brokers/Brokers.tsx @@ -27,6 +27,7 @@ const Brokers: React.FC<Props> = ({ diskUsage, fetchClusterStats, fetchBrokers, + version, }) => { const { clusterName } = useParams<{ clusterName: ClusterName }>(); @@ -56,6 +57,9 @@ const Brokers: React.FC<Props> = ({ {zkOnline ? 'Online' : 'Offline'} </span> </Indicator> + <Indicator className="is-one-third" label="Version"> + {version} + </Indicator> </MetricsWrapper> <MetricsWrapper title="Partitions"> <Indicator label="Online"> diff --git a/kafka-ui-react-app/src/components/Brokers/BrokersContainer.ts b/kafka-ui-react-app/src/components/Brokers/BrokersContainer.ts index 9dad90a0abf..d61a049f768 100644 --- a/kafka-ui-react-app/src/components/Brokers/BrokersContainer.ts +++ b/kafka-ui-react-app/src/components/Brokers/BrokersContainer.ts @@ -12,6 +12,7 @@ import { getOutOfSyncReplicasCount, getUnderReplicatedPartitionCount, getDiskUsage, + getVersion, } from 'redux/reducers/brokers/selectors'; import Brokers from 'components/Brokers/Brokers'; @@ -26,6 +27,7 @@ const mapStateToProps = (state: RootState) => ({ outOfSyncReplicasCount: getOutOfSyncReplicasCount(state), underReplicatedPartitionCount: getUnderReplicatedPartitionCount(state), diskUsage: getDiskUsage(state), + version: getVersion(state), }); const mapDispatchToProps = { diff --git a/kafka-ui-react-app/src/components/Brokers/__test__/Brokers.spec.tsx b/kafka-ui-react-app/src/components/Brokers/__test__/Brokers.spec.tsx index 7aab0fc3e57..2ec1b16f0b0 100644 --- a/kafka-ui-react-app/src/components/Brokers/__test__/Brokers.spec.tsx +++ b/kafka-ui-react-app/src/components/Brokers/__test__/Brokers.spec.tsx @@ -26,6 +26,7 @@ describe('Brokers Component', () => { inSyncReplicasCount={0} outOfSyncReplicasCount={0} underReplicatedPartitionCount={0} + version="1" fetchClusterStats={jest.fn()} fetchBrokers={jest.fn()} diskUsage={undefined} @@ -61,6 +62,7 @@ describe('Brokers Component', () => { inSyncReplicasCount={64} outOfSyncReplicasCount={0} underReplicatedPartitionCount={0} + version="1" fetchClusterStats={jest.fn()} fetchBrokers={jest.fn()} diskUsage={[ diff --git a/kafka-ui-react-app/src/components/Brokers/__test__/__snapshots__/Brokers.spec.tsx.snap b/kafka-ui-react-app/src/components/Brokers/__test__/__snapshots__/Brokers.spec.tsx.snap index 24cb30bd533..be83a9ba00f 100644 --- a/kafka-ui-react-app/src/components/Brokers/__test__/__snapshots__/Brokers.spec.tsx.snap +++ b/kafka-ui-react-app/src/components/Brokers/__test__/__snapshots__/Brokers.spec.tsx.snap @@ -69,6 +69,7 @@ exports[`Brokers Component Brokers Empty matches Brokers Empty snapshot 1`] = ` onlinePartitionCount={0} outOfSyncReplicasCount={0} underReplicatedPartitionCount={0} + version="1" zooKeeperStatus={0} > <div @@ -179,6 +180,29 @@ exports[`Brokers Component Brokers Empty matches Brokers Empty snapshot 1`] = ` </div> </div> </Indicator> + <Indicator + className="is-one-third" + label="Version" + > + <div + className="level-item is-one-third" + > + <div + title="Version" + > + <p + className="heading" + > + Version + </p> + <p + className="title has-text-centered" + > + 1 + </p> + </div> + </div> + </Indicator> </div> </div> </MetricsWrapper> @@ -400,6 +424,7 @@ exports[`Brokers Component Brokers matches snapshot 1`] = ` onlinePartitionCount={64} outOfSyncReplicasCount={0} underReplicatedPartitionCount={0} + version="1" zooKeeperStatus={1} > <div @@ -510,6 +535,29 @@ exports[`Brokers Component Brokers matches snapshot 1`] = ` </div> </div> </Indicator> + <Indicator + className="is-one-third" + label="Version" + > + <div + className="level-item is-one-third" + > + <div + title="Version" + > + <p + className="heading" + > + Version + </p> + <p + className="title has-text-centered" + > + 1 + </p> + </div> + </div> + </Indicator> </div> </div> </MetricsWrapper> diff --git a/kafka-ui-react-app/src/components/Dashboard/ClustersWidget/ClusterWidget.tsx b/kafka-ui-react-app/src/components/Dashboard/ClustersWidget/ClusterWidget.tsx index 43d076ae15a..4231e277617 100644 --- a/kafka-ui-react-app/src/components/Dashboard/ClustersWidget/ClusterWidget.tsx +++ b/kafka-ui-react-app/src/components/Dashboard/ClustersWidget/ClusterWidget.tsx @@ -18,6 +18,7 @@ const ClusterWidget: React.FC<ClusterWidgetProps> = ({ bytesOutPerSec, onlinePartitionCount, readOnly, + version, }, }) => ( <div className="column is-full-modile is-6"> @@ -38,6 +39,10 @@ const ClusterWidget: React.FC<ClusterWidgetProps> = ({ <table className="table is-fullwidth"> <tbody> + <tr> + <th>Version</th> + <td>{version}</td> + </tr> <tr> <th>Brokers</th> <td> diff --git a/kafka-ui-react-app/src/components/Dashboard/ClustersWidget/__test__/__snapshots__/ClusterWidget.spec.tsx.snap b/kafka-ui-react-app/src/components/Dashboard/ClustersWidget/__test__/__snapshots__/ClusterWidget.spec.tsx.snap index 8ab8a406d3e..8d6b8173f84 100644 --- a/kafka-ui-react-app/src/components/Dashboard/ClustersWidget/__test__/__snapshots__/ClusterWidget.spec.tsx.snap +++ b/kafka-ui-react-app/src/components/Dashboard/ClustersWidget/__test__/__snapshots__/ClusterWidget.spec.tsx.snap @@ -21,6 +21,12 @@ exports[`ClusterWidget when cluster is offline matches snapshot 1`] = ` className="table is-fullwidth" > <tbody> + <tr> + <th> + Version + </th> + <td /> + </tr> <tr> <th> Brokers @@ -100,6 +106,12 @@ exports[`ClusterWidget when cluster is online matches snapshot 1`] = ` className="table is-fullwidth" > <tbody> + <tr> + <th> + Version + </th> + <td /> + </tr> <tr> <th> Brokers diff --git a/kafka-ui-react-app/src/redux/reducers/brokers/selectors.ts b/kafka-ui-react-app/src/redux/reducers/brokers/selectors.ts index 3c6ef947dd4..9b841b464b3 100644 --- a/kafka-ui-react-app/src/redux/reducers/brokers/selectors.ts +++ b/kafka-ui-react-app/src/redux/reducers/brokers/selectors.ts @@ -48,3 +48,8 @@ export const getDiskUsage = createSelector( brokersState, ({ diskUsage }) => diskUsage ); + +export const getVersion = createSelector( + brokersState, + ({ version }) => version +);
null
test
train
2021-07-13T17:53:05
"2021-07-02T12:29:00Z"
germanosin
train
provectus/kafka-ui/622_762
provectus/kafka-ui
provectus/kafka-ui/622
provectus/kafka-ui/762
[ "timestamp(timedelta=590.0, similarity=0.8772672254426148)" ]
40678809661cb2659d4c93b2fbcd0cd940b4006a
4fc5c4002986f224864060611ece5c2bfee4e8d5
[]
[]
"2021-08-03T08:48:43Z"
[ "scope/QA" ]
[e2e]Update workflow to run e2e checks on current build
Currently, e2e-checks use the latest docker build (from `./docker/kafka-ui.yaml`). We need to run tests against the version, that is currently in commit
[ ".github/workflows/e2e-checks.yaml" ]
[ ".github/workflows/e2e-checks.yaml" ]
[ "kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/extensions/WaitUtils.java", "kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/topics/TopicTests.java" ]
diff --git a/.github/workflows/e2e-checks.yaml b/.github/workflows/e2e-checks.yaml index 6622c0908a9..f6b5caa7ed7 100644 --- a/.github/workflows/e2e-checks.yaml +++ b/.github/workflows/e2e-checks.yaml @@ -20,26 +20,30 @@ jobs: ${{ runner.os }}-maven- - uses: actions/checkout@v2 - name: Set the values - id: step_one + id: set_env_values run: | cat "./kafka-ui-e2e-checks/.env.ci" >> "./kafka-ui-e2e-checks/.env" - name: pull docker - id: step_four + id: pull_selenoid run: | docker pull selenoid/vnc:chrome_86.0 - - name: compose app - id: step_five - run: | - docker-compose -f ./docker/kafka-ui.yaml up -d - name: Set up JDK 1.13 uses: actions/setup-java@v1 with: java-version: 1.13 - - name: Build and analyze + - name: Build with Maven + id: build_app + run: | + mvn clean package -DskipTests ${{ github.event.inputs.extraMavenOptions }} + - name: compose app + id: compose_app + run: | + docker-compose -f ./docker/kafka-ui.yaml up -d + - name: e2e run env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Needed to get PR information, if any SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} - run: mvn -B verify org.sonarsource.scanner.maven:sonar-maven-plugin:sonar + run: mvn -pl '!kafka-ui-api' test -Pprod - name: Generate allure report uses: simple-elf/allure-report-action@master if: always()
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/extensions/WaitUtils.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/extensions/WaitUtils.java index 5632ce72c22..0fa3d810e60 100644 --- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/extensions/WaitUtils.java +++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/extensions/WaitUtils.java @@ -25,7 +25,7 @@ public static void waitForSelectedValue(SelenideElement element, String selected refresh(); i++; sleep(2000); - } while (!selectedValue.equals(element.getSelectedValue()) && i != 30); + } while (!selectedValue.equals(element.getSelectedValue()) && i != 40); Assertions.assertEquals(selectedValue, element.getSelectedValue()) ; } } diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/topics/TopicTests.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/topics/TopicTests.java index cfca9d529b7..99a809a01ea 100644 --- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/topics/TopicTests.java +++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/topics/TopicTests.java @@ -57,6 +57,7 @@ void updateTopic() { pages.openTopicsList(SECOND_LOCAL) .isOnPage() .openTopic(TOPIC_TO_UPDATE); + Selenide.refresh(); pages.openTopicView(SECOND_LOCAL, TOPIC_TO_UPDATE) .openEditSettings() .changeCleanupPolicy(COMPACT_POLICY_VALUE) @@ -85,6 +86,7 @@ void deleteTopic() { .openTopic(TOPIC_TO_DELETE); pages.openTopicView(SECOND_LOCAL, TOPIC_TO_DELETE).clickDeleteTopicButton(); pages.openTopicsList(SECOND_LOCAL).isNotVisible(TOPIC_TO_DELETE); + } }
test
train
2021-08-05T08:05:21
"2021-07-05T10:39:57Z"
mikementor
train
provectus/kafka-ui/626_696
provectus/kafka-ui
provectus/kafka-ui/626
provectus/kafka-ui/696
[ "timestamp(timedelta=62041.0, similarity=0.9078285945612603)" ]
29a3158df68662f9ed7795edbc775446831a977d
4c13555461cb5225ed976cf5d5e864d0f041840a
[]
[ "less words - good\r\nbut im not sure, should we be more explicit, what page we assert there\r\n\r\nAlso, some almost controversial idea - can we try to omit `should`?\r\n\r\nIt adds opinionated flavour, like lesser brother of `must`. More propositional approach - `onPage` / `isOnPage` conveys the same amount of information\r\n\r\n I might be wrong.", "Specificity helps, it's something around ~`CustomConditions` / at least, `WaitUtils`? Im terrible at names, I only know, that you need to remember contents of utils, as name doesn't help with what utils help\r\n", "If you want to do it more elegantly, you can try to play with custom conditions, though I hadn't tried it myself \r\nIn case you want, there're probably helpful links\r\nhttps://github.com/selenide/selenide/wiki/Custom-conditions\r\nhttps://gitter.im/codeborne/selenide?at=5cb46f4da0790b29c9bdf313", "make fields `private` please\r\nYou can move `waitForSelectedValue(topicViewPage.cleanupPolicy, COMPACT_POLICY_VALUE);` into TopicViewPage, so you won't have to expose your internals", "Also discussable, as all my other suggestions,\r\nWe should probably omit `Page` part of the naming, while enforcing usage only through `pages` entrypoint. Then there won't be any ambiguity, but stream-lined mentally-free usage. Having Page in the name can sometimes lean into using class by itself, which can result in sloppy design..", "if you'll decide to keep it without custom conditions, use class name before using method `SomeUtils.refreshUntil`\r\nso it'll become clearer for next readers. Some can know some selenide basics like `refresh` / `$`, but won't be familiar with custom statics", "I guess `refresh` will also work)\r\n", "that's a misleading method\r\nYou actually use `goTo` to go to the direct path(any path/ not topic view),and then just returns topicview for all other `openEditSettings`\r\n\r\nSuggest to use\r\n`goTo(path).onTopicListPage()` <--- add assertion which will return needed page\r\n", "Because you got here ambiguous `openTopicViewPage`\r\nfeels like you loading page twice - on reload and on `goTo` inside `openTopicViewPage`\r\nif you need reload try(with the comment above)\r\n```\r\npages.reloadPage()\r\n .onTopicViewPage()\r\n .openEditSettings()\r\n```\r\n", "move wait into `topicViewPage` class, don't expose internals", "also, try to do it the way it won't require knowledge of the class fields", "small suggestion here. `DELETE_TOPIC` sounds like action,maybe `TOPIC_TO_DELETE` or something like that?", "private?", "This method is stored in Utils because it is a common method, it will be used not only for topicView.", "I'll do it during my next tasks", "Done!", "Done!", "Done!", "Done!", "Done!", "Done!", "Done!", "Done!", "Done!", "Done!", "Done!", "`refreshUntil` until what? In implementation - until Condition.visible,\r\nsuggest to either move condition in arguments ```refreshUntil(By by, Condition condition) {```\r\nor explicitly say `refreshUntilVisible( By by`" ]
"2021-07-20T16:51:46Z"
[ "scope/QA" ]
[e2e] add e2e checks on update/delete topic
[ "kafka-ui-e2e-checks/pom.xml", "pom.xml" ]
[ "kafka-ui-e2e-checks/pom.xml", "pom.xml" ]
[ "kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/SmokeTests.java", "kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/extensions/WaitUtils.java", "kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/pages/MainPage.java", "kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/pages/Pages.java", "kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/pages/TopicView.java", "kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/pages/TopicsList.java", "kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/topics/TopicTests.java" ]
diff --git a/kafka-ui-e2e-checks/pom.xml b/kafka-ui-e2e-checks/pom.xml index 19fb78f2e34..f74ce1a8d62 100644 --- a/kafka-ui-e2e-checks/pom.xml +++ b/kafka-ui-e2e-checks/pom.xml @@ -99,7 +99,7 @@ <dependency> <groupId>org.projectlombok</groupId> <artifactId>lombok</artifactId> - <version>${org.projectlombok.version}</version> + <version>${org.projectlombok.e2e-checks.version}</version> </dependency> <dependency> <groupId>org.aspectj</groupId> diff --git a/pom.xml b/pom.xml index c91d9e392cb..0c74e664aae 100644 --- a/pom.xml +++ b/pom.xml @@ -18,6 +18,7 @@ <jackson-databind-nullable.version>0.2.1</jackson-databind-nullable.version> <org.mapstruct.version>1.3.1.Final</org.mapstruct.version> <org.projectlombok.version>1.18.10</org.projectlombok.version> + <org.projectlombok.e2e-checks.version>1.18.20</org.projectlombok.e2e-checks.version> <git.revision>latest</git.revision> <zkclient.version>0.11</zkclient.version> <kafka-clients.version>2.4.1</kafka-clients.version>
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/SmokeTests.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/SmokeTests.java index da7139e434c..1719b0229ca 100644 --- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/SmokeTests.java +++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/SmokeTests.java @@ -3,7 +3,6 @@ import com.provectus.kafka.ui.base.BaseTest; import io.qameta.allure.Issue; import lombok.SneakyThrows; -import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; @@ -14,7 +13,7 @@ public class SmokeTests extends BaseTest { @Issue("380") void mainPageLoads() { pages.open() - .mainPage.shouldBeOnPage(); + .isOnPage(); compareScreenshots("main"); } diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/extensions/WaitUtils.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/extensions/WaitUtils.java new file mode 100644 index 00000000000..f4d18c79775 --- /dev/null +++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/extensions/WaitUtils.java @@ -0,0 +1,31 @@ +package com.provectus.kafka.ui.extensions; + +import com.codeborne.selenide.Condition; +import com.codeborne.selenide.SelenideElement; +import org.junit.jupiter.api.Assertions; +import org.openqa.selenium.By; + +import static com.codeborne.selenide.Selenide.*; +import static com.codeborne.selenide.Selenide.$; + +public class WaitUtils { + public static void refreshUntil(By by, Condition condition) { + int i = 0; + do { + refresh(); + i++; + sleep(2000); + } while ($$(by).size() < 1 && i != 20); + $(by).shouldBe(condition); + } + + public static void waitForSelectedValue(SelenideElement element, String selectedValue) { + int i = 0; + do { + refresh(); + i++; + sleep(2000); + } while (!selectedValue.equals(element.getSelectedValue()) && i != 20); + Assertions.assertEquals(selectedValue, element.getSelectedValue()) ; + } +} diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/pages/MainPage.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/pages/MainPage.java index f0749f5fbb5..cb4f922688a 100644 --- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/pages/MainPage.java +++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/pages/MainPage.java @@ -1,42 +1,38 @@ package com.provectus.kafka.ui.pages; import com.codeborne.selenide.Condition; +import com.codeborne.selenide.Selenide; +import com.provectus.kafka.ui.base.TestConfiguration; +import com.provectus.kafka.ui.extensions.WaitUtils; import io.qameta.allure.Step; import lombok.SneakyThrows; +import lombok.experimental.ExtensionMethod; import org.openqa.selenium.By; import static com.codeborne.selenide.Selenide.*; +@ExtensionMethod({WaitUtils.class}) public class MainPage { - private static final long TIMEOUT = 25000; + private static final String path = ""; @Step - public MainPage shouldBeOnPage() { + public MainPage goTo(){ + Selenide.open(TestConfiguration.BASE_URL+path); + return this; + } + @Step + public MainPage isOnPage() { $(By.xpath("//*[contains(text(),'Loading')]")).shouldBe(Condition.disappear); $(By.xpath("//h5[text()='Clusters']")).shouldBe(Condition.visible); return this; } - - private void refreshUntil(By by){ - int i =0; - do - { - refresh(); - i++; - sleep(2000); - } while(getElements(by).size()<1 && i!=20); - $(by).shouldBe(Condition.visible); - } - @SneakyThrows - public void shouldBeTopic(String topicName) { - refreshUntil(By.xpath("//div[contains(@class,'section')]//table//a[text()='%s']".formatted(topicName))); + public void topicIsVisible(String topicName) { + By.xpath("//div[contains(@class,'section')]//table//a[text()='%s']".formatted(topicName)).refreshUntil(Condition.visible); } - - public enum SideMenuOptions { BROKERS("Brokers"), TOPICS("Topics"), diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/pages/Pages.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/pages/Pages.java index 1f21e576707..9b48e097147 100644 --- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/pages/Pages.java +++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/pages/Pages.java @@ -1,19 +1,27 @@ package com.provectus.kafka.ui.pages; -import com.codeborne.selenide.Selenide; -import com.provectus.kafka.ui.base.TestConfiguration; - public class Pages { public static Pages INSTANCE = new Pages(); public MainPage mainPage = new MainPage(); + public TopicsList topicsList = new TopicsList(); + public TopicView topicView = new TopicView(); + + public MainPage open() { + return openMainPage(); + } - private Pages goTo(String path) { - Selenide.open(TestConfiguration.BASE_URL+path); - return this; + public MainPage openMainPage() { + return mainPage.goTo(); } - public Pages open() { - return goTo(""); + + public TopicsList openTopicsList(String clusterName) { + return topicsList.goTo(clusterName); } + + public TopicView openTopicView(String clusterName, String topicName) { + return topicView.goTo(clusterName, topicName); + } + } diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/pages/TopicView.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/pages/TopicView.java new file mode 100644 index 00000000000..386e4626d51 --- /dev/null +++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/pages/TopicView.java @@ -0,0 +1,92 @@ +package com.provectus.kafka.ui.pages; + +import com.codeborne.selenide.Selenide; +import com.codeborne.selenide.SelenideElement; +import com.provectus.kafka.ui.base.TestConfiguration; +import com.provectus.kafka.ui.extensions.WaitUtils; +import io.qameta.allure.Step; +import lombok.SneakyThrows; +import lombok.experimental.ExtensionMethod; +import org.junit.jupiter.api.Assertions; +import org.openqa.selenium.By; + +import static com.codeborne.selenide.Selenide.*; + +@ExtensionMethod({WaitUtils.class}) +public class TopicView { + private static final String path = "ui/clusters/%s/topics/%s"; + private final SelenideElement cleanupPolicy = $(By.name("cleanupPolicy")); + private final SelenideElement timeToRetain = $(By.id("timeToRetain")); + private final SelenideElement maxSizeOnDisk = $(By.name("retentionBytes")); + private final SelenideElement maxMessageBytes = $(By.name("maxMessageBytes")); + + @Step + public TopicView goTo(String cluster,String topic){ + Selenide.open(TestConfiguration.BASE_URL+path.formatted(cluster,topic)); + return this; + } + @SneakyThrows + public TopicView openEditSettings() { + $(By.xpath("//a[@class=\"button\" and text()='Edit settings']")).click(); + return this; + } + + @SneakyThrows + public void clickDeleteTopicButton() { + $(By.xpath("//*[text()='Delete Topic']")).click(); + $(By.xpath("//*[text()='Confirm']")).click(); + } + + @SneakyThrows + public TopicView changeCleanupPolicy(String cleanupPolicyValue) { + cleanupPolicy.click(); + $(By.xpath("//select/option[@value = '%s']".formatted(cleanupPolicyValue))).click(); + return this; + } + + @SneakyThrows + public TopicView changeTimeToRetainValue(String timeToRetainValue) { + timeToRetain.clear(); + timeToRetain.sendKeys(String.valueOf(timeToRetainValue)); + return this; + } + + @SneakyThrows + public TopicView changeMaxSizeOnDisk(String maxSizeOnDiskValue) { + maxSizeOnDisk.click(); + $(By.xpath("//select/option[text() = '%s']".formatted(maxSizeOnDiskValue))).click(); + return this; + } + + @SneakyThrows + public TopicView changeMaxMessageBytes(String maxMessageBytesValue) { + maxMessageBytes.clear(); + maxMessageBytes.sendKeys(String.valueOf(maxMessageBytesValue)); + return this; + } + + @SneakyThrows + public void submitSettingChanges() { + $(By.xpath("//input[@type='submit']")).click(); + } + + public TopicView cleanupPolicyIs(String value) { + cleanupPolicy.waitForSelectedValue(value); + return this; + } + + public TopicView timeToRetainIs(String time) { + Assertions.assertEquals(time, timeToRetain.getValue()); + return this; + } + + public TopicView maxSizeOnDiskIs(String size) { + Assertions.assertEquals(size, maxSizeOnDisk.getSelectedText()); + return this; + } + + public TopicView maxMessageBytesIs(String bytes) { + Assertions.assertEquals(bytes, maxMessageBytes.getValue()); + return this; + } +} \ No newline at end of file diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/pages/TopicsList.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/pages/TopicsList.java new file mode 100644 index 00000000000..341bfea9fd9 --- /dev/null +++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/pages/TopicsList.java @@ -0,0 +1,47 @@ +package com.provectus.kafka.ui.pages; + +import com.codeborne.selenide.Condition; +import com.codeborne.selenide.Selenide; +import com.provectus.kafka.ui.base.TestConfiguration; +import com.provectus.kafka.ui.extensions.WaitUtils; +import io.qameta.allure.Step; +import lombok.SneakyThrows; +import lombok.experimental.ExtensionMethod; +import org.openqa.selenium.By; + +import static com.codeborne.selenide.Selenide.$; + +@ExtensionMethod(WaitUtils.class) +public class TopicsList { + private static final String path = "ui/clusters/%s/topics"; + + @Step + public TopicsList goTo(String cluster) { + Selenide.open(TestConfiguration.BASE_URL+path.formatted(cluster)); + return this; + } + + @Step + public TopicsList isOnPage() { + $(By.xpath("//*[contains(text(),'Loading')]")).shouldBe(Condition.disappear); + $(By.xpath("//span[text()='All Topics']")).shouldBe(Condition.visible); + return this; + } + + @SneakyThrows + public TopicsList openTopic(String topicName) { + By.xpath("//div[contains(@class,'section')]//table//a[text()='%s']" + .formatted(topicName)).refreshUntil(Condition.visible); + $(By.xpath("//div[contains(@class,'section')]//table//a[text()='%s']".formatted(topicName))) + .click(); + return this; + } + + @SneakyThrows + public TopicsList isNotVisible(String topicName) { + By.xpath("//div[contains(@class,'section')]//table").refreshUntil(Condition.visible); + $(By.xpath("//a[text()='%s']".formatted(topicName))).shouldNotBe(Condition.visible); + return this; + } + +} diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/topics/TopicTests.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/topics/TopicTests.java index b32d9b992f1..cfca9d529b7 100644 --- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/topics/TopicTests.java +++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/topics/TopicTests.java @@ -1,34 +1,90 @@ package com.provectus.kafka.ui.topics; +import com.codeborne.selenide.Selenide; import com.provectus.kafka.ui.base.BaseTest; +import com.provectus.kafka.ui.helpers.Helpers; import com.provectus.kafka.ui.pages.MainPage; -import com.provectus.kafka.ui.steps.kafka.KafkaSteps; -import com.provectus.kafka.ui.helpers.ApiHelper; import lombok.SneakyThrows; -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.*; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; -public class TopicTests extends BaseTest { +public class TopicTests extends BaseTest { public static final String NEW_TOPIC = "new-topic"; + public static final String TOPIC_TO_UPDATE = "topic-to-update"; + public static final String TOPIC_TO_DELETE = "topic-to-delete"; + public static final String SECOND_LOCAL = "secondLocal"; + public static final String COMPACT_POLICY_VALUE = "compact"; + public static final String UPDATED_TIME_TO_RETAIN_VALUE = "604800001"; + public static final String UPDATED_MAX_SIZE_ON_DISK = "20 GB"; + public static final String UPDATED_MAX_MESSAGE_BYTES = "1000020"; + + @BeforeAll + @SneakyThrows + public static void beforeAll() { + Helpers.INSTANCE.apiHelper.createTopic(SECOND_LOCAL, TOPIC_TO_UPDATE); + Helpers.INSTANCE.apiHelper.createTopic(SECOND_LOCAL, TOPIC_TO_DELETE); + } - @AfterEach + @AfterAll @SneakyThrows - void afterEach(){ - helpers.apiHelper.deleteTopic("secondLocal","new-topic"); + public static void afterAll() { + Helpers.INSTANCE.apiHelper.deleteTopic(SECOND_LOCAL, TOPIC_TO_UPDATE); + Helpers.INSTANCE.apiHelper.deleteTopic(SECOND_LOCAL, TOPIC_TO_DELETE); } @SneakyThrows @DisplayName("should create a topic") @Test - void createTopic(){ - helpers.apiHelper.createTopic("secondLocal","new-topic"); - pages.open() - .mainPage.shouldBeOnPage() - .goToSideMenu("secondLocal", MainPage.SideMenuOptions.TOPICS) - .shouldBeTopic(NEW_TOPIC); + void createTopic() { + try { + helpers.apiHelper.createTopic(SECOND_LOCAL, NEW_TOPIC); + pages.open() + .isOnPage() + .goToSideMenu(SECOND_LOCAL, MainPage.SideMenuOptions.TOPICS) + .topicIsVisible(NEW_TOPIC); + } finally { + helpers.apiHelper.deleteTopic(SECOND_LOCAL, NEW_TOPIC); + } } + + @SneakyThrows + @DisplayName("should update a topic") + @Test + void updateTopic() { + pages.openTopicsList(SECOND_LOCAL) + .isOnPage() + .openTopic(TOPIC_TO_UPDATE); + pages.openTopicView(SECOND_LOCAL, TOPIC_TO_UPDATE) + .openEditSettings() + .changeCleanupPolicy(COMPACT_POLICY_VALUE) + .changeTimeToRetainValue(UPDATED_TIME_TO_RETAIN_VALUE) + .changeMaxSizeOnDisk(UPDATED_MAX_SIZE_ON_DISK) + .changeMaxMessageBytes(UPDATED_MAX_MESSAGE_BYTES) + .submitSettingChanges(); + Selenide.refresh(); + pages.openTopicView(SECOND_LOCAL, TOPIC_TO_UPDATE) + .openEditSettings() + // Assertions + .cleanupPolicyIs(COMPACT_POLICY_VALUE) + .timeToRetainIs(UPDATED_TIME_TO_RETAIN_VALUE) + .maxSizeOnDiskIs(UPDATED_MAX_SIZE_ON_DISK) + .maxMessageBytesIs(UPDATED_MAX_MESSAGE_BYTES); + } + + @SneakyThrows + @DisplayName("should delete topic") + @Test + @Disabled + void deleteTopic() { + + pages.openTopicsList(SECOND_LOCAL) + .isOnPage() + .openTopic(TOPIC_TO_DELETE); + pages.openTopicView(SECOND_LOCAL, TOPIC_TO_DELETE).clickDeleteTopicButton(); + pages.openTopicsList(SECOND_LOCAL).isNotVisible(TOPIC_TO_DELETE); + } + }
val
train
2021-07-27T12:03:21
"2021-07-05T10:41:36Z"
mikementor
train
provectus/kafka-ui/635_650
provectus/kafka-ui
provectus/kafka-ui/635
provectus/kafka-ui/650
[ "timestamp(timedelta=62580.0, similarity=0.842880490788546)" ]
13463fe95f8c6c375b09d47e7da41a87b8a47134
7eaa7336239dce3b6a521e1d884dd01eeea7a428
[ "I made a PR for this issue.\r\nI only added it in the global consumer list as an item to the table.\r\n\r\nNot sure about whether I should also add it into the consumers details (which would be very repetitive) ?" ]
[]
"2021-07-09T16:33:00Z"
[ "type/enhancement", "good first issue", "scope/frontend" ]
Display consumer group state
### Describe the solution you'd like Backend provide consumer group state in consumer group list request. Let's display it in the table.
[ "kafka-ui-react-app/src/components/ConsumerGroups/Details/Details.tsx", "kafka-ui-react-app/src/components/ConsumerGroups/Details/__tests__/Details.spec.tsx", "kafka-ui-react-app/src/redux/actions/thunks/topics.ts" ]
[ "kafka-ui-react-app/src/components/ConsumerGroups/Details/Details.tsx", "kafka-ui-react-app/src/components/ConsumerGroups/Details/__tests__/Details.spec.tsx", "kafka-ui-react-app/src/redux/actions/thunks/topics.ts" ]
[]
diff --git a/kafka-ui-react-app/src/components/ConsumerGroups/Details/Details.tsx b/kafka-ui-react-app/src/components/ConsumerGroups/Details/Details.tsx index dfb3834b479..4f58dabf201 100644 --- a/kafka-ui-react-app/src/components/ConsumerGroups/Details/Details.tsx +++ b/kafka-ui-react-app/src/components/ConsumerGroups/Details/Details.tsx @@ -16,7 +16,7 @@ import ListItem from './ListItem'; export interface Props extends ConsumerGroup, ConsumerGroupDetails { clusterName: ClusterName; - consumers?: ConsumerGroupTopicPartition[]; + partitions?: ConsumerGroupTopicPartition[]; isFetched: boolean; isDeleted: boolean; fetchConsumerGroupDetails: ( @@ -29,7 +29,7 @@ export interface Props extends ConsumerGroup, ConsumerGroupDetails { const Details: React.FC<Props> = ({ clusterName, groupId, - consumers, + partitions, isFetched, isDeleted, fetchConsumerGroupDetails, @@ -38,7 +38,7 @@ const Details: React.FC<Props> = ({ React.useEffect(() => { fetchConsumerGroupDetails(clusterName, groupId); }, [fetchConsumerGroupDetails, clusterName, groupId]); - const items = consumers || []; + const items = partitions || []; const [isConfirmationModelVisible, setIsConfirmationModelVisible] = React.useState<boolean>(false); const history = useHistory(); diff --git a/kafka-ui-react-app/src/components/ConsumerGroups/Details/__tests__/Details.spec.tsx b/kafka-ui-react-app/src/components/ConsumerGroups/Details/__tests__/Details.spec.tsx index ab359c16284..1d20c799e26 100644 --- a/kafka-ui-react-app/src/components/ConsumerGroups/Details/__tests__/Details.spec.tsx +++ b/kafka-ui-react-app/src/components/ConsumerGroups/Details/__tests__/Details.spec.tsx @@ -20,7 +20,7 @@ describe('Details component', () => { isDeleted={false} fetchConsumerGroupDetails={jest.fn()} deleteConsumerGroup={jest.fn()} - consumers={[ + partitions={[ { consumerId: 'consumer-messages-consumer-1-122fbf98-643b-491d-8aec-c0641d2513d0', diff --git a/kafka-ui-react-app/src/redux/actions/thunks/topics.ts b/kafka-ui-react-app/src/redux/actions/thunks/topics.ts index 6cff812f9c7..be6ad93f614 100644 --- a/kafka-ui-react-app/src/redux/actions/thunks/topics.ts +++ b/kafka-ui-react-app/src/redux/actions/thunks/topics.ts @@ -332,9 +332,7 @@ export const fetchTopicConsumerGroups = ...state.byName, [topicName]: { ...state.byName[topicName], - consumerGroups: { - ...consumerGroups, - }, + consumerGroups, }, }, };
null
train
train
2021-07-09T14:55:43
"2021-07-05T15:41:30Z"
germanosin
train
provectus/kafka-ui/671_821
provectus/kafka-ui
provectus/kafka-ui/671
provectus/kafka-ui/821
[ "timestamp(timedelta=128.0, similarity=0.8523080857900269)" ]
76af95ed787fb39b5e051c5ec7d7f2b9a20c9835
d737953a8eb60c75ee87097337fb7e1a115a00f9
[ "@anjeyy thank you for creating this issue. This is definitely a bug. We'll fix it in 0.2 version", "I am having the same error on topic creation:\r\n\r\n```\r\n18:22:49.274 [kafka-admin-client-thread | adminclient-1] ERROR org.springframework.boot.autoconfigure.web.reactive.error.AbstractErrorWebExceptionHandler - [f527d091] 500 Server Error for HTTP POST \"/api/clusters/stage/topics\"\r\norg.apache.kafka.common.errors.UnknownTopicOrPartitionException:\r\n\tSuppressed: reactor.core.publisher.FluxOnAssembly$OnAssemblyException:\r\nError has been observed at the following site(s):\r\n\t|_ checkpoint β‡’ Handler com.provectus.kafka.ui.controller.TopicsController#createTopic(String, Mono, ServerWebExchange) [DispatcherHandler]\r\n\t|_ checkpoint β‡’ com.provectus.kafka.ui.config.CustomWebFilter [DefaultWebFilterChain]\r\n\t|_ checkpoint β‡’ com.provectus.kafka.ui.config.ReadOnlyModeFilter [DefaultWebFilterChain]\r\n\t|_ checkpoint β‡’ org.springframework.security.web.server.authorization.AuthorizationWebFilter [DefaultWebFilterChain]\r\n\t|_ checkpoint β‡’ org.springframework.security.web.server.authorization.ExceptionTranslationWebFilter [DefaultWebFilterChain]\r\n\t|_ checkpoint β‡’ org.springframework.security.web.server.authentication.logout.LogoutWebFilter [DefaultWebFilterChain]\r\n\t|_ checkpoint β‡’ org.springframework.security.web.server.savedrequest.ServerRequestCacheWebFilter [DefaultWebFilterChain]\r\n\t|_ checkpoint β‡’ org.springframework.security.web.server.context.SecurityContextServerWebExchangeWebFilter [DefaultWebFilterChain]\r\n\t|_ checkpoint β‡’ org.springframework.security.web.server.context.ReactorContextWebFilter [DefaultWebFilterChain]\r\n\t|_ checkpoint β‡’ org.springframework.security.web.server.header.HttpHeaderWriterWebFilter [DefaultWebFilterChain]\r\n\t|_ checkpoint β‡’ org.springframework.security.config.web.server.ServerHttpSecurity$ServerWebExchangeReactorContextWebFilter [DefaultWebFilterChain]\r\n\t|_ checkpoint β‡’ org.springframework.security.web.server.WebFilterChainProxy [DefaultWebFilterChain]\r\n\t|_ checkpoint β‡’ HTTP POST \"/api/clusters/stage/topics\" [ExceptionHandlingWebHandler]\r\n```\r\n\r\nThe topic seems to get created anyways.", "The problem was in requesting configs, right after topic creation. At this point of time cluster is not ready to response it. Fixed in master." ]
[]
"2021-08-25T15:03:00Z"
[ "type/bug", "scope/backend", "scope/frontend" ]
Error creating new topic
**Describe the bug** Running with `docker-compose`, local cluster is up after a few seconds. So i was about to create a new _topic_ via UI `Add a topic`. - Next i clicked `Add Custom Parameter` - Clicked `Custom Parameter` to have a look, but i did not select anything - Clicked the big red `-` button on the right to remove the _parameter row_ - Clicked the button at the buttom to finish creating a new topic - Error came in with _code_ `500` and _message_ `message.downconversion.enabled` has to be either true or false Just out of curiosity, i tried another creation with the same pattern, this time another error was show ```shell Schema docker-compose 500 Unknown topic config name: ``` **Set up** Running via `docker-compose`, configuration listed below. ```yml version: "2" services: zookeeper: image: confluentinc/cp-zookeeper:latest container_name: kafka-zookeeper environment: ZOOKEEPER_CLIENT_PORT: 2181 ZOOKEEPER_TICK_TIME: 2000 ports: - 22181:2181 kafka: image: confluentinc/cp-kafka:latest container_name: kafka-core depends_on: - zookeeper ports: - 29092:29092 environment: KAFKA_BROKER_ID: 1 KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,PLAINTEXT_HOST://localhost:29092 KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 kafka-ui: image: provectuslabs/kafka-ui container_name: kafka-ui ports: - "8080:8080" restart: always environment: - KAFKA_CLUSTERS_0_NAME=local - KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS=kafka:9092 - KAFKA_CLUSTERS_0_ZOOKEEPER=localhost:2181 ``` **Steps to Reproduce** Steps to reproduce the behavior: Please see athe top section **Describe the bug**. **Expected behavior** I was expecting to be creating a new topic for a local cluster. **Note:** After leaving out the `Add Custom Parameter` section, creating topics is working fine.
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/CorsGlobalConfiguration.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ClusterService.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ClustersStorage.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaService.java", "kafka-ui-api/src/main/resources/application-local.yml", "kafka-ui-api/src/main/resources/application-sdp.yml" ]
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/CorsGlobalConfiguration.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ClusterService.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ClustersStorage.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaService.java", "kafka-ui-api/src/main/resources/application-local.yml", "kafka-ui-api/src/main/resources/application-sdp.yml" ]
[]
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/CorsGlobalConfiguration.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/CorsGlobalConfiguration.java index bbb743daac1..6e46b607bcb 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/CorsGlobalConfiguration.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/CorsGlobalConfiguration.java @@ -1,10 +1,15 @@ package com.provectus.kafka.ui.config; +import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Profile; +import org.springframework.core.io.ClassPathResource; import org.springframework.web.reactive.config.CorsRegistry; import org.springframework.web.reactive.config.EnableWebFlux; import org.springframework.web.reactive.config.WebFluxConfigurer; +import org.springframework.web.reactive.function.server.RouterFunction; +import org.springframework.web.reactive.function.server.RouterFunctions; +import org.springframework.web.reactive.function.server.ServerResponse; @Configuration @EnableWebFlux @@ -19,4 +24,22 @@ public void addCorsMappings(CorsRegistry registry) { .allowedHeaders("*") .allowCredentials(true); } + + @Bean + public RouterFunction<ServerResponse> cssFilesRouter() { + return RouterFunctions + .resources("/static/css/**", new ClassPathResource("static/static/css/")); + } + + @Bean + public RouterFunction<ServerResponse> jsFilesRouter() { + return RouterFunctions + .resources("/static/js/**", new ClassPathResource("static/static/js/")); + } + + @Bean + public RouterFunction<ServerResponse> mediaFilesRouter() { + return RouterFunctions + .resources("/static/media/**", new ClassPathResource("static/static/media/")); + } } \ No newline at end of file diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ClusterService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ClusterService.java index 026c4bb8967..256aa907cb0 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ClusterService.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ClusterService.java @@ -247,7 +247,7 @@ public Mono<Void> deleteTopic(String clusterName, String topicName) { .orElseThrow(TopicNotFoundException::new); if (cluster.getFeatures().contains(Feature.TOPIC_DELETION)) { return kafkaService.deleteTopic(cluster, topic.getName()) - .doOnNext(t -> updateCluster(topicName, clusterName, cluster)); + .doOnSuccess(t -> updateCluster(topicName, clusterName, cluster)); } else { return Mono.error(new ValidationException("Topic deletion restricted")); } @@ -315,9 +315,11 @@ public Mono<Void> deleteConsumerGroupById(String clusterName, return clustersStorage.getClusterByName(clusterName) .map(cluster -> adminClientService.getOrCreateAdminClient(cluster) .map(ExtendedAdminClient::getAdminClient) - .map(adminClient -> adminClient.deleteConsumerGroups(List.of(groupId))) - .map(DeleteConsumerGroupsResult::all) - .flatMap(ClusterUtil::toMono) + .flatMap(adminClient -> + ClusterUtil.toMono( + adminClient.deleteConsumerGroups(List.of(groupId)).all() + ) + ) .onErrorResume(this::reThrowCustomException) ) .orElse(Mono.empty()); diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ClustersStorage.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ClustersStorage.java index 18267df0ee4..6c58470e59b 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ClustersStorage.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ClustersStorage.java @@ -2,7 +2,6 @@ import com.provectus.kafka.ui.config.ClustersProperties; import com.provectus.kafka.ui.mapper.ClusterMapper; -import com.provectus.kafka.ui.model.Feature; import com.provectus.kafka.ui.model.KafkaCluster; import java.util.Collection; import java.util.HashMap; diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaService.java index b03b19296c4..b966ee06794 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaService.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaService.java @@ -290,12 +290,7 @@ public Mono<InternalTopic> createTopic(AdminClient adminClient, topicData -> getTopicsData(adminClient, Collections.singleton(topicData.getName())) .next() - ).switchIfEmpty(Mono.error(new RuntimeException("Can't find created topic"))) - .flatMap(t -> - loadTopicsConfig(adminClient, Collections.singletonList(t.getName())) - .map(c -> mergeWithConfigs(Collections.singletonList(t), c)) - .map(m -> m.values().iterator().next()) - ); + ).switchIfEmpty(Mono.error(new RuntimeException("Can't find created topic"))); } public Mono<InternalTopic> createTopic(KafkaCluster cluster, Mono<TopicCreation> topicCreation) { @@ -306,8 +301,9 @@ public Mono<InternalTopic> createTopic(KafkaCluster cluster, Mono<TopicCreation> public Mono<Void> deleteTopic(KafkaCluster cluster, String topicName) { return adminClientService.getOrCreateAdminClient(cluster) .map(ExtendedAdminClient::getAdminClient) - .map(adminClient -> adminClient.deleteTopics(List.of(topicName))) - .then(); + .flatMap(adminClient -> + ClusterUtil.toMono(adminClient.deleteTopics(List.of(topicName)).all()) + ); } @SneakyThrows @@ -667,7 +663,9 @@ public Mono<Void> deleteTopicMessages(KafkaCluster cluster, Map<TopicPartition, .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); return adminClientService.getOrCreateAdminClient(cluster) .map(ExtendedAdminClient::getAdminClient) - .map(ac -> ac.deleteRecords(records)).then(); + .flatMap(ac -> + ClusterUtil.toMono(ac.deleteRecords(records).all()) + ); } public Mono<RecordMetadata> sendMessage(KafkaCluster cluster, String topic, diff --git a/kafka-ui-api/src/main/resources/application-local.yml b/kafka-ui-api/src/main/resources/application-local.yml index 31190a77880..9b1458e1eda 100644 --- a/kafka-ui-api/src/main/resources/application-local.yml +++ b/kafka-ui-api/src/main/resources/application-local.yml @@ -1,9 +1,9 @@ kafka: clusters: - name: local - bootstrapServers: localhost:9093 + bootstrapServers: localhost:9092 zookeeper: localhost:2181 - schemaRegistry: http://localhost:8081 + schemaRegistry: http://localhost:8085 ksqldbServer: http://localhost:8088 kafkaConnect: - name: first diff --git a/kafka-ui-api/src/main/resources/application-sdp.yml b/kafka-ui-api/src/main/resources/application-sdp.yml index 71af1078f9d..2de30ccb7ca 100644 --- a/kafka-ui-api/src/main/resources/application-sdp.yml +++ b/kafka-ui-api/src/main/resources/application-sdp.yml @@ -1,9 +1,11 @@ kafka: clusters: - name: local - bootstrapServers: b-1.kad-msk.uxahxx.c6.kafka.eu-west-1.amazonaws.com:9092 + bootstrapServers: b-1.kad-msk.57w67o.c6.kafka.eu-central-1.amazonaws.com:9094 + properties: + security.protocol: SSL # zookeeper: localhost:2181 - schemaRegistry: http://kad-ecs-application-lb-857515197.eu-west-1.elb.amazonaws.com:9000/api/schema-registry +# schemaRegistry: http://kad-ecs-application-lb-857515197.eu-west-1.elb.amazonaws.com:9000/api/schema-registry # - # name: secondLocal # zookeeper: zookeeper1:2181
null
test
train
2021-08-25T14:58:18
"2021-07-13T12:30:14Z"
anjeyy
train
provectus/kafka-ui/708_723
provectus/kafka-ui
provectus/kafka-ui/708
provectus/kafka-ui/723
[ "timestamp(timedelta=94.0, similarity=0.9337098451601132)" ]
699512f7069791581af91a4c3c69004993a862fd
2ab1601a7f0aa1d7eee9b70fd0657f48d539665a
[ "It looks like UI team needs an endpoint to fetch the list of all available logDirs", "@workshur you can get them here: https://github.com/provectus/kafka-ui/issues/707", "Implemented here - #723 " ]
[ "Timur, could you please refactor your method not to pass Mono/Flux objects as a function parameter? For example, just using\r\n`brokerLogDir.map(... -> clusterService.updateBrokerLogDir(...))` instead?\r\nI know there're a lot of places where we've done it in such a way.", "Fixed, please review", "Looks like you'll return 200 even if broker sent error. I think it's better to pass 4xx/5xx status", "Fixed, please review", "Please do not use camel case in paths", "To be precise, it's not always Bad Request, we should provide 5xx errors also, and maybe it's better to throw on Mono level, to catch and provide as other errors ", "Url is copied from your task description :) But ok, I'll fix it. ", "sorry, my bad)", "Fixed", "Fixed" ]
"2021-07-27T14:44:20Z"
[ "type/enhancement", "scope/backend" ]
Add log dirs alter operation
Add endpoint: PATCH /api/clusters/{clusterName}/brokers/{id}/logDirs ```json [ { "topic": "topicName", "partition": 0, "logDir": "/var/data/kafka" }, ... ] ``` Use AdminClient alterReplicaLogDirs operation for it
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/BrokersController.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/ErrorCode.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ClusterService.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaService.java", "kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml" ]
[ "kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/BrokersController.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/ErrorCode.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/LogDirNotFoundApiException.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/TopicOrPartitionNotFoundException.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ClusterService.java", "kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaService.java", "kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml" ]
[ "kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/LogDirsTest.java" ]
diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/BrokersController.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/BrokersController.java index d1e90a545e8..0b34efdbad1 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/BrokersController.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/controller/BrokersController.java @@ -3,6 +3,7 @@ import com.provectus.kafka.ui.api.BrokersApi; import com.provectus.kafka.ui.model.Broker; import com.provectus.kafka.ui.model.BrokerConfig; +import com.provectus.kafka.ui.model.BrokerLogdirUpdate; import com.provectus.kafka.ui.model.BrokerMetrics; import com.provectus.kafka.ui.model.BrokersLogdirs; import com.provectus.kafka.ui.service.ClusterService; @@ -51,4 +52,13 @@ public Mono<ResponseEntity<Flux<BrokerConfig>>> getBrokerConfig(String clusterNa .map(ResponseEntity::ok) .onErrorReturn(ResponseEntity.notFound().build()); } + + @Override + public Mono<ResponseEntity<Void>> updateBrokerTopicPartitionLogDir( + String clusterName, Integer id, Mono<BrokerLogdirUpdate> brokerLogdir, + ServerWebExchange exchange) { + return brokerLogdir + .flatMap(bld -> clusterService.updateBrokerLogDir(clusterName, id, bld)) + .map(ResponseEntity::ok); + } } diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/ErrorCode.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/ErrorCode.java index 7c66fb98df6..f87fccbaaf9 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/ErrorCode.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/ErrorCode.java @@ -20,7 +20,9 @@ public enum ErrorCode { TOPIC_NOT_FOUND(4008, HttpStatus.NOT_FOUND), SCHEMA_NOT_FOUND(4009, HttpStatus.NOT_FOUND), CONNECT_NOT_FOUND(4010, HttpStatus.NOT_FOUND), - KSQLDB_NOT_FOUND(4011, HttpStatus.NOT_FOUND); + KSQLDB_NOT_FOUND(4011, HttpStatus.NOT_FOUND), + DIR_NOT_FOUND(4012, HttpStatus.BAD_REQUEST), + TOPIC_OR_PARTITION_NOT_FOUND(4013, HttpStatus.BAD_REQUEST); static { // codes uniqueness check diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/LogDirNotFoundApiException.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/LogDirNotFoundApiException.java new file mode 100644 index 00000000000..ab1666180d6 --- /dev/null +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/LogDirNotFoundApiException.java @@ -0,0 +1,13 @@ +package com.provectus.kafka.ui.exception; + +public class LogDirNotFoundApiException extends CustomBaseException { + + public LogDirNotFoundApiException() { + super("The user-specified log directory is not found in the broker config."); + } + + @Override + public ErrorCode getErrorCode() { + return ErrorCode.DIR_NOT_FOUND; + } +} diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/TopicOrPartitionNotFoundException.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/TopicOrPartitionNotFoundException.java new file mode 100644 index 00000000000..0f67b4ff69e --- /dev/null +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/exception/TopicOrPartitionNotFoundException.java @@ -0,0 +1,13 @@ +package com.provectus.kafka.ui.exception; + +public class TopicOrPartitionNotFoundException extends CustomBaseException { + + public TopicOrPartitionNotFoundException() { + super("This server does not host this topic-partition."); + } + + @Override + public ErrorCode getErrorCode() { + return ErrorCode.TOPIC_OR_PARTITION_NOT_FOUND; + } +} diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ClusterService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ClusterService.java index 4bfbea56fa0..b75c7a62552 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ClusterService.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/ClusterService.java @@ -9,6 +9,7 @@ import com.provectus.kafka.ui.mapper.DescribeLogDirsMapper; import com.provectus.kafka.ui.model.Broker; import com.provectus.kafka.ui.model.BrokerConfig; +import com.provectus.kafka.ui.model.BrokerLogdirUpdate; import com.provectus.kafka.ui.model.BrokerMetrics; import com.provectus.kafka.ui.model.BrokersLogdirs; import com.provectus.kafka.ui.model.Cluster; @@ -379,4 +380,10 @@ public Flux<BrokersLogdirs> getAllBrokersLogdirs(String clusterName, List<Intege .map(describeLogDirsMapper::toBrokerLogDirsList) .flatMapMany(Flux::fromIterable); } + + public Mono<Void> updateBrokerLogDir( + String clusterName, Integer id, BrokerLogdirUpdate brokerLogDir) { + return Mono.justOrEmpty(clustersStorage.getClusterByName(clusterName)) + .flatMap(c -> kafkaService.updateBrokerLogDir(c, id, brokerLogDir)); + } } \ No newline at end of file diff --git a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaService.java b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaService.java index bfe0e7a829f..8dc5c3aed37 100644 --- a/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaService.java +++ b/kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/KafkaService.java @@ -1,9 +1,12 @@ package com.provectus.kafka.ui.service; import com.provectus.kafka.ui.exception.IllegalEntityStateException; +import com.provectus.kafka.ui.exception.LogDirNotFoundApiException; import com.provectus.kafka.ui.exception.NotFoundException; import com.provectus.kafka.ui.exception.TopicMetadataException; +import com.provectus.kafka.ui.exception.TopicOrPartitionNotFoundException; import com.provectus.kafka.ui.exception.ValidationException; +import com.provectus.kafka.ui.model.BrokerLogdirUpdate; import com.provectus.kafka.ui.model.CleanupPolicy; import com.provectus.kafka.ui.model.CreateTopicMessage; import com.provectus.kafka.ui.model.ExtendedAdminClient; @@ -44,7 +47,6 @@ import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.ExecutionException; import java.util.stream.Collectors; import java.util.stream.Stream; import lombok.RequiredArgsConstructor; @@ -58,7 +60,6 @@ import org.apache.kafka.clients.admin.ConfigEntry; import org.apache.kafka.clients.admin.ConsumerGroupListing; import org.apache.kafka.clients.admin.DescribeConfigsOptions; -import org.apache.kafka.clients.admin.DescribeLogDirsResult; import org.apache.kafka.clients.admin.ListTopicsOptions; import org.apache.kafka.clients.admin.NewPartitionReassignment; import org.apache.kafka.clients.admin.NewPartitions; @@ -73,8 +74,11 @@ import org.apache.kafka.clients.producer.RecordMetadata; import org.apache.kafka.common.Node; import org.apache.kafka.common.TopicPartition; +import org.apache.kafka.common.TopicPartitionReplica; import org.apache.kafka.common.config.ConfigResource; +import org.apache.kafka.common.errors.LogDirNotFoundException; import org.apache.kafka.common.errors.TimeoutException; +import org.apache.kafka.common.errors.UnknownTopicOrPartitionException; import org.apache.kafka.common.requests.DescribeLogDirsResponse; import org.apache.kafka.common.serialization.ByteArraySerializer; import org.apache.kafka.common.serialization.BytesDeserializer; @@ -940,6 +944,26 @@ private Map<Integer, Integer> getBrokersMap(KafkaCluster cluster, return result; } + public Mono<Void> updateBrokerLogDir(KafkaCluster cluster, Integer broker, + BrokerLogdirUpdate brokerLogDir) { + return getOrCreateAdminClient(cluster) + .flatMap(ac -> updateBrokerLogDir(ac, brokerLogDir, broker)); + } + private Mono<Void> updateBrokerLogDir(ExtendedAdminClient adminMono, + BrokerLogdirUpdate b, + Integer broker) { + Map<TopicPartitionReplica, String> req = Map.of( + new TopicPartitionReplica(b.getTopic(), b.getPartition(), broker), + b.getLogDir()); + return Mono.just(adminMono) + .map(admin -> admin.getAdminClient().alterReplicaLogDirs(req)) + .flatMap(result -> ClusterUtil.toMono(result.all())) + .onErrorResume(UnknownTopicOrPartitionException.class, + e -> Mono.error(new TopicOrPartitionNotFoundException())) + .onErrorResume(LogDirNotFoundException.class, + e -> Mono.error(new LogDirNotFoundApiException())) + .doOnError(log::error); + } } diff --git a/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml b/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml index bc849e63509..7fc50d5358e 100644 --- a/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml +++ b/kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml @@ -201,6 +201,31 @@ paths: items: $ref: '#/components/schemas/BrokersLogdirs' + /api/clusters/{clusterName}/brokers/{id}/logdirs: + patch: + tags: + - Brokers + summary: updateBrokerTopicPartitionLogDir + operationId: updateBrokerTopicPartitionLogDir + parameters: + - name: clusterName + in: path + required: true + schema: + type: string + - name: id + in: path + required: true + schema: + type: integer + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/BrokerLogdirUpdate' + responses: + 200: + description: OK /api/clusters/{clusterName}/topics: get: @@ -1702,6 +1727,16 @@ components: required: - id + BrokerLogdirUpdate: + type: object + properties: + topic: + type: string + partition: + type: integer + logDir: + type: string + ConsumerGroupState: type: string enum:
diff --git a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/LogDirsTest.java b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/LogDirsTest.java index 79bcce8d822..5412086a579 100644 --- a/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/LogDirsTest.java +++ b/kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/LogDirsTest.java @@ -3,9 +3,14 @@ import static org.assertj.core.api.Assertions.assertThat; import com.provectus.kafka.ui.AbstractBaseTest; +import com.provectus.kafka.ui.exception.LogDirNotFoundApiException; +import com.provectus.kafka.ui.exception.TopicOrPartitionNotFoundException; +import com.provectus.kafka.ui.model.BrokerLogdirUpdate; import com.provectus.kafka.ui.model.BrokerTopicLogdirs; import com.provectus.kafka.ui.model.BrokersLogdirs; +import com.provectus.kafka.ui.model.ErrorResponse; import java.util.List; +import java.util.Map; import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.autoconfigure.web.reactive.AutoConfigureWebTestClient; @@ -82,4 +87,41 @@ public void testWrongBrokers() { assertThat(dirs).isEmpty(); } + + @Test + public void testChangeDirToWrongDir() { + ErrorResponse dirs = webTestClient.patch() + .uri("/api/clusters/{clusterName}/brokers/{id}/logdirs", LOCAL, 1) + .bodyValue(Map.of( + "topic", "__consumer_offsets", + "partition", "0", + "logDir", "/asdf/as" + ) + ) + .exchange() + .expectStatus().isBadRequest() + .expectBody(ErrorResponse.class) + .returnResult() + .getResponseBody(); + + assertThat(dirs.getMessage()) + .isEqualTo(new LogDirNotFoundApiException().getMessage()); + + dirs = webTestClient.patch() + .uri("/api/clusters/{clusterName}/brokers/{id}/logdirs", LOCAL, 1) + .bodyValue(Map.of( + "topic", "asdf", + "partition", "0", + "logDir", "/var/lib/kafka/data" + ) + ) + .exchange() + .expectStatus().isBadRequest() + .expectBody(ErrorResponse.class) + .returnResult() + .getResponseBody(); + + assertThat(dirs.getMessage()) + .isEqualTo(new TopicOrPartitionNotFoundException().getMessage()); + } }
test
train
2021-07-29T12:50:44
"2021-07-23T11:30:20Z"
germanosin
train
provectus/kafka-ui/1009_1581
provectus/kafka-ui
provectus/kafka-ui/1009
provectus/kafka-ui/1581
[ "timestamp(timedelta=0.0, similarity=0.8867268794850615)" ]
79198fe8cd0cf2c9d9bb8e791b6df6448da6a246
d503691600035a78d6a41cb8c281f9f65639c268
[ "I did take a look and find out that SonarCloud seems not supporting this....\r\n\r\nhttps://community.sonarsource.com/t/how-to-use-sonarcloud-with-a-forked-repository-on-github/7363/27", "@IndeedSi Yeah I remember that. That's why I was thinking about some other solution, maaaybe" ]
[]
"2022-02-09T12:26:46Z"
[ "status/accepted", "scope/infrastructure" ]
Add an ability to run tests for PRs made from forks
[ ".github/workflows/backend.yml", ".github/workflows/e2e-checks.yaml", ".github/workflows/frontend.yaml" ]
[ ".github/workflows/backend.yml", ".github/workflows/e2e-checks.yaml", ".github/workflows/frontend.yaml" ]
[]
diff --git a/.github/workflows/backend.yml b/.github/workflows/backend.yml index 79c4bd2454e..573f1b08ac7 100644 --- a/.github/workflows/backend.yml +++ b/.github/workflows/backend.yml @@ -1,6 +1,6 @@ name: backend on: - pull_request: + pull_request_target: types: ['opened', 'edited', 'reopened', 'synchronize'] paths: - 'kafka-ui-api/**' @@ -32,6 +32,7 @@ jobs: restore-keys: ${{ runner.os }}-sonar - name: Build and analyze env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Needed to get PR information, if any SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} - run: mvn -B verify org.sonarsource.scanner.maven:sonar-maven-plugin:sonar + run: | + mvn versions:set -DnewVersion=$GITHUB_SHA + mvn -B verify org.sonarsource.scanner.maven:sonar-maven-plugin:sonar diff --git a/.github/workflows/e2e-checks.yaml b/.github/workflows/e2e-checks.yaml index 8e1156c7617..9145b32ca6d 100644 --- a/.github/workflows/e2e-checks.yaml +++ b/.github/workflows/e2e-checks.yaml @@ -1,6 +1,6 @@ name: e2e-checks on: - pull_request: + pull_request_target: types: [ 'opened', 'edited', 'reopened', 'synchronize' ] paths: - 'kafka-ui-api/**' @@ -34,6 +34,7 @@ jobs: - name: Build with Maven id: build_app run: | + mvn versions:set -DnewVersion=$GITHUB_SHA mvn clean package -DskipTests ${{ github.event.inputs.extraMavenOptions }} - name: compose app id: compose_app @@ -41,10 +42,9 @@ jobs: run: | docker-compose -f ./documentation/compose/kafka-ui-connectors.yaml up -d - name: e2e run - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Needed to get PR information, if any - SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} - run: mvn -pl '!kafka-ui-api' test -Pprod + run: | + mvn versions:set -DnewVersion=$GITHUB_SHA + mvn -pl '!kafka-ui-api' test -Pprod - name: Generate allure report uses: simple-elf/allure-report-action@master if: always() diff --git a/.github/workflows/frontend.yaml b/.github/workflows/frontend.yaml index ba7f5c6cfd6..631981d1ed9 100644 --- a/.github/workflows/frontend.yaml +++ b/.github/workflows/frontend.yaml @@ -3,7 +3,7 @@ on: push: branches: - master - pull_request: + pull_request_target: types: ['opened', 'edited', 'reopened', 'synchronize'] paths: - 'kafka-ui-contract/**'
null
test
train
2022-02-10T13:07:29
"2021-10-25T10:10:59Z"
Haarolean
train
provectus/kafka-ui/1082_975
provectus/kafka-ui
provectus/kafka-ui/1082
provectus/kafka-ui/975
[ "timestamp(timedelta=23059.0, similarity=0.8451160168128085)" ]
547863fdb206de06b94afd7c41ecdf87d8884764
ee7c16efca407e3de9d33c18a736a46df60e17ee
[ "Hello there rgsurfs! πŸ‘‹\n\nThank you and congratulations πŸŽ‰ for opening your very first issue in this project! πŸ’–\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. πŸ‘€", "Current version in use is over two years old.\r\n\r\nhttps://mvnrepository.com/artifact/org.yaml/snakeyaml", "I don't see this one used anywhere either. Would you elaborate where do you see this dependency used?", "#1083", "Current version of transitive dependency of snakeyaml (via spring-boot-starter-webflux) is 1.28" ]
[]
"2021-10-18T07:17:37Z"
[ "status/invalid", "scope/backend", "type/security" ]
bump snakeyaml from 1.25 to 1.29 in kafka-ui-api
Current version in use is over two years old. https://mvnrepository.com/artifact/org.yaml/snakeyaml
[ "kafka-ui-react-app/package-lock.json" ]
[ "kafka-ui-react-app/package-lock.json" ]
[]
diff --git a/kafka-ui-react-app/package-lock.json b/kafka-ui-react-app/package-lock.json index a82e88d5ea8..49106b24f48 100644 --- a/kafka-ui-react-app/package-lock.json +++ b/kafka-ui-react-app/package-lock.json @@ -7762,12 +7762,13 @@ } }, "eslint-module-utils": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.6.2.tgz", - "integrity": "sha512-QG8pcgThYOuqxupd06oYTZoNOGaUdTY1PqK+oS6ElF6vs4pBdk/aYxFVQQXzcrAqp9m7cl7lb2ubazX+g16k2Q==", + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.7.1.tgz", + "integrity": "sha512-fjoetBXQZq2tSTWZ9yWVl2KuFrTZZH3V+9iD1V1RfpDgxzJR+mPd/KZmMiA8gbPqdBzpNiEHOuT7IYEWxrH0zQ==", "dev": true, "requires": { "debug": "^3.2.7", + "find-up": "^2.1.0", "pkg-dir": "^2.0.0" }, "dependencies": { @@ -7799,28 +7800,50 @@ } }, "eslint-plugin-import": { - "version": "2.24.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.24.0.tgz", - "integrity": "sha512-Kc6xqT9hiYi2cgybOc0I2vC9OgAYga5o/rAFinam/yF/t5uBqxQbauNPMC6fgb640T/89P0gFoO27FOilJ/Cqg==", + "version": "2.25.2", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.25.2.tgz", + "integrity": "sha512-qCwQr9TYfoBHOFcVGKY9C9unq05uOxxdklmBXLVvcwo68y5Hta6/GzCZEMx2zQiu0woKNEER0LE7ZgaOfBU14g==", "dev": true, "requires": { - "array-includes": "^3.1.3", - "array.prototype.flat": "^1.2.4", + "array-includes": "^3.1.4", + "array.prototype.flat": "^1.2.5", "debug": "^2.6.9", "doctrine": "^2.1.0", - "eslint-import-resolver-node": "^0.3.5", - "eslint-module-utils": "^2.6.2", - "find-up": "^2.0.0", + "eslint-import-resolver-node": "^0.3.6", + "eslint-module-utils": "^2.7.0", "has": "^1.0.3", - "is-core-module": "^2.4.0", + "is-core-module": "^2.7.0", + "is-glob": "^4.0.3", "minimatch": "^3.0.4", - "object.values": "^1.1.3", - "pkg-up": "^2.0.0", - "read-pkg-up": "^3.0.0", + "object.values": "^1.1.5", "resolve": "^1.20.0", - "tsconfig-paths": "^3.9.0" + "tsconfig-paths": "^3.11.0" }, "dependencies": { + "array-includes": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.4.tgz", + "integrity": "sha512-ZTNSQkmWumEbiHO2GF4GmWxYVTiQyJy2XOTa15sdQSrvKn7l+180egQMqlrMOUMCyLMD7pmyQe4mMDUT6Behrw==", + "dev": true, + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3", + "es-abstract": "^1.19.1", + "get-intrinsic": "^1.1.1", + "is-string": "^1.0.7" + } + }, + "array.prototype.flat": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.2.5.tgz", + "integrity": "sha512-KaYU+S+ndVqyUnignHftkwc58o3uVU1jzczILJ1tN2YaIZpFIKBiP/x/j97E5MVPsaCloPbqWLB/8qCTVvT2qg==", + "dev": true, + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3", + "es-abstract": "^1.19.0" + } + }, "doctrine": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", @@ -7829,6 +7852,133 @@ "requires": { "esutils": "^2.0.2" } + }, + "es-abstract": { + "version": "1.19.1", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.19.1.tgz", + "integrity": "sha512-2vJ6tjA/UfqLm2MPs7jxVybLoB8i1t1Jd9R3kISld20sIxPcTbLuggQOUxeWeAvIUkduv/CfMjuh4WmiXr2v9w==", + "dev": true, + "requires": { + "call-bind": "^1.0.2", + "es-to-primitive": "^1.2.1", + "function-bind": "^1.1.1", + "get-intrinsic": "^1.1.1", + "get-symbol-description": "^1.0.0", + "has": "^1.0.3", + "has-symbols": "^1.0.2", + "internal-slot": "^1.0.3", + "is-callable": "^1.2.4", + "is-negative-zero": "^2.0.1", + "is-regex": "^1.1.4", + "is-shared-array-buffer": "^1.0.1", + "is-string": "^1.0.7", + "is-weakref": "^1.0.1", + "object-inspect": "^1.11.0", + "object-keys": "^1.1.1", + "object.assign": "^4.1.2", + "string.prototype.trimend": "^1.0.4", + "string.prototype.trimstart": "^1.0.4", + "unbox-primitive": "^1.0.1" + } + }, + "eslint-import-resolver-node": { + "version": "0.3.6", + "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.6.tgz", + "integrity": "sha512-0En0w03NRVMn9Uiyn8YRPDKvWjxCWkslUEhGNTdGx15RvPJYQ+lbOlqrlNI2vEAs4pDYK4f/HN2TbDmk5TP0iw==", + "dev": true, + "requires": { + "debug": "^3.2.7", + "resolve": "^1.20.0" + }, + "dependencies": { + "debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + } + } + }, + "is-callable": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.4.tgz", + "integrity": "sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w==", + "dev": true + }, + "is-core-module": { + "version": "2.8.0", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.8.0.tgz", + "integrity": "sha512-vd15qHsaqrRL7dtH6QNuy0ndJmRDrS9HAM1CAiSifNUFv4x1a0CCVsj18hJ1mShxIG6T2i1sO78MkP56r0nYRw==", + "dev": true, + "requires": { + "has": "^1.0.3" + } + }, + "is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "requires": { + "is-extglob": "^2.1.1" + } + }, + "is-regex": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", + "integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==", + "dev": true, + "requires": { + "call-bind": "^1.0.2", + "has-tostringtag": "^1.0.0" + } + }, + "is-string": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz", + "integrity": "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==", + "dev": true, + "requires": { + "has-tostringtag": "^1.0.0" + } + }, + "ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true + }, + "object-inspect": { + "version": "1.11.0", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.11.0.tgz", + "integrity": "sha512-jp7ikS6Sd3GxQfZJPyH3cjcbJF6GZPClgdV+EFygjFLQ5FmW/dRUnTd9PQ9k0JhoNDabWFbpF1yCdSWCC6gexg==", + "dev": true + }, + "object.values": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.5.tgz", + "integrity": "sha512-QUZRW0ilQ3PnPpbNtgdNV1PDbEqLIiSFB3l+EnGtBQ/8SUTLj1PZwtQHABZtLgwpJZTSZhuGLOGk57Drx2IvYg==", + "dev": true, + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3", + "es-abstract": "^1.19.1" + } + }, + "tsconfig-paths": { + "version": "3.11.0", + "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.11.0.tgz", + "integrity": "sha512-7ecdYDnIdmv639mmDwslG6KQg1Z9STTz1j7Gcz0xa+nshh/gKDAHcPxRbWOsA3SPp0tXP2leTcY9Kw+NAkfZzA==", + "dev": true, + "requires": { + "@types/json5": "^0.0.29", + "json5": "^1.0.1", + "minimist": "^1.2.0", + "strip-bom": "^3.0.0" + } } } }, @@ -9441,6 +9591,16 @@ "pump": "^3.0.0" } }, + "get-symbol-description": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.0.tgz", + "integrity": "sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw==", + "dev": true, + "requires": { + "call-bind": "^1.0.2", + "get-intrinsic": "^1.1.1" + } + }, "get-value": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/get-value/-/get-value-2.0.6.tgz", @@ -9678,6 +9838,15 @@ "integrity": "sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw==", "dev": true }, + "has-tostringtag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz", + "integrity": "sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==", + "dev": true, + "requires": { + "has-symbols": "^1.0.2" + } + }, "has-unicode": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", @@ -10795,6 +10964,12 @@ "integrity": "sha512-AGOriNp96vNBd3HtU+RzFEc75FfR5ymiYv8E553I71SCeXBiMsVDUtdio1OEFvrPyLIQ9tVR5RxXIFe5PUFjMg==", "dev": true }, + "is-shared-array-buffer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.1.tgz", + "integrity": "sha512-IU0NmyknYZN0rChcKhRO1X8LYz5Isj/Fsqh8NJOSf+N/hCOTwy29F32Ik7a+QszE63IdvmwdTPDd6cZ5pg4cwA==", + "dev": true + }, "is-stream": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", @@ -10840,6 +11015,15 @@ "integrity": "sha1-Sw2hRCEE0bM2NA6AeX6GXPOffXI=", "dev": true }, + "is-weakref": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.1.tgz", + "integrity": "sha512-b2jKc2pQZjaeFYWEf7ScFj+Be1I+PXmlu572Q8coTXZ+LD/QQZ7ShPMst8h16riVgyXTQwUsFEl74mDvc/3MHQ==", + "dev": true, + "requires": { + "call-bind": "^1.0.0" + } + }, "is-windows": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/is-windows/-/is-windows-1.0.2.tgz", @@ -12858,30 +13042,6 @@ } } }, - "load-json-file": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-4.0.0.tgz", - "integrity": "sha1-L19Fq5HjMhYjT9U62rZo607AmTs=", - "dev": true, - "requires": { - "graceful-fs": "^4.1.2", - "parse-json": "^4.0.0", - "pify": "^3.0.0", - "strip-bom": "^3.0.0" - }, - "dependencies": { - "parse-json": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-4.0.0.tgz", - "integrity": "sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA=", - "dev": true, - "requires": { - "error-ex": "^1.3.1", - "json-parse-better-errors": "^1.0.1" - } - } - } - }, "loader-runner": { "version": "2.4.0", "resolved": "https://registry.npmjs.org/loader-runner/-/loader-runner-2.4.0.tgz", @@ -14760,12 +14920,6 @@ "integrity": "sha512-lY1Q/PiJGC2zOv/z391WOTD+Z02bCgsFfvxoXXf6h7kv9o+WmsmzYqrAwY63sNgOxE4xEdq0WyUnXfKeBrSvYw==", "dev": true }, - "pify": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", - "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=", - "dev": true - }, "pinkie": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz", @@ -14799,15 +14953,6 @@ "find-up": "^2.1.0" } }, - "pkg-up": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/pkg-up/-/pkg-up-2.0.0.tgz", - "integrity": "sha1-yBmscoBZpGHKscOImivjxJoATX8=", - "dev": true, - "requires": { - "find-up": "^2.1.0" - } - }, "please-upgrade-node": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/please-upgrade-node/-/please-upgrade-node-3.2.0.tgz", @@ -16953,38 +17098,6 @@ "type-fest": "^0.6.0" } }, - "read-pkg-up": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-3.0.0.tgz", - "integrity": "sha1-PtSWaF26D4/hGNBpHcUfSh/5bwc=", - "dev": true, - "requires": { - "find-up": "^2.0.0", - "read-pkg": "^3.0.0" - }, - "dependencies": { - "path-type": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-3.0.0.tgz", - "integrity": "sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==", - "dev": true, - "requires": { - "pify": "^3.0.0" - } - }, - "read-pkg": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-3.0.0.tgz", - "integrity": "sha1-nLxoaXj+5l0WwA4rGcI3/Pbjg4k=", - "dev": true, - "requires": { - "load-json-file": "^4.0.0", - "normalize-package-data": "^2.3.2", - "path-type": "^3.0.0" - } - } - } - }, "readable-stream": { "version": "3.6.0", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz",
null
train
train
2021-10-15T15:16:45
"2021-11-13T00:43:35Z"
rgsurfs
train
provectus/kafka-ui/1083_798
provectus/kafka-ui
provectus/kafka-ui/1083
provectus/kafka-ui/798
[ "timestamp(timedelta=8633.0, similarity=0.850395819062005)" ]
6c9f7f7b770d738c65952ac712ab0eeafaacb185
4f49e82a919313afacaf1f75e356a24c6c12cd4a
[ "Hi, I don't see this dependency anywhere in our project at all.", " appreciate the reply. tks.Β  Close my tickets then, maybe I was given bum info.....Β  I haveΒ  75mb jar fileΒ \nkafka-ui-api-0.2.1.jarΒ \n\nWhen I expanded that jar, i get three folders:BOOT-INFMETA-INForg\nThere are 171 jars in here:\\kafka-ui-api-0.2.1\\BOOT-INF\\lib\\\n\nRobert\n\n\n On Saturday, November 13, 2021, 09:05:22 AM EST, Roman Zabaluev ***@***.***> wrote: \n \n \n\n\nHi, I don't see this dependency anywhere in our project at all.\n\nβ€”\nYou are receiving this because you authored the thread.\nReply to this email directly, view it on GitHub, or unsubscribe.\nTriage notifications on the go with GitHub Mobile for iOS or Android.\n ", "This might be a transitive dependency. There's nothing wrong with it, considering we're getting notifications about security vulnerabilities via dependabot and sonar. But thanks for the concern! ", " We are getting our reports from Twistlock scans.Β  copy attached. I'll step back out of the issues for the moment.Β  tks for the replies.\nRobert\n On Sunday, November 14, 2021, 09:08:04 AM EST, Roman Zabaluev ***@***.***> wrote: \n \n \n\n\nThis might be a transitive dependency. There's nothing wrong with it, considering we're getting notifications about security vulnerabilities via dependabot and sonar. But thanks for the concern!\n\nβ€”\nYou are receiving this because you authored the thread.\nReply to this email directly, view it on GitHub, or unsubscribe.\nTriage notifications on the go with GitHub Mobile for iOS or Android.\n ", "If you’ve attached a copy via email it won’t work probably. Can you send it to me via ***REDACTED*** please?\r\n\r\n> On 14 Nov 2021, at 18:49, rgsurfs ***@***.***> wrote:\r\n> \r\n> ο»Ώ\r\n> We are getting our reports from Twistlock scans. copy attached. I'll step back out of the issues for the moment. tks for the replies.\r\n> Robert\r\n> On Sunday, November 14, 2021, 09:08:04 AM EST, Roman Zabaluev ***@***.***> wrote: \r\n> \r\n> \r\n> \r\n> \r\n> This might be a transitive dependency. There's nothing wrong with it, considering we're getting notifications about security vulnerabilities via dependabot and sonar. But thanks for the concern!\r\n> \r\n> β€”\r\n> You are receiving this because you authored the thread.\r\n> Reply to this email directly, view it on GitHub, or unsubscribe.\r\n> Triage notifications on the go with GitHub Mobile for iOS or Android.\r\n> β€”\r\n> You are receiving this because you modified the open/close state.\r\n> Reply to this email directly, view it on GitHub, or unsubscribe.\r\n> Triage notifications on the go with GitHub Mobile for iOS or Android. \r\n", "Current version of nimbus-jose-jwt (via spring-security) is 9.10.1." ]
[]
"2021-08-16T07:17:31Z"
[ "status/invalid", "scope/backend", "type/security" ]
bump nimbus-jose-jwt from 7.8 to 9.16 in kafka-ui-api
Current version in use is almost two years old. https://mvnrepository.com/artifact/com.nimbusds/nimbus-jose-jwt
[ "kafka-ui-react-app/package-lock.json" ]
[ "kafka-ui-react-app/package-lock.json" ]
[]
diff --git a/kafka-ui-react-app/package-lock.json b/kafka-ui-react-app/package-lock.json index a82e88d5ea8..169f8df8523 100644 --- a/kafka-ui-react-app/package-lock.json +++ b/kafka-ui-react-app/package-lock.json @@ -7711,9 +7711,9 @@ } }, "eslint-import-resolver-node": { - "version": "0.3.5", - "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.5.tgz", - "integrity": "sha512-XMoPKjSpXbkeJ7ZZ9icLnJMTY5Mc1kZbCakHquaFsXPpyWOwK0TK6CODO+0ca54UoM9LKOxyUNnoVZRl8TeaAg==", + "version": "0.3.6", + "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.6.tgz", + "integrity": "sha512-0En0w03NRVMn9Uiyn8YRPDKvWjxCWkslUEhGNTdGx15RvPJYQ+lbOlqrlNI2vEAs4pDYK4f/HN2TbDmk5TP0iw==", "requires": { "debug": "^3.2.7", "resolve": "^1.20.0"
null
train
train
2021-11-13T16:38:13
"2021-11-13T00:49:21Z"
rgsurfs
train
provectus/kafka-ui/1083_975
provectus/kafka-ui
provectus/kafka-ui/1083
provectus/kafka-ui/975
[ "timestamp(timedelta=23816.0, similarity=0.8451755050228945)" ]
547863fdb206de06b94afd7c41ecdf87d8884764
ee7c16efca407e3de9d33c18a736a46df60e17ee
[ "Hi, I don't see this dependency anywhere in our project at all.", " appreciate the reply. tks.Β  Close my tickets then, maybe I was given bum info.....Β  I haveΒ  75mb jar fileΒ \nkafka-ui-api-0.2.1.jarΒ \n\nWhen I expanded that jar, i get three folders:BOOT-INFMETA-INForg\nThere are 171 jars in here:\\kafka-ui-api-0.2.1\\BOOT-INF\\lib\\\n\nRobert\n\n\n On Saturday, November 13, 2021, 09:05:22 AM EST, Roman Zabaluev ***@***.***> wrote: \n \n \n\n\nHi, I don't see this dependency anywhere in our project at all.\n\nβ€”\nYou are receiving this because you authored the thread.\nReply to this email directly, view it on GitHub, or unsubscribe.\nTriage notifications on the go with GitHub Mobile for iOS or Android.\n ", "This might be a transitive dependency. There's nothing wrong with it, considering we're getting notifications about security vulnerabilities via dependabot and sonar. But thanks for the concern! ", " We are getting our reports from Twistlock scans.Β  copy attached. I'll step back out of the issues for the moment.Β  tks for the replies.\nRobert\n On Sunday, November 14, 2021, 09:08:04 AM EST, Roman Zabaluev ***@***.***> wrote: \n \n \n\n\nThis might be a transitive dependency. There's nothing wrong with it, considering we're getting notifications about security vulnerabilities via dependabot and sonar. But thanks for the concern!\n\nβ€”\nYou are receiving this because you authored the thread.\nReply to this email directly, view it on GitHub, or unsubscribe.\nTriage notifications on the go with GitHub Mobile for iOS or Android.\n ", "If you’ve attached a copy via email it won’t work probably. Can you send it to me via ***REDACTED*** please?\r\n\r\n> On 14 Nov 2021, at 18:49, rgsurfs ***@***.***> wrote:\r\n> \r\n> ο»Ώ\r\n> We are getting our reports from Twistlock scans. copy attached. I'll step back out of the issues for the moment. tks for the replies.\r\n> Robert\r\n> On Sunday, November 14, 2021, 09:08:04 AM EST, Roman Zabaluev ***@***.***> wrote: \r\n> \r\n> \r\n> \r\n> \r\n> This might be a transitive dependency. There's nothing wrong with it, considering we're getting notifications about security vulnerabilities via dependabot and sonar. But thanks for the concern!\r\n> \r\n> β€”\r\n> You are receiving this because you authored the thread.\r\n> Reply to this email directly, view it on GitHub, or unsubscribe.\r\n> Triage notifications on the go with GitHub Mobile for iOS or Android.\r\n> β€”\r\n> You are receiving this because you modified the open/close state.\r\n> Reply to this email directly, view it on GitHub, or unsubscribe.\r\n> Triage notifications on the go with GitHub Mobile for iOS or Android. \r\n", "Current version of nimbus-jose-jwt (via spring-security) is 9.10.1." ]
[]
"2021-10-18T07:17:37Z"
[ "status/invalid", "scope/backend", "type/security" ]
bump nimbus-jose-jwt from 7.8 to 9.16 in kafka-ui-api
Current version in use is almost two years old. https://mvnrepository.com/artifact/com.nimbusds/nimbus-jose-jwt
[ "kafka-ui-react-app/package-lock.json" ]
[ "kafka-ui-react-app/package-lock.json" ]
[]
diff --git a/kafka-ui-react-app/package-lock.json b/kafka-ui-react-app/package-lock.json index a82e88d5ea8..49106b24f48 100644 --- a/kafka-ui-react-app/package-lock.json +++ b/kafka-ui-react-app/package-lock.json @@ -7762,12 +7762,13 @@ } }, "eslint-module-utils": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.6.2.tgz", - "integrity": "sha512-QG8pcgThYOuqxupd06oYTZoNOGaUdTY1PqK+oS6ElF6vs4pBdk/aYxFVQQXzcrAqp9m7cl7lb2ubazX+g16k2Q==", + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.7.1.tgz", + "integrity": "sha512-fjoetBXQZq2tSTWZ9yWVl2KuFrTZZH3V+9iD1V1RfpDgxzJR+mPd/KZmMiA8gbPqdBzpNiEHOuT7IYEWxrH0zQ==", "dev": true, "requires": { "debug": "^3.2.7", + "find-up": "^2.1.0", "pkg-dir": "^2.0.0" }, "dependencies": { @@ -7799,28 +7800,50 @@ } }, "eslint-plugin-import": { - "version": "2.24.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.24.0.tgz", - "integrity": "sha512-Kc6xqT9hiYi2cgybOc0I2vC9OgAYga5o/rAFinam/yF/t5uBqxQbauNPMC6fgb640T/89P0gFoO27FOilJ/Cqg==", + "version": "2.25.2", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.25.2.tgz", + "integrity": "sha512-qCwQr9TYfoBHOFcVGKY9C9unq05uOxxdklmBXLVvcwo68y5Hta6/GzCZEMx2zQiu0woKNEER0LE7ZgaOfBU14g==", "dev": true, "requires": { - "array-includes": "^3.1.3", - "array.prototype.flat": "^1.2.4", + "array-includes": "^3.1.4", + "array.prototype.flat": "^1.2.5", "debug": "^2.6.9", "doctrine": "^2.1.0", - "eslint-import-resolver-node": "^0.3.5", - "eslint-module-utils": "^2.6.2", - "find-up": "^2.0.0", + "eslint-import-resolver-node": "^0.3.6", + "eslint-module-utils": "^2.7.0", "has": "^1.0.3", - "is-core-module": "^2.4.0", + "is-core-module": "^2.7.0", + "is-glob": "^4.0.3", "minimatch": "^3.0.4", - "object.values": "^1.1.3", - "pkg-up": "^2.0.0", - "read-pkg-up": "^3.0.0", + "object.values": "^1.1.5", "resolve": "^1.20.0", - "tsconfig-paths": "^3.9.0" + "tsconfig-paths": "^3.11.0" }, "dependencies": { + "array-includes": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.4.tgz", + "integrity": "sha512-ZTNSQkmWumEbiHO2GF4GmWxYVTiQyJy2XOTa15sdQSrvKn7l+180egQMqlrMOUMCyLMD7pmyQe4mMDUT6Behrw==", + "dev": true, + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3", + "es-abstract": "^1.19.1", + "get-intrinsic": "^1.1.1", + "is-string": "^1.0.7" + } + }, + "array.prototype.flat": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.2.5.tgz", + "integrity": "sha512-KaYU+S+ndVqyUnignHftkwc58o3uVU1jzczILJ1tN2YaIZpFIKBiP/x/j97E5MVPsaCloPbqWLB/8qCTVvT2qg==", + "dev": true, + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3", + "es-abstract": "^1.19.0" + } + }, "doctrine": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", @@ -7829,6 +7852,133 @@ "requires": { "esutils": "^2.0.2" } + }, + "es-abstract": { + "version": "1.19.1", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.19.1.tgz", + "integrity": "sha512-2vJ6tjA/UfqLm2MPs7jxVybLoB8i1t1Jd9R3kISld20sIxPcTbLuggQOUxeWeAvIUkduv/CfMjuh4WmiXr2v9w==", + "dev": true, + "requires": { + "call-bind": "^1.0.2", + "es-to-primitive": "^1.2.1", + "function-bind": "^1.1.1", + "get-intrinsic": "^1.1.1", + "get-symbol-description": "^1.0.0", + "has": "^1.0.3", + "has-symbols": "^1.0.2", + "internal-slot": "^1.0.3", + "is-callable": "^1.2.4", + "is-negative-zero": "^2.0.1", + "is-regex": "^1.1.4", + "is-shared-array-buffer": "^1.0.1", + "is-string": "^1.0.7", + "is-weakref": "^1.0.1", + "object-inspect": "^1.11.0", + "object-keys": "^1.1.1", + "object.assign": "^4.1.2", + "string.prototype.trimend": "^1.0.4", + "string.prototype.trimstart": "^1.0.4", + "unbox-primitive": "^1.0.1" + } + }, + "eslint-import-resolver-node": { + "version": "0.3.6", + "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.6.tgz", + "integrity": "sha512-0En0w03NRVMn9Uiyn8YRPDKvWjxCWkslUEhGNTdGx15RvPJYQ+lbOlqrlNI2vEAs4pDYK4f/HN2TbDmk5TP0iw==", + "dev": true, + "requires": { + "debug": "^3.2.7", + "resolve": "^1.20.0" + }, + "dependencies": { + "debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + } + } + }, + "is-callable": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.4.tgz", + "integrity": "sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w==", + "dev": true + }, + "is-core-module": { + "version": "2.8.0", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.8.0.tgz", + "integrity": "sha512-vd15qHsaqrRL7dtH6QNuy0ndJmRDrS9HAM1CAiSifNUFv4x1a0CCVsj18hJ1mShxIG6T2i1sO78MkP56r0nYRw==", + "dev": true, + "requires": { + "has": "^1.0.3" + } + }, + "is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "requires": { + "is-extglob": "^2.1.1" + } + }, + "is-regex": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", + "integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==", + "dev": true, + "requires": { + "call-bind": "^1.0.2", + "has-tostringtag": "^1.0.0" + } + }, + "is-string": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz", + "integrity": "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==", + "dev": true, + "requires": { + "has-tostringtag": "^1.0.0" + } + }, + "ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true + }, + "object-inspect": { + "version": "1.11.0", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.11.0.tgz", + "integrity": "sha512-jp7ikS6Sd3GxQfZJPyH3cjcbJF6GZPClgdV+EFygjFLQ5FmW/dRUnTd9PQ9k0JhoNDabWFbpF1yCdSWCC6gexg==", + "dev": true + }, + "object.values": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.5.tgz", + "integrity": "sha512-QUZRW0ilQ3PnPpbNtgdNV1PDbEqLIiSFB3l+EnGtBQ/8SUTLj1PZwtQHABZtLgwpJZTSZhuGLOGk57Drx2IvYg==", + "dev": true, + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3", + "es-abstract": "^1.19.1" + } + }, + "tsconfig-paths": { + "version": "3.11.0", + "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.11.0.tgz", + "integrity": "sha512-7ecdYDnIdmv639mmDwslG6KQg1Z9STTz1j7Gcz0xa+nshh/gKDAHcPxRbWOsA3SPp0tXP2leTcY9Kw+NAkfZzA==", + "dev": true, + "requires": { + "@types/json5": "^0.0.29", + "json5": "^1.0.1", + "minimist": "^1.2.0", + "strip-bom": "^3.0.0" + } } } }, @@ -9441,6 +9591,16 @@ "pump": "^3.0.0" } }, + "get-symbol-description": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.0.tgz", + "integrity": "sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw==", + "dev": true, + "requires": { + "call-bind": "^1.0.2", + "get-intrinsic": "^1.1.1" + } + }, "get-value": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/get-value/-/get-value-2.0.6.tgz", @@ -9678,6 +9838,15 @@ "integrity": "sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw==", "dev": true }, + "has-tostringtag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz", + "integrity": "sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==", + "dev": true, + "requires": { + "has-symbols": "^1.0.2" + } + }, "has-unicode": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", @@ -10795,6 +10964,12 @@ "integrity": "sha512-AGOriNp96vNBd3HtU+RzFEc75FfR5ymiYv8E553I71SCeXBiMsVDUtdio1OEFvrPyLIQ9tVR5RxXIFe5PUFjMg==", "dev": true }, + "is-shared-array-buffer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.1.tgz", + "integrity": "sha512-IU0NmyknYZN0rChcKhRO1X8LYz5Isj/Fsqh8NJOSf+N/hCOTwy29F32Ik7a+QszE63IdvmwdTPDd6cZ5pg4cwA==", + "dev": true + }, "is-stream": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", @@ -10840,6 +11015,15 @@ "integrity": "sha1-Sw2hRCEE0bM2NA6AeX6GXPOffXI=", "dev": true }, + "is-weakref": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.1.tgz", + "integrity": "sha512-b2jKc2pQZjaeFYWEf7ScFj+Be1I+PXmlu572Q8coTXZ+LD/QQZ7ShPMst8h16riVgyXTQwUsFEl74mDvc/3MHQ==", + "dev": true, + "requires": { + "call-bind": "^1.0.0" + } + }, "is-windows": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/is-windows/-/is-windows-1.0.2.tgz", @@ -12858,30 +13042,6 @@ } } }, - "load-json-file": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-4.0.0.tgz", - "integrity": "sha1-L19Fq5HjMhYjT9U62rZo607AmTs=", - "dev": true, - "requires": { - "graceful-fs": "^4.1.2", - "parse-json": "^4.0.0", - "pify": "^3.0.0", - "strip-bom": "^3.0.0" - }, - "dependencies": { - "parse-json": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-4.0.0.tgz", - "integrity": "sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA=", - "dev": true, - "requires": { - "error-ex": "^1.3.1", - "json-parse-better-errors": "^1.0.1" - } - } - } - }, "loader-runner": { "version": "2.4.0", "resolved": "https://registry.npmjs.org/loader-runner/-/loader-runner-2.4.0.tgz", @@ -14760,12 +14920,6 @@ "integrity": "sha512-lY1Q/PiJGC2zOv/z391WOTD+Z02bCgsFfvxoXXf6h7kv9o+WmsmzYqrAwY63sNgOxE4xEdq0WyUnXfKeBrSvYw==", "dev": true }, - "pify": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", - "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=", - "dev": true - }, "pinkie": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz", @@ -14799,15 +14953,6 @@ "find-up": "^2.1.0" } }, - "pkg-up": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/pkg-up/-/pkg-up-2.0.0.tgz", - "integrity": "sha1-yBmscoBZpGHKscOImivjxJoATX8=", - "dev": true, - "requires": { - "find-up": "^2.1.0" - } - }, "please-upgrade-node": { "version": "3.2.0", "resolved": "https://registry.npmjs.org/please-upgrade-node/-/please-upgrade-node-3.2.0.tgz", @@ -16953,38 +17098,6 @@ "type-fest": "^0.6.0" } }, - "read-pkg-up": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-3.0.0.tgz", - "integrity": "sha1-PtSWaF26D4/hGNBpHcUfSh/5bwc=", - "dev": true, - "requires": { - "find-up": "^2.0.0", - "read-pkg": "^3.0.0" - }, - "dependencies": { - "path-type": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-3.0.0.tgz", - "integrity": "sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==", - "dev": true, - "requires": { - "pify": "^3.0.0" - } - }, - "read-pkg": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-3.0.0.tgz", - "integrity": "sha1-nLxoaXj+5l0WwA4rGcI3/Pbjg4k=", - "dev": true, - "requires": { - "load-json-file": "^4.0.0", - "normalize-package-data": "^2.3.2", - "path-type": "^3.0.0" - } - } - } - }, "readable-stream": { "version": "3.6.0", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz",
null
test
train
2021-10-15T15:16:45
"2021-11-13T00:49:21Z"
rgsurfs
train
provectus/kafka-ui/1188_1411
provectus/kafka-ui
provectus/kafka-ui/1188
provectus/kafka-ui/1411
[ "timestamp(timedelta=0.0, similarity=0.8441198095055402)" ]
439d41da0b1d1166c9d76350e680aa55e5498cd0
7e3c66b197894e6f74c87f2e625a4be0dc92bd75
[]
[ "I think it better to use `SchemaType.PROTOBUF` from `import { SchemaType } from 'generated-sources';\r\n` instead of hardcoded value" ]
"2022-01-19T09:15:06Z"
[ "type/bug", "scope/frontend", "status/accepted", "status/confirmed" ]
Impossible to edit Schema of PROTOBUF type
**Describe the bug** Button Edit Schema redirects to an empty screen. **Set up** http://redesign.internal.kafka-ui.provectus.io/ **Steps to Reproduce** Steps to reproduce the behavior: 1. Click the button Edit Schema for existing proto schema, redirection leads to an empty screen http://redesign.internal.kafka-ui.provectus.io/ui/clusters/local/schemas/proto3/edit 2. Back and Forward buttons in the navigation bar redirect to application`s URLs but the screen is always empty. Only Reload button helps. **Expected behavior** Edit screen with Latest schema and New schema like for other types of Schemas i.e. http://redesign.internal.kafka-ui.provectus.io/ui/clusters/local/schemas/json1/edit **Screenshots** ![image](https://user-images.githubusercontent.com/92585878/145196310-da89179b-04a4-488b-bff4-5e57adfc5171.png) **Additional context** Edit schema works correctly for AVRO and JSON schemas types
[ "kafka-ui-react-app/src/components/Schemas/Edit/Edit.tsx" ]
[ "kafka-ui-react-app/src/components/Schemas/Edit/Edit.tsx" ]
[]
diff --git a/kafka-ui-react-app/src/components/Schemas/Edit/Edit.tsx b/kafka-ui-react-app/src/components/Schemas/Edit/Edit.tsx index 63a48580dca..27a109f0238 100644 --- a/kafka-ui-react-app/src/components/Schemas/Edit/Edit.tsx +++ b/kafka-ui-react-app/src/components/Schemas/Edit/Edit.tsx @@ -37,10 +37,11 @@ const Edit: React.FC = () => { const schema = useAppSelector((state) => selectSchemaById(state, subject)); - const formatedSchema = React.useMemo( - () => JSON.stringify(JSON.parse(schema?.schema || '{}'), null, '\t'), - [schema] - ); + const formatedSchema = React.useMemo(() => { + return schema?.schemaType === SchemaType.PROTOBUF + ? schema?.schema + : JSON.stringify(JSON.parse(schema?.schema || '{}'), null, '\t'); + }, [schema]); const onSubmit = React.useCallback(async (props: NewSchemaSubjectRaw) => { if (!schema) return;
null
test
train
2022-01-19T00:46:17
"2021-12-08T11:01:56Z"
agolosen
train
provectus/kafka-ui/1212_1389
provectus/kafka-ui
provectus/kafka-ui/1212
provectus/kafka-ui/1389
[ "timestamp(timedelta=0.0, similarity=0.9543798449095806)" ]
611307ef59b7c5853716fcf4badd09c57ad347c5
cdc929add70a9e5e0987a089b6c436079cae32f1
[]
[ "The direct use of `Colors.brand[50]` is a wrong approach. we must use the `theme` object instead", "I'm not sure we really need all these `!important`. Could you double check?", "```jsx\r\n <S.Title\r\n isSortable={isSortable}\r\n isCurrentSort={isCurrentSort}\r\n onClick: handleOnClick\r\n onKeyDown: handleOnKeyDown\r\n role: 'button'\r\n tabIndex: 0\r\n >\r\n```\r\n\r\nI would suggest to move all these conditions inside of handleOnClick & handleOnKeyDown functions", "```suggestion\r\n const isOrderable = !!(orderValue && handleOrderBy);\r\n```", "\r\n```suggestion\r\n const isOrdered = !!orderValue && orderBy === orderValue;\r\n```", "Rewrote to use theme object from props", "Removed `!important`\r\nLooks the same", "Rewrote this part. Still looks not ideal but better imo", ":+1:", "You can also use theme color styles there instead of color hardcode", "the same here", "The role of the <span /> should be role='span', not role='button'.", "Here too.", "There are two variable names in the same TopicColumnsToSort.NAME, I think it's better to rename these variables by\r\norderBy && orderValue => orderByName\r\notherOrderValue => orderByOutOfSyncReplicas", "If it is (const sortIconTitle) then it must be (const title => const testTitle).", "I don't understand Why this variable (SPACE_KEY) is a constant in uppercase, although, for example, it is not (title)?", "The same variable with different names (cell, td).\r\nOne place TableHeaderCell one place td\r\nOr they will be TableHeaderCell and cell\r\nOr they will be TableHeaderTd and td", "The role of the <span /> should be role='span', not role='button'.\r\nI think a variable name would be better (const titleNode => const span).", "I think a variable name would be better (const titleNode => const span).", "The same variable in this file has been declared twice with different names (titleNode and previewNode)\r\nAnd I've already mentioned the best version (const span) :))", "Here too.", "I don't think so as it is a `span` that acts like a `button`\r\nAnd, as I know, `role='span'` is not a correct role type \r\n[List of role types on MDN](https://developer.mozilla.org/en-US/docs/Web/Accessibility/ARIA/Roles#aria_role_types)", "It represents a `space key` press and used in `userEvent.type` as constant and will not be changed anytime soon\r\nFor example, `userEvent.type` can take `{enter}` as a second arg to represent `enter key` press\r\nSo if I needed to put `enter key` to a variable I'll do it like this\r\n```js\r\nconst ENTER_KEY = '{enter}'\r\n```\r\n\r\nI don't mind changing it tho", "I tried to change them like this and I don't like how it looks\r\nIf we, by some reason, need to change `const orderByName = TopicColumnsToSort.NAME` to something like `const orderByTotalPartitions = TopicColumnsToSort.TOTAL_PARTITIONS` we have to change all variables to new ones\r\n\r\nI'm thinking about removing `orderBy`, `orderValue` and `otherOrderValue` variables and to use `TopicColumnsToSort.NAME` etc \r\nIt will be more readable, I think", "![image](https://user-images.githubusercontent.com/12596177/149927070-a54c7393-6aca-42db-9666-478bbe4c1f2e.png)\r\nLooks better IMO", "We can have `Title` and `Preview` in `TableHeaderCell` component which are both `span`'s\r\nI think it will be confusing", "`sortIconTitle` represents `title` attribute value on HTML node (`title='Sort icon'`)\r\n\r\nIs `sortIconTitleValue` will be better?", "Yes it is also a good version. ", "I don't think so.", "For me the best way is \r\nconst title = 'test title'; => const testTitle = 'test title';\r\nconst sortIconTitle => this is okay.", "I think either all hardcoded variables must be in snake case or in camelCase.", "<span role='button'/>\r\nI suggest this role change to <span role='span'/> or something else\r\nWhat do you think about it ? @workshur , @Hurenka.", "Will be resolved with https://github.com/provectus/kafka-ui/pull/1389#discussion_r786774385", "will be resolved with https://github.com/provectus/kafka-ui/pull/1389#discussion_r786659717", "will be resolved with https://github.com/provectus/kafka-ui/pull/1389#discussion_r786659717", "Please, help us decide which naming to choose @workshur , @Hurenka", "Done", "Done", "Will be resolved with https://github.com/provectus/kafka-ui/pull/1389#discussion_r786774385", "To my mind here we can add a role 'button'. if it was an element with predefined role, like 'h2' then the first role would be used - 'heading', therefore adding the second 'button' would not make sense.\r\nBut for 'span' we can apply the ARIA role to show that this span is clickable.", "I used to write consts representing keyboard events in upper case, like Damir." ]
"2022-01-14T06:55:50Z"
[ "type/bug", "scope/frontend", "status/accepted", "status/confirmed" ]
Confusing sorting UI in Topics
### Is your proposal related to a problem? <!-- Provide a clear and concise description of what the problem is. For example, "I'm always frustrated when..." --> When you access Topics tab, some elements of the UI seem confusing: - The topics are sorted by Topic Name, but it is not highlighted - You need to click any other table field to see that sorting works (purple highlight) - The sorting arrows are way to small, but you need to click them to start sorting - The arrows point up and down, but you can't actually sort anything up or down in given field (THIS MAY BE A BUG) ### Describe the solution you'd like <!-- Provide a clear and concise description of what you want to happen. --> - The selected sorted field is highlighted by default - You can click field name, not tiny arrows to start sorting - Sorting works up and down, as the arrows suggest - You don't have to update the page to return to the default state (just click on Topic Name instead) ### Describe alternatives you've considered <!-- Let us know about other solutions you've tried or researched. --> --- ### Additional context <!-- Is there anything else you can add about the proposal? You might want to link to related issues here, if you haven't already. --> Please have a look at up&down sorting. It doesn't work right now in a selected field. Only sorting in-between fields works.
[ "kafka-ui-react-app/src/components/Connect/Details/Tasks/__tests__/__snapshots__/Tasks.spec.tsx.snap", "kafka-ui-react-app/src/components/Connect/List/__tests__/__snapshots__/ListItem.spec.tsx.snap", "kafka-ui-react-app/src/components/Topics/Topic/Details/__test__/__snapshots__/Details.spec.tsx.snap", "kafka-ui-react-app/src/components/common/table/TableHeaderCell/TableHeaderCell.styled.ts", "kafka-ui-react-app/src/components/common/table/TableHeaderCell/TableHeaderCell.tsx", "kafka-ui-react-app/src/components/common/table/__tests__/TableHeaderCell.spec.tsx", "kafka-ui-react-app/src/redux/reducers/topics/reducer.ts", "kafka-ui-react-app/src/theme/theme.ts" ]
[ "kafka-ui-react-app/src/components/Connect/Details/Tasks/__tests__/__snapshots__/Tasks.spec.tsx.snap", "kafka-ui-react-app/src/components/Connect/List/__tests__/__snapshots__/ListItem.spec.tsx.snap", "kafka-ui-react-app/src/components/Topics/Topic/Details/__test__/__snapshots__/Details.spec.tsx.snap", "kafka-ui-react-app/src/components/common/table/TableHeaderCell/TableHeaderCell.styled.ts", "kafka-ui-react-app/src/components/common/table/TableHeaderCell/TableHeaderCell.tsx", "kafka-ui-react-app/src/components/common/table/__tests__/TableHeaderCell.spec.tsx", "kafka-ui-react-app/src/redux/reducers/topics/reducer.ts", "kafka-ui-react-app/src/theme/theme.ts" ]
[]
diff --git a/kafka-ui-react-app/src/components/Connect/Details/Tasks/__tests__/__snapshots__/Tasks.spec.tsx.snap b/kafka-ui-react-app/src/components/Connect/Details/Tasks/__tests__/__snapshots__/Tasks.spec.tsx.snap index 34f655c6374..7f14f530d18 100644 --- a/kafka-ui-react-app/src/components/Connect/Details/Tasks/__tests__/__snapshots__/Tasks.spec.tsx.snap +++ b/kafka-ui-react-app/src/components/Connect/Details/Tasks/__tests__/__snapshots__/Tasks.spec.tsx.snap @@ -18,22 +18,7 @@ exports[`Tasks view matches snapshot 1`] = ` background-color: #F1F2F3; } -.c1 { - padding: 4px 0 4px 24px !important; - border-bottom-width: 1px !important; - vertical-align: middle !important; -} - -.c1.is-clickable { - cursor: pointer !important; - pointer-events: all !important; -} - -.c1.has-text-link-dark span { - color: #4F4FFF !important; -} - -.c1 span { +.c2 { font-family: Inter,sans-serif; font-size: 12px; font-style: normal; @@ -46,18 +31,13 @@ exports[`Tasks view matches snapshot 1`] = ` text-align: left; background: #FFFFFF; color: #73848C; + cursor: default; } -.c1 span.preview { - margin-left: 8px; - font-size: 14px; - color: #4F4FFF; - cursor: pointer; -} - -.c1 span.is-clickable { - cursor: pointer !important; - pointer-events: all !important; +.c1 { + padding: 4px 0 4px 24px; + border-bottom-width: 1px; + vertical-align: middle; } <table @@ -67,40 +47,36 @@ exports[`Tasks view matches snapshot 1`] = ` <tr> <th className="c1" - title="ID" > <span - className="title" + className="c2" > ID </span> </th> <th className="c1" - title="Worker" > <span - className="title" + className="c2" > Worker </span> </th> <th className="c1" - title="State" > <span - className="title" + className="c2" > State </span> </th> <th className="c1" - title="Trace" > <span - className="title" + className="c2" > Trace </span> @@ -109,7 +85,7 @@ exports[`Tasks view matches snapshot 1`] = ` className="c1" > <span - className="title" + className="c2" /> </th> </tr> @@ -203,22 +179,7 @@ exports[`Tasks view matches snapshot when no tasks 1`] = ` background-color: #F1F2F3; } -.c1 { - padding: 4px 0 4px 24px !important; - border-bottom-width: 1px !important; - vertical-align: middle !important; -} - -.c1.is-clickable { - cursor: pointer !important; - pointer-events: all !important; -} - -.c1.has-text-link-dark span { - color: #4F4FFF !important; -} - -.c1 span { +.c2 { font-family: Inter,sans-serif; font-size: 12px; font-style: normal; @@ -231,18 +192,13 @@ exports[`Tasks view matches snapshot when no tasks 1`] = ` text-align: left; background: #FFFFFF; color: #73848C; + cursor: default; } -.c1 span.preview { - margin-left: 8px; - font-size: 14px; - color: #4F4FFF; - cursor: pointer; -} - -.c1 span.is-clickable { - cursor: pointer !important; - pointer-events: all !important; +.c1 { + padding: 4px 0 4px 24px; + border-bottom-width: 1px; + vertical-align: middle; } <table @@ -252,40 +208,36 @@ exports[`Tasks view matches snapshot when no tasks 1`] = ` <tr> <th className="c1" - title="ID" > <span - className="title" + className="c2" > ID </span> </th> <th className="c1" - title="Worker" > <span - className="title" + className="c2" > Worker </span> </th> <th className="c1" - title="State" > <span - className="title" + className="c2" > State </span> </th> <th className="c1" - title="Trace" > <span - className="title" + className="c2" > Trace </span> @@ -294,7 +246,7 @@ exports[`Tasks view matches snapshot when no tasks 1`] = ` className="c1" > <span - className="title" + className="c2" /> </th> </tr> diff --git a/kafka-ui-react-app/src/components/Connect/List/__tests__/__snapshots__/ListItem.spec.tsx.snap b/kafka-ui-react-app/src/components/Connect/List/__tests__/__snapshots__/ListItem.spec.tsx.snap index 2fbbed56bb5..26d4830207e 100644 --- a/kafka-ui-react-app/src/components/Connect/List/__tests__/__snapshots__/ListItem.spec.tsx.snap +++ b/kafka-ui-react-app/src/components/Connect/List/__tests__/__snapshots__/ListItem.spec.tsx.snap @@ -259,6 +259,8 @@ exports[`Connectors ListItem matches snapshot 1`] = ` "normal": "#FFFFFF", }, "color": Object { + "active": "#4F4FFF", + "hover": "#4F4FFF", "normal": "#73848C", }, "previewColor": Object { diff --git a/kafka-ui-react-app/src/components/Topics/Topic/Details/__test__/__snapshots__/Details.spec.tsx.snap b/kafka-ui-react-app/src/components/Topics/Topic/Details/__test__/__snapshots__/Details.spec.tsx.snap index 117c5269190..f9e7653eb82 100644 --- a/kafka-ui-react-app/src/components/Topics/Topic/Details/__test__/__snapshots__/Details.spec.tsx.snap +++ b/kafka-ui-react-app/src/components/Topics/Topic/Details/__test__/__snapshots__/Details.spec.tsx.snap @@ -270,6 +270,8 @@ exports[`Details when it has readonly flag does not render the Action button a T "normal": "#FFFFFF", }, "color": Object { + "active": "#4F4FFF", + "hover": "#4F4FFF", "normal": "#73848C", }, "previewColor": Object { diff --git a/kafka-ui-react-app/src/components/common/table/TableHeaderCell/TableHeaderCell.styled.ts b/kafka-ui-react-app/src/components/common/table/TableHeaderCell/TableHeaderCell.styled.ts index 6d4fbf847cc..6e4d44d65f2 100644 --- a/kafka-ui-react-app/src/components/common/table/TableHeaderCell/TableHeaderCell.styled.ts +++ b/kafka-ui-react-app/src/components/common/table/TableHeaderCell/TableHeaderCell.styled.ts @@ -1,43 +1,52 @@ -import styled from 'styled-components'; -import { Colors } from 'theme/theme'; +import styled, { css } from 'styled-components'; -import { TableHeaderCellProps } from './TableHeaderCell'; +interface TitleProps { + isOrderable?: boolean; + isOrdered?: boolean; +} -export const TableHeaderCell = styled.th<TableHeaderCellProps>` - padding: 4px 0 4px 24px !important; - border-bottom-width: 1px !important; - vertical-align: middle !important; +const isOrderableStyles = css` + cursor: pointer; - &.is-clickable { - cursor: pointer !important; - pointer-events: all !important; + &:hover { + color: ${(props) => props.theme.thStyles.color.hover}; } +`; - &.has-text-link-dark span { - color: ${Colors.brand[50]} !important; - } +export const Title = styled.span<TitleProps>` + font-family: Inter, sans-serif; + font-size: 12px; + font-style: normal; + font-weight: 400; + line-height: 16px; + letter-spacing: 0em; + text-align: left; + background: ${(props) => props.theme.thStyles.backgroundColor.normal}; + color: ${(props) => + props.isOrdered + ? props.theme.thStyles.color.active + : props.theme.thStyles.color.normal}; + cursor: default; - span { - font-family: Inter, sans-serif; - font-size: 12px; - font-style: normal; - font-weight: 400; - line-height: 16px; - letter-spacing: 0em; - text-align: left; - background: ${(props) => props.theme.thStyles.backgroundColor.normal}; - color: ${(props) => props.theme.thStyles.color.normal}; + ${(props) => props.isOrderable && isOrderableStyles} +`; - &.preview { - margin-left: 8px; - font-size: 14px; - color: ${(props) => props.theme.thStyles.previewColor.normal}; - cursor: pointer; - } +export const Preview = styled.span` + margin-left: 8px; + font-family: Inter, sans-serif; + font-style: normal; + font-weight: 400; + line-height: 16px; + letter-spacing: 0em; + text-align: left; + background: ${(props) => props.theme.thStyles.backgroundColor.normal}; + font-size: 14px; + color: ${(props) => props.theme.thStyles.previewColor.normal}; + cursor: pointer; +`; - &.is-clickable { - cursor: pointer !important; - pointer-events: all !important; - } - } +export const TableHeaderCell = styled.th` + padding: 4px 0 4px 24px; + border-bottom-width: 1px; + vertical-align: middle; `; diff --git a/kafka-ui-react-app/src/components/common/table/TableHeaderCell/TableHeaderCell.tsx b/kafka-ui-react-app/src/components/common/table/TableHeaderCell/TableHeaderCell.tsx index 66a75835ceb..bf90430e006 100644 --- a/kafka-ui-react-app/src/components/common/table/TableHeaderCell/TableHeaderCell.tsx +++ b/kafka-ui-react-app/src/components/common/table/TableHeaderCell/TableHeaderCell.tsx @@ -1,7 +1,6 @@ import React from 'react'; import { TopicColumnsToSort } from 'generated-sources'; import * as S from 'components/common/table/TableHeaderCell/TableHeaderCell.styled'; -import cx from 'classnames'; export interface TableHeaderCellProps { title?: string; @@ -13,38 +12,57 @@ export interface TableHeaderCellProps { } const TableHeaderCell: React.FC<TableHeaderCellProps> = (props) => { - const { title, previewText, onPreview, orderBy, orderValue, handleOrderBy } = - props; + const { + title, + previewText, + onPreview, + orderBy, + orderValue, + handleOrderBy, + ...restProps + } = props; + const isOrdered = !!orderValue && orderValue === orderBy; + const isOrderable = !!(orderValue && handleOrderBy); + + const handleOnClick = () => { + return orderValue && handleOrderBy && handleOrderBy(orderValue); + }; + const handleOnKeyDown = (event: React.KeyboardEvent) => { + return ( + event.code === 'Space' && + orderValue && + handleOrderBy && + handleOrderBy(orderValue) + ); + }; + const orderableProps = isOrderable && { + isOrderable, + onClick: handleOnClick, + onKeyDown: handleOnKeyDown, + role: 'button', + tabIndex: 0, + }; return ( - <S.TableHeaderCell - className={cx(orderBy && orderBy === orderValue && 'has-text-link-dark')} - {...props} - > - <span className="title">{title}</span> + <S.TableHeaderCell {...restProps}> + <S.Title isOrdered={isOrdered} {...orderableProps}> + {title} + {isOrderable && ( + <span title="Sort icon" className="icon is-small"> + <i className="fas fa-sort" /> + </span> + )} + </S.Title> + {previewText && ( - <span - className="preview" + <S.Preview onClick={onPreview} onKeyDown={onPreview} role="button" tabIndex={0} > {previewText} - </span> - )} - {orderValue && ( - <span - className="icon is-small is-clickable" - onClick={() => - orderValue && handleOrderBy && handleOrderBy(orderValue) - } - onKeyDown={() => handleOrderBy} - role="button" - tabIndex={0} - > - <i className="fas fa-sort" /> - </span> + </S.Preview> )} </S.TableHeaderCell> ); diff --git a/kafka-ui-react-app/src/components/common/table/__tests__/TableHeaderCell.spec.tsx b/kafka-ui-react-app/src/components/common/table/__tests__/TableHeaderCell.spec.tsx index 7b5fd4e2aba..7742f67b703 100644 --- a/kafka-ui-react-app/src/components/common/table/__tests__/TableHeaderCell.spec.tsx +++ b/kafka-ui-react-app/src/components/common/table/__tests__/TableHeaderCell.spec.tsx @@ -1,17 +1,25 @@ import React from 'react'; -import { StaticRouter } from 'react-router'; +import { screen, within } from '@testing-library/react'; +import { render } from 'lib/testHelpers'; import TableHeaderCell, { TableHeaderCellProps, } from 'components/common/table/TableHeaderCell/TableHeaderCell'; -import { mountWithTheme } from 'lib/testHelpers'; import { TopicColumnsToSort } from 'generated-sources'; +import theme from 'theme/theme'; +import userEvent from '@testing-library/user-event'; -const STUB_TITLE = 'stub test title'; -const STUB_PREVIEW_TEXT = 'stub preview text'; +const SPACE_KEY = ' '; + +const testTitle = 'test title'; +const testPreviewText = 'test preview text'; +const handleOrderBy = jest.fn(); +const onPreview = jest.fn(); + +const sortIconTitle = 'Sort icon'; describe('TableHeaderCell', () => { - const setupComponent = (props: TableHeaderCellProps) => ( - <StaticRouter> + const setupComponent = (props: Partial<TableHeaderCellProps> = {}) => + render( <table> <thead> <tr> @@ -19,49 +27,135 @@ describe('TableHeaderCell', () => { </tr> </thead> </table> - </StaticRouter> - ); + ); it('renders without props', () => { - const wrapper = mountWithTheme(setupComponent({})); - expect(wrapper.contains(<TableHeaderCell />)).toBeTruthy(); + setupComponent(); + expect(screen.getByRole('columnheader')).toBeInTheDocument(); }); it('renders with title & preview text', () => { - const wrapper = mountWithTheme( - setupComponent({ - title: STUB_TITLE, - previewText: STUB_PREVIEW_TEXT, - }) - ); + setupComponent({ + title: testTitle, + previewText: testPreviewText, + }); - expect(wrapper.find('span.title').text()).toEqual(STUB_TITLE); - expect(wrapper.find('span.preview').text()).toEqual(STUB_PREVIEW_TEXT); + const columnheader = screen.getByRole('columnheader'); + expect(within(columnheader).getByText(testTitle)).toBeInTheDocument(); + expect(within(columnheader).getByText(testPreviewText)).toBeInTheDocument(); }); - it('renders with orderBy props', () => { - const wrapper = mountWithTheme( - setupComponent({ - title: STUB_TITLE, - orderBy: TopicColumnsToSort.NAME, - orderValue: TopicColumnsToSort.NAME, - }) - ); + it('renders with orderable props', () => { + setupComponent({ + title: testTitle, + orderBy: TopicColumnsToSort.NAME, + orderValue: TopicColumnsToSort.NAME, + handleOrderBy, + }); + const columnheader = screen.getByRole('columnheader'); + const title = within(columnheader).getByRole('button'); + expect(title).toBeInTheDocument(); + expect(title).toHaveTextContent(testTitle); + expect(within(title).getByTitle(sortIconTitle)).toBeInTheDocument(); + expect(title).toHaveStyle(`color: ${theme.thStyles.color.active};`); + expect(title).toHaveStyle('cursor: pointer;'); + }); - expect(wrapper.find('span.title').text()).toEqual(STUB_TITLE); - expect(wrapper.exists('span.icon.is-small.is-clickable')).toBeTruthy(); - expect(wrapper.exists('i.fas.fa-sort')).toBeTruthy(); + it('renders click on title triggers handler', () => { + setupComponent({ + title: testTitle, + orderBy: TopicColumnsToSort.NAME, + orderValue: TopicColumnsToSort.NAME, + handleOrderBy, + }); + const columnheader = screen.getByRole('columnheader'); + const title = within(columnheader).getByRole('button'); + userEvent.click(title); + expect(handleOrderBy.mock.calls.length).toBe(1); + }); + + it('renders space on title triggers handler', () => { + setupComponent({ + title: testTitle, + orderBy: TopicColumnsToSort.NAME, + orderValue: TopicColumnsToSort.NAME, + handleOrderBy, + }); + const columnheader = screen.getByRole('columnheader'); + const title = within(columnheader).getByRole('button'); + userEvent.type(title, SPACE_KEY); + // userEvent.type clicks and only then presses space + expect(handleOrderBy.mock.calls.length).toBe(2); + }); + + it('click on preview triggers handler', () => { + setupComponent({ + title: testTitle, + previewText: testPreviewText, + onPreview, + }); + const columnheader = screen.getByRole('columnheader'); + const preview = within(columnheader).getByRole('button'); + userEvent.click(preview); + expect(onPreview.mock.calls.length).toBe(1); + }); + + it('click on preview triggers handler', () => { + setupComponent({ + title: testTitle, + previewText: testPreviewText, + onPreview, + }); + const columnheader = screen.getByRole('columnheader'); + const preview = within(columnheader).getByRole('button'); + userEvent.type(preview, SPACE_KEY); + // userEvent.type clicks and only then presses space + expect(onPreview.mock.calls.length).toBe(2); + }); + + it('renders without sort indication', () => { + setupComponent({ + title: testTitle, + orderBy: TopicColumnsToSort.NAME, + }); + + const columnheader = screen.getByRole('columnheader'); + const title = within(columnheader).getByText(testTitle); + expect(within(title).queryByTitle(sortIconTitle)).not.toBeInTheDocument(); + expect(title).toHaveStyle('cursor: default;'); + }); + + it('renders with hightlighted title when orderBy and orderValue are equal', () => { + setupComponent({ + title: testTitle, + orderBy: TopicColumnsToSort.NAME, + orderValue: TopicColumnsToSort.NAME, + }); + const columnheader = screen.getByRole('columnheader'); + const title = within(columnheader).getByText(testTitle); + expect(title).toHaveStyle(`color: ${theme.thStyles.color.active};`); + }); + + it('renders without hightlighted title when orderBy and orderValue are not equal', () => { + setupComponent({ + title: testTitle, + orderBy: TopicColumnsToSort.NAME, + orderValue: TopicColumnsToSort.OUT_OF_SYNC_REPLICAS, + }); + const columnheader = screen.getByRole('columnheader'); + const title = within(columnheader).getByText(testTitle); + expect(title).toHaveStyle(`color: ${theme.thStyles.color.normal}`); }); it('renders with default (primary) theme', () => { - const wrapper = mountWithTheme( - setupComponent({ - title: STUB_TITLE, - }) - ); + setupComponent({ + title: testTitle, + }); - const domNode = wrapper.find('span').at(0).getDOMNode(); - const background = getComputedStyle(domNode).getPropertyValue('background'); - expect(background).toBe('rgb(255, 255, 255)'); + const columnheader = screen.getByRole('columnheader'); + const title = within(columnheader).getByText(testTitle); + expect(title).toHaveStyle( + `background: ${theme.thStyles.backgroundColor.normal};` + ); }); }); diff --git a/kafka-ui-react-app/src/redux/reducers/topics/reducer.ts b/kafka-ui-react-app/src/redux/reducers/topics/reducer.ts index 485793cd215..5e4426d77d7 100644 --- a/kafka-ui-react-app/src/redux/reducers/topics/reducer.ts +++ b/kafka-ui-react-app/src/redux/reducers/topics/reducer.ts @@ -2,13 +2,14 @@ import { Action, TopicsState } from 'redux/interfaces'; import { getType } from 'typesafe-actions'; import * as actions from 'redux/actions'; import * as _ from 'lodash'; +import { TopicColumnsToSort } from 'generated-sources'; export const initialState: TopicsState = { byName: {}, allNames: [], totalPages: 1, search: '', - orderBy: null, + orderBy: TopicColumnsToSort.NAME, consumerGroups: [], }; diff --git a/kafka-ui-react-app/src/theme/theme.ts b/kafka-ui-react-app/src/theme/theme.ts index 6737c6739ab..ec49a6cfac0 100644 --- a/kafka-ui-react-app/src/theme/theme.ts +++ b/kafka-ui-react-app/src/theme/theme.ts @@ -127,6 +127,8 @@ const theme = { }, color: { normal: Colors.neutral[50], + hover: Colors.brand[50], + active: Colors.brand[50], }, previewColor: { normal: Colors.brand[50],
null
val
train
2022-01-14T00:23:12
"2021-12-09T12:56:40Z"
Khakha-A
train
provectus/kafka-ui/1213_1395
provectus/kafka-ui
provectus/kafka-ui/1213
provectus/kafka-ui/1395
[ "timestamp(timedelta=0.0, similarity=0.917865918159041)" ]
d0761e2040bad4b325922764f457edce3f80bca5
77895236135a776cfaeec94aea8ce5a075ab38cf
[ "a similar one for the old design: #996" ]
[]
"2022-01-17T08:49:05Z"
[ "type/bug", "good first issue", "scope/frontend", "status/accepted", "status/confirmed" ]
Topic Creation: Inconsistency in Deleted parameters
**Describe the bug** When creating a new topic, you have the ability to add custom parameters. When selecting a custom parameter, you can delete it, but if you want to add it again, it is either greyed out or still accessible -- and it should be not. **Set up** http://redesign.internal.kafka-ui.provectus.io/ **Steps to Reproduce** Steps to reproduce the behavior: 1. Go to Topics - Add a Topic 2. Add custom parameter - Select Preallocate 3. Add custom parameter - Select retention.bytes 4. Add custom parameter - Select segment.bytes 5. Delete preallocate parameter - Cannot be seelcted when clicking Add custom parameter 6. Delete retention.bytes - Add custom parameter - preallocate not greyed out /available for selection 7. Delete segment.bytes - Add custom parameter - add deleted parameters are available for selection **Expected behavior** When parameter is deleted, it can be immediately added again, without inconsistencies **Screenshots** (If applicable, add screenshots to help explain your problem) **Additional context** There may a small lag in how the tool processes the add/delete functionality. It seems to affect specific parameters like preallocate, though.
[ "kafka-ui-react-app/src/components/Topics/New/New.tsx", "kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParamField.tsx", "kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParams.styled.ts", "kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParams.tsx", "kafka-ui-react-app/src/components/common/Icons/IconButtonWrapper.ts" ]
[ "kafka-ui-react-app/src/components/Topics/New/New.tsx", "kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParamField.tsx", "kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParams.styled.ts", "kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParams.tsx", "kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/__tests__/CustomParamField.spec.tsx", "kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/__tests__/CustomParams.spec.tsx", "kafka-ui-react-app/src/components/common/Icons/IconButtonWrapper.ts" ]
[]
diff --git a/kafka-ui-react-app/src/components/Topics/New/New.tsx b/kafka-ui-react-app/src/components/Topics/New/New.tsx index de4075912c2..7d596e63c1a 100644 --- a/kafka-ui-react-app/src/components/Topics/New/New.tsx +++ b/kafka-ui-react-app/src/components/Topics/New/New.tsx @@ -37,7 +37,7 @@ const New: React.FC = () => { history.push(clusterTopicPath(clusterName, data.name)); } catch (error) { - const response = await getResponse(error); + const response = await getResponse(error as Response); const alert: FailurePayload = { subject: ['schema', data.name].join('-'), title: `Schema ${data.name}`, diff --git a/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParamField.tsx b/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParamField.tsx index c2a023ba0e8..1c3163bf6d1 100644 --- a/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParamField.tsx +++ b/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParamField.tsx @@ -1,8 +1,7 @@ -import React from 'react'; +import React, { useRef } from 'react'; import { ErrorMessage } from '@hookform/error-message'; import { TOPIC_CUSTOM_PARAMS } from 'lib/constants'; import { FieldArrayWithId, useFormContext } from 'react-hook-form'; -import { remove as _remove } from 'lodash'; import { TopicFormData } from 'redux/interfaces'; import { InputLabel } from 'components/common/Input/InputLabel.styled'; import { FormError } from 'components/common/Input/Input.styled'; @@ -14,7 +13,7 @@ import * as C from 'components/Topics/shared/Form/TopicForm.styled'; import * as S from './CustomParams.styled'; -interface Props { +export interface Props { isDisabled: boolean; index: number; existingFields: string[]; @@ -37,19 +36,17 @@ const CustomParamField: React.FC<Props> = ({ watch, } = useFormContext<TopicFormData>(); const nameValue = watch(`customParams.${index}.name`); - let prevName = ''; + const prevName = useRef(nameValue); React.useEffect(() => { - prevName = nameValue; - }, []); - - React.useEffect(() => { - if (nameValue !== prevName) { + if (nameValue !== prevName.current) { let newExistingFields = [...existingFields]; - if (prevName) { - newExistingFields = _remove(newExistingFields, (el) => el === prevName); + if (prevName.current) { + newExistingFields = newExistingFields.filter( + (name) => name !== prevName.current + ); } - prevName = nameValue; + prevName.current = nameValue; newExistingFields.push(nameValue); setExistingFields(newExistingFields); setValue(`customParams.${index}.value`, TOPIC_CUSTOM_PARAMS[nameValue]); @@ -83,7 +80,10 @@ const CustomParamField: React.FC<Props> = ({ ))} </Select> <FormError> - <ErrorMessage errors={errors} name={`customParams.${index}.name`} /> + <ErrorMessage + errors={errors} + name={`customParams.${index}.name` as const} + /> </FormError> </div> </> @@ -101,13 +101,22 @@ const CustomParamField: React.FC<Props> = ({ disabled={isDisabled} /> <FormError> - <ErrorMessage errors={errors} name={`customParams.${index}.value`} /> + <ErrorMessage + errors={errors} + name={`customParams.${index}.value` as const} + /> </FormError> </div> <S.DeleteButtonWrapper> - <IconButtonWrapper onClick={() => remove(index)} aria-hidden> - <CloseIcon /> + <IconButtonWrapper + onClick={() => remove(index)} + onKeyDown={(e: React.KeyboardEvent) => + e.code === 'Space' && remove(index) + } + title={`Delete customParam field ${index}`} + > + <CloseIcon aria-hidden /> </IconButtonWrapper> </S.DeleteButtonWrapper> </C.Column> diff --git a/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParams.styled.ts b/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParams.styled.ts index 092f6d08573..5c7d1debcd1 100644 --- a/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParams.styled.ts +++ b/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParams.styled.ts @@ -6,11 +6,10 @@ export const ParamsWrapper = styled.div` `; export const DeleteButtonWrapper = styled.div` - height: 32px; + min-height: 32px; display: flex; flex-direction: column; - justify-content: center; align-items: center; - align-self: flex-end; - flex-grow: 0.25 !important; + justify-self: flex-start; + margin-top: 32px; `; diff --git a/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParams.tsx b/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParams.tsx index f1a6be62bde..1eb4dbe4a92 100644 --- a/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParams.tsx +++ b/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/CustomParams.tsx @@ -1,6 +1,6 @@ import React from 'react'; import { TopicConfigByName, TopicFormData } from 'redux/interfaces'; -import { useFieldArray, useFormContext } from 'react-hook-form'; +import { useFieldArray, useFormContext, useWatch } from 'react-hook-form'; import { Button } from 'components/common/Button/Button'; import CustomParamField from './CustomParamField'; @@ -8,28 +8,41 @@ import * as S from './CustomParams.styled'; export const INDEX_PREFIX = 'customParams'; -interface Props { +export interface CustomParamsProps { isSubmitting: boolean; config?: TopicConfigByName; } -const CustomParams: React.FC<Props> = ({ isSubmitting }) => { +const CustomParams: React.FC<CustomParamsProps> = ({ isSubmitting }) => { const { control } = useFormContext<TopicFormData>(); const { fields, append, remove } = useFieldArray({ control, name: INDEX_PREFIX, }); + const watchFieldArray = useWatch({ + control, + name: INDEX_PREFIX, + defaultValue: fields, + }); + const controlledFields = fields.map((field, index) => { + return { + ...field, + ...watchFieldArray[index], + }; + }); + const [existingFields, setExistingFields] = React.useState<string[]>([]); + const removeField = (index: number): void => { setExistingFields( - existingFields.filter((field) => field === fields[index].name) + existingFields.filter((field) => field !== controlledFields[index].name) ); remove(index); }; return ( <S.ParamsWrapper> - {fields.map((field, idx) => ( + {controlledFields.map((field, idx) => ( <CustomParamField key={field.id} field={field} diff --git a/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/__tests__/CustomParamField.spec.tsx b/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/__tests__/CustomParamField.spec.tsx new file mode 100644 index 00000000000..7d9ea036a0a --- /dev/null +++ b/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/__tests__/CustomParamField.spec.tsx @@ -0,0 +1,123 @@ +import React from 'react'; +import { screen, within } from '@testing-library/react'; +import { render } from 'lib/testHelpers'; +import CustomParamsField, { + Props, +} from 'components/Topics/shared/Form/CustomParams/CustomParamField'; +import { FormProvider, useForm } from 'react-hook-form'; +import userEvent from '@testing-library/user-event'; +import { TOPIC_CUSTOM_PARAMS } from 'lib/constants'; + +const isDisabled = false; +const index = 0; +const existingFields: string[] = []; +const field = { name: 'name', value: 'value', id: 'id' }; +const remove = jest.fn(); +const setExistingFields = jest.fn(); + +const SPACE_KEY = ' '; + +describe('CustomParamsField', () => { + const setupComponent = (props: Props) => { + const Wrapper: React.FC = ({ children }) => { + const methods = useForm(); + return <FormProvider {...methods}>{children}</FormProvider>; + }; + + return render( + <Wrapper> + <CustomParamsField {...props} /> + </Wrapper> + ); + }; + + it('renders with props', () => { + setupComponent({ + field, + isDisabled, + index, + remove, + existingFields, + setExistingFields, + }); + expect(screen.getByRole('listbox')).toBeInTheDocument(); + expect(screen.getByRole('textbox')).toBeInTheDocument(); + expect(screen.getByRole('button')).toBeInTheDocument(); + }); + + describe('core functionality works', () => { + it('click on button triggers remove', () => { + setupComponent({ + field, + isDisabled, + index, + remove, + existingFields, + setExistingFields, + }); + userEvent.click(screen.getByRole('button')); + expect(remove.mock.calls.length).toBe(1); + }); + + it('pressing space on button triggers remove', () => { + setupComponent({ + field, + isDisabled, + index, + remove, + existingFields, + setExistingFields, + }); + userEvent.type(screen.getByRole('button'), SPACE_KEY); + // userEvent.type triggers remove two times as at first it clicks on element and then presses space + expect(remove.mock.calls.length).toBe(2); + }); + + it('can select option', () => { + setupComponent({ + field, + isDisabled, + index, + remove, + existingFields, + setExistingFields, + }); + const listbox = screen.getByRole('listbox'); + userEvent.selectOptions(listbox, ['compression.type']); + + const option = within(listbox).getByRole('option', { selected: true }); + expect(option).toHaveValue('compression.type'); + }); + + it('selecting option updates textbox value', () => { + setupComponent({ + field, + isDisabled, + index, + remove, + existingFields, + setExistingFields, + }); + const listbox = screen.getByRole('listbox'); + userEvent.selectOptions(listbox, ['compression.type']); + + const textbox = screen.getByRole('textbox'); + expect(textbox).toHaveValue(TOPIC_CUSTOM_PARAMS['compression.type']); + }); + + it('selecting option updates triggers setExistingFields', () => { + setupComponent({ + field, + isDisabled, + index, + remove, + existingFields, + setExistingFields, + }); + const listbox = screen.getByRole('listbox'); + userEvent.selectOptions(listbox, ['compression.type']); + + expect(setExistingFields.mock.calls.length).toBe(1); + }); + }); +}); diff --git a/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/__tests__/CustomParams.spec.tsx b/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/__tests__/CustomParams.spec.tsx new file mode 100644 index 00000000000..d803e65e4b0 --- /dev/null +++ b/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/__tests__/CustomParams.spec.tsx @@ -0,0 +1,171 @@ +import React from 'react'; +import { screen, within } from '@testing-library/react'; +import { render } from 'lib/testHelpers'; +import CustomParams, { + CustomParamsProps, +} from 'components/Topics/shared/Form/CustomParams/CustomParams'; +import { FormProvider, useForm } from 'react-hook-form'; +import userEvent from '@testing-library/user-event'; +import { TOPIC_CUSTOM_PARAMS } from 'lib/constants'; + +describe('CustomParams', () => { + const setupComponent = (props: CustomParamsProps) => { + const Wrapper: React.FC = ({ children }) => { + const methods = useForm(); + return <FormProvider {...methods}>{children}</FormProvider>; + }; + + return render( + <Wrapper> + <CustomParams {...props} /> + </Wrapper> + ); + }; + + beforeEach(() => { + setupComponent({ isSubmitting: false }); + }); + + it('renders with props', () => { + const addParamButton = screen.getByRole('button'); + expect(addParamButton).toBeInTheDocument(); + expect(addParamButton).toHaveTextContent('Add Custom Parameter'); + }); + + describe('works with user inputs correctly', () => { + it('button click creates custom param fieldset', () => { + const addParamButton = screen.getByRole('button'); + userEvent.click(addParamButton); + + const listbox = screen.getByRole('listbox'); + expect(listbox).toBeInTheDocument(); + + const textbox = screen.getByRole('textbox'); + expect(textbox).toBeInTheDocument(); + }); + + it('can select option', () => { + const addParamButton = screen.getByRole('button'); + userEvent.click(addParamButton); + + const listbox = screen.getByRole('listbox'); + + userEvent.selectOptions(listbox, ['compression.type']); + + const option = screen.getByRole('option', { + selected: true, + }); + expect(option).toHaveValue('compression.type'); + expect(option).toBeDisabled(); + + const textbox = screen.getByRole('textbox'); + expect(textbox).toHaveValue(TOPIC_CUSTOM_PARAMS['compression.type']); + }); + + it('when selected option changes disabled options update correctly', () => { + const addParamButton = screen.getByRole('button'); + userEvent.click(addParamButton); + + const listbox = screen.getByRole('listbox'); + + userEvent.selectOptions(listbox, ['compression.type']); + + const option = screen.getByRole('option', { + name: 'compression.type', + }); + expect(option).toBeDisabled(); + + userEvent.selectOptions(listbox, ['delete.retention.ms']); + const newOption = screen.getByRole('option', { + name: 'delete.retention.ms', + }); + expect(newOption).toBeDisabled(); + + expect(option).toBeEnabled(); + }); + + it('multiple button clicks create multiple fieldsets', () => { + const addParamButton = screen.getByRole('button'); + userEvent.click(addParamButton); + userEvent.click(addParamButton); + userEvent.click(addParamButton); + + const listboxes = screen.getAllByRole('listbox'); + expect(listboxes.length).toBe(3); + + const textboxes = screen.getAllByRole('textbox'); + expect(textboxes.length).toBe(3); + }); + + it("can't select already selected option", () => { + const addParamButton = screen.getByRole('button'); + userEvent.click(addParamButton); + userEvent.click(addParamButton); + + const listboxes = screen.getAllByRole('listbox'); + + const firstListbox = listboxes[0]; + userEvent.selectOptions(firstListbox, ['compression.type']); + + const firstListboxOption = within(firstListbox).getByRole('option', { + selected: true, + }); + expect(firstListboxOption).toBeDisabled(); + + const secondListbox = listboxes[1]; + const secondListboxOption = within(secondListbox).getByRole('option', { + name: 'compression.type', + }); + expect(secondListboxOption).toBeDisabled(); + }); + + it('when fieldset with selected custom property type is deleted disabled options update correctly', async () => { + const addParamButton = screen.getByRole('button'); + userEvent.click(addParamButton); + userEvent.click(addParamButton); + userEvent.click(addParamButton); + + const listboxes = screen.getAllByRole('listbox'); + + const firstListbox = listboxes[0]; + userEvent.selectOptions(firstListbox, ['compression.type']); + + const firstListboxOption = within(firstListbox).getByRole('option', { + selected: true, + }); + expect(firstListboxOption).toBeDisabled(); + + const secondListbox = listboxes[1]; + userEvent.selectOptions(secondListbox, ['delete.retention.ms']); + const secondListboxOption = within(secondListbox).getByRole('option', { + selected: true, + }); + expect(secondListboxOption).toBeDisabled(); + + const thirdListbox = listboxes[2]; + userEvent.selectOptions(thirdListbox, ['file.delete.delay.ms']); + const thirdListboxOption = within(thirdListbox).getByRole('option', { + selected: true, + }); + expect(thirdListboxOption).toBeDisabled(); + + const deleteSecondFieldsetButton = screen.getByTitle( + 'Delete customParam field 1' + ); + userEvent.click(deleteSecondFieldsetButton); + expect(secondListbox).not.toBeInTheDocument(); + + expect( + within(firstListbox).getByRole('option', { + name: 'delete.retention.ms', + }) + ).toBeEnabled(); + + expect( + within(thirdListbox).getByRole('option', { + name: 'delete.retention.ms', + }) + ).toBeEnabled(); + }); + }); +}); diff --git a/kafka-ui-react-app/src/components/common/Icons/IconButtonWrapper.ts b/kafka-ui-react-app/src/components/common/Icons/IconButtonWrapper.ts index 6cae47935a4..7804071f6f7 100644 --- a/kafka-ui-react-app/src/components/common/Icons/IconButtonWrapper.ts +++ b/kafka-ui-react-app/src/components/common/Icons/IconButtonWrapper.ts @@ -1,6 +1,9 @@ import styled from 'styled-components'; -const IconButtonWrapper = styled.span` +const IconButtonWrapper = styled.span.attrs(() => ({ + role: 'button', + tabIndex: '0', +}))` height: 16px !important; display: inline-block; &:hover {
null
val
train
2022-01-19T15:24:12
"2021-12-09T13:22:20Z"
Khakha-A
train
provectus/kafka-ui/1221_1429
provectus/kafka-ui
provectus/kafka-ui/1221
provectus/kafka-ui/1429
[ "timestamp(timedelta=0.0, similarity=0.9556253923811532)" ]
68f1d3e7ee6f868cf4909baf559889aa5e65c894
86394034486ee20f50a91bb52bb792695be7ab07
[]
[]
"2022-01-19T16:18:29Z"
[ "type/enhancement", "scope/frontend", "status/accepted", "status/confirmed" ]
Unclear parameters for Consumers` Search. Inconsistency of Search line on Consumers page.
### Is your proposal related to a problem? <!-- Provide a clear and concise description of what the problem is. For example, "I'm always frustrated when..." --> (Write your answer here.) It`s confusing now what parameter the search is currently using because it`s only one word 'Search' into the search line on Consumers page. Moreover, the Search line is inconsistent here - as on other pages there is a clear hint into the search line like Search by [parameters] ### Describe the solution you'd like <!-- Provide a clear and concise description of what you want to happen. --> (Describe your proposed solution here.) The clear hint in the Search line for instance Search by Consumer group ID ### Describe alternatives you've considered <!-- Let us know about other solutions you've tried or researched. --> (Write your answer here.) ### Additional context <!-- Is there anything else you can add about the proposal? You might want to link to related issues here, if you haven't already. --> (Write your answer here.)
[ "kafka-ui-react-app/src/components/ConsumerGroups/List/List.tsx", "kafka-ui-react-app/src/components/ConsumerGroups/List/__test__/List.spec.tsx" ]
[ "kafka-ui-react-app/src/components/ConsumerGroups/List/List.tsx", "kafka-ui-react-app/src/components/ConsumerGroups/List/__test__/List.spec.tsx" ]
[]
diff --git a/kafka-ui-react-app/src/components/ConsumerGroups/List/List.tsx b/kafka-ui-react-app/src/components/ConsumerGroups/List/List.tsx index f5b006a1c6d..9cd37695850 100644 --- a/kafka-ui-react-app/src/components/ConsumerGroups/List/List.tsx +++ b/kafka-ui-react-app/src/components/ConsumerGroups/List/List.tsx @@ -22,7 +22,7 @@ const List: React.FC = () => { <PageHeading text="Consumers" /> <ControlPanelWrapper hasInput> <Search - placeholder="Search" + placeholder="Search by Consumer Group ID" value={searchText} handleSearch={handleInputChange} /> diff --git a/kafka-ui-react-app/src/components/ConsumerGroups/List/__test__/List.spec.tsx b/kafka-ui-react-app/src/components/ConsumerGroups/List/__test__/List.spec.tsx index 1bcb34daac9..0ac9c0ceedf 100644 --- a/kafka-ui-react-app/src/components/ConsumerGroups/List/__test__/List.spec.tsx +++ b/kafka-ui-react-app/src/components/ConsumerGroups/List/__test__/List.spec.tsx @@ -30,7 +30,10 @@ describe('List', () => { describe('when searched', () => { it('renders only searched consumers', () => { - userEvent.type(screen.getByPlaceholderText('Search'), 'groupId1'); + userEvent.type( + screen.getByPlaceholderText('Search by Consumer Group ID'), + 'groupId1' + ); expect(screen.getByText('groupId1')).toBeInTheDocument(); expect(screen.getByText('groupId2')).toBeInTheDocument(); });
null
train
train
2022-01-20T10:21:13
"2021-12-10T11:10:08Z"
agolosen
train
provectus/kafka-ui/1223_1259
provectus/kafka-ui
provectus/kafka-ui/1223
provectus/kafka-ui/1259
[ "timestamp(timedelta=0.0, similarity=0.908531084211858)" ]
7804a6eb9b34c67af1af9d0ca7d464634ffca65c
65d648419c4c0afc6ebb4a89014f892e4737eedf
[]
[]
"2021-12-16T08:31:27Z"
[ "type/enhancement", "scope/frontend", "status/accepted", "status/confirmed" ]
Adjust font in the selection/sorting area in Topics - Messages
### Is your proposal related to a problem? <!-- Provide a clear and concise description of what the problem is. For example, "I'm always frustrated when..." --> Adjust fonts in the selection/sorting fields in Topics - Specific Topic - Messages ![image](https://user-images.githubusercontent.com/39586798/145566069-1c30c776-3447-438e-a87a-254bc8a681c1.png) ### Describe the solution you'd like <!-- Provide a clear and concise description of what you want to happen. --> Same font for all fields ### Describe alternatives you've considered <!-- Let us know about other solutions you've tried or researched. --> (Write your answer here.) ### Additional context <!-- Is there anything else you can add about the proposal? You might want to link to related issues here, if you haven't already. --> (Write your answer here.)
[ "kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Filters/Filters.styled.ts", "kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Filters/Filters.tsx", "kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Filters/__tests__/__snapshots__/Filters.spec.tsx.snap" ]
[ "kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Filters/Filters.styled.ts", "kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Filters/Filters.tsx", "kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Filters/__tests__/__snapshots__/Filters.spec.tsx.snap", "kafka-ui-react-app/src/components/common/MultiSelect/MultiSelect.styled.ts" ]
[]
diff --git a/kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Filters/Filters.styled.ts b/kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Filters/Filters.styled.ts index 3b67922fdb1..0243519e8f2 100644 --- a/kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Filters/Filters.styled.ts +++ b/kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Filters/Filters.styled.ts @@ -16,16 +16,6 @@ export const FiltersWrapper = styled.div` width: 10%; } } - - & .multi-select { - height: 32px; - & > .dropdown-container { - height: 32px; - & > .dropdown-heading { - height: 32px; - } - } - } `; export const FilterInputs = styled.div` diff --git a/kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Filters/Filters.tsx b/kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Filters/Filters.tsx index aa29d078d35..a59061185f4 100644 --- a/kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Filters/Filters.tsx +++ b/kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Filters/Filters.tsx @@ -13,7 +13,7 @@ import * as React from 'react'; import { omitBy } from 'lodash'; import { useHistory, useLocation } from 'react-router'; import DatePicker from 'react-datepicker'; -import MultiSelect from 'react-multi-select-component'; +import MultiSelect from 'components/common/MultiSelect/MultiSelect.styled'; import { Option } from 'react-multi-select-component/dist/lib/interfaces'; import BytesFormatted from 'components/common/BytesFormatted/BytesFormatted'; import { TopicName, ClusterName } from 'redux/interfaces'; @@ -22,14 +22,7 @@ import Input from 'components/common/Input/Input'; import Select from 'components/common/Select/Select'; import { Button } from 'components/common/Button/Button'; -import { - FilterInputs, - FiltersMetrics, - FiltersWrapper, - Metric, - MetricsIcon, - SeekTypeSelectorWrapper, -} from './Filters.styled'; +import * as S from './Filters.styled'; import { filterOptions, getOffsetFromSeekToParam, @@ -235,9 +228,9 @@ const Filters: React.FC<FiltersProps> = ({ }, [seekDirection]); return ( - <FiltersWrapper> + <S.FiltersWrapper> <div> - <FilterInputs> + <S.FilterInputs> <Input inputSize="M" id="searchText" @@ -248,7 +241,7 @@ const Filters: React.FC<FiltersProps> = ({ onChange={({ target: { value } }) => setQuery(value)} /> {isSeekTypeControlVisible && ( - <SeekTypeSelectorWrapper> + <S.SeekTypeSelectorWrapper> <Select id="selectSeekType" onChange={({ target: { value } }) => @@ -280,7 +273,7 @@ const Filters: React.FC<FiltersProps> = ({ placeholderText="Select timestamp" /> )} - </SeekTypeSelectorWrapper> + </S.SeekTypeSelectorWrapper> )} <MultiSelect options={partitions.map((p) => ({ @@ -315,7 +308,7 @@ const Filters: React.FC<FiltersProps> = ({ Submit </Button> )} - </FilterInputs> + </S.FilterInputs> <Select selectSize="M" onChange={(e) => toggleSeekDirection(e.target.value)} @@ -325,28 +318,28 @@ const Filters: React.FC<FiltersProps> = ({ <option value={SeekDirection.BACKWARD}>Newest first</option> </Select> </div> - <FiltersMetrics> + <S.FiltersMetrics> <p style={{ fontSize: 14 }}>{isFetching && phaseMessage}</p> - <Metric title="Elapsed Time"> - <MetricsIcon> + <S.Metric title="Elapsed Time"> + <S.MetricsIcon> <i className="far fa-clock" /> - </MetricsIcon> + </S.MetricsIcon> <span>{Math.max(elapsedMs || 0, 0)} ms</span> - </Metric> - <Metric title="Bytes Consumed"> - <MetricsIcon> + </S.Metric> + <S.Metric title="Bytes Consumed"> + <S.MetricsIcon> <i className="fas fa-arrow-down" /> - </MetricsIcon> + </S.MetricsIcon> <BytesFormatted value={bytesConsumed} /> - </Metric> - <Metric title="Messages Consumed"> - <MetricsIcon> + </S.Metric> + <S.Metric title="Messages Consumed"> + <S.MetricsIcon> <i className="far fa-file-alt" /> - </MetricsIcon> + </S.MetricsIcon> <span>{messagesConsumed} messages</span> - </Metric> - </FiltersMetrics> - </FiltersWrapper> + </S.Metric> + </S.FiltersMetrics> + </S.FiltersWrapper> ); }; diff --git a/kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Filters/__tests__/__snapshots__/Filters.spec.tsx.snap b/kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Filters/__tests__/__snapshots__/Filters.spec.tsx.snap index 188c378b7c2..03286ea2e18 100644 --- a/kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Filters/__tests__/__snapshots__/Filters.spec.tsx.snap +++ b/kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Filters/__tests__/__snapshots__/Filters.spec.tsx.snap @@ -1,6 +1,20 @@ // Jest Snapshot v1, https://goo.gl/fbAQLP exports[`Filters component matches the snapshot 1`] = ` +.c9 { + min-width: 200px; + height: 32px; + font-size: 14px; +} + +.c9 > .dropdown-container { + height: 32px; +} + +.c9 > .dropdown-container > .dropdown-heading { + height: 32px; +} + .c3 { position: absolute; top: 50%; @@ -206,7 +220,7 @@ exports[`Filters component matches the snapshot 1`] = ` position: relative; } -.c9 { +.c10 { display: -webkit-box; display: -webkit-flex; display: -ms-flexbox; @@ -232,27 +246,27 @@ exports[`Filters component matches the snapshot 1`] = ` height: 32px; } -.c9:hover:enabled { +.c10:hover:enabled { background: #E3E6E8; color: #171A1C; cursor: pointer; } -.c9:active:enabled { +.c10:active:enabled { background: #D5DADD; color: #171A1C; } -.c9:disabled { +.c10:disabled { opacity: 0.5; cursor: not-allowed; } -.c9 a { +.c10 a { color: white; } -.c9 i { +.c10 i { margin-right: 7px; } @@ -284,18 +298,6 @@ exports[`Filters component matches the snapshot 1`] = ` width: 10%; } -.c0 .multi-select { - height: 32px; -} - -.c0 .multi-select > .dropdown-container { - height: 32px; -} - -.c0 .multi-select > .dropdown-container > .dropdown-heading { - height: 32px; -} - .c1 { display: -webkit-box; display: -webkit-flex; @@ -359,7 +361,7 @@ exports[`Filters component matches the snapshot 1`] = ` outline: none; } -.c10 { +.c11 { display: -webkit-box; display: -webkit-flex; display: -ms-flexbox; @@ -377,7 +379,7 @@ exports[`Filters component matches the snapshot 1`] = ` padding-bottom: 16px; } -.c11 { +.c12 { color: #73848C; font-size: 12px; display: -webkit-box; @@ -386,7 +388,7 @@ exports[`Filters component matches the snapshot 1`] = ` display: flex; } -.c12 { +.c13 { color: #171A1C; padding-right: 6px; height: 12px; @@ -449,7 +451,7 @@ exports[`Filters component matches the snapshot 1`] = ` </div> </div> <div - class="rmsc multi-select" + class="rmsc c9" > <div aria-labelledby="Select partitions" @@ -510,7 +512,7 @@ exports[`Filters component matches the snapshot 1`] = ` </div> </div> <button - class="c9" + class="c10" style="font-weight: 500;" type="submit" > @@ -537,17 +539,17 @@ exports[`Filters component matches the snapshot 1`] = ` </div> </div> <div - class="c10" + class="c11" > <p style="font-size: 14px;" /> <div - class="c11" + class="c12" title="Elapsed Time" > <div - class="c12" + class="c13" > <i class="far fa-clock" @@ -559,11 +561,11 @@ exports[`Filters component matches the snapshot 1`] = ` </span> </div> <div - class="c11" + class="c12" title="Bytes Consumed" > <div - class="c12" + class="c13" > <i class="fas fa-arrow-down" @@ -574,11 +576,11 @@ exports[`Filters component matches the snapshot 1`] = ` </span> </div> <div - class="c11" + class="c12" title="Messages Consumed" > <div - class="c12" + class="c13" > <i class="far fa-file-alt" @@ -596,7 +598,21 @@ exports[`Filters component matches the snapshot 1`] = ` exports[`Filters component when fetching matches the snapshot 1`] = ` <body> - .c3 { + .c9 { + min-width: 200px; + height: 32px; + font-size: 14px; +} + +.c9 > .dropdown-container { + height: 32px; +} + +.c9 > .dropdown-container > .dropdown-heading { + height: 32px; +} + +.c3 { position: absolute; top: 50%; line-height: 0; @@ -801,7 +817,7 @@ exports[`Filters component when fetching matches the snapshot 1`] = ` position: relative; } -.c9 { +.c10 { display: -webkit-box; display: -webkit-flex; display: -ms-flexbox; @@ -827,27 +843,27 @@ exports[`Filters component when fetching matches the snapshot 1`] = ` height: 32px; } -.c9:hover:enabled { +.c10:hover:enabled { background: #E3E6E8; color: #171A1C; cursor: pointer; } -.c9:active:enabled { +.c10:active:enabled { background: #D5DADD; color: #171A1C; } -.c9:disabled { +.c10:disabled { opacity: 0.5; cursor: not-allowed; } -.c9 a { +.c10 a { color: white; } -.c9 i { +.c10 i { margin-right: 7px; } @@ -879,18 +895,6 @@ exports[`Filters component when fetching matches the snapshot 1`] = ` width: 10%; } -.c0 .multi-select { - height: 32px; -} - -.c0 .multi-select > .dropdown-container { - height: 32px; -} - -.c0 .multi-select > .dropdown-container > .dropdown-heading { - height: 32px; -} - .c1 { display: -webkit-box; display: -webkit-flex; @@ -954,7 +958,7 @@ exports[`Filters component when fetching matches the snapshot 1`] = ` outline: none; } -.c10 { +.c11 { display: -webkit-box; display: -webkit-flex; display: -ms-flexbox; @@ -972,7 +976,7 @@ exports[`Filters component when fetching matches the snapshot 1`] = ` padding-bottom: 16px; } -.c11 { +.c12 { color: #73848C; font-size: 12px; display: -webkit-box; @@ -981,7 +985,7 @@ exports[`Filters component when fetching matches the snapshot 1`] = ` display: flex; } -.c12 { +.c13 { color: #171A1C; padding-right: 6px; height: 12px; @@ -1043,7 +1047,7 @@ exports[`Filters component when fetching matches the snapshot 1`] = ` </div> </div> <div - class="rmsc multi-select" + class="rmsc c9" > <div aria-labelledby="Select partitions" @@ -1104,7 +1108,7 @@ exports[`Filters component when fetching matches the snapshot 1`] = ` </div> </div> <button - class="c9" + class="c10" style="font-weight: 500;" type="button" > @@ -1131,17 +1135,17 @@ exports[`Filters component when fetching matches the snapshot 1`] = ` </div> </div> <div - class="c10" + class="c11" > <p style="font-size: 14px;" /> <div - class="c11" + class="c12" title="Elapsed Time" > <div - class="c12" + class="c13" > <i class="far fa-clock" @@ -1153,11 +1157,11 @@ exports[`Filters component when fetching matches the snapshot 1`] = ` </span> </div> <div - class="c11" + class="c12" title="Bytes Consumed" > <div - class="c12" + class="c13" > <i class="fas fa-arrow-down" @@ -1168,11 +1172,11 @@ exports[`Filters component when fetching matches the snapshot 1`] = ` </span> </div> <div - class="c11" + class="c12" title="Messages Consumed" > <div - class="c12" + class="c13" > <i class="far fa-file-alt" diff --git a/kafka-ui-react-app/src/components/common/MultiSelect/MultiSelect.styled.ts b/kafka-ui-react-app/src/components/common/MultiSelect/MultiSelect.styled.ts new file mode 100644 index 00000000000..ea596585b0a --- /dev/null +++ b/kafka-ui-react-app/src/components/common/MultiSelect/MultiSelect.styled.ts @@ -0,0 +1,18 @@ +import styled from 'styled-components'; +import ReactMultiSelect from 'react-multi-select-component'; + +const MultiSelect = styled(ReactMultiSelect)<{ minWidth?: string }>` + min-width: ${({ minWidth }) => minWidth || '200px;'}; + height: 32px; + font-size: 14px; + + & > .dropdown-container { + height: 32px; + + & > .dropdown-heading { + height: 32px; + } + } +`; + +export default MultiSelect;
null
test
train
2021-12-16T10:24:43
"2021-12-10T11:18:36Z"
Khakha-A
train
provectus/kafka-ui/1227_1259
provectus/kafka-ui
provectus/kafka-ui/1227
provectus/kafka-ui/1259
[ "timestamp(timedelta=0.0, similarity=0.902463989099796)" ]
7804a6eb9b34c67af1af9d0ca7d464634ffca65c
65d648419c4c0afc6ebb4a89014f892e4737eedf
[]
[]
"2021-12-16T08:31:27Z"
[ "type/bug", "scope/frontend", "status/accepted", "status/confirmed" ]
Dropdown list of partitions shrinks in Search line for Messages
**Describe the bug** (A clear and concise description of what the bug is.) On the tab Topic`s Messages the dropdown list of Partitions shrinks and jumps to the left when unselecting checkboxes. **Set up** (How do you run the app? Which version of the app are you running? Provide either docker image version or check commit hash at the top left corner. We won't be able to help you without this information.) http://redesign.internal.kafka-ui.provectus.io/ **Steps to Reproduce** Steps to reproduce the behavior: 1. Navigate to Topics 2. Choose some topic 3. Go to tab Messages 4. In the Search line at field Partitions unselect checkbox i.g. Select All 5. Observe: the dropdown list shrinks and jumps to the left **Expected behavior** (A clear and concise description of what you expected to happen) The dimension of the dropdown list doesn`t change, the list doesn't jump **Screenshots** (If applicable, add screenshots to help explain your problem) ![image](https://user-images.githubusercontent.com/92585878/145570999-045f8d08-c651-47c1-94c6-afa895c2a185.png) ![image](https://user-images.githubusercontent.com/92585878/145571101-4166c466-7365-4fd9-925b-a20d84412814.png) **Additional context** (Add any other context about the problem here) Worked correctly before the redesign https://www.kafka-ui.provectus.io/ui Maybe the order of search boxes was more suitable because if all checkboxes of partitions were unselected the search boxes for Seek Type, Offset/Timestamp disappeared.
[ "kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Filters/Filters.styled.ts", "kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Filters/Filters.tsx", "kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Filters/__tests__/__snapshots__/Filters.spec.tsx.snap" ]
[ "kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Filters/Filters.styled.ts", "kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Filters/Filters.tsx", "kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Filters/__tests__/__snapshots__/Filters.spec.tsx.snap", "kafka-ui-react-app/src/components/common/MultiSelect/MultiSelect.styled.ts" ]
[]
diff --git a/kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Filters/Filters.styled.ts b/kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Filters/Filters.styled.ts index 3b67922fdb1..0243519e8f2 100644 --- a/kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Filters/Filters.styled.ts +++ b/kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Filters/Filters.styled.ts @@ -16,16 +16,6 @@ export const FiltersWrapper = styled.div` width: 10%; } } - - & .multi-select { - height: 32px; - & > .dropdown-container { - height: 32px; - & > .dropdown-heading { - height: 32px; - } - } - } `; export const FilterInputs = styled.div` diff --git a/kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Filters/Filters.tsx b/kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Filters/Filters.tsx index aa29d078d35..a59061185f4 100644 --- a/kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Filters/Filters.tsx +++ b/kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Filters/Filters.tsx @@ -13,7 +13,7 @@ import * as React from 'react'; import { omitBy } from 'lodash'; import { useHistory, useLocation } from 'react-router'; import DatePicker from 'react-datepicker'; -import MultiSelect from 'react-multi-select-component'; +import MultiSelect from 'components/common/MultiSelect/MultiSelect.styled'; import { Option } from 'react-multi-select-component/dist/lib/interfaces'; import BytesFormatted from 'components/common/BytesFormatted/BytesFormatted'; import { TopicName, ClusterName } from 'redux/interfaces'; @@ -22,14 +22,7 @@ import Input from 'components/common/Input/Input'; import Select from 'components/common/Select/Select'; import { Button } from 'components/common/Button/Button'; -import { - FilterInputs, - FiltersMetrics, - FiltersWrapper, - Metric, - MetricsIcon, - SeekTypeSelectorWrapper, -} from './Filters.styled'; +import * as S from './Filters.styled'; import { filterOptions, getOffsetFromSeekToParam, @@ -235,9 +228,9 @@ const Filters: React.FC<FiltersProps> = ({ }, [seekDirection]); return ( - <FiltersWrapper> + <S.FiltersWrapper> <div> - <FilterInputs> + <S.FilterInputs> <Input inputSize="M" id="searchText" @@ -248,7 +241,7 @@ const Filters: React.FC<FiltersProps> = ({ onChange={({ target: { value } }) => setQuery(value)} /> {isSeekTypeControlVisible && ( - <SeekTypeSelectorWrapper> + <S.SeekTypeSelectorWrapper> <Select id="selectSeekType" onChange={({ target: { value } }) => @@ -280,7 +273,7 @@ const Filters: React.FC<FiltersProps> = ({ placeholderText="Select timestamp" /> )} - </SeekTypeSelectorWrapper> + </S.SeekTypeSelectorWrapper> )} <MultiSelect options={partitions.map((p) => ({ @@ -315,7 +308,7 @@ const Filters: React.FC<FiltersProps> = ({ Submit </Button> )} - </FilterInputs> + </S.FilterInputs> <Select selectSize="M" onChange={(e) => toggleSeekDirection(e.target.value)} @@ -325,28 +318,28 @@ const Filters: React.FC<FiltersProps> = ({ <option value={SeekDirection.BACKWARD}>Newest first</option> </Select> </div> - <FiltersMetrics> + <S.FiltersMetrics> <p style={{ fontSize: 14 }}>{isFetching && phaseMessage}</p> - <Metric title="Elapsed Time"> - <MetricsIcon> + <S.Metric title="Elapsed Time"> + <S.MetricsIcon> <i className="far fa-clock" /> - </MetricsIcon> + </S.MetricsIcon> <span>{Math.max(elapsedMs || 0, 0)} ms</span> - </Metric> - <Metric title="Bytes Consumed"> - <MetricsIcon> + </S.Metric> + <S.Metric title="Bytes Consumed"> + <S.MetricsIcon> <i className="fas fa-arrow-down" /> - </MetricsIcon> + </S.MetricsIcon> <BytesFormatted value={bytesConsumed} /> - </Metric> - <Metric title="Messages Consumed"> - <MetricsIcon> + </S.Metric> + <S.Metric title="Messages Consumed"> + <S.MetricsIcon> <i className="far fa-file-alt" /> - </MetricsIcon> + </S.MetricsIcon> <span>{messagesConsumed} messages</span> - </Metric> - </FiltersMetrics> - </FiltersWrapper> + </S.Metric> + </S.FiltersMetrics> + </S.FiltersWrapper> ); }; diff --git a/kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Filters/__tests__/__snapshots__/Filters.spec.tsx.snap b/kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Filters/__tests__/__snapshots__/Filters.spec.tsx.snap index 188c378b7c2..03286ea2e18 100644 --- a/kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Filters/__tests__/__snapshots__/Filters.spec.tsx.snap +++ b/kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Filters/__tests__/__snapshots__/Filters.spec.tsx.snap @@ -1,6 +1,20 @@ // Jest Snapshot v1, https://goo.gl/fbAQLP exports[`Filters component matches the snapshot 1`] = ` +.c9 { + min-width: 200px; + height: 32px; + font-size: 14px; +} + +.c9 > .dropdown-container { + height: 32px; +} + +.c9 > .dropdown-container > .dropdown-heading { + height: 32px; +} + .c3 { position: absolute; top: 50%; @@ -206,7 +220,7 @@ exports[`Filters component matches the snapshot 1`] = ` position: relative; } -.c9 { +.c10 { display: -webkit-box; display: -webkit-flex; display: -ms-flexbox; @@ -232,27 +246,27 @@ exports[`Filters component matches the snapshot 1`] = ` height: 32px; } -.c9:hover:enabled { +.c10:hover:enabled { background: #E3E6E8; color: #171A1C; cursor: pointer; } -.c9:active:enabled { +.c10:active:enabled { background: #D5DADD; color: #171A1C; } -.c9:disabled { +.c10:disabled { opacity: 0.5; cursor: not-allowed; } -.c9 a { +.c10 a { color: white; } -.c9 i { +.c10 i { margin-right: 7px; } @@ -284,18 +298,6 @@ exports[`Filters component matches the snapshot 1`] = ` width: 10%; } -.c0 .multi-select { - height: 32px; -} - -.c0 .multi-select > .dropdown-container { - height: 32px; -} - -.c0 .multi-select > .dropdown-container > .dropdown-heading { - height: 32px; -} - .c1 { display: -webkit-box; display: -webkit-flex; @@ -359,7 +361,7 @@ exports[`Filters component matches the snapshot 1`] = ` outline: none; } -.c10 { +.c11 { display: -webkit-box; display: -webkit-flex; display: -ms-flexbox; @@ -377,7 +379,7 @@ exports[`Filters component matches the snapshot 1`] = ` padding-bottom: 16px; } -.c11 { +.c12 { color: #73848C; font-size: 12px; display: -webkit-box; @@ -386,7 +388,7 @@ exports[`Filters component matches the snapshot 1`] = ` display: flex; } -.c12 { +.c13 { color: #171A1C; padding-right: 6px; height: 12px; @@ -449,7 +451,7 @@ exports[`Filters component matches the snapshot 1`] = ` </div> </div> <div - class="rmsc multi-select" + class="rmsc c9" > <div aria-labelledby="Select partitions" @@ -510,7 +512,7 @@ exports[`Filters component matches the snapshot 1`] = ` </div> </div> <button - class="c9" + class="c10" style="font-weight: 500;" type="submit" > @@ -537,17 +539,17 @@ exports[`Filters component matches the snapshot 1`] = ` </div> </div> <div - class="c10" + class="c11" > <p style="font-size: 14px;" /> <div - class="c11" + class="c12" title="Elapsed Time" > <div - class="c12" + class="c13" > <i class="far fa-clock" @@ -559,11 +561,11 @@ exports[`Filters component matches the snapshot 1`] = ` </span> </div> <div - class="c11" + class="c12" title="Bytes Consumed" > <div - class="c12" + class="c13" > <i class="fas fa-arrow-down" @@ -574,11 +576,11 @@ exports[`Filters component matches the snapshot 1`] = ` </span> </div> <div - class="c11" + class="c12" title="Messages Consumed" > <div - class="c12" + class="c13" > <i class="far fa-file-alt" @@ -596,7 +598,21 @@ exports[`Filters component matches the snapshot 1`] = ` exports[`Filters component when fetching matches the snapshot 1`] = ` <body> - .c3 { + .c9 { + min-width: 200px; + height: 32px; + font-size: 14px; +} + +.c9 > .dropdown-container { + height: 32px; +} + +.c9 > .dropdown-container > .dropdown-heading { + height: 32px; +} + +.c3 { position: absolute; top: 50%; line-height: 0; @@ -801,7 +817,7 @@ exports[`Filters component when fetching matches the snapshot 1`] = ` position: relative; } -.c9 { +.c10 { display: -webkit-box; display: -webkit-flex; display: -ms-flexbox; @@ -827,27 +843,27 @@ exports[`Filters component when fetching matches the snapshot 1`] = ` height: 32px; } -.c9:hover:enabled { +.c10:hover:enabled { background: #E3E6E8; color: #171A1C; cursor: pointer; } -.c9:active:enabled { +.c10:active:enabled { background: #D5DADD; color: #171A1C; } -.c9:disabled { +.c10:disabled { opacity: 0.5; cursor: not-allowed; } -.c9 a { +.c10 a { color: white; } -.c9 i { +.c10 i { margin-right: 7px; } @@ -879,18 +895,6 @@ exports[`Filters component when fetching matches the snapshot 1`] = ` width: 10%; } -.c0 .multi-select { - height: 32px; -} - -.c0 .multi-select > .dropdown-container { - height: 32px; -} - -.c0 .multi-select > .dropdown-container > .dropdown-heading { - height: 32px; -} - .c1 { display: -webkit-box; display: -webkit-flex; @@ -954,7 +958,7 @@ exports[`Filters component when fetching matches the snapshot 1`] = ` outline: none; } -.c10 { +.c11 { display: -webkit-box; display: -webkit-flex; display: -ms-flexbox; @@ -972,7 +976,7 @@ exports[`Filters component when fetching matches the snapshot 1`] = ` padding-bottom: 16px; } -.c11 { +.c12 { color: #73848C; font-size: 12px; display: -webkit-box; @@ -981,7 +985,7 @@ exports[`Filters component when fetching matches the snapshot 1`] = ` display: flex; } -.c12 { +.c13 { color: #171A1C; padding-right: 6px; height: 12px; @@ -1043,7 +1047,7 @@ exports[`Filters component when fetching matches the snapshot 1`] = ` </div> </div> <div - class="rmsc multi-select" + class="rmsc c9" > <div aria-labelledby="Select partitions" @@ -1104,7 +1108,7 @@ exports[`Filters component when fetching matches the snapshot 1`] = ` </div> </div> <button - class="c9" + class="c10" style="font-weight: 500;" type="button" > @@ -1131,17 +1135,17 @@ exports[`Filters component when fetching matches the snapshot 1`] = ` </div> </div> <div - class="c10" + class="c11" > <p style="font-size: 14px;" /> <div - class="c11" + class="c12" title="Elapsed Time" > <div - class="c12" + class="c13" > <i class="far fa-clock" @@ -1153,11 +1157,11 @@ exports[`Filters component when fetching matches the snapshot 1`] = ` </span> </div> <div - class="c11" + class="c12" title="Bytes Consumed" > <div - class="c12" + class="c13" > <i class="fas fa-arrow-down" @@ -1168,11 +1172,11 @@ exports[`Filters component when fetching matches the snapshot 1`] = ` </span> </div> <div - class="c11" + class="c12" title="Messages Consumed" > <div - class="c12" + class="c13" > <i class="far fa-file-alt" diff --git a/kafka-ui-react-app/src/components/common/MultiSelect/MultiSelect.styled.ts b/kafka-ui-react-app/src/components/common/MultiSelect/MultiSelect.styled.ts new file mode 100644 index 00000000000..ea596585b0a --- /dev/null +++ b/kafka-ui-react-app/src/components/common/MultiSelect/MultiSelect.styled.ts @@ -0,0 +1,18 @@ +import styled from 'styled-components'; +import ReactMultiSelect from 'react-multi-select-component'; + +const MultiSelect = styled(ReactMultiSelect)<{ minWidth?: string }>` + min-width: ${({ minWidth }) => minWidth || '200px;'}; + height: 32px; + font-size: 14px; + + & > .dropdown-container { + height: 32px; + + & > .dropdown-heading { + height: 32px; + } + } +`; + +export default MultiSelect;
null
val
train
2021-12-16T10:24:43
"2021-12-10T12:37:37Z"
agolosen
train
provectus/kafka-ui/1310_1396
provectus/kafka-ui
provectus/kafka-ui/1310
provectus/kafka-ui/1396
[ "timestamp(timedelta=0.0, similarity=0.8918496985975255)" ]
611307ef59b7c5853716fcf4badd09c57ad347c5
060c043094a60cc24d20a5042ed7d5bc179eb386
[]
[ "I don't think we that we need this actions, we can use latest tag", "It is not \"Branch\" , \r\nDo we need to deploy DEMO env from Specific Branch ? ", "Please re-think about using demo env name instead of branch", "Removed build job. Changed infra repo for using dockerhub kafka-ui image with the latest tag.", "Changed the logics to use input var ENV_NAME to be used instead of branch name.", "Changed the logics to use input var ENV_NAME to be used instead of branch name." ]
"2022-01-17T09:37:08Z"
[ "status/accepted", "scope/infrastructure", "type/feature" ]
Implement a workflow to bring up a temp demo external env with separate app instances
Implement a mechanism to bring up a separate public feature testing environment: 1. Every each app should have a separate instance 2. Manual workflow via github actions' run button (master branch) 3. Add additional connectors which produce decent amount of messages, e.g. twitter connector. 4. VPN is not required but static single-user credentials are (same as #1144).
[]
[ ".github/workflows/separate_env_public_create.yml", ".github/workflows/separate_env_public_remove.yml" ]
[]
diff --git a/.github/workflows/separate_env_public_create.yml b/.github/workflows/separate_env_public_create.yml new file mode 100644 index 00000000000..eb59e55f7ad --- /dev/null +++ b/.github/workflows/separate_env_public_create.yml @@ -0,0 +1,31 @@ +name: Separate environment create +on: + workflow_dispatch: + inputs: + ENV_NAME: + description: 'Will be used as subdomain in the public URL.' + required: true + default: 'demo' + +jobs: + separate-env-create: + runs-on: ubuntu-latest + steps: + - name: clone + run: | + git clone https://kafka-ui-infra:${{ secrets.KAFKA_UI_INFRA_TOKEN }}@gitlab.provectus.com/provectus-internals/kafka-ui-infra.git + + - name: separate env create + run: | + cd kafka-ui-infra/aws-infrastructure4eks/argocd/scripts + bash separate_env_create.sh ${{ github.event.inputs.ENV_NAME }} ${{ secrets.FEATURE_TESTING_UI_PASSWORD }} + git config --global user.email "[email protected]" + git config --global user.name "kafka-ui-infra" + git add -A + git commit -m "separate env added: ${{ github.event.inputs.ENV_NAME }}" && git push || true + + - name: echo separate environment public link + run: | + echo "Please note, separate environment creation takes up to 5-10 minutes." + echo "Separate environment will be available at http://${{ github.event.inputs.ENV_NAME }}.kafka-ui.provectus.io" + echo "Username: admin" diff --git a/.github/workflows/separate_env_public_remove.yml b/.github/workflows/separate_env_public_remove.yml new file mode 100644 index 00000000000..19084801377 --- /dev/null +++ b/.github/workflows/separate_env_public_remove.yml @@ -0,0 +1,24 @@ +name: Separate environment remove +on: + workflow_dispatch: + inputs: + ENV_NAME: + description: 'Will be used to remove previously deployed separate environment.' + required: true + default: 'demo' + +jobs: + separate-env-remove: + runs-on: ubuntu-latest + steps: + - name: clone + run: | + git clone https://kafka-ui-infra:${{ secrets.KAFKA_UI_INFRA_TOKEN }}@gitlab.provectus.com/provectus-internals/kafka-ui-infra.git + - name: separate environment remove + run: | + cd kafka-ui-infra/aws-infrastructure4eks/argocd/scripts + bash separate_env_remove.sh ${{ github.event.inputs.ENV_NAME }} + git config --global user.email "[email protected]" + git config --global user.name "kafka-ui-infra" + git add -A + git commit -m "separate env removed: ${{ github.event.inputs.ENV_NAME }}" && git push || true
null
test
train
2022-01-14T00:23:12
"2021-12-22T09:39:17Z"
Haarolean
train
provectus/kafka-ui/1315_1585
provectus/kafka-ui
provectus/kafka-ui/1315
provectus/kafka-ui/1585
[ "timestamp(timedelta=0.0, similarity=0.885997565637605)" ]
d503691600035a78d6a41cb8c281f9f65639c268
ce345ce56149d5fc1ee88dc1aca350598dffb50a
[ "Hello there swilliams-a3digital! πŸ‘‹\n\nThank you and congratulations πŸŽ‰ for opening your very first issue in this project! πŸ’–\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. πŸ‘€", "Hi, thanks for reaching out. The thing is, `latest` points to latest *release* version.\r\nWe'll consider additionally labelling each commit with a dockerhub label as well.\r\nBtw, there's a 0.3 release coming out soon (this week)." ]
[]
"2022-02-11T08:03:29Z"
[ "type/enhancement", "status/accepted", "scope/infrastructure" ]
Tag master docker image with a version number tag
The ":latest" tag does not have the latest. It looks like the most recent image is only tagged as ":master". It would be nice if it was tagged with a version number. We like to specify a specific version so we know what we are installing. The version from 3 months ago was tagged as 0.23.1. Looking for something similar with the most recent image.
[ ".github/workflows/master.yaml" ]
[ ".github/workflows/master.yaml" ]
[]
diff --git a/.github/workflows/master.yaml b/.github/workflows/master.yaml index fd94ae63c27..9d610eab3cb 100644 --- a/.github/workflows/master.yaml +++ b/.github/workflows/master.yaml @@ -63,7 +63,9 @@ jobs: context: kafka-ui-api platforms: linux/amd64,linux/arm64 push: true - tags: provectuslabs/kafka-ui:master + tags: | + provectuslabs/kafka-ui:${{ steps.build.outputs.version }} + provectuslabs/kafka-ui:master build-args: | JAR_FILE=kafka-ui-api-${{ steps.build.outputs.version }}.jar cache-from: type=local,src=/tmp/.buildx-cache
null
val
train
2022-02-10T13:24:57
"2021-12-22T23:04:21Z"
swilliams-a3digital
train
provectus/kafka-ui/1315_1595
provectus/kafka-ui
provectus/kafka-ui/1315
provectus/kafka-ui/1595
[ "timestamp(timedelta=168219.0, similarity=0.8455814725408642)" ]
217f0ead0d871205f7a05062cdf56649b8eecc79
95a9047114b499c61e478c584ea14c66d7d8ef3e
[ "Hello there swilliams-a3digital! πŸ‘‹\n\nThank you and congratulations πŸŽ‰ for opening your very first issue in this project! πŸ’–\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. πŸ‘€", "Hi, thanks for reaching out. The thing is, `latest` points to latest *release* version.\r\nWe'll consider additionally labelling each commit with a dockerhub label as well.\r\nBtw, there's a 0.3 release coming out soon (this week)." ]
[]
"2022-02-13T09:58:42Z"
[ "type/enhancement", "status/accepted", "scope/infrastructure" ]
Tag master docker image with a version number tag
The ":latest" tag does not have the latest. It looks like the most recent image is only tagged as ":master". It would be nice if it was tagged with a version number. We like to specify a specific version so we know what we are installing. The version from 3 months ago was tagged as 0.23.1. Looking for something similar with the most recent image.
[ ".github/workflows/release.yaml", "charts/kafka-ui/values.yaml" ]
[ ".github/workflows/release.yaml", "charts/kafka-ui/values.yaml" ]
[]
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index a88c7d05d81..12c11a771d2 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -109,6 +109,7 @@ jobs: run: | export version=${{needs.release.outputs.version}} sed -i "s/version:.*/version: ${version}/" charts/kafka-ui/Chart.yaml + sed -i "s/appVersion:.*/appVersion: ${version}/" charts/kafka-ui/Chart.yaml - name: add chart run: | diff --git a/charts/kafka-ui/values.yaml b/charts/kafka-ui/values.yaml index 3fc0754aff5..43cbf869f6b 100644 --- a/charts/kafka-ui/values.yaml +++ b/charts/kafka-ui/values.yaml @@ -5,7 +5,7 @@ image: repository: provectuslabs/kafka-ui pullPolicy: IfNotPresent # Overrides the image tag whose default is the chart appVersion. - tag: "latest" + tag: "" imagePullSecrets: [] nameOverride: ""
null
test
train
2022-02-13T12:08:37
"2021-12-22T23:04:21Z"
swilliams-a3digital
train
provectus/kafka-ui/1320_1321
provectus/kafka-ui
provectus/kafka-ui/1320
provectus/kafka-ui/1321
[ "timestamp(timedelta=0.0, similarity=0.9202583623899578)" ]
32a2e753b07b1741cbc338ca6391d9549082793b
0b112003a5999d8c0390514d9551c4b1f08d00ed
[]
[]
"2021-12-25T09:00:22Z"
[ "type/enhancement", "scope/infrastructure" ]
remove branch kafka-ui application from argocd when labeled pr is closed
RemoveCustomDeployment workflow is being triggered only when PR becomes unlabeled. However, when a pr is closed and is still labeled at the same time, an environment stays in ARGOCD. The goal is to modify branch-remove.yml in order to be triggered on PR closure as well.
[ ".github/workflows/branch-remove.yml" ]
[ ".github/workflows/branch-remove.yml" ]
[]
diff --git a/.github/workflows/branch-remove.yml b/.github/workflows/branch-remove.yml index ad6e63aa71e..3d08e5682dd 100644 --- a/.github/workflows/branch-remove.yml +++ b/.github/workflows/branch-remove.yml @@ -2,7 +2,7 @@ name: RemoveCustomDeployment on: workflow_dispatch: pull_request: - types: ['unlabeled'] + types: ['unlabeled', 'closed'] jobs: remove: if: ${{ github.event.label.name == 'status/feature_testing' || github.event.label.name == 'status/feature_testing_public' }} @@ -37,4 +37,4 @@ jobs: with: issue-number: ${{ github.event.pull_request.number }} body: | - Custom deployment removed \ No newline at end of file + Custom deployment removed
null
train
train
2021-12-24T17:00:27
"2021-12-25T08:47:41Z"
5hin0bi
train
provectus/kafka-ui/1337_1340
provectus/kafka-ui
provectus/kafka-ui/1337
provectus/kafka-ui/1340
[ "timestamp(timedelta=0.0, similarity=0.9044774356542453)" ]
108242aeb2aaf9ee6ca89879c827c1f551cf1a72
2f5e484a690567091c44ae4c318703a0c5df2f04
[ "Hello there bahaeddinoz! πŸ‘‹\n\nThank you and congratulations πŸŽ‰ for opening your very first issue in this project! πŸ’–\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. πŸ‘€", "Hey, thanks for reaching out. We'll take a look.", "Thanks @Haarolean for the so quick updates πŸ™ Just one question why directly didn't we use the \"networking.k8s.io/v1 \" and instead specified if k8s version is highter then 1.22 ? k8s supports it since v1.10. Can be good to use it directly or is there any other reason ?\r\nhttps://kubernetes.io/docs/reference/using-api/deprecation-guide/#apiservice-v122", "@bahaeddinoz first of all, your documentation says it's supported since 1.19, not 1.10 (`Migrate manifests and API clients to use the networking.k8s.io/v1 API version, available since v1.19.`). The main purpose is to save back compatibility." ]
[]
"2021-12-29T12:21:13Z"
[ "type/enhancement", "scope/infrastructure", "scope/k8s" ]
Make helm Ingress compatible with k8s v1.22+
### Is your proposal related to a problem? Can we change the ingress to make compatiable with k8s v1.22+ <!-- Provide a clear and concise description of what the problem is. networking.k8s.io/v1beta1 Ingress is deprecated in v1.19+, unavailable in v1.22+; use networking.k8s.io/v1 Ingress --> (Write your answer here.) ### Describe the solution you'd like Can we change the ingress to make compatiable with k8s v1.22+ <!-- Provide a clear and concise description of what you want to happen. --> (Describe your proposed solution here.) ### Describe alternatives you've considered <!-- Let us know about other solutions you've tried or researched. --> (Write your answer here.) ### Additional context <!-- Is there anything else you can add about the proposal? You might want to link to related issues here, if you haven't already. --> (Write your answer here.)
[ "charts/kafka-ui/templates/ingress.yaml" ]
[ "charts/kafka-ui/templates/ingress.yaml" ]
[]
diff --git a/charts/kafka-ui/templates/ingress.yaml b/charts/kafka-ui/templates/ingress.yaml index d6e6450dc57..e602bb7471d 100644 --- a/charts/kafka-ui/templates/ingress.yaml +++ b/charts/kafka-ui/templates/ingress.yaml @@ -3,6 +3,8 @@ {{- $svcPort := .Values.service.port -}} {{- if semverCompare ">=1.14-0" .Capabilities.KubeVersion.GitVersion -}} apiVersion: networking.k8s.io/v1beta1 +{{- else if semverCompare ">=1.22-0" .Capabilities.KubeVersion.GitVersion -}} +apiVersion: networking.k8s.io/v1 {{- else -}} apiVersion: extensions/v1beta1 {{- end }}
null
train
train
2021-12-29T11:20:34
"2021-12-29T10:09:17Z"
bahaeddinoz
train
provectus/kafka-ui/1337_1345
provectus/kafka-ui
provectus/kafka-ui/1337
provectus/kafka-ui/1345
[ "timestamp(timedelta=73767.0, similarity=0.8676273215490587)" ]
d87c2a52901e7916e109a3a81abb5c54661a641d
269ace82e1bd2e55bde400f1175cb4bdc489287c
[ "Hello there bahaeddinoz! πŸ‘‹\n\nThank you and congratulations πŸŽ‰ for opening your very first issue in this project! πŸ’–\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. πŸ‘€", "Hey, thanks for reaching out. We'll take a look.", "Thanks @Haarolean for the so quick updates πŸ™ Just one question why directly didn't we use the \"networking.k8s.io/v1 \" and instead specified if k8s version is highter then 1.22 ? k8s supports it since v1.10. Can be good to use it directly or is there any other reason ?\r\nhttps://kubernetes.io/docs/reference/using-api/deprecation-guide/#apiservice-v122", "@bahaeddinoz first of all, your documentation says it's supported since 1.19, not 1.10 (`Migrate manifests and API clients to use the networking.k8s.io/v1 API version, available since v1.19.`). The main purpose is to save back compatibility." ]
[]
"2021-12-30T08:57:14Z"
[ "type/enhancement", "scope/infrastructure", "scope/k8s" ]
Make helm Ingress compatible with k8s v1.22+
### Is your proposal related to a problem? Can we change the ingress to make compatiable with k8s v1.22+ <!-- Provide a clear and concise description of what the problem is. networking.k8s.io/v1beta1 Ingress is deprecated in v1.19+, unavailable in v1.22+; use networking.k8s.io/v1 Ingress --> (Write your answer here.) ### Describe the solution you'd like Can we change the ingress to make compatiable with k8s v1.22+ <!-- Provide a clear and concise description of what you want to happen. --> (Describe your proposed solution here.) ### Describe alternatives you've considered <!-- Let us know about other solutions you've tried or researched. --> (Write your answer here.) ### Additional context <!-- Is there anything else you can add about the proposal? You might want to link to related issues here, if you haven't already. --> (Write your answer here.)
[ "charts/kafka-ui/templates/ingress.yaml" ]
[ "charts/kafka-ui/templates/ingress.yaml" ]
[]
diff --git a/charts/kafka-ui/templates/ingress.yaml b/charts/kafka-ui/templates/ingress.yaml index e602bb7471d..6c823c0ab05 100644 --- a/charts/kafka-ui/templates/ingress.yaml +++ b/charts/kafka-ui/templates/ingress.yaml @@ -1,10 +1,10 @@ {{- if .Values.ingress.enabled -}} {{- $fullName := include "kafka-ui.fullname" . -}} {{- $svcPort := .Values.service.port -}} -{{- if semverCompare ">=1.14-0" .Capabilities.KubeVersion.GitVersion -}} -apiVersion: networking.k8s.io/v1beta1 -{{- else if semverCompare ">=1.22-0" .Capabilities.KubeVersion.GitVersion -}} +{{- if semverCompare ">=1.22-0" .Capabilities.KubeVersion.GitVersion -}} apiVersion: networking.k8s.io/v1 +{{- else if semverCompare ">=1.14-0" .Capabilities.KubeVersion.GitVersion -}} +apiVersion: networking.k8s.io/v1beta1 {{- else -}} apiVersion: extensions/v1beta1 {{- end }} @@ -27,6 +27,37 @@ spec: rules: - http: paths: +{{- if semverCompare ">=1.22-0" .Capabilities.KubeVersion.GitVersion -}} + {{- range .Values.ingress.precedingPaths }} + - path: {{ .path }} + pathType: ImplementationSpecific + backend: + service: + name: {{ .serviceName }} + port: + number: {{ .servicePort }} + {{- end }} + - backend: + service: + name: {{ $fullName }} + port: + number: {{ $svcPort }} +{{- if .Values.ingress.path }} + path: {{ .Values.ingress.path }} +{{- end }} + {{- range .Values.ingress.succeedingPaths }} + - path: {{ .path }} + pathType: ImplementationSpecific + backend: + service: + name: {{ .serviceName }} + port: + number: {{ .servicePort }} + {{- end }} +{{- if .Values.ingress.host }} + host: {{ .Values.ingress.host }} +{{- end }} +{{- else -}} {{- range .Values.ingress.precedingPaths }} - path: {{ .path }} backend: @@ -48,4 +79,5 @@ spec: {{- if .Values.ingress.host }} host: {{ .Values.ingress.host }} {{- end }} - {{- end }} \ No newline at end of file +{{- end }} +{{- end }} \ No newline at end of file
null
train
train
2021-12-29T17:55:44
"2021-12-29T10:09:17Z"
bahaeddinoz
train
provectus/kafka-ui/1388_1411
provectus/kafka-ui
provectus/kafka-ui/1388
provectus/kafka-ui/1411
[ "timestamp(timedelta=3135.0, similarity=0.9302129567820533)" ]
439d41da0b1d1166c9d76350e680aa55e5498cd0
7e3c66b197894e6f74c87f2e625a4be0dc92bd75
[ "No failed backend requests", "A duplicate of #1188" ]
[ "I think it better to use `SchemaType.PROTOBUF` from `import { SchemaType } from 'generated-sources';\r\n` instead of hardcoded value" ]
"2022-01-19T09:15:06Z"
[ "type/bug", "status/duplicate", "scope/frontend" ]
Empty screen if try to Edit schema of proto type
**Describe the bug** (A clear and concise description of what the bug is.) There is an empty screen when you try to edit the schema of proto type. **Set up** (How do you run the app? Which version of the app are you running? Provide either docker image version or check commit hash at the top left corner. We won't be able to help you without this information.) http://dependabot-npm-and-yarn-kafka-ui.internal.kafka-ui.provectus.io/ **Steps to Reproduce** Steps to reproduce the behavior: 1. Navigate to Schema Registry 2. Click the schema with a name containing 'proto' 3. Press the button Edit Schema 4. Observe the empty screen with the right URL (see attachment) **Expected behavior** (A clear and concise description of what you expected to happen) Edit screen for the schema. **Screenshots** (If applicable, add screenshots to help explain your problem) ![image](https://user-images.githubusercontent.com/92585878/149402931-43117037-cb89-4716-82c2-ffef381b4f60.png) **Additional context** (Add any other context about the problem here) Works correctly for other schema types.
[ "kafka-ui-react-app/src/components/Schemas/Edit/Edit.tsx" ]
[ "kafka-ui-react-app/src/components/Schemas/Edit/Edit.tsx" ]
[]
diff --git a/kafka-ui-react-app/src/components/Schemas/Edit/Edit.tsx b/kafka-ui-react-app/src/components/Schemas/Edit/Edit.tsx index 63a48580dca..27a109f0238 100644 --- a/kafka-ui-react-app/src/components/Schemas/Edit/Edit.tsx +++ b/kafka-ui-react-app/src/components/Schemas/Edit/Edit.tsx @@ -37,10 +37,11 @@ const Edit: React.FC = () => { const schema = useAppSelector((state) => selectSchemaById(state, subject)); - const formatedSchema = React.useMemo( - () => JSON.stringify(JSON.parse(schema?.schema || '{}'), null, '\t'), - [schema] - ); + const formatedSchema = React.useMemo(() => { + return schema?.schemaType === SchemaType.PROTOBUF + ? schema?.schema + : JSON.stringify(JSON.parse(schema?.schema || '{}'), null, '\t'); + }, [schema]); const onSubmit = React.useCallback(async (props: NewSchemaSubjectRaw) => { if (!schema) return;
null
val
train
2022-01-19T00:46:17
"2022-01-13T20:19:07Z"
agolosen
train
provectus/kafka-ui/1391_1422
provectus/kafka-ui
provectus/kafka-ui/1391
provectus/kafka-ui/1422
[ "timestamp(timedelta=0.0, similarity=0.9326225082355849)" ]
77895236135a776cfaeec94aea8ce5a075ab38cf
2d5a8c024a20e0cab7d72c1200397e90b27ab88a
[]
[]
"2022-01-19T12:30:29Z"
[ "type/bug", "scope/frontend", "status/accepted" ]
Incorrect topic message content type (avro -> json)
**Describe the bug** When opening a message of a topic (while using avro converters), in kafka-ui it's shown as JSON. However, if you check the http response in the browser F12 console it's shown as AVRO. **Set up** (How do you run the app? Which version of the app are you running? Provide either docker image version or check commit hash at the top left corner. We won't be able to help you without this information.) <img width="456" alt="image" src="https://user-images.githubusercontent.com/94184844/149524120-98b8e50d-a3e7-48b2-98b9-6b5563c5b67c.png"> **Steps to Reproduce** Steps to reproduce the behavior: 1. Create connector with avro key and value converter 2. Check the message in the topic 3. Json type is shown as content of the message **Expected behavior** (A clear and concise description of what you expected to happen) Avro type is shown as content of the message **Screenshots** (If applicable, add screenshots to help explain your problem) <img width="1578" alt="image" src="https://user-images.githubusercontent.com/94184844/149523892-79fd09b1-c992-4864-831f-1fc4b81a8141.png"> <img width="495" alt="image" src="https://user-images.githubusercontent.com/94184844/149524052-6ec7679e-7fa8-41ca-90ff-e39c20273eec.png"> **Additional context** (Add any other context about the problem here)
[ "kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Message.tsx", "kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/MessageContent/MessageContent.styled.ts", "kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/MessageContent/MessageContent.tsx", "kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/MessageContent/__tests__/MessageContent.spec.tsx" ]
[ "kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Message.tsx", "kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/MessageContent/MessageContent.styled.ts", "kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/MessageContent/MessageContent.tsx", "kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/MessageContent/__tests__/MessageContent.spec.tsx" ]
[]
diff --git a/kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Message.tsx b/kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Message.tsx index 39154105606..9112b5e1df9 100644 --- a/kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Message.tsx +++ b/kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Message.tsx @@ -27,6 +27,8 @@ const Message: React.FC<{ message: TopicMessage }> = ({ key, partition, content, + valueFormat, + keyFormat, headers, }, }) => { @@ -72,7 +74,9 @@ const Message: React.FC<{ message: TopicMessage }> = ({ {isOpen && ( <MessageContent messageKey={key} + messageKeyFormat={keyFormat} messageContent={content} + messageContentFormat={valueFormat} headers={headers} timestamp={timestamp} timestampType={timestampType} diff --git a/kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/MessageContent/MessageContent.styled.ts b/kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/MessageContent/MessageContent.styled.ts index b4d74a12d16..e09c10728a5 100644 --- a/kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/MessageContent/MessageContent.styled.ts +++ b/kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/MessageContent/MessageContent.styled.ts @@ -1,7 +1,7 @@ import styled from 'styled-components'; import { Colors } from 'theme/theme'; -export const MessageContentWrapper = styled.tr` +export const Wrapper = styled.tr` background-color: ${Colors.neutral[5]}; & > td { padding: 16px; @@ -14,7 +14,7 @@ export const MessageContentWrapper = styled.tr` } `; -export const StyledSection = styled.div` +export const Section = styled.div` padding: 0 16px; display: flex; gap: 1px; diff --git a/kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/MessageContent/MessageContent.tsx b/kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/MessageContent/MessageContent.tsx index 207d4dba72f..8ccb4937870 100644 --- a/kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/MessageContent/MessageContent.tsx +++ b/kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/MessageContent/MessageContent.tsx @@ -2,25 +2,17 @@ import { TopicMessageTimestampTypeEnum } from 'generated-sources'; import React from 'react'; import JSONViewer from 'components/common/JSONViewer/JSONViewer'; import { SecondaryTabs } from 'components/common/Tabs/SecondaryTabs.styled'; -import { isObject } from 'lodash'; import BytesFormatted from 'components/common/BytesFormatted/BytesFormatted'; -import { - ContentBox, - StyledSection, - MessageContentWrapper, - Metadata, - MetadataLabel, - MetadataMeta, - MetadataValue, - MetadataWrapper, -} from './MessageContent.styled'; +import * as S from './MessageContent.styled'; type Tab = 'key' | 'content' | 'headers'; export interface MessageContentProps { messageKey?: string; + messageKeyFormat?: string; messageContent?: string; + messageContentFormat?: string; headers?: { [key: string]: string | undefined }; timestamp?: Date; timestampType?: TopicMessageTimestampTypeEnum; @@ -28,7 +20,9 @@ export interface MessageContentProps { const MessageContent: React.FC<MessageContentProps> = ({ messageKey, + messageKeyFormat, messageContent, + messageContentFormat, headers, timestamp, timestampType, @@ -58,25 +52,12 @@ const MessageContent: React.FC<MessageContentProps> = ({ }; const keySize = new TextEncoder().encode(messageKey).length; const contentSize = new TextEncoder().encode(messageContent).length; - const isContentJson = () => { - try { - return isObject(messageContent && JSON.parse(messageContent)); - } catch { - return false; - } - }; - const isKeyJson = () => { - try { - return isObject(messageKey && JSON.parse(messageKey)); - } catch { - return false; - } - }; + return ( - <MessageContentWrapper> + <S.Wrapper> <td colSpan={10}> - <StyledSection> - <ContentBox> + <S.Section> + <S.ContentBox> <SecondaryTabs> <button type="button" @@ -101,41 +82,39 @@ const MessageContent: React.FC<MessageContentProps> = ({ </button> </SecondaryTabs> <JSONViewer data={activeTabContent() || ''} /> - </ContentBox> - <MetadataWrapper> - <Metadata> - <MetadataLabel>Timestamp</MetadataLabel> + </S.ContentBox> + <S.MetadataWrapper> + <S.Metadata> + <S.MetadataLabel>Timestamp</S.MetadataLabel> <span> - <MetadataValue>{timestamp?.toLocaleString()}</MetadataValue> - <MetadataMeta>Timestamp type: {timestampType}</MetadataMeta> + <S.MetadataValue>{timestamp?.toLocaleString()}</S.MetadataValue> + <S.MetadataMeta>Timestamp type: {timestampType}</S.MetadataMeta> </span> - </Metadata> + </S.Metadata> - <Metadata> - <MetadataLabel>Content</MetadataLabel> + <S.Metadata> + <S.MetadataLabel>Content</S.MetadataLabel> <span> - <MetadataValue> - {isContentJson() ? 'JSON' : 'Text'} - </MetadataValue> - <MetadataMeta> + <S.MetadataValue>{messageContentFormat}</S.MetadataValue> + <S.MetadataMeta> Size: <BytesFormatted value={contentSize} /> - </MetadataMeta> + </S.MetadataMeta> </span> - </Metadata> + </S.Metadata> - <Metadata> - <MetadataLabel>Key</MetadataLabel> + <S.Metadata> + <S.MetadataLabel>Key</S.MetadataLabel> <span> - <MetadataValue>{isKeyJson() ? 'JSON' : 'Text'}</MetadataValue> - <MetadataMeta> + <S.MetadataValue>{messageKeyFormat}</S.MetadataValue> + <S.MetadataMeta> Size: <BytesFormatted value={keySize} /> - </MetadataMeta> + </S.MetadataMeta> </span> - </Metadata> - </MetadataWrapper> - </StyledSection> + </S.Metadata> + </S.MetadataWrapper> + </S.Section> </td> - </MessageContentWrapper> + </S.Wrapper> ); }; diff --git a/kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/MessageContent/__tests__/MessageContent.spec.tsx b/kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/MessageContent/__tests__/MessageContent.spec.tsx index ad0f65380b4..2a085d97ce8 100644 --- a/kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/MessageContent/__tests__/MessageContent.spec.tsx +++ b/kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/MessageContent/__tests__/MessageContent.spec.tsx @@ -15,7 +15,9 @@ const setupWrapper = (props?: Partial<MessageContentProps>) => { <tbody> <MessageContent messageKey='"test-key"' + messageKeyFormat="JSON" messageContent='{"data": "test"}' + messageContentFormat="AVRO" headers={{ header: 'test' }} timestamp={new Date(0)} timestampType={TopicMessageTimestampTypeEnum.CREATE_TIME} @@ -32,6 +34,17 @@ describe('MessageContent screen', () => { beforeEach(() => { render(setupWrapper()); }); + + describe('renders', () => { + it('key format in document', () => { + expect(screen.getByText('JSON')).toBeInTheDocument(); + }); + + it('content format in document', () => { + expect(screen.getByText('AVRO')).toBeInTheDocument(); + }); + }); + describe('when switched to display the key', () => { it('has a tab with is-active classname', () => { const keyTab = screen.getAllByText('Key');
null
val
train
2022-01-19T15:34:12
"2022-01-14T13:40:25Z"
5hin0bi
train
provectus/kafka-ui/1412_1507
provectus/kafka-ui
provectus/kafka-ui/1412
provectus/kafka-ui/1507
[ "timestamp(timedelta=0.0, similarity=0.9578273548811195)" ]
91a99794bb7d890611f587a08e70a99da7727b29
9bd881a97bde4506d2e37657b32d90cbd7fa050c
[ "<img width=\"1457\" alt=\"Screenshot 2022-01-19 at 12 17 09\" src=\"https://user-images.githubusercontent.com/702205/150102216-7a1370d8-4798-4071-bf1b-a6bf8017b0cf.png\">\r\n", "Btw team, I’ve noticed that the button in the screenshot seem to bring no effect when a task is failed, however, β€œclear messages” which is restart - does restart particular task. Can you check it please?\r\nYou can wait for reddit connector to fail someday (it sometimes fails) and try to click it.\r\n![image](https://user-images.githubusercontent.com/94184844/150106690-d731af8a-4a52-402c-8e6d-374c2aede023.png)\r\n", "@5hin0bi please create ticket for it", "OK. I will test it once again first and if my proposal is correct I'll create a ticket.", "@Motherships this is only rename Clear Messages button ticket. No need to do anything else. " ]
[]
"2022-01-28T10:31:13Z"
[ "type/bug", "good first issue", "scope/frontend", "status/accepted", "status/confirmed" ]
UI: Invalid button title on Connector Tasks page
**Describe the bug** (A clear and concise description of what the bug is.) Go to Connectors -> Select any connector -> Tasks -> Click ... -> Button should be called 'Restart' not Clear messages **Set up** (How do you run the app? Which version of the app are you running? Provide either docker image version or check commit hash at the top left corner. We won't be able to help you without this information.) **Steps to Reproduce** Steps to reproduce the behavior: 1. **Expected behavior** (A clear and concise description of what you expected to happen) **Screenshots** (If applicable, add screenshots to help explain your problem) **Additional context** (Add any other context about the problem here)
[ "kafka-ui-react-app/src/components/Connect/Details/Tasks/ListItem/ListItem.tsx", "kafka-ui-react-app/src/components/Connect/Details/Tasks/ListItem/__tests__/ListItem.spec.tsx", "kafka-ui-react-app/src/components/Connect/Details/Tasks/ListItem/__tests__/__snapshots__/ListItem.spec.tsx.snap" ]
[ "kafka-ui-react-app/src/components/Connect/Details/Tasks/ListItem/ListItem.tsx", "kafka-ui-react-app/src/components/Connect/Details/Tasks/ListItem/__tests__/ListItem.spec.tsx" ]
[]
diff --git a/kafka-ui-react-app/src/components/Connect/Details/Tasks/ListItem/ListItem.tsx b/kafka-ui-react-app/src/components/Connect/Details/Tasks/ListItem/ListItem.tsx index aa7f33bd690..1c48d16a0d9 100644 --- a/kafka-ui-react-app/src/components/Connect/Details/Tasks/ListItem/ListItem.tsx +++ b/kafka-ui-react-app/src/components/Connect/Details/Tasks/ListItem/ListItem.tsx @@ -42,7 +42,7 @@ const ListItem: React.FC<ListItemProps> = ({ task, restartTask }) => { <div> <Dropdown label={<VerticalElipsisIcon />} right> <DropdownItem onClick={restartTaskHandler} danger> - <span>Clear Messages</span> + <span>Restart task</span> </DropdownItem> </Dropdown> </div> diff --git a/kafka-ui-react-app/src/components/Connect/Details/Tasks/ListItem/__tests__/ListItem.spec.tsx b/kafka-ui-react-app/src/components/Connect/Details/Tasks/ListItem/__tests__/ListItem.spec.tsx index 856e769b752..457d11b543f 100644 --- a/kafka-ui-react-app/src/components/Connect/Details/Tasks/ListItem/__tests__/ListItem.spec.tsx +++ b/kafka-ui-react-app/src/components/Connect/Details/Tasks/ListItem/__tests__/ListItem.spec.tsx @@ -1,73 +1,73 @@ import React from 'react'; -import { create } from 'react-test-renderer'; -import { mount } from 'enzyme'; -import { act } from 'react-dom/test-utils'; -import { containerRendersView, TestRouterWrapper } from 'lib/testHelpers'; +import { render } from 'lib/testHelpers'; import { clusterConnectConnectorTasksPath } from 'lib/paths'; -import ListItemContainer from 'components/Connect/Details/Tasks/ListItem/ListItemContainer'; import ListItem, { ListItemProps, } from 'components/Connect/Details/Tasks/ListItem/ListItem'; import { tasks } from 'redux/reducers/connect/__test__/fixtures'; -import { ThemeProvider } from 'styled-components'; -import theme from 'theme/theme'; +import { Route } from 'react-router-dom'; +import { screen } from '@testing-library/react'; +import userEvent from '@testing-library/user-event'; -describe('ListItem', () => { - containerRendersView( - <table> - <tbody> - <ListItemContainer task={tasks[0]} /> - </tbody> - </table>, - ListItem +const pathname = clusterConnectConnectorTasksPath( + ':clusterName', + ':connectName', + ':connectorName' +); +const clusterName = 'my-cluster'; +const connectName = 'my-connect'; +const connectorName = 'my-connector'; +const restartTask = jest.fn(); +const task = tasks[0]; + +const renderComponent = (props: ListItemProps = { task, restartTask }) => { + return render( + <Route path={pathname}> + <table> + <tbody> + <ListItem {...props} /> + </tbody> + </table> + </Route>, + { + pathname: clusterConnectConnectorTasksPath( + clusterName, + connectName, + connectorName + ), + } ); +}; - describe('view', () => { - const pathname = clusterConnectConnectorTasksPath( - ':clusterName', - ':connectName', - ':connectorName' - ); - const clusterName = 'my-cluster'; - const connectName = 'my-connect'; - const connectorName = 'my-connector'; +describe('ListItem', () => { + it('renders', () => { + renderComponent(); + expect(screen.getByRole('row')).toBeInTheDocument(); + expect( + screen.getByRole('cell', { name: task.status.id.toString() }) + ).toBeInTheDocument(); + expect( + screen.getByRole('cell', { name: task.status.workerId }) + ).toBeInTheDocument(); + expect( + screen.getByRole('cell', { name: task.status.state }) + ).toBeInTheDocument(); + expect(screen.getByRole('button')).toBeInTheDocument(); + expect(screen.getByRole('menu')).toBeInTheDocument(); + expect(screen.getByRole('menuitem')).toBeInTheDocument(); + }); + it('calls restartTask on button click', () => { + renderComponent(); - const setupWrapper = (props: Partial<ListItemProps> = {}) => ( - <ThemeProvider theme={theme}> - <TestRouterWrapper - pathname={pathname} - urlParams={{ clusterName, connectName, connectorName }} - > - <table> - <tbody> - <ListItem task={tasks[0]} restartTask={jest.fn()} {...props} /> - </tbody> - </table> - </TestRouterWrapper> - </ThemeProvider> + expect(restartTask).not.toBeCalled(); + userEvent.click(screen.getByRole('button')); + userEvent.click(screen.getByRole('menuitem')); + expect(restartTask).toBeCalledTimes(1); + expect(restartTask).toHaveBeenCalledWith( + clusterName, + connectName, + connectorName, + task.id?.task ); - - it('matches snapshot', () => { - const wrapper = create(setupWrapper()); - expect(wrapper.toJSON()).toMatchSnapshot(); - }); - - it('calls restartTask on button click', async () => { - const restartTask = jest.fn(); - const wrapper = mount(setupWrapper({ restartTask })); - await act(async () => { - wrapper.find('svg').simulate('click'); - }); - await act(async () => { - wrapper.find('span').simulate('click'); - }); - expect(restartTask).toHaveBeenCalledTimes(1); - expect(restartTask).toHaveBeenCalledWith( - clusterName, - connectName, - connectorName, - tasks[0].id?.task - ); - }); }); }); diff --git a/kafka-ui-react-app/src/components/Connect/Details/Tasks/ListItem/__tests__/__snapshots__/ListItem.spec.tsx.snap b/kafka-ui-react-app/src/components/Connect/Details/Tasks/ListItem/__tests__/__snapshots__/ListItem.spec.tsx.snap deleted file mode 100644 index 753ad3c7e2a..00000000000 --- a/kafka-ui-react-app/src/components/Connect/Details/Tasks/ListItem/__tests__/__snapshots__/ListItem.spec.tsx.snap +++ /dev/null @@ -1,133 +0,0 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP - -exports[`ListItem view matches snapshot 1`] = ` -.c1 { - display: -webkit-box; - display: -webkit-flex; - display: -ms-flexbox; - display: flex; - -webkit-align-self: center; - -ms-flex-item-align: center; - align-self: center; -} - -.c2 { - background: transparent; - border: none; - display: -webkit-box; - display: -webkit-flex; - display: -ms-flexbox; - display: flex; - -webkit-align-items: 'center'; - -webkit-box-align: 'center'; - -ms-flex-align: 'center'; - align-items: 'center'; - -webkit-box-pack: 'center'; - -webkit-justify-content: 'center'; - -ms-flex-pack: 'center'; - justify-content: 'center'; -} - -.c2:hover { - cursor: pointer; -} - -.c3 { - color: #E51A1A; -} - -.c0 { - border: none; - border-radius: 16px; - height: 20px; - line-height: 20px; - background-color: #FFEECC; - color: #171A1C; - font-size: 12px; - display: inline-block; - padding-left: 0.75em; - padding-right: 0.75em; - text-align: center; -} - -<table> - <tbody> - <tr> - <td> - 1 - </td> - <td> - kafka-connect0:8083 - </td> - <td> - <p - className="c0" - color="yellow" - > - RUNNING - </p> - </td> - <td> - null - </td> - <td - style={ - Object { - "width": "5%", - } - } - > - <div> - <div - className="dropdown is-right" - > - <div - className="c1" - > - <button - className="c2" - onClick={[Function]} - type="button" - > - <svg - fill="none" - height="16" - viewBox="0 0 4 16" - width="4" - xmlns="http://www.w3.org/2000/svg" - > - <path - d="M2 4C3.1 4 4 3.1 4 2C4 0.9 3.1 0 2 0C0.9 0 0 0.9 0 2C0 3.1 0.9 4 2 4ZM2 6C0.9 6 0 6.9 0 8C0 9.1 0.9 10 2 10C3.1 10 4 9.1 4 8C4 6.9 3.1 6 2 6ZM2 12C0.9 12 0 12.9 0 14C0 15.1 0.9 16 2 16C3.1 16 4 15.1 4 14C4 12.9 3.1 12 2 12Z" - fill="#73848C" - /> - </svg> - </button> - </div> - <div - className="dropdown-menu" - id="dropdown-menu" - role="menu" - > - <div - className="dropdown-content has-text-left" - > - <a - className="c3 dropdown-item is-link" - href="#end" - onClick={[Function]} - role="menuitem" - type="button" - > - <span> - Clear Messages - </span> - </a> - </div> - </div> - </div> - </div> - </td> - </tr> - </tbody> -</table> -`;
null
train
train
2022-01-28T15:18:35
"2022-01-19T09:26:15Z"
iliax
train
provectus/kafka-ui/1428_1436
provectus/kafka-ui
provectus/kafka-ui/1428
provectus/kafka-ui/1436
[ "timestamp(timedelta=0.0, similarity=0.9493168565417999)" ]
205d8d000ded60f8443007a68c9cf70dd2abe413
31954ceb5535bcbb608776432a27598355ef058d
[]
[ "`@ts-expect-error first arg is number`\r\nI think this is not the correct solution.\r\nIf the argument type expects a string, you must pass a string.", "It was there before\r\n\r\nThose mutes just to remove mutes from top of the file\r\n```\r\n// eslint-disable-next-line @typescript-eslint/ban-ts-comment\r\n// @ts-nocheck\r\n```\r\n\r\nI don't really know why they were here, but I guess it's because we get `GIT_TAG` from `process.env` so it can be `undefined`", "πŸ‘ ", "πŸ‘ \r\n", "W/e rewrote this part" ]
"2022-01-20T10:57:04Z"
[ "type/bug", "scope/frontend", "status/accepted" ]
Improve Sonar Cloud security hotspots metrics
**Describe the bug** Fix security hotspots https://sonarcloud.io/project/security_hotspots?id=provectus_kafka-ui_frontend&sinceLeakPeriod=true **Expected behavior** Security Hotspots Metric should be 100%
[ "kafka-ui-react-app/src/components/App.tsx", "kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/__tests__/CustomParamField.spec.tsx", "kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/__tests__/CustomParams.spec.tsx", "kafka-ui-react-app/src/components/Version/Version.tsx", "kafka-ui-react-app/src/components/Version/__tests__/Version.spec.tsx", "kafka-ui-react-app/src/components/Version/__tests__/__snapshots__/Version.spec.tsx.snap", "kafka-ui-react-app/src/components/Version/__tests__/compareVersions.spec.ts", "kafka-ui-react-app/src/components/Version/compareVersions.ts", "kafka-ui-react-app/src/redux/reducers/alerts/reducer.ts", "kafka-ui-react-app/src/redux/reducers/loader/reducer.ts" ]
[ "kafka-ui-react-app/src/components/App.tsx", "kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/__tests__/CustomParamField.spec.tsx", "kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/__tests__/CustomParams.spec.tsx", "kafka-ui-react-app/src/components/Version/Version.tsx", "kafka-ui-react-app/src/components/Version/__tests__/Version.spec.tsx", "kafka-ui-react-app/src/components/Version/__tests__/compareVersions.spec.ts", "kafka-ui-react-app/src/components/Version/compareVersions.ts", "kafka-ui-react-app/src/redux/reducers/alerts/reducer.ts", "kafka-ui-react-app/src/redux/reducers/loader/reducer.ts" ]
[]
diff --git a/kafka-ui-react-app/src/components/App.tsx b/kafka-ui-react-app/src/components/App.tsx index 15cc54856d5..13e5fdef5ac 100644 --- a/kafka-ui-react-app/src/components/App.tsx +++ b/kafka-ui-react-app/src/components/App.tsx @@ -61,7 +61,7 @@ const App: React.FC = () => { <S.Hyperlink href="/ui">UI for Apache Kafka</S.Hyperlink> <S.NavbarItem> - <Version tag={GIT_TAG} commit={GIT_COMMIT} /> + {GIT_TAG && <Version tag={GIT_TAG} commit={GIT_COMMIT} />} </S.NavbarItem> </S.NavbarBrand> </S.Navbar> diff --git a/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/__tests__/CustomParamField.spec.tsx b/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/__tests__/CustomParamField.spec.tsx index 5983684a732..0cc39496e82 100644 --- a/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/__tests__/CustomParamField.spec.tsx +++ b/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/__tests__/CustomParamField.spec.tsx @@ -1,5 +1,5 @@ import React from 'react'; -import {screen, waitFor, within} from '@testing-library/react'; +import { screen, waitFor, within } from '@testing-library/react'; import { render } from 'lib/testHelpers'; import CustomParamsField, { Props, diff --git a/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/__tests__/CustomParams.spec.tsx b/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/__tests__/CustomParams.spec.tsx index 9be012a0aa2..ded3e1b80f3 100644 --- a/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/__tests__/CustomParams.spec.tsx +++ b/kafka-ui-react-app/src/components/Topics/shared/Form/CustomParams/__tests__/CustomParams.spec.tsx @@ -9,8 +9,10 @@ import userEvent from '@testing-library/user-event'; import { TOPIC_CUSTOM_PARAMS } from 'lib/constants'; const selectOption = async (listbox: HTMLElement, option: string) => { - await waitFor(() => userEvent.click(listbox)); - await waitFor(() => userEvent.click(screen.getByText(option))); + await waitFor(() => { + userEvent.click(listbox); + userEvent.click(screen.getByText(option)); + }); }; const expectOptionIsSelected = (listbox: HTMLElement, option: string) => { @@ -19,17 +21,27 @@ const expectOptionIsSelected = (listbox: HTMLElement, option: string) => { expect(selectedOption[0]).toHaveTextContent(option); }; -const expectOptionIsDisabled = async ( +const expectOptionAvailability = async ( listbox: HTMLElement, option: string, disabled: boolean ) => { await waitFor(() => userEvent.click(listbox)); - const selectedOption = within(listbox).getAllByText(option); - expect(selectedOption[1]).toHaveStyleRule( + const selectedOptions = within(listbox).getAllByText(option).reverse(); + // its either two or one nodes, we only need last one + const selectedOption = selectedOptions[0]; + + if (disabled) { + expect(selectedOption).toHaveAttribute('disabled'); + } else { + expect(selectedOption).toBeEnabled(); + } + + expect(selectedOption).toHaveStyleRule( 'cursor', disabled ? 'not-allowed' : 'pointer' ); + await waitFor(() => userEvent.click(listbox)); }; describe('CustomParams', () => { @@ -51,15 +63,15 @@ describe('CustomParams', () => { }); it('renders with props', () => { - const addParamButton = screen.getByRole('button'); - expect(addParamButton).toBeInTheDocument(); - expect(addParamButton).toHaveTextContent('Add Custom Parameter'); + const button = screen.getByRole('button'); + expect(button).toBeInTheDocument(); + expect(button).toHaveTextContent('Add Custom Parameter'); }); describe('works with user inputs correctly', () => { it('button click creates custom param fieldset', async () => { - const addParamButton = screen.getByRole('button'); - await waitFor(() => userEvent.click(addParamButton)); + const button = screen.getByRole('button'); + await waitFor(() => userEvent.click(button)); const listbox = screen.getByRole('listbox'); expect(listbox).toBeInTheDocument(); @@ -69,38 +81,38 @@ describe('CustomParams', () => { }); it('can select option', async () => { - const addParamButton = screen.getByRole('button'); - await waitFor(() => userEvent.click(addParamButton)); - + const button = screen.getByRole('button'); + await waitFor(() => userEvent.click(button)); const listbox = screen.getByRole('listbox'); await selectOption(listbox, 'compression.type'); expectOptionIsSelected(listbox, 'compression.type'); - expectOptionIsDisabled(listbox, 'compression.type', true); + await expectOptionAvailability(listbox, 'compression.type', true); const textbox = screen.getByRole('textbox'); expect(textbox).toHaveValue(TOPIC_CUSTOM_PARAMS['compression.type']); }); it('when selected option changes disabled options update correctly', async () => { - const addParamButton = screen.getByRole('button'); - await waitFor(() => userEvent.click(addParamButton)); + const button = screen.getByRole('button'); + await waitFor(() => userEvent.click(button)); const listbox = screen.getByRole('listbox'); await selectOption(listbox, 'compression.type'); - expectOptionIsDisabled(listbox, 'compression.type', true); + expectOptionIsSelected(listbox, 'compression.type'); + await expectOptionAvailability(listbox, 'compression.type', true); await selectOption(listbox, 'delete.retention.ms'); - expectOptionIsDisabled(listbox, 'delete.retention.ms', true); - expectOptionIsDisabled(listbox, 'compression.type', false); + await expectOptionAvailability(listbox, 'delete.retention.ms', true); + await expectOptionAvailability(listbox, 'compression.type', false); }); it('multiple button clicks create multiple fieldsets', async () => { - const addParamButton = screen.getByRole('button'); - await waitFor(() => userEvent.click(addParamButton)); - await waitFor(() => userEvent.click(addParamButton)); - await waitFor(() => userEvent.click(addParamButton)); + const button = screen.getByRole('button'); + await waitFor(() => userEvent.click(button)); + await waitFor(() => userEvent.click(button)); + await waitFor(() => userEvent.click(button)); const listboxes = screen.getAllByRole('listbox'); expect(listboxes.length).toBe(3); @@ -110,48 +122,64 @@ describe('CustomParams', () => { }); it("can't select already selected option", async () => { - const addParamButton = screen.getByRole('button'); - userEvent.click(addParamButton); - userEvent.click(addParamButton); + const button = screen.getByRole('button'); + await waitFor(() => userEvent.click(button)); + await waitFor(() => userEvent.click(button)); const listboxes = screen.getAllByRole('listbox'); const firstListbox = listboxes[0]; await selectOption(firstListbox, 'compression.type'); - expectOptionIsDisabled(firstListbox, 'compression.type', true); + await expectOptionAvailability(firstListbox, 'compression.type', true); const secondListbox = listboxes[1]; - expectOptionIsDisabled(secondListbox, 'compression.type', true); + await expectOptionAvailability(secondListbox, 'compression.type', true); }); it('when fieldset with selected custom property type is deleted disabled options update correctly', async () => { - const addParamButton = screen.getByRole('button'); - userEvent.click(addParamButton); - userEvent.click(addParamButton); - userEvent.click(addParamButton); + const button = screen.getByRole('button'); + await waitFor(() => userEvent.click(button)); + await waitFor(() => userEvent.click(button)); + await waitFor(() => userEvent.click(button)); const listboxes = screen.getAllByRole('listbox'); const firstListbox = listboxes[0]; await selectOption(firstListbox, 'compression.type'); - expectOptionIsDisabled(firstListbox, 'compression.type', true); + await expectOptionAvailability(firstListbox, 'compression.type', true); const secondListbox = listboxes[1]; await selectOption(secondListbox, 'delete.retention.ms'); - expectOptionIsDisabled(secondListbox, 'delete.retention.ms', true); + await expectOptionAvailability( + secondListbox, + 'delete.retention.ms', + true + ); const thirdListbox = listboxes[2]; await selectOption(thirdListbox, 'file.delete.delay.ms'); - expectOptionIsDisabled(thirdListbox, 'file.delete.delay.ms', true); + await expectOptionAvailability( + thirdListbox, + 'file.delete.delay.ms', + true + ); const deleteSecondFieldsetButton = screen.getByTitle( 'Delete customParam field 1' ); - userEvent.click(deleteSecondFieldsetButton); + await waitFor(() => userEvent.click(deleteSecondFieldsetButton)); expect(secondListbox).not.toBeInTheDocument(); - expectOptionIsDisabled(firstListbox, 'delete.retention.ms', false); - expectOptionIsDisabled(thirdListbox, 'delete.retention.ms', false); + await expectOptionAvailability( + firstListbox, + 'delete.retention.ms', + false + ); + await expectOptionAvailability( + thirdListbox, + 'delete.retention.ms', + false + ); }); }); }); diff --git a/kafka-ui-react-app/src/components/Version/Version.tsx b/kafka-ui-react-app/src/components/Version/Version.tsx index 6fde9db8ecf..37acf181461 100644 --- a/kafka-ui-react-app/src/components/Version/Version.tsx +++ b/kafka-ui-react-app/src/components/Version/Version.tsx @@ -5,7 +5,7 @@ import { GIT_REPO_LATEST_RELEASE_LINK } from 'lib/constants'; import compareVersions from './compareVersions'; export interface VesionProps { - tag?: string; + tag: string; commit?: string; } @@ -15,20 +15,15 @@ const Version: React.FC<VesionProps> = ({ tag, commit }) => { latestTag: '', }); useEffect(() => { - if (tag) { - fetch(GIT_REPO_LATEST_RELEASE_LINK) - .then((response) => response.json()) - .then((data) => { - setLatestVersionInfo({ - outdated: compareVersions(tag, data.tag_name) === -1, - latestTag: data.tag_name, - }); + fetch(GIT_REPO_LATEST_RELEASE_LINK) + .then((response) => response.json()) + .then((data) => { + setLatestVersionInfo({ + outdated: compareVersions(tag, data.tag_name) === -1, + latestTag: data.tag_name, }); - } + }); }, [tag]); - if (!tag) { - return null; - } const { outdated, latestTag } = latestVersionInfo; diff --git a/kafka-ui-react-app/src/components/Version/__tests__/Version.spec.tsx b/kafka-ui-react-app/src/components/Version/__tests__/Version.spec.tsx index 8215bd8d275..eebe8636fb5 100644 --- a/kafka-ui-react-app/src/components/Version/__tests__/Version.spec.tsx +++ b/kafka-ui-react-app/src/components/Version/__tests__/Version.spec.tsx @@ -1,29 +1,22 @@ import React from 'react'; -import { mount } from 'enzyme'; -import Version from 'components/Version/Version'; +import Version, { VesionProps } from 'components/Version/Version'; +import { screen } from '@testing-library/react'; +import { render } from 'lib/testHelpers'; const tag = 'v1.0.1-SHAPSHOT'; const commit = '123sdf34'; describe('Version', () => { - it('shows nothing if tag is not defined', () => { - const component = mount(<Version />); - expect(component.html()).toEqual(null); - }); + const setupComponent = (props: VesionProps) => render(<Version {...props} />); - it('shows current tag when only tag is defined', () => { - const component = mount(<Version tag={tag} />); - expect(component.text()).toContain(tag); + it('renders', () => { + setupComponent({ tag }); + expect(screen.getByText('Version:')).toBeInTheDocument(); }); it('shows current tag and commit', () => { - const component = mount(<Version tag={tag} commit={commit} />); - expect(component.text()).toContain(tag); - expect(component.text()).toContain(commit); - }); - - it('matches snapshot', () => { - const component = mount(<Version tag={tag} commit={commit} />); - expect(component).toMatchSnapshot(); + setupComponent({ tag, commit }); + expect(screen.getByText(tag)).toBeInTheDocument(); + expect(screen.getByText(commit)).toBeInTheDocument(); }); }); diff --git a/kafka-ui-react-app/src/components/Version/__tests__/__snapshots__/Version.spec.tsx.snap b/kafka-ui-react-app/src/components/Version/__tests__/__snapshots__/Version.spec.tsx.snap deleted file mode 100644 index 92926af577a..00000000000 --- a/kafka-ui-react-app/src/components/Version/__tests__/__snapshots__/Version.spec.tsx.snap +++ /dev/null @@ -1,36 +0,0 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP - -exports[`Version matches snapshot 1`] = ` -<Version - commit="123sdf34" - tag="v1.0.1-SHAPSHOT" -> - <div - className="is-size-8 has-text-grey" - > - <span - className="has-text-grey-light mr-1" - > - Version: - </span> - <span - className="mr-1" - > - v1.0.1-SHAPSHOT - </span> - <span> - ( - </span> - <a - href="https://github.com/provectus/kafka-ui/commit/123sdf34" - target="__blank" - title="Current commit" - > - 123sdf34 - </a> - <span> - ) - </span> - </div> -</Version> -`; diff --git a/kafka-ui-react-app/src/components/Version/__tests__/compareVersions.spec.ts b/kafka-ui-react-app/src/components/Version/__tests__/compareVersions.spec.ts index 083ba825709..9875255555f 100644 --- a/kafka-ui-react-app/src/components/Version/__tests__/compareVersions.spec.ts +++ b/kafka-ui-react-app/src/components/Version/__tests__/compareVersions.spec.ts @@ -1,6 +1,3 @@ -// eslint-disable-next-line @typescript-eslint/ban-ts-comment -// @ts-nocheck - import compareVersions from 'components/Version/compareVersions'; const runTests = (dataSet: [string, string, number][]) => { @@ -63,10 +60,11 @@ describe('compareVersions function', () => { }); it('returns valid result (negative test cases)', () => { + expect(compareVersions()).toEqual(0); + expect(compareVersions('v0.0.0')).toEqual(0); + // @ts-expect-error first arg is number expect(compareVersions(123, 'v0.0.0')).toEqual(0); - expect(compareVersions(undefined, 'v0.0.0')).toEqual(0); + // @ts-expect-error second arg is number expect(compareVersions('v0.0.0', 123)).toEqual(0); - expect(compareVersions('v0.0.0', undefined)).toEqual(0); - expect(compareVersions(undefined, undefined)).toEqual(0); }); }); diff --git a/kafka-ui-react-app/src/components/Version/compareVersions.ts b/kafka-ui-react-app/src/components/Version/compareVersions.ts index 72a8db926f7..71b3e11da14 100644 --- a/kafka-ui-react-app/src/components/Version/compareVersions.ts +++ b/kafka-ui-react-app/src/components/Version/compareVersions.ts @@ -1,9 +1,12 @@ const split = (v: string): string[] => { - const c = v.replace(/^v/, '').replace(/\+.*$/, ''); - return c.split('-')[0].split('.'); + const c = v.replace('v', '').split('-')[0]; + return c.split('.'); }; -const compareVersions = (v1: string, v2: string): number => { +const compareVersions = (v1?: string, v2?: string): number => { + if (!v1 || !v2) return 0; + + // try..catch - is our safeguard for strange git tags (or usecases without network) try { const s1 = split(v1); const s2 = split(v2); diff --git a/kafka-ui-react-app/src/redux/reducers/alerts/reducer.ts b/kafka-ui-react-app/src/redux/reducers/alerts/reducer.ts index 5e75f1023b7..6f98a1b65fe 100644 --- a/kafka-ui-react-app/src/redux/reducers/alerts/reducer.ts +++ b/kafka-ui-react-app/src/redux/reducers/alerts/reducer.ts @@ -9,8 +9,7 @@ export const initialState: AlertsState = {}; const reducer = (state = initialState, action: Action): AlertsState => { const { type } = action; - const matches = /(.*)__(FAILURE)$/.exec(type); - if (matches && matches[2]) return addError(state, action); + if (type.endsWith('__FAILURE')) return addError(state, action); if (type === getType(dismissAlert)) { return removeAlert(state, action); diff --git a/kafka-ui-react-app/src/redux/reducers/loader/reducer.ts b/kafka-ui-react-app/src/redux/reducers/loader/reducer.ts index 18197eb2f41..0d14d5e2bf1 100644 --- a/kafka-ui-react-app/src/redux/reducers/loader/reducer.ts +++ b/kafka-ui-react-app/src/redux/reducers/loader/reducer.ts @@ -4,12 +4,17 @@ export const initialState: LoaderState = {}; const reducer = (state = initialState, action: Action): LoaderState => { const { type } = action; - const matches = /(.*)__(REQUEST|SUCCESS|FAILURE|CANCEL)$/.exec(type); + const splitType = type.split('__'); + const requestState = splitType.pop(); + const requestName = splitType.join('__'); // not a *__REQUEST / *__SUCCESS / *__FAILURE / *__CANCEL actions, so we ignore them - if (!matches) return state; - - const [, requestName, requestState] = matches; + if ( + requestState && + !['REQUEST', 'SUCCESS', 'FAILURE', 'CANCEL'].includes(requestState) + ) { + return state; + } switch (requestState) { case 'REQUEST':
null
train
train
2022-01-20T14:41:20
"2022-01-19T15:34:20Z"
workshur
train
provectus/kafka-ui/1447_1478
provectus/kafka-ui
provectus/kafka-ui/1447
provectus/kafka-ui/1478
[ "timestamp(timedelta=0.0, similarity=0.8528607105910732)" ]
471e84d0f92065d155911ebd7f3cd67be0ad4e14
315a3612c104c6bc024b4361f37ce38b157017a6
[]
[]
"2022-01-25T12:31:18Z"
[ "type/enhancement", "scope/infrastructure" ]
Find a way to make certain workflows required
Workflows like the following: `e2e-checks` `backend` `frontend` should have status `passed` to be able to merge a PR which has these workflows triggered. The things is, setting these workflows as required makes PRs which don't have these workflows launched blocked. https://github.com/marketplace/actions/workflow-status
[ ".github/workflows/backend.yml", ".github/workflows/e2e-checks.yaml", ".github/workflows/frontend.yaml" ]
[ ".github/workflows/backend.yml", ".github/workflows/documentation.yaml", ".github/workflows/e2e-checks.yaml", ".github/workflows/frontend.yaml" ]
[ "kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/SmokeTests.java", "kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/tests/ConnectorsTests.java", "kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/tests/TopicTests.java" ]
diff --git a/.github/workflows/backend.yml b/.github/workflows/backend.yml index 5df7b74bc08..677d4407a1d 100644 --- a/.github/workflows/backend.yml +++ b/.github/workflows/backend.yml @@ -5,7 +5,7 @@ on: paths: - 'kafka-ui-api/**' jobs: - build: + build-and-test: runs-on: ubuntu-latest steps: - name: Cache local Maven repository diff --git a/.github/workflows/documentation.yaml b/.github/workflows/documentation.yaml new file mode 100644 index 00000000000..0388bf21abc --- /dev/null +++ b/.github/workflows/documentation.yaml @@ -0,0 +1,23 @@ +name: Documentation +on: + pull_request: + types: + - opened + - labeled + - reopened + - synchronize + paths: + - 'documentation/*' + - './*.md' +jobs: + build-and-test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Check URLs in files + uses: urlstechie/[email protected] + with: + exclude_patterns: localhost,127.0.,192.168. + exclude_urls: https://api.server,https://graph.microsoft.com/User.Read + print_all: false + file_types: .md diff --git a/.github/workflows/e2e-checks.yaml b/.github/workflows/e2e-checks.yaml index b6c44e72e1b..6ff3723258c 100644 --- a/.github/workflows/e2e-checks.yaml +++ b/.github/workflows/e2e-checks.yaml @@ -8,7 +8,7 @@ on: - 'kafka-ui-react-app/**' - 'kafka-ui-e2e-checks/**' jobs: - build: + build-and-test: runs-on: ubuntu-latest steps: - name: Cache local Maven repository diff --git a/.github/workflows/frontend.yaml b/.github/workflows/frontend.yaml index 1ba60f511b5..ba7f5c6cfd6 100644 --- a/.github/workflows/frontend.yaml +++ b/.github/workflows/frontend.yaml @@ -9,7 +9,7 @@ on: - 'kafka-ui-contract/**' - 'kafka-ui-react-app/**' jobs: - npm-test: + build-and-test: env: CI: true NODE_ENV: dev
diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/SmokeTests.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/SmokeTests.java index 1719b0229ca..af88cc03887 100644 --- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/SmokeTests.java +++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/SmokeTests.java @@ -3,9 +3,12 @@ import com.provectus.kafka.ui.base.BaseTest; import io.qameta.allure.Issue; import lombok.SneakyThrows; +import org.junit.Ignore; +import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; +@Disabled // TODO #1480 public class SmokeTests extends BaseTest { @Test @SneakyThrows diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/tests/ConnectorsTests.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/tests/ConnectorsTests.java index 33516fa289e..468c087d495 100644 --- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/tests/ConnectorsTests.java +++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/tests/ConnectorsTests.java @@ -5,8 +5,10 @@ import com.provectus.kafka.ui.helpers.ApiHelper; import com.provectus.kafka.ui.helpers.Helpers; import lombok.SneakyThrows; +import org.junit.Ignore; import org.junit.jupiter.api.*; +@Disabled // TODO #1480 public class ConnectorsTests extends BaseTest { public static final String LOCAL = "local"; diff --git a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/tests/TopicTests.java b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/tests/TopicTests.java index 3fd2640d097..670e0bcbe92 100644 --- a/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/tests/TopicTests.java +++ b/kafka-ui-e2e-checks/src/test/java/com/provectus/kafka/ui/tests/TopicTests.java @@ -7,8 +7,9 @@ import org.junit.jupiter.api.*; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Disabled; - +@Disabled // TODO #1480 public class TopicTests extends BaseTest { public static final String NEW_TOPIC = "new-topic";
train
train
2022-01-25T13:54:46
"2022-01-21T11:15:32Z"
Haarolean
train
provectus/kafka-ui/1448_1450
provectus/kafka-ui
provectus/kafka-ui/1448
provectus/kafka-ui/1450
[ "timestamp(timedelta=60015.0, similarity=0.9286223072228074)" ]
9446d9c39d82c309c573060bf06a4f027067aa90
86dad04448a47ef47f5d82b0c5959ce7398f9631
[ "Hello there PhilDulac! πŸ‘‹\n\nThank you and congratulations πŸŽ‰ for opening your very first issue in this project! πŸ’–\n\nIn case you want to claim this issue, please comment down below! We will try to get back to you as soon as we can. πŸ‘€", "@PhilDulac hey, thanks for reaching out. This has been fixed in `master`-labeled image, we'll also get a minor release with this change to update the charts on artifacthub." ]
[]
"2022-01-21T19:30:08Z"
[ "type/bug", "status/accepted", "status/confirmed", "scope/k8s" ]
Missing Ingress pathType with k8s >=1.22
**Describe the bug** <!--(A clear and concise description of what the bug is.)--> When installing the helm chart on K8s version >= 1.22 with ingress enabled, the path type is missing and gives the following message: `Error: INSTALLATION FAILED: unable to build kubernetes objects from release manifest: error validating "": error validating data: ValidationError(Ingress.spec.rules[0].http.paths[0]): missing required field "pathType" in io.k8s.api.networking.v1.HTTPIngressPath` **Set up** <!-- (How do you run the app? Which version of the app are you running? Provide either docker image version or check commit hash at the top left corner. We won't be able to help you without this information.) --> ** Needs to be done on a k8s cluster version 1.22 or greater. Enable the ingress and specify a path in values.yaml: ``` kafka-ui: ingress: enabled: true path: / ``` **Steps to Reproduce** Steps to reproduce the behavior: 1. `helm install chart-name .` **Expected behavior** <!-- (A clear and concise description of what you expected to happen) --> Chart is successfully installed is correctly formatted ingress.
[ "charts/kafka-ui/templates/ingress.yaml" ]
[ "charts/kafka-ui/templates/ingress.yaml" ]
[]
diff --git a/charts/kafka-ui/templates/ingress.yaml b/charts/kafka-ui/templates/ingress.yaml index 641cfd9b67e..7c1c046a0eb 100644 --- a/charts/kafka-ui/templates/ingress.yaml +++ b/charts/kafka-ui/templates/ingress.yaml @@ -33,7 +33,7 @@ spec: {{- if semverCompare ">=1.22-0" .Capabilities.KubeVersion.GitVersion -}} {{- range .Values.ingress.precedingPaths }} - path: {{ .path }} - pathType: ImplementationSpecific + pathType: Prefix backend: service: name: {{ .serviceName }} @@ -45,12 +45,13 @@ spec: name: {{ $fullName }} port: number: {{ $svcPort }} + pathType: Prefix {{- if .Values.ingress.path }} path: {{ .Values.ingress.path }} {{- end }} {{- range .Values.ingress.succeedingPaths }} - path: {{ .path }} - pathType: ImplementationSpecific + pathType: Prefix backend: service: name: {{ .serviceName }}
null
test
train
2022-01-22T14:49:41
"2022-01-21T18:41:26Z"
PhilDulac
train
provectus/kafka-ui/1504_1505
provectus/kafka-ui
provectus/kafka-ui/1504
provectus/kafka-ui/1505
[ "timestamp(timedelta=0.0, similarity=0.9363045601377752)" ]
2f3aae028ecf993cd56ca98a9a43a612d2c5082b
1ae0d4d8aa05570632ccfbabdeaf893d74fb7240
[]
[]
"2022-01-28T08:03:30Z"
[ "type/enhancement", "scope/infrastructure" ]
Add built jar file to a release
Need to attach built jar file to a published release.
[ ".github/workflows/release.yaml" ]
[ ".github/workflows/release.yaml" ]
[]
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 883c2937b8f..f0c3b51cc57 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -37,6 +37,13 @@ jobs: export VERSION=$(mvn -q -Dexec.executable=echo -Dexec.args='${project.version}' -DbuildVersion=${{ github.event.release.tag_name }} --non-recursive exec:exec) echo ::set-output name=version::${VERSION} + - name: Upload files to a GitHub release + uses: svenstaro/[email protected] + with: + repo_token: ${{ secrets.GITHUB_TOKEN }} + file: kafka-ui-api/target/kafka-ui-api-${{ steps.build.outputs.version }}.jar + tag: ${{ github.event.release.tag_name }} + - name: Archive JAR uses: actions/upload-artifact@v2 with:
null
test
train
2022-01-27T09:03:44
"2022-01-28T07:59:18Z"
5hin0bi
train
provectus/kafka-ui/1510_1514
provectus/kafka-ui
provectus/kafka-ui/1510
provectus/kafka-ui/1514
[ "timestamp(timedelta=1.0, similarity=0.9369598311168711)" ]
4390923e482c08d1e27ff0e1e783231f9fc6ee5c
982d29709b213e98cd6b40f91c6778a0f796be24
[]
[ "I'm not sure we need waitFor here. You have been waiting this on a previous line", "Y, it seems after we done waiting for `pathname` change all other mocks are already changed and we don't need to wait for them\r\nthanks\r\n![image](https://user-images.githubusercontent.com/12596177/151979076-17e72273-cc21-4a2a-b6e7-8c24f9c2cc71.png)\r\n" ]
"2022-01-30T09:32:15Z"
[ "type/bug", "scope/frontend", "status/accepted", "status/confirmed", "type/regression" ]
UI, Critical: retention.bytes parameter set to 0
**Describe the bug** Go to topic creation view -> enter new topic name, do not edit other configs -> retention.bytes properties are set to 0 in passed parameter. Expected: when `Max size on disk in GB` is "Not set"`retention.bytes` should be either not passed or passed as -1 which indicates that topic don't have size limit. This is critical because it will cause unexpected topic data deletion, need to fix it ASAP <img width="292" alt="Screenshot 2022-01-28 at 18 32 02" src="https://user-images.githubusercontent.com/702205/151580439-0c6f1ed8-9269-43ca-bdba-37c143917945.png">
[ "kafka-ui-react-app/src/components/Topics/New/New.tsx", "kafka-ui-react-app/src/components/Topics/New/__tests__/New.spec.tsx", "kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Filters/__tests__/__snapshots__/Filters.spec.tsx.snap", "kafka-ui-react-app/src/components/Topics/shared/Form/TimeToRetain.tsx", "kafka-ui-react-app/src/components/Topics/shared/Form/TopicForm.tsx", "kafka-ui-react-app/src/components/common/Select/Select.tsx", "kafka-ui-react-app/src/lib/constants.ts" ]
[ "kafka-ui-react-app/src/components/Topics/New/New.tsx", "kafka-ui-react-app/src/components/Topics/New/__test__/New.spec.tsx", "kafka-ui-react-app/src/components/Topics/New/__test__/fixtures.ts", "kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Filters/__tests__/__snapshots__/Filters.spec.tsx.snap", "kafka-ui-react-app/src/components/Topics/shared/Form/TimeToRetain.tsx", "kafka-ui-react-app/src/components/Topics/shared/Form/TopicForm.tsx", "kafka-ui-react-app/src/components/Topics/shared/Form/__tests__/TopicForm.spec.tsx", "kafka-ui-react-app/src/components/common/Select/Select.tsx", "kafka-ui-react-app/src/lib/constants.ts" ]
[]
diff --git a/kafka-ui-react-app/src/components/Topics/New/New.tsx b/kafka-ui-react-app/src/components/Topics/New/New.tsx index 7eaccd51e09..923cecfb586 100644 --- a/kafka-ui-react-app/src/components/Topics/New/New.tsx +++ b/kafka-ui-react-app/src/components/Topics/New/New.tsx @@ -35,7 +35,6 @@ const New: React.FC = () => { clusterName, topicCreation: formatTopicCreation(data), }); - history.push(clusterTopicPath(clusterName, data.name)); } catch (error) { const response = await getResponse(error as Response); diff --git a/kafka-ui-react-app/src/components/Topics/New/__test__/New.spec.tsx b/kafka-ui-react-app/src/components/Topics/New/__test__/New.spec.tsx new file mode 100644 index 00000000000..f1a6aab9dce --- /dev/null +++ b/kafka-ui-react-app/src/components/Topics/New/__test__/New.spec.tsx @@ -0,0 +1,90 @@ +import React from 'react'; +import New from 'components/Topics/New/New'; +import { Route, Router } from 'react-router'; +import configureStore from 'redux-mock-store'; +import { RootState } from 'redux/interfaces'; +import { Provider } from 'react-redux'; +import { screen, waitFor } from '@testing-library/react'; +import { createMemoryHistory } from 'history'; +import fetchMock from 'fetch-mock-jest'; +import { clusterTopicNewPath, clusterTopicPath } from 'lib/paths'; +import userEvent from '@testing-library/user-event'; +import { render } from 'lib/testHelpers'; + +import { createTopicPayload, createTopicResponsePayload } from './fixtures'; + +const mockStore = configureStore(); + +const clusterName = 'local'; +const topicName = 'test-topic'; + +const initialState: Partial<RootState> = {}; +const storeMock = mockStore(initialState); +const historyMock = createMemoryHistory(); +const createTopicAPIPath = `/api/clusters/${clusterName}/topics`; + +const renderComponent = (history = historyMock, store = storeMock) => + render( + <Router history={history}> + <Route path={clusterTopicNewPath(':clusterName')}> + <Provider store={store}> + <New /> + </Provider> + </Route> + <Route path={clusterTopicPath(':clusterName', ':topicName')}> + New topic path + </Route> + </Router> + ); + +describe('New', () => { + beforeEach(() => { + fetchMock.reset(); + }); + + it('validates form', async () => { + const mockedHistory = createMemoryHistory({ + initialEntries: [clusterTopicNewPath(clusterName)], + }); + jest.spyOn(mockedHistory, 'push'); + renderComponent(mockedHistory); + + await waitFor(() => { + userEvent.click(screen.getByText('Send')); + }); + await waitFor(() => { + expect(screen.getByText('name is a required field')).toBeInTheDocument(); + }); + await waitFor(() => { + expect(mockedHistory.push).toBeCalledTimes(0); + }); + }); + + it('submits valid form', async () => { + const createTopicAPIPathMock = fetchMock.postOnce( + createTopicAPIPath, + createTopicResponsePayload, + { + body: createTopicPayload, + } + ); + const mockedHistory = createMemoryHistory({ + initialEntries: [clusterTopicNewPath(clusterName)], + }); + jest.spyOn(mockedHistory, 'push'); + renderComponent(mockedHistory); + + await waitFor(() => { + userEvent.type(screen.getByPlaceholderText('Topic Name'), topicName); + userEvent.click(screen.getByText('Send')); + }); + + await waitFor(() => + expect(mockedHistory.location.pathname).toBe( + clusterTopicPath(clusterName, topicName) + ) + ); + expect(mockedHistory.push).toBeCalledTimes(1); + expect(createTopicAPIPathMock.called()).toBeTruthy(); + }); +}); diff --git a/kafka-ui-react-app/src/components/Topics/New/__test__/fixtures.ts b/kafka-ui-react-app/src/components/Topics/New/__test__/fixtures.ts new file mode 100644 index 00000000000..a78c9ccb8e6 --- /dev/null +++ b/kafka-ui-react-app/src/components/Topics/New/__test__/fixtures.ts @@ -0,0 +1,36 @@ +import { CleanUpPolicy, Topic } from 'generated-sources'; + +export const createTopicPayload: Record<string, unknown> = { + name: 'test-topic', + partitions: 1, + replicationFactor: 1, + configs: { + 'cleanup.policy': 'delete', + 'retention.ms': '604800000', + 'retention.bytes': '-1', + 'max.message.bytes': '1000012', + 'min.insync.replicas': '1', + }, +}; + +export const createTopicResponsePayload: Topic = { + name: 'local', + internal: false, + partitionCount: 1, + replicationFactor: 1, + replicas: 1, + inSyncReplicas: 1, + segmentSize: 0, + segmentCount: 0, + underReplicatedPartitions: 0, + cleanUpPolicy: CleanUpPolicy.DELETE, + partitions: [ + { + partition: 0, + leader: 1, + replicas: [{ broker: 1, leader: false, inSync: true }], + offsetMax: 0, + offsetMin: 0, + }, + ], +}; diff --git a/kafka-ui-react-app/src/components/Topics/New/__tests__/New.spec.tsx b/kafka-ui-react-app/src/components/Topics/New/__tests__/New.spec.tsx deleted file mode 100644 index 9bf44dfbb78..00000000000 --- a/kafka-ui-react-app/src/components/Topics/New/__tests__/New.spec.tsx +++ /dev/null @@ -1,67 +0,0 @@ -import React from 'react'; -import New from 'components/Topics/New/New'; -import { Router } from 'react-router'; -import configureStore from 'redux-mock-store'; -import { RootState } from 'redux/interfaces'; -import { Provider } from 'react-redux'; -import { screen, waitFor } from '@testing-library/react'; -import { createMemoryHistory } from 'history'; -import fetchMock from 'fetch-mock-jest'; -import { clusterTopicNewPath, clusterTopicPath } from 'lib/paths'; -import userEvent from '@testing-library/user-event'; -import { render } from 'lib/testHelpers'; - -const mockStore = configureStore(); - -describe('New', () => { - const clusterName = 'local'; - const topicName = 'test-topic'; - - const initialState: Partial<RootState> = {}; - const storeMock = mockStore(initialState); - const historyMock = createMemoryHistory(); - - beforeEach(() => { - fetchMock.restore(); - }); - - const setupComponent = (history = historyMock, store = storeMock) => ( - <Router history={history}> - <Provider store={store}> - <New /> - </Provider> - </Router> - ); - - it('validates form', async () => { - const mockedHistory = createMemoryHistory(); - jest.spyOn(mockedHistory, 'push'); - render(setupComponent(mockedHistory)); - userEvent.click(screen.getByText('Send')); - - await waitFor(() => { - expect(screen.getByText('name is a required field')).toBeInTheDocument(); - }); - await waitFor(() => { - expect(mockedHistory.push).toBeCalledTimes(0); - }); - }); - - it('submits valid form', () => { - const mockedHistory = createMemoryHistory({ - initialEntries: [clusterTopicNewPath(clusterName)], - }); - jest.spyOn(mockedHistory, 'push'); - render(setupComponent()); - userEvent.type(screen.getByPlaceholderText('Topic Name'), topicName); - userEvent.click(screen.getByText('Send')); - waitFor(() => { - expect(mockedHistory.location.pathname).toBe( - clusterTopicPath(clusterName, topicName) - ); - }); - waitFor(() => { - expect(mockedHistory.push).toBeCalledTimes(1); - }); - }); -}); diff --git a/kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Filters/__tests__/__snapshots__/Filters.spec.tsx.snap b/kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Filters/__tests__/__snapshots__/Filters.spec.tsx.snap index 8b9d33e7403..20a4a48c718 100644 --- a/kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Filters/__tests__/__snapshots__/Filters.spec.tsx.snap +++ b/kafka-ui-react-app/src/components/Topics/Topic/Details/Messages/Filters/__tests__/__snapshots__/Filters.spec.tsx.snap @@ -479,6 +479,7 @@ exports[`Filters component matches the snapshot 1`] = ` <div> <ul class="c6" + id="selectSeekType" role="listbox" > <li @@ -1122,6 +1123,7 @@ exports[`Filters component when fetching matches the snapshot 1`] = ` <div> <ul class="c6" + id="selectSeekType" role="listbox" > <li diff --git a/kafka-ui-react-app/src/components/Topics/shared/Form/TimeToRetain.tsx b/kafka-ui-react-app/src/components/Topics/shared/Form/TimeToRetain.tsx index 0740aa5d1d7..daa515ae7bf 100644 --- a/kafka-ui-react-app/src/components/Topics/shared/Form/TimeToRetain.tsx +++ b/kafka-ui-react-app/src/components/Topics/shared/Form/TimeToRetain.tsx @@ -31,7 +31,9 @@ const TimeToRetain: React.FC<Props> = ({ isSubmitting }) => { return ( <> <S.Label> - <InputLabel>Time to retain data (in ms)</InputLabel> + <InputLabel htmlFor="timeToRetain"> + Time to retain data (in ms) + </InputLabel> {valueHint && <span>{valueHint}</span>} </S.Label> <Input diff --git a/kafka-ui-react-app/src/components/Topics/shared/Form/TopicForm.tsx b/kafka-ui-react-app/src/components/Topics/shared/Form/TopicForm.tsx index 3d214953914..b329b7e5a53 100644 --- a/kafka-ui-react-app/src/components/Topics/shared/Form/TopicForm.tsx +++ b/kafka-ui-react-app/src/components/Topics/shared/Form/TopicForm.tsx @@ -1,6 +1,6 @@ import React from 'react'; import { useFormContext, Controller } from 'react-hook-form'; -import { BYTES_IN_GB } from 'lib/constants'; +import { NOT_SET, BYTES_IN_GB } from 'lib/constants'; import { TopicName, TopicConfigByName } from 'redux/interfaces'; import { ErrorMessage } from '@hookform/error-message'; import Select, { SelectOption } from 'components/common/Select/Select'; @@ -14,7 +14,7 @@ import CustomParamsContainer from './CustomParams/CustomParamsContainer'; import TimeToRetain from './TimeToRetain'; import * as S from './TopicForm.styled'; -interface Props { +export interface Props { topicName?: TopicName; config?: TopicConfigByName; isEditing?: boolean; @@ -29,7 +29,7 @@ const CleanupPolicyOptions: Array<SelectOption> = [ ]; const RetentionBytesOptions: Array<SelectOption> = [ - { value: -1, label: 'Not Set' }, + { value: NOT_SET, label: 'Not Set' }, { value: BYTES_IN_GB, label: '1 GB' }, { value: BYTES_IN_GB * 10, label: '10 GB' }, { value: BYTES_IN_GB * 20, label: '20 GB' }, @@ -47,15 +47,15 @@ const TopicForm: React.FC<Props> = ({ control, formState: { errors }, } = useFormContext(); - return ( <StyledForm onSubmit={onSubmit}> <fieldset disabled={isSubmitting}> <fieldset disabled={isEditing}> <S.Column> <S.NameField> - <InputLabel>Topic Name *</InputLabel> + <InputLabel htmlFor="topicFormName">Topic Name *</InputLabel> <Input + id="topicFormName" name="name" placeholder="Topic Name" defaultValue={topicName} @@ -69,8 +69,11 @@ const TopicForm: React.FC<Props> = ({ {!isEditing && ( <S.Column> <div> - <InputLabel>Number of partitions *</InputLabel> + <InputLabel htmlFor="topicFormNumberOfPartitions"> + Number of partitions * + </InputLabel> <Input + id="topicFormNumberOfPartitions" type="number" placeholder="Number of partitions" min="1" @@ -82,8 +85,11 @@ const TopicForm: React.FC<Props> = ({ </FormError> </div> <div> - <InputLabel>Replication Factor *</InputLabel> + <InputLabel htmlFor="topicFormReplicationFactor"> + Replication Factor * + </InputLabel> <Input + id="topicFormReplicationFactor" type="number" placeholder="Replication Factor" min="1" @@ -100,8 +106,11 @@ const TopicForm: React.FC<Props> = ({ <S.Column> <div> - <InputLabel>Min In Sync Replicas *</InputLabel> + <InputLabel htmlFor="topicFormMinInSyncReplicas"> + Min In Sync Replicas * + </InputLabel> <Input + id="topicFormMinInSyncReplicas" type="number" placeholder="Min In Sync Replicas" min="1" @@ -113,13 +122,20 @@ const TopicForm: React.FC<Props> = ({ </FormError> </div> <div> - <InputLabel>Cleanup policy</InputLabel> + <InputLabel + id="topicFormCleanupPolicyLabel" + htmlFor="topicFormCleanupPolicy" + > + Cleanup policy + </InputLabel> <Controller defaultValue={CleanupPolicyOptions[0].value} control={control} name="cleanupPolicy" render={({ field: { name, onChange } }) => ( <Select + id="topicFormCleanupPolicy" + aria-labelledby="topicFormCleanupPolicyLabel" name={name} value={CleanupPolicyOptions[0].value} onChange={onChange} @@ -131,48 +147,56 @@ const TopicForm: React.FC<Props> = ({ </div> </S.Column> - <div> - <S.Column> - <div> - <TimeToRetain isSubmitting={isSubmitting} /> - </div> - </S.Column> - <S.Column> - <div> - <InputLabel>Max size on disk in GB</InputLabel> - <Controller - control={control} - name="retentionBytes" - defaultValue={0} - render={({ field: { name, onChange } }) => ( - <Select - name={name} - value={RetentionBytesOptions[0].value} - onChange={onChange} - minWidth="100%" - options={RetentionBytesOptions} - /> - )} - /> - </div> + <S.Column> + <div> + <TimeToRetain isSubmitting={isSubmitting} /> + </div> + </S.Column> - <div> - <InputLabel>Maximum message size in bytes *</InputLabel> - <Input - type="number" - min="1" - defaultValue="1000012" - name="maxMessageBytes" - /> - <FormError> - <ErrorMessage errors={errors} name="maxMessageBytes" /> - </FormError> - </div> - </S.Column> - </div> + <S.Column> + <div> + <InputLabel + id="topicFormRetentionBytesLabel" + htmlFor="topicFormRetentionBytes" + > + Max size on disk in GB + </InputLabel> + <Controller + control={control} + name="retentionBytes" + defaultValue={RetentionBytesOptions[0].value} + render={({ field: { name, onChange } }) => ( + <Select + id="topicFormRetentionBytes" + aria-labelledby="topicFormRetentionBytesLabel" + name={name} + value={RetentionBytesOptions[0].value} + onChange={onChange} + minWidth="100%" + options={RetentionBytesOptions} + /> + )} + /> + </div> - <S.CustomParamsHeading>Custom parameters</S.CustomParamsHeading> + <div> + <InputLabel htmlFor="topicFormMaxMessageBytes"> + Maximum message size in bytes * + </InputLabel> + <Input + id="topicFormMaxMessageBytes" + type="number" + min="1" + defaultValue="1000012" + name="maxMessageBytes" + /> + <FormError> + <ErrorMessage errors={errors} name="maxMessageBytes" /> + </FormError> + </div> + </S.Column> + <S.CustomParamsHeading>Custom parameters</S.CustomParamsHeading> <CustomParamsContainer isSubmitting={isSubmitting} config={config} /> <Button type="submit" buttonType="primary" buttonSize="L"> diff --git a/kafka-ui-react-app/src/components/Topics/shared/Form/__tests__/TopicForm.spec.tsx b/kafka-ui-react-app/src/components/Topics/shared/Form/__tests__/TopicForm.spec.tsx new file mode 100644 index 00000000000..d98ee0706c9 --- /dev/null +++ b/kafka-ui-react-app/src/components/Topics/shared/Form/__tests__/TopicForm.spec.tsx @@ -0,0 +1,72 @@ +import React from 'react'; +import { render } from 'lib/testHelpers'; +import { screen } from '@testing-library/dom'; +import { FormProvider, useForm } from 'react-hook-form'; +import TopicForm, { Props } from 'components/Topics/shared/Form/TopicForm'; +import userEvent from '@testing-library/user-event'; + +const isSubmitting = false; +const onSubmit = jest.fn(); + +const renderComponent = (props: Props = { isSubmitting, onSubmit }) => { + const Wrapper: React.FC = ({ children }) => { + const methods = useForm(); + return <FormProvider {...methods}>{children}</FormProvider>; + }; + + return render( + <Wrapper> + <TopicForm {...props} /> + </Wrapper> + ); +}; + +const expectByRoleAndNameToBeInDocument = ( + role: string, + accessibleName: string +) => { + expect(screen.getByRole(role, { name: accessibleName })).toBeInTheDocument(); +}; + +describe('TopicForm', () => { + it('renders', () => { + renderComponent(); + + expectByRoleAndNameToBeInDocument('textbox', 'Topic Name *'); + + expectByRoleAndNameToBeInDocument('spinbutton', 'Number of partitions *'); + expectByRoleAndNameToBeInDocument('spinbutton', 'Replication Factor *'); + + expectByRoleAndNameToBeInDocument('spinbutton', 'Min In Sync Replicas *'); + expectByRoleAndNameToBeInDocument('listbox', 'Cleanup policy'); + + expectByRoleAndNameToBeInDocument( + 'spinbutton', + 'Time to retain data (in ms)' + ); + expectByRoleAndNameToBeInDocument('button', '12h'); + expectByRoleAndNameToBeInDocument('button', '2d'); + expectByRoleAndNameToBeInDocument('button', '7d'); + expectByRoleAndNameToBeInDocument('button', '4w'); + + expectByRoleAndNameToBeInDocument('listbox', 'Max size on disk in GB'); + expectByRoleAndNameToBeInDocument( + 'spinbutton', + 'Maximum message size in bytes *' + ); + + expectByRoleAndNameToBeInDocument('heading', 'Custom parameters'); + + expectByRoleAndNameToBeInDocument('button', 'Send'); + }); + + it('submits', () => { + renderComponent({ + isSubmitting, + onSubmit: onSubmit.mockImplementation((e) => e.preventDefault()), + }); + + userEvent.click(screen.getByRole('button', { name: 'Send' })); + expect(onSubmit).toBeCalledTimes(1); + }); +}); diff --git a/kafka-ui-react-app/src/components/common/Select/Select.tsx b/kafka-ui-react-app/src/components/common/Select/Select.tsx index 12b27a0352f..0b02c1c411c 100644 --- a/kafka-ui-react-app/src/components/common/Select/Select.tsx +++ b/kafka-ui-react-app/src/components/common/Select/Select.tsx @@ -25,7 +25,6 @@ export interface SelectOption { } const Select: React.FC<SelectProps> = ({ - id, options = [], value, defaultValue, diff --git a/kafka-ui-react-app/src/lib/constants.ts b/kafka-ui-react-app/src/lib/constants.ts index 700e4d8e5cb..9e25a9df538 100644 --- a/kafka-ui-react-app/src/lib/constants.ts +++ b/kafka-ui-react-app/src/lib/constants.ts @@ -46,6 +46,7 @@ export const MILLISECONDS_IN_WEEK = 604_800_000; export const MILLISECONDS_IN_DAY = 86_400_000; export const MILLISECONDS_IN_SECOND = 1_000; +export const NOT_SET = -1; export const BYTES_IN_GB = 1_073_741_824; export const PER_PAGE = 25;
null
train
train
2022-02-01T14:35:42
"2022-01-28T16:04:16Z"
iliax
train