X-Git-Url: https://gerrit.o-ran-sc.org/r/gitweb?a=blobdiff_plain;f=dmaap-adaptor-java%2Fsrc%2Ftest%2Fjava%2Forg%2Foran%2Fdmaapadapter%2FIntegrationWithKafka.java;h=5a48d61fbac4f12b0ff7fade8e20f415f8676b92;hb=844931b62f35ce6ee2d9dc7274573fc54e14407a;hp=a0db58a07725aec88fef7fb8f191ed2f1d3f3391;hpb=b3896f4ad7912be9e12c05e7d4770fa39752d797;p=nonrtric.git diff --git a/dmaap-adaptor-java/src/test/java/org/oran/dmaapadapter/IntegrationWithKafka.java b/dmaap-adaptor-java/src/test/java/org/oran/dmaapadapter/IntegrationWithKafka.java index a0db58a0..5a48d61f 100644 --- a/dmaap-adaptor-java/src/test/java/org/oran/dmaapadapter/IntegrationWithKafka.java +++ b/dmaap-adaptor-java/src/test/java/org/oran/dmaapadapter/IntegrationWithKafka.java @@ -75,10 +75,12 @@ import reactor.kafka.sender.SenderRecord; @TestPropertySource(properties = { // "server.ssl.key-store=./config/keystore.jks", // "app.webclient.trust-store=./config/truststore.jks", // - "app.configuration-filepath=./src/test/resources/test_application_configuration_kafka.json"// + "app.configuration-filepath=./src/test/resources/test_application_configuration.json"// }) class IntegrationWithKafka { + final String TYPE_ID = "KafkaInformationType"; + @Autowired private ApplicationConfig applicationConfig; @@ -92,12 +94,12 @@ class IntegrationWithKafka { private ConsumerController consumerController; @Autowired - private EcsSimulatorController ecsSimulatorController; + private IcsSimulatorController icsSimulatorController; @Autowired private KafkaTopicConsumers kafkaTopicConsumers; - private com.google.gson.Gson gson = new com.google.gson.GsonBuilder().create(); + private static com.google.gson.Gson gson = new com.google.gson.GsonBuilder().create(); private static final Logger logger = LoggerFactory.getLogger(IntegrationWithKafka.class); @@ -106,7 +108,7 @@ class IntegrationWithKafka { static class TestApplicationConfig extends ApplicationConfig { @Override - public String getEcsBaseUrl() { + public String getIcsBaseUrl() { return thisProcessUrl(); } @@ -149,7 +151,7 @@ class IntegrationWithKafka { @AfterEach void reset() { this.consumerController.testResults.reset(); - this.ecsSimulatorController.testResults.reset(); + this.icsSimulatorController.testResults.reset(); this.jobs.clear(); } @@ -181,14 +183,15 @@ class IntegrationWithKafka { return "https://localhost:" + this.applicationConfig.getLocalServerHttpPort(); } - private Object jobParametersAsJsonObject(String filter, long maxTimeMiliseconds, int maxSize, int maxConcurrency) { + private static Object jobParametersAsJsonObject(String filter, long maxTimeMiliseconds, int maxSize, + int maxConcurrency) { Job.Parameters param = new Job.Parameters(filter, new Job.BufferTimeout(maxSize, maxTimeMiliseconds), maxConcurrency); String str = gson.toJson(param); return jsonObject(str); } - private Object jsonObject(String json) { + private static Object jsonObject(String json) { try { return JsonParser.parseString(json).getAsJsonObject(); } catch (Exception e) { @@ -196,12 +199,10 @@ class IntegrationWithKafka { } } - private ConsumerJobInfo consumerJobInfo(String filter, Duration maxTime, int maxSize, int maxConcurrency) { + ConsumerJobInfo consumerJobInfo(String filter, Duration maxTime, int maxSize, int maxConcurrency) { try { - InfoType type = this.types.getAll().iterator().next(); - String typeId = type.getId(); String targetUri = baseUrl() + ConsumerController.CONSUMER_TARGET_URL; - return new ConsumerJobInfo(typeId, + return new ConsumerJobInfo(TYPE_ID, jobParametersAsJsonObject(filter, maxTime.toMillis(), maxSize, maxConcurrency), "owner", targetUri, ""); } catch (Exception e) { @@ -214,16 +215,18 @@ class IntegrationWithKafka { Map props = new HashMap<>(); props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); - props.put(ProducerConfig.CLIENT_ID_CONFIG, "sample-producer"); + props.put(ProducerConfig.CLIENT_ID_CONFIG, "sample-producerx"); props.put(ProducerConfig.ACKS_CONFIG, "all"); props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, IntegerSerializer.class); props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class); return SenderOptions.create(props); } - private SenderRecord senderRecord(String data, int i) { - final InfoType infoType = this.types.getAll().iterator().next(); - return SenderRecord.create(new ProducerRecord<>(infoType.getKafkaInputTopic(), i, data + i), i); + private SenderRecord senderRecord(String data) { + final InfoType infoType = this.types.get(TYPE_ID); + int key = 1; + int correlationMetadata = 2; + return SenderRecord.create(new ProducerRecord<>(infoType.getKafkaInputTopic(), key, data), correlationMetadata); } private void sendDataToStream(Flux> dataToSend) { @@ -233,6 +236,8 @@ class IntegrationWithKafka { .doOnError(e -> logger.error("Send failed", e)) // .blockLast(); + sender.close(); + } private void verifiedReceivedByConsumer(String... strings) { @@ -244,64 +249,96 @@ class IntegrationWithKafka { } @Test - void kafkaIntegrationTest() throws InterruptedException { + void simpleCase() throws InterruptedException { + final String JOB_ID = "ID"; + + // Register producer, Register types + await().untilAsserted(() -> assertThat(icsSimulatorController.testResults.registrationInfo).isNotNull()); + assertThat(icsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(this.types.size()); + + this.icsSimulatorController.addJob(consumerJobInfo(null, Duration.ZERO, 0, 1), JOB_ID, restClient()); + await().untilAsserted(() -> assertThat(this.jobs.size()).isEqualTo(1)); + + Thread.sleep(4000); + var dataToSend = Flux.just(senderRecord("Message")); + sendDataToStream(dataToSend); + + verifiedReceivedByConsumer("Message"); + + this.icsSimulatorController.deleteJob(JOB_ID, restClient()); + + await().untilAsserted(() -> assertThat(this.jobs.size()).isZero()); + await().untilAsserted(() -> assertThat(this.kafkaTopicConsumers.getConsumers().keySet()).isEmpty()); + } + + @Test + void kafkaIntegrationTest() throws Exception { final String JOB_ID1 = "ID1"; final String JOB_ID2 = "ID2"; // Register producer, Register types - await().untilAsserted(() -> assertThat(ecsSimulatorController.testResults.registrationInfo).isNotNull()); - assertThat(ecsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(1); + await().untilAsserted(() -> assertThat(icsSimulatorController.testResults.registrationInfo).isNotNull()); + assertThat(icsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(this.types.size()); // Create two jobs. One buffering and one with a filter - this.ecsSimulatorController.addJob(consumerJobInfo(null, Duration.ofMillis(400), 1000, 20), JOB_ID1, + this.icsSimulatorController.addJob(consumerJobInfo(null, Duration.ofMillis(400), 10, 20), JOB_ID1, restClient()); - this.ecsSimulatorController.addJob(consumerJobInfo("^Message_1$", Duration.ZERO, 0, 1), JOB_ID2, restClient()); + this.icsSimulatorController.addJob(consumerJobInfo("^Message_1$", Duration.ZERO, 0, 1), JOB_ID2, restClient()); await().untilAsserted(() -> assertThat(this.jobs.size()).isEqualTo(2)); - var dataToSend = Flux.range(1, 3).map(i -> senderRecord("Message_", i)); // Message_1, Message_2 etc. + Thread.sleep(2000); + var dataToSend = Flux.range(1, 3).map(i -> senderRecord("Message_" + i)); // Message_1, Message_2 etc. sendDataToStream(dataToSend); - verifiedReceivedByConsumer("Message_1", "[Message_1, Message_2, Message_3]"); + verifiedReceivedByConsumer("Message_1", "[\"Message_1\", \"Message_2\", \"Message_3\"]"); // Delete the jobs - this.ecsSimulatorController.deleteJob(JOB_ID1, restClient()); - this.ecsSimulatorController.deleteJob(JOB_ID2, restClient()); + this.icsSimulatorController.deleteJob(JOB_ID1, restClient()); + this.icsSimulatorController.deleteJob(JOB_ID2, restClient()); await().untilAsserted(() -> assertThat(this.jobs.size()).isZero()); - await().untilAsserted(() -> assertThat(this.kafkaTopicConsumers.getActiveSubscriptions()).isEmpty()); + await().untilAsserted(() -> assertThat(this.kafkaTopicConsumers.getConsumers().keySet()).isEmpty()); } @Test void kafkaIOverflow() throws InterruptedException { - // This does not work. After an overflow, the kafka stream does not seem to work - // final String JOB_ID1 = "ID1"; final String JOB_ID2 = "ID2"; // Register producer, Register types - await().untilAsserted(() -> assertThat(ecsSimulatorController.testResults.registrationInfo).isNotNull()); - assertThat(ecsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(1); + await().untilAsserted(() -> assertThat(icsSimulatorController.testResults.registrationInfo).isNotNull()); + assertThat(icsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(this.types.size()); // Create two jobs. - this.ecsSimulatorController.addJob(consumerJobInfo(null, Duration.ZERO, 0, 1), JOB_ID1, restClient()); - this.ecsSimulatorController.addJob(consumerJobInfo(null, Duration.ZERO, 0, 1), JOB_ID2, restClient()); + this.icsSimulatorController.addJob(consumerJobInfo(null, Duration.ofMillis(400), 1000, 1), JOB_ID1, + restClient()); + this.icsSimulatorController.addJob(consumerJobInfo(null, Duration.ZERO, 0, 1), JOB_ID2, restClient()); await().untilAsserted(() -> assertThat(this.jobs.size()).isEqualTo(2)); - var dataToSend = Flux.range(1, 1000000).map(i -> senderRecord("Message_", i)); // Message_1, Message_2 etc. - sendDataToStream(dataToSend); // this will overflow + var dataToSend = Flux.range(1, 1000000).map(i -> senderRecord("Message_" + i)); // Message_1, Message_2 etc. + sendDataToStream(dataToSend); // this should overflow - KafkaJobDataConsumer consumer = kafkaTopicConsumers.getActiveSubscriptions().values().iterator().next(); + KafkaJobDataConsumer consumer = kafkaTopicConsumers.getConsumers().get(TYPE_ID).iterator().next(); await().untilAsserted(() -> assertThat(consumer.isRunning()).isFalse()); this.consumerController.testResults.reset(); - kafkaTopicConsumers.restartNonRunningTasks(); + this.icsSimulatorController.deleteJob(JOB_ID2, restClient()); // Delete one job + kafkaTopicConsumers.restartNonRunningTopics(); + Thread.sleep(1000); // Restarting the input seems to take some asynch time - dataToSend = Flux.range(1, 3).map(i -> senderRecord("Message__", i)); // Message_1 + dataToSend = Flux.just(senderRecord("Howdy\"")); sendDataToStream(dataToSend); - verifiedReceivedByConsumer("Message__1", "Message__1"); + verifiedReceivedByConsumer("[\"Howdy\\\"\"]"); + + // Delete the jobs + this.icsSimulatorController.deleteJob(JOB_ID1, restClient()); + this.icsSimulatorController.deleteJob(JOB_ID2, restClient()); + + await().untilAsserted(() -> assertThat(this.jobs.size()).isZero()); + await().untilAsserted(() -> assertThat(this.kafkaTopicConsumers.getConsumers().keySet()).isEmpty()); } }