X-Git-Url: https://gerrit.o-ran-sc.org/r/gitweb?a=blobdiff_plain;f=dmaap-adaptor-java%2Fsrc%2Ftest%2Fjava%2Forg%2Foran%2Fdmaapadapter%2FIntegrationWithKafka.java;h=470e114ebda7c154cd998f9c229918fd16a53072;hb=ce1d9f2d3e1d2713289dc4d2b5c246f99ec65160;hp=31ef970f6103c8d9c6ea4ca6726e9aa0455403c4;hpb=968b89731a192c2ee3f3393d00519879ad89ce56;p=nonrtric.git diff --git a/dmaap-adaptor-java/src/test/java/org/oran/dmaapadapter/IntegrationWithKafka.java b/dmaap-adaptor-java/src/test/java/org/oran/dmaapadapter/IntegrationWithKafka.java index 31ef970f..470e114e 100644 --- a/dmaap-adaptor-java/src/test/java/org/oran/dmaapadapter/IntegrationWithKafka.java +++ b/dmaap-adaptor-java/src/test/java/org/oran/dmaapadapter/IntegrationWithKafka.java @@ -22,9 +22,11 @@ package org.oran.dmaapadapter; import static org.assertj.core.api.Assertions.assertThat; import static org.awaitility.Awaitility.await; +import static org.junit.jupiter.api.Assertions.assertTrue; import com.google.gson.JsonParser; +import java.time.Duration; import java.util.HashMap; import java.util.Map; @@ -47,6 +49,8 @@ import org.oran.dmaapadapter.repository.InfoType; import org.oran.dmaapadapter.repository.InfoTypes; import org.oran.dmaapadapter.repository.Job; import org.oran.dmaapadapter.repository.Jobs; +import org.oran.dmaapadapter.tasks.KafkaJobDataConsumer; +import org.oran.dmaapadapter.tasks.KafkaTopicConsumers; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -90,6 +94,9 @@ class IntegrationWithKafka { @Autowired private EcsSimulatorController ecsSimulatorController; + @Autowired + private KafkaTopicConsumers kafkaTopicConsumers; + private com.google.gson.Gson gson = new com.google.gson.GsonBuilder().create(); private static final Logger logger = LoggerFactory.getLogger(IntegrationWithKafka.class); @@ -174,9 +181,9 @@ class IntegrationWithKafka { return "https://localhost:" + this.applicationConfig.getLocalServerHttpPort(); } - private Object jobParametersAsJsonObject(String filter, int maxTimeMiliseconds, int maxSize) { - Job.Parameters param = new Job.Parameters(filter, - new Job.Parameters.BufferTimeout(maxSize, maxTimeMiliseconds)); + private Object jobParametersAsJsonObject(String filter, long maxTimeMiliseconds, int maxSize, int maxConcurrency) { + Job.Parameters param = + new Job.Parameters(filter, new Job.BufferTimeout(maxSize, maxTimeMiliseconds), maxConcurrency); String str = gson.toJson(param); return jsonObject(str); } @@ -189,13 +196,14 @@ class IntegrationWithKafka { } } - private ConsumerJobInfo consumerJobInfo(String filter, int maxTimeMiliseconds, int maxSize) { + private ConsumerJobInfo consumerJobInfo(String filter, Duration maxTime, int maxSize, int maxConcurrency) { try { InfoType type = this.types.getAll().iterator().next(); String typeId = type.getId(); String targetUri = baseUrl() + ConsumerController.CONSUMER_TARGET_URL; - return new ConsumerJobInfo(typeId, jobParametersAsJsonObject(filter, maxTimeMiliseconds, maxSize), "owner", - targetUri, ""); + return new ConsumerJobInfo(typeId, + jobParametersAsJsonObject(filter, maxTime.toMillis(), maxSize, maxConcurrency), "owner", targetUri, + ""); } catch (Exception e) { return null; } @@ -218,6 +226,23 @@ class IntegrationWithKafka { return SenderRecord.create(new ProducerRecord<>(infoType.getKafkaInputTopic(), i, data + i), i); } + private void sendDataToStream(Flux> dataToSend) { + final KafkaSender sender = KafkaSender.create(senderOptions()); + + sender.send(dataToSend) // + .doOnError(e -> logger.error("Send failed", e)) // + .blockLast(); + + } + + private void verifiedReceivedByConsumer(String... strings) { + ConsumerController.TestResults consumer = this.consumerController.testResults; + await().untilAsserted(() -> assertThat(consumer.receivedBodies.size()).isEqualTo(strings.length)); + for (String s : strings) { + assertTrue(consumer.hasReceived(s)); + } + } + @Test void kafkaIntegrationTest() throws InterruptedException { final String JOB_ID1 = "ID1"; @@ -227,31 +252,62 @@ class IntegrationWithKafka { await().untilAsserted(() -> assertThat(ecsSimulatorController.testResults.registrationInfo).isNotNull()); assertThat(ecsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(1); - // Create a job - this.ecsSimulatorController.addJob(consumerJobInfo(".*", 10, 1000), JOB_ID1, restClient()); - this.ecsSimulatorController.addJob(consumerJobInfo(".*Message_1.*", 0, 0), JOB_ID2, restClient()); - await().untilAsserted(() -> assertThat(this.jobs.size()).isEqualTo(2)); + // Create two jobs. One buffering and one with a filter + this.ecsSimulatorController.addJob(consumerJobInfo(null, Duration.ofMillis(400), 1000, 20), JOB_ID1, + restClient()); + this.ecsSimulatorController.addJob(consumerJobInfo("^Message_1$", Duration.ZERO, 0, 1), JOB_ID2, restClient()); - final KafkaSender sender = KafkaSender.create(senderOptions()); + await().untilAsserted(() -> assertThat(this.jobs.size()).isEqualTo(2)); var dataToSend = Flux.range(1, 3).map(i -> senderRecord("Message_", i)); // Message_1, Message_2 etc. + sendDataToStream(dataToSend); - sender.send(dataToSend) // - .doOnError(e -> logger.error("Send failed", e)) // - .doOnNext(senderResult -> logger.debug("Sent {}", senderResult)) // - .doOnError(t -> logger.error("Error {}", t)) // - .blockLast(); + verifiedReceivedByConsumer("Message_1", "[\"Message_1\", \"Message_2\", \"Message_3\"]"); - ConsumerController.TestResults consumer = this.consumerController.testResults; - await().untilAsserted(() -> assertThat(consumer.receivedBodies.size()).isEqualTo(2)); - assertThat(consumer.receivedBodies.get(0)).isEqualTo("Message_1"); - assertThat(consumer.receivedBodies.get(1)).isEqualTo("[Message_1, Message_2, Message_3]"); + // Just for testing quoting + this.consumerController.testResults.reset(); + dataToSend = Flux.just(senderRecord("Message\"_", 1)); + sendDataToStream(dataToSend); + verifiedReceivedByConsumer("[\"Message\\\"_1\"]"); - // Delete the job + // Delete the jobs this.ecsSimulatorController.deleteJob(JOB_ID1, restClient()); this.ecsSimulatorController.deleteJob(JOB_ID2, restClient()); await().untilAsserted(() -> assertThat(this.jobs.size()).isZero()); + await().untilAsserted(() -> assertThat(this.kafkaTopicConsumers.getConsumers()).isEmpty()); + } + + @Test + void kafkaIOverflow() throws InterruptedException { + final String JOB_ID1 = "ID1"; + final String JOB_ID2 = "ID2"; + + // Register producer, Register types + await().untilAsserted(() -> assertThat(ecsSimulatorController.testResults.registrationInfo).isNotNull()); + assertThat(ecsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(1); + + // Create two jobs. + this.ecsSimulatorController.addJob(consumerJobInfo(null, Duration.ZERO, 0, 1), JOB_ID1, restClient()); + this.ecsSimulatorController.addJob(consumerJobInfo(null, Duration.ZERO, 0, 1), JOB_ID2, restClient()); + + await().untilAsserted(() -> assertThat(this.jobs.size()).isEqualTo(2)); + + var dataToSend = Flux.range(1, 1000000).map(i -> senderRecord("Message_", i)); // Message_1, Message_2 etc. + sendDataToStream(dataToSend); // this should overflow + + KafkaJobDataConsumer consumer = kafkaTopicConsumers.getConsumers().values().iterator().next(); + await().untilAsserted(() -> assertThat(consumer.isRunning()).isFalse()); + this.consumerController.testResults.reset(); + + kafkaTopicConsumers.restartNonRunningTasks(); + this.ecsSimulatorController.deleteJob(JOB_ID2, restClient()); // Delete one job + Thread.sleep(1000); // Restarting the input seems to take some asynch time + + dataToSend = Flux.range(1, 1).map(i -> senderRecord("Howdy_", i)); + sendDataToStream(dataToSend); + + verifiedReceivedByConsumer("Howdy_1"); } }