@TestPropertySource(properties = { //
"server.ssl.key-store=./config/keystore.jks", //
"app.webclient.trust-store=./config/truststore.jks", //
- "app.configuration-filepath=./src/test/resources/test_application_configuration_kafka.json"//
+ "app.configuration-filepath=./src/test/resources/test_application_configuration.json"//
})
class IntegrationWithKafka {
+ final String TYPE_ID = "KafkaInformationType";
+
@Autowired
private ApplicationConfig applicationConfig;
private ConsumerController consumerController;
@Autowired
- private EcsSimulatorController ecsSimulatorController;
+ private IcsSimulatorController icsSimulatorController;
@Autowired
private KafkaTopicConsumers kafkaTopicConsumers;
- private com.google.gson.Gson gson = new com.google.gson.GsonBuilder().create();
+ private static com.google.gson.Gson gson = new com.google.gson.GsonBuilder().create();
private static final Logger logger = LoggerFactory.getLogger(IntegrationWithKafka.class);
static class TestApplicationConfig extends ApplicationConfig {
@Override
- public String getEcsBaseUrl() {
+ public String getIcsBaseUrl() {
return thisProcessUrl();
}
@AfterEach
void reset() {
this.consumerController.testResults.reset();
- this.ecsSimulatorController.testResults.reset();
+ this.icsSimulatorController.testResults.reset();
this.jobs.clear();
}
return "https://localhost:" + this.applicationConfig.getLocalServerHttpPort();
}
- private Object jobParametersAsJsonObject(String filter, long maxTimeMiliseconds, int maxSize, int maxConcurrency) {
+ private static Object jobParametersAsJsonObject(String filter, long maxTimeMiliseconds, int maxSize,
+ int maxConcurrency) {
Job.Parameters param =
new Job.Parameters(filter, new Job.BufferTimeout(maxSize, maxTimeMiliseconds), maxConcurrency);
String str = gson.toJson(param);
return jsonObject(str);
}
- private Object jsonObject(String json) {
+ private static Object jsonObject(String json) {
try {
return JsonParser.parseString(json).getAsJsonObject();
} catch (Exception e) {
}
}
- private ConsumerJobInfo consumerJobInfo(String filter, Duration maxTime, int maxSize, int maxConcurrency) {
+ ConsumerJobInfo consumerJobInfo(String filter, Duration maxTime, int maxSize, int maxConcurrency) {
try {
- InfoType type = this.types.getAll().iterator().next();
- String typeId = type.getId();
String targetUri = baseUrl() + ConsumerController.CONSUMER_TARGET_URL;
- return new ConsumerJobInfo(typeId,
+ return new ConsumerJobInfo(TYPE_ID,
jobParametersAsJsonObject(filter, maxTime.toMillis(), maxSize, maxConcurrency), "owner", targetUri,
"");
} catch (Exception e) {
return SenderOptions.create(props);
}
- private SenderRecord<Integer, String, Integer> senderRecord(String data, int i) {
- final InfoType infoType = this.types.getAll().iterator().next();
- return SenderRecord.create(new ProducerRecord<>(infoType.getKafkaInputTopic(), i, data + i), i);
+ private SenderRecord<Integer, String, Integer> senderRecord(String data) {
+ final InfoType infoType = this.types.get(TYPE_ID);
+ int key = 1;
+ int correlationMetadata = 2;
+ return SenderRecord.create(new ProducerRecord<>(infoType.getKafkaInputTopic(), key, data), correlationMetadata);
}
private void sendDataToStream(Flux<SenderRecord<Integer, String, Integer>> dataToSend) {
}
@Test
- void kafkaIntegrationTest() throws InterruptedException {
+ void kafkaIntegrationTest() throws Exception {
final String JOB_ID1 = "ID1";
final String JOB_ID2 = "ID2";
// Register producer, Register types
- await().untilAsserted(() -> assertThat(ecsSimulatorController.testResults.registrationInfo).isNotNull());
- assertThat(ecsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(1);
+ await().untilAsserted(() -> assertThat(icsSimulatorController.testResults.registrationInfo).isNotNull());
+ assertThat(icsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(this.types.size());
// Create two jobs. One buffering and one with a filter
- this.ecsSimulatorController.addJob(consumerJobInfo(null, Duration.ofMillis(400), 1000, 20), JOB_ID1,
+ this.icsSimulatorController.addJob(consumerJobInfo(null, Duration.ofMillis(400), 1000, 20), JOB_ID1,
restClient());
- this.ecsSimulatorController.addJob(consumerJobInfo("^Message_1$", Duration.ZERO, 0, 1), JOB_ID2, restClient());
+ this.icsSimulatorController.addJob(consumerJobInfo("^Message_1$", Duration.ZERO, 0, 1), JOB_ID2, restClient());
await().untilAsserted(() -> assertThat(this.jobs.size()).isEqualTo(2));
- var dataToSend = Flux.range(1, 3).map(i -> senderRecord("Message_", i)); // Message_1, Message_2 etc.
+ var dataToSend = Flux.range(1, 3).map(i -> senderRecord("Message_" + i)); // Message_1, Message_2 etc.
sendDataToStream(dataToSend);
- verifiedReceivedByConsumer("Message_1", "[Message_1, Message_2, Message_3]");
+ verifiedReceivedByConsumer("Message_1", "[\"Message_1\", \"Message_2\", \"Message_3\"]");
// Delete the jobs
- this.ecsSimulatorController.deleteJob(JOB_ID1, restClient());
- this.ecsSimulatorController.deleteJob(JOB_ID2, restClient());
+ this.icsSimulatorController.deleteJob(JOB_ID1, restClient());
+ this.icsSimulatorController.deleteJob(JOB_ID2, restClient());
await().untilAsserted(() -> assertThat(this.jobs.size()).isZero());
- await().untilAsserted(() -> assertThat(this.kafkaTopicConsumers.getActiveSubscriptions()).isEmpty());
+ await().untilAsserted(() -> assertThat(this.kafkaTopicConsumers.getConsumers().keySet()).isEmpty());
}
@Test
void kafkaIOverflow() throws InterruptedException {
- // This does not work. After an overflow, the kafka stream does not seem to work
- //
final String JOB_ID1 = "ID1";
final String JOB_ID2 = "ID2";
// Register producer, Register types
- await().untilAsserted(() -> assertThat(ecsSimulatorController.testResults.registrationInfo).isNotNull());
- assertThat(ecsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(1);
+ await().untilAsserted(() -> assertThat(icsSimulatorController.testResults.registrationInfo).isNotNull());
+ assertThat(icsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(this.types.size());
// Create two jobs.
- this.ecsSimulatorController.addJob(consumerJobInfo(null, Duration.ZERO, 0, 1), JOB_ID1, restClient());
- this.ecsSimulatorController.addJob(consumerJobInfo(null, Duration.ZERO, 0, 1), JOB_ID2, restClient());
+ this.icsSimulatorController.addJob(consumerJobInfo(null, Duration.ofMillis(400), 1000, 1), JOB_ID1,
+ restClient());
+ this.icsSimulatorController.addJob(consumerJobInfo(null, Duration.ZERO, 0, 1), JOB_ID2, restClient());
await().untilAsserted(() -> assertThat(this.jobs.size()).isEqualTo(2));
- var dataToSend = Flux.range(1, 1000000).map(i -> senderRecord("Message_", i)); // Message_1, Message_2 etc.
- sendDataToStream(dataToSend); // this will overflow
+ var dataToSend = Flux.range(1, 1000000).map(i -> senderRecord("Message_" + i)); // Message_1, Message_2 etc.
+ sendDataToStream(dataToSend); // this should overflow
- KafkaJobDataConsumer consumer = kafkaTopicConsumers.getActiveSubscriptions().values().iterator().next();
+ KafkaJobDataConsumer consumer = kafkaTopicConsumers.getConsumers().get(TYPE_ID).iterator().next();
await().untilAsserted(() -> assertThat(consumer.isRunning()).isFalse());
this.consumerController.testResults.reset();
- kafkaTopicConsumers.restartNonRunningTasks();
+ this.icsSimulatorController.deleteJob(JOB_ID2, restClient()); // Delete one job
+ kafkaTopicConsumers.restartNonRunningTopics();
+ Thread.sleep(1000); // Restarting the input seems to take some asynch time
- dataToSend = Flux.range(1, 3).map(i -> senderRecord("Message__", i)); // Message_1
+ dataToSend = Flux.just(senderRecord("Howdy\""));
sendDataToStream(dataToSend);
- verifiedReceivedByConsumer("Message__1", "Message__1");
+ verifiedReceivedByConsumer("[\"Howdy\\\"\"]");
+
+ // Delete the jobs
+ this.icsSimulatorController.deleteJob(JOB_ID1, restClient());
+ this.icsSimulatorController.deleteJob(JOB_ID2, restClient());
+
+ await().untilAsserted(() -> assertThat(this.jobs.size()).isZero());
+ await().untilAsserted(() -> assertThat(this.kafkaTopicConsumers.getConsumers().keySet()).isEmpty());
}
}