Merge "NONRTRIC - Implement DMaaP mediator producer service in Java"
[nonrtric.git] / dmaap-adaptor-java / src / test / java / org / oran / dmaapadapter / IntegrationWithKafka.java
1 /*-
2  * ========================LICENSE_START=================================
3  * O-RAN-SC
4  * %%
5  * Copyright (C) 2021 Nordix Foundation
6  * %%
7  * Licensed under the Apache License, Version 2.0 (the "License");
8  * you may not use this file except in compliance with the License.
9  * You may obtain a copy of the License at
10  *
11  *      http://www.apache.org/licenses/LICENSE-2.0
12  *
13  * Unless required by applicable law or agreed to in writing, software
14  * distributed under the License is distributed on an "AS IS" BASIS,
15  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16  * See the License for the specific language governing permissions and
17  * limitations under the License.
18  * ========================LICENSE_END===================================
19  */
20
21 package org.oran.dmaapadapter;
22
23 import static org.assertj.core.api.Assertions.assertThat;
24 import static org.awaitility.Awaitility.await;
25 import static org.junit.jupiter.api.Assertions.assertTrue;
26
27 import com.google.gson.JsonParser;
28
29 import java.time.Duration;
30 import java.util.HashMap;
31 import java.util.Map;
32
33 import org.apache.kafka.clients.producer.ProducerConfig;
34 import org.apache.kafka.clients.producer.ProducerRecord;
35 import org.apache.kafka.common.serialization.IntegerSerializer;
36 import org.apache.kafka.common.serialization.StringSerializer;
37 import org.junit.jupiter.api.AfterEach;
38 import org.junit.jupiter.api.Test;
39 import org.junit.jupiter.api.extension.ExtendWith;
40 import org.oran.dmaapadapter.clients.AsyncRestClient;
41 import org.oran.dmaapadapter.clients.AsyncRestClientFactory;
42 import org.oran.dmaapadapter.configuration.ApplicationConfig;
43 import org.oran.dmaapadapter.configuration.ImmutableHttpProxyConfig;
44 import org.oran.dmaapadapter.configuration.ImmutableWebClientConfig;
45 import org.oran.dmaapadapter.configuration.WebClientConfig;
46 import org.oran.dmaapadapter.configuration.WebClientConfig.HttpProxyConfig;
47 import org.oran.dmaapadapter.r1.ConsumerJobInfo;
48 import org.oran.dmaapadapter.repository.InfoType;
49 import org.oran.dmaapadapter.repository.InfoTypes;
50 import org.oran.dmaapadapter.repository.Job;
51 import org.oran.dmaapadapter.repository.Jobs;
52 import org.oran.dmaapadapter.tasks.KafkaJobDataConsumer;
53 import org.oran.dmaapadapter.tasks.KafkaTopicConsumers;
54 import org.slf4j.Logger;
55 import org.slf4j.LoggerFactory;
56 import org.springframework.beans.factory.annotation.Autowired;
57 import org.springframework.boot.test.context.SpringBootTest;
58 import org.springframework.boot.test.context.SpringBootTest.WebEnvironment;
59 import org.springframework.boot.test.context.TestConfiguration;
60 import org.springframework.boot.web.embedded.tomcat.TomcatServletWebServerFactory;
61 import org.springframework.boot.web.server.LocalServerPort;
62 import org.springframework.boot.web.servlet.server.ServletWebServerFactory;
63 import org.springframework.context.annotation.Bean;
64 import org.springframework.test.context.TestPropertySource;
65 import org.springframework.test.context.junit.jupiter.SpringExtension;
66
67 import reactor.core.publisher.Flux;
68 import reactor.kafka.sender.KafkaSender;
69 import reactor.kafka.sender.SenderOptions;
70 import reactor.kafka.sender.SenderRecord;
71
72 @SuppressWarnings("java:S3577") // Rename class
73 @ExtendWith(SpringExtension.class)
74 @SpringBootTest(webEnvironment = WebEnvironment.DEFINED_PORT)
75 @TestPropertySource(properties = { //
76         "server.ssl.key-store=./config/keystore.jks", //
77         "app.webclient.trust-store=./config/truststore.jks", //
78         "app.configuration-filepath=./src/test/resources/test_application_configuration.json"//
79 })
80 class IntegrationWithKafka {
81
82     final String TYPE_ID = "KafkaInformationType";
83
84     @Autowired
85     private ApplicationConfig applicationConfig;
86
87     @Autowired
88     private Jobs jobs;
89
90     @Autowired
91     private InfoTypes types;
92
93     @Autowired
94     private ConsumerController consumerController;
95
96     @Autowired
97     private EcsSimulatorController ecsSimulatorController;
98
99     @Autowired
100     private KafkaTopicConsumers kafkaTopicConsumers;
101
102     private static com.google.gson.Gson gson = new com.google.gson.GsonBuilder().create();
103
104     private static final Logger logger = LoggerFactory.getLogger(IntegrationWithKafka.class);
105
106     @LocalServerPort
107     int localServerHttpPort;
108
109     static class TestApplicationConfig extends ApplicationConfig {
110         @Override
111         public String getEcsBaseUrl() {
112             return thisProcessUrl();
113         }
114
115         @Override
116         public String getDmaapBaseUrl() {
117             return thisProcessUrl();
118         }
119
120         @Override
121         public String getSelfUrl() {
122             return thisProcessUrl();
123         }
124
125         private String thisProcessUrl() {
126             final String url = "https://localhost:" + getLocalServerHttpPort();
127             return url;
128         }
129     }
130
131     /**
132      * Overrides the BeanFactory.
133      */
134     @TestConfiguration
135     static class TestBeanFactory extends BeanFactory {
136
137         @Override
138         @Bean
139         public ServletWebServerFactory servletContainer() {
140             return new TomcatServletWebServerFactory();
141         }
142
143         @Override
144         @Bean
145         public ApplicationConfig getApplicationConfig() {
146             TestApplicationConfig cfg = new TestApplicationConfig();
147             return cfg;
148         }
149     }
150
151     @AfterEach
152     void reset() {
153         this.consumerController.testResults.reset();
154         this.ecsSimulatorController.testResults.reset();
155         this.jobs.clear();
156     }
157
158     private AsyncRestClient restClient(boolean useTrustValidation) {
159         WebClientConfig config = this.applicationConfig.getWebClientConfig();
160         HttpProxyConfig httpProxyConfig = ImmutableHttpProxyConfig.builder() //
161                 .httpProxyHost("") //
162                 .httpProxyPort(0) //
163                 .build();
164         config = ImmutableWebClientConfig.builder() //
165                 .keyStoreType(config.keyStoreType()) //
166                 .keyStorePassword(config.keyStorePassword()) //
167                 .keyStore(config.keyStore()) //
168                 .keyPassword(config.keyPassword()) //
169                 .isTrustStoreUsed(useTrustValidation) //
170                 .trustStore(config.trustStore()) //
171                 .trustStorePassword(config.trustStorePassword()) //
172                 .httpProxyConfig(httpProxyConfig).build();
173
174         AsyncRestClientFactory restClientFactory = new AsyncRestClientFactory(config);
175         return restClientFactory.createRestClientNoHttpProxy(baseUrl());
176     }
177
178     private AsyncRestClient restClient() {
179         return restClient(false);
180     }
181
182     private String baseUrl() {
183         return "https://localhost:" + this.applicationConfig.getLocalServerHttpPort();
184     }
185
186     private static Object jobParametersAsJsonObject(String filter, long maxTimeMiliseconds, int maxSize,
187             int maxConcurrency) {
188         Job.Parameters param =
189                 new Job.Parameters(filter, new Job.BufferTimeout(maxSize, maxTimeMiliseconds), maxConcurrency);
190         String str = gson.toJson(param);
191         return jsonObject(str);
192     }
193
194     private static Object jsonObject(String json) {
195         try {
196             return JsonParser.parseString(json).getAsJsonObject();
197         } catch (Exception e) {
198             throw new NullPointerException(e.toString());
199         }
200     }
201
202     ConsumerJobInfo consumerJobInfo(String filter, Duration maxTime, int maxSize, int maxConcurrency) {
203         try {
204             String targetUri = baseUrl() + ConsumerController.CONSUMER_TARGET_URL;
205             return new ConsumerJobInfo(TYPE_ID,
206                     jobParametersAsJsonObject(filter, maxTime.toMillis(), maxSize, maxConcurrency), "owner", targetUri,
207                     "");
208         } catch (Exception e) {
209             return null;
210         }
211     }
212
213     private SenderOptions<Integer, String> senderOptions() {
214         String bootstrapServers = this.applicationConfig.getKafkaBootStrapServers();
215
216         Map<String, Object> props = new HashMap<>();
217         props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
218         props.put(ProducerConfig.CLIENT_ID_CONFIG, "sample-producer");
219         props.put(ProducerConfig.ACKS_CONFIG, "all");
220         props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, IntegerSerializer.class);
221         props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
222         return SenderOptions.create(props);
223     }
224
225     private SenderRecord<Integer, String, Integer> senderRecord(String data) {
226         final InfoType infoType = this.types.get(TYPE_ID);
227         int key = 1;
228         int correlationMetadata = 2;
229         return SenderRecord.create(new ProducerRecord<>(infoType.getKafkaInputTopic(), key, data), correlationMetadata);
230     }
231
232     private void sendDataToStream(Flux<SenderRecord<Integer, String, Integer>> dataToSend) {
233         final KafkaSender<Integer, String> sender = KafkaSender.create(senderOptions());
234
235         sender.send(dataToSend) //
236                 .doOnError(e -> logger.error("Send failed", e)) //
237                 .blockLast();
238
239     }
240
241     private void verifiedReceivedByConsumer(String... strings) {
242         ConsumerController.TestResults consumer = this.consumerController.testResults;
243         await().untilAsserted(() -> assertThat(consumer.receivedBodies.size()).isEqualTo(strings.length));
244         for (String s : strings) {
245             assertTrue(consumer.hasReceived(s));
246         }
247     }
248
249     @Test
250     void kafkaIntegrationTest() throws Exception {
251         final String JOB_ID1 = "ID1";
252         final String JOB_ID2 = "ID2";
253
254         // Register producer, Register types
255         await().untilAsserted(() -> assertThat(ecsSimulatorController.testResults.registrationInfo).isNotNull());
256         assertThat(ecsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(this.types.size());
257
258         // Create two jobs. One buffering and one with a filter
259         this.ecsSimulatorController.addJob(consumerJobInfo(null, Duration.ofMillis(400), 1000, 20), JOB_ID1,
260                 restClient());
261         this.ecsSimulatorController.addJob(consumerJobInfo("^Message_1$", Duration.ZERO, 0, 1), JOB_ID2, restClient());
262
263         await().untilAsserted(() -> assertThat(this.jobs.size()).isEqualTo(2));
264
265         var dataToSend = Flux.range(1, 3).map(i -> senderRecord("Message_" + i)); // Message_1, Message_2 etc.
266         sendDataToStream(dataToSend);
267
268         verifiedReceivedByConsumer("Message_1", "[\"Message_1\", \"Message_2\", \"Message_3\"]");
269
270         // Delete the jobs
271         this.ecsSimulatorController.deleteJob(JOB_ID1, restClient());
272         this.ecsSimulatorController.deleteJob(JOB_ID2, restClient());
273
274         await().untilAsserted(() -> assertThat(this.jobs.size()).isZero());
275         await().untilAsserted(() -> assertThat(this.kafkaTopicConsumers.getConsumers().keySet()).isEmpty());
276     }
277
278     @Test
279     void kafkaIOverflow() throws InterruptedException {
280         final String JOB_ID1 = "ID1";
281         final String JOB_ID2 = "ID2";
282
283         // Register producer, Register types
284         await().untilAsserted(() -> assertThat(ecsSimulatorController.testResults.registrationInfo).isNotNull());
285         assertThat(ecsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(this.types.size());
286
287         // Create two jobs.
288         this.ecsSimulatorController.addJob(consumerJobInfo(null, Duration.ofMillis(400), 1000, 1), JOB_ID1,
289                 restClient());
290         this.ecsSimulatorController.addJob(consumerJobInfo(null, Duration.ZERO, 0, 1), JOB_ID2, restClient());
291
292         await().untilAsserted(() -> assertThat(this.jobs.size()).isEqualTo(2));
293
294         var dataToSend = Flux.range(1, 1000000).map(i -> senderRecord("Message_" + i)); // Message_1, Message_2 etc.
295         sendDataToStream(dataToSend); // this should overflow
296
297         KafkaJobDataConsumer consumer = kafkaTopicConsumers.getConsumers().get(TYPE_ID).iterator().next();
298         await().untilAsserted(() -> assertThat(consumer.isRunning()).isFalse());
299         this.consumerController.testResults.reset();
300
301         this.ecsSimulatorController.deleteJob(JOB_ID2, restClient()); // Delete one job
302         kafkaTopicConsumers.restartNonRunningTasks();
303         Thread.sleep(1000); // Restarting the input seems to take some asynch time
304
305         dataToSend = Flux.just(senderRecord("Howdy\""));
306         sendDataToStream(dataToSend);
307
308         verifiedReceivedByConsumer("[\"Howdy\\\"\"]");
309
310         // Delete the jobs
311         this.ecsSimulatorController.deleteJob(JOB_ID1, restClient());
312         this.ecsSimulatorController.deleteJob(JOB_ID2, restClient());
313
314         await().untilAsserted(() -> assertThat(this.jobs.size()).isZero());
315         await().untilAsserted(() -> assertThat(this.kafkaTopicConsumers.getConsumers().keySet()).isEmpty());
316     }
317
318 }