Improve Test coverage of InfluxLogger
[nonrtric/plt/ranpm.git] / influxlogger / src / main / java / org / oran / pmlog / KafkaTopicListener.java
1 /*-
2  * ========================LICENSE_START=================================
3  * O-RAN-SC
4  * %%
5  * Copyright (C) 2023 Nordix Foundation
6  * %%
7  * Licensed under the Apache License, Version 2.0 (the "License");
8  * you may not use this file except in compliance with the License.
9  * You may obtain a copy of the License at
10  *
11  *      http://www.apache.org/licenses/LICENSE-2.0
12  *
13  * Unless required by applicable law or agreed to in writing, software
14  * distributed under the License is distributed on an "AS IS" BASIS,
15  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16  * See the License for the specific language governing permissions and
17  * limitations under the License.
18  * ========================LICENSE_END===================================
19  */
20
21 package org.oran.pmlog;
22
23 import java.util.Collections;
24 import java.util.HashMap;
25 import java.util.Map;
26
27 import org.apache.kafka.clients.consumer.ConsumerConfig;
28 import org.apache.kafka.common.serialization.ByteArrayDeserializer;
29 import org.oran.pmlog.configuration.ApplicationConfig;
30 import org.slf4j.Logger;
31 import org.slf4j.LoggerFactory;
32
33 import reactor.core.publisher.Flux;
34 import reactor.core.publisher.Mono;
35 import reactor.kafka.receiver.KafkaReceiver;
36 import reactor.kafka.receiver.ReceiverOptions;
37
38 /**
39  * The class streams incoming requests from a Kafka topic and sends them further
40  * to a multi cast sink, which several other streams can connect to.
41  */
42 @SuppressWarnings("squid:S2629") // Invoke method(s) only conditionally
43 public class KafkaTopicListener {
44
45     private static final Logger logger = LoggerFactory.getLogger(KafkaTopicListener.class);
46     private final ApplicationConfig applicationConfig;
47     private Flux<DataFromKafkaTopic> dataFromTopic;
48
49     public KafkaTopicListener(ApplicationConfig applConfig) {
50         this.applicationConfig = applConfig;
51     }
52
53     public Flux<DataFromKafkaTopic> getFlux() {
54         if (this.dataFromTopic == null) {
55             this.dataFromTopic = startReceiveFromTopic(this.applicationConfig.getKafkaClientId());
56         }
57         return this.dataFromTopic;
58     }
59
60     private Flux<DataFromKafkaTopic> startReceiveFromTopic(String clientId) {
61         logger.debug("Listening to kafka topic: {}", this.applicationConfig.getKafkaInputTopic());
62
63         return KafkaReceiver.create(kafkaInputProperties(clientId)) //
64                 .receiveAutoAck() //
65                 .concatMap(consumerRecord -> consumerRecord) //
66                 .doOnNext(input -> logger.trace("Received from kafka topic: {}",
67                         this.applicationConfig.getKafkaInputTopic())) //
68                 .doOnError(t -> logger.error("Received error: {}", t.getMessage())) //
69                 .onErrorResume(t -> Mono.empty()) //
70                 .doFinally(sig -> logger.error("KafkaTopicListener stopped, reason: {}", sig)) //
71                 .filter(t -> t.value().length > 0 || t.key().length > 0) //
72                 .map(input -> new DataFromKafkaTopic(input.headers(), input.key(), input.value())) //
73                 .publish() //
74                 .autoConnect(1);
75     }
76
77     private ReceiverOptions<byte[], byte[]> kafkaInputProperties(String clientId) {
78         Map<String, Object> consumerProps = new HashMap<>();
79         if (this.applicationConfig.getKafkaBootStrapServers().isEmpty()) {
80             logger.error("No kafka boostrap server is setup");
81         }
82
83         consumerProps.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
84         consumerProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, this.applicationConfig.getKafkaBootStrapServers());
85         consumerProps.put(ConsumerConfig.GROUP_ID_CONFIG, applicationConfig.getKafkaGroupId());
86         consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class);
87         consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class);
88         consumerProps.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
89
90         consumerProps.put(ConsumerConfig.CLIENT_ID_CONFIG, clientId + "_" + applicationConfig.getKafkaGroupId());
91         this.applicationConfig.addKafkaSecurityProps(consumerProps);
92
93         return ReceiverOptions.<byte[], byte[]>create(consumerProps)
94                 .subscription(Collections.singleton(this.applicationConfig.getKafkaInputTopic()));
95     }
96
97 }