2 * ========================LICENSE_START=================================
5 * Copyright (C) 2023 Nordix Foundation
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
11 * http://www.apache.org/licenses/LICENSE-2.0
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 * ========================LICENSE_END===================================
21 package org.oran.pmlog;
23 import java.util.Collections;
24 import java.util.HashMap;
27 import org.apache.kafka.clients.consumer.ConsumerConfig;
28 import org.apache.kafka.common.serialization.ByteArrayDeserializer;
29 import org.oran.pmlog.configuration.ApplicationConfig;
30 import org.slf4j.Logger;
31 import org.slf4j.LoggerFactory;
33 import reactor.core.publisher.Flux;
34 import reactor.core.publisher.Mono;
35 import reactor.kafka.receiver.KafkaReceiver;
36 import reactor.kafka.receiver.ReceiverOptions;
39 * The class streams incoming requests from a Kafka topic and sends them further
40 * to a multi cast sink, which several other streams can connect to.
42 @SuppressWarnings("squid:S2629") // Invoke method(s) only conditionally
43 public class KafkaTopicListener {
45 private static final Logger logger = LoggerFactory.getLogger(KafkaTopicListener.class);
46 private final ApplicationConfig applicationConfig;
47 private Flux<DataFromKafkaTopic> dataFromTopic;
48 private static com.google.gson.Gson gson = new com.google.gson.GsonBuilder().disableHtmlEscaping().create();
50 public KafkaTopicListener(ApplicationConfig applConfig) {
51 this.applicationConfig = applConfig;
54 public Flux<DataFromKafkaTopic> getFlux() {
55 if (this.dataFromTopic == null) {
56 this.dataFromTopic = startReceiveFromTopic(this.applicationConfig.getKafkaClientId());
58 return this.dataFromTopic;
61 private Flux<DataFromKafkaTopic> startReceiveFromTopic(String clientId) {
62 logger.debug("Listening to kafka topic: {}", this.applicationConfig.getKafkaInputTopic());
64 return KafkaReceiver.create(kafkaInputProperties(clientId)) //
66 .concatMap(consumerRecord -> consumerRecord) //
67 .doOnNext(input -> logger.trace("Received from kafka topic: {}",
68 this.applicationConfig.getKafkaInputTopic())) //
69 .doOnError(t -> logger.error("Received error: {}", t.getMessage())) //
70 .onErrorResume(t -> Mono.empty()) //
71 .doFinally(sig -> logger.error("KafkaTopicListener stopped, reason: {}", sig)) //
72 .filter(t -> t.value().length > 0 || t.key().length > 0) //
73 .map(input -> new DataFromKafkaTopic(input.headers(), input.key(), input.value())) //
78 private ReceiverOptions<byte[], byte[]> kafkaInputProperties(String clientId) {
79 Map<String, Object> consumerProps = new HashMap<>();
80 if (this.applicationConfig.getKafkaBootStrapServers().isEmpty()) {
81 logger.error("No kafka boostrap server is setup");
84 consumerProps.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
85 consumerProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, this.applicationConfig.getKafkaBootStrapServers());
86 consumerProps.put(ConsumerConfig.GROUP_ID_CONFIG, applicationConfig.getKafkaGroupId());
87 consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class);
88 consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class);
89 consumerProps.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
91 consumerProps.put(ConsumerConfig.CLIENT_ID_CONFIG, clientId + "_" + applicationConfig.getKafkaGroupId());
92 this.applicationConfig.addKafkaSecurityProps(consumerProps);
94 return ReceiverOptions.<byte[], byte[]>create(consumerProps)
95 .subscription(Collections.singleton(this.applicationConfig.getKafkaInputTopic()));