2 * ========================LICENSE_START=================================
5 * Copyright (C) 2021 Nordix Foundation
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
11 * http://www.apache.org/licenses/LICENSE-2.0
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 * ========================LICENSE_END===================================
21 package org.oran.datafile.tasks;
23 import java.util.Collections;
24 import java.util.HashMap;
27 import lombok.ToString;
29 import org.apache.kafka.clients.consumer.ConsumerConfig;
30 import org.apache.kafka.common.serialization.StringDeserializer;
31 import org.oran.datafile.configuration.AppConfig;
32 import org.slf4j.Logger;
33 import org.slf4j.LoggerFactory;
35 import reactor.core.publisher.Flux;
36 import reactor.kafka.receiver.KafkaReceiver;
37 import reactor.kafka.receiver.ReceiverOptions;
40 * The class streams incoming requests from a Kafka topic and sends them further
41 * to a multi cast sink, which several other streams can connect to.
43 @SuppressWarnings("squid:S2629") // Invoke method(s) only conditionally
44 public class KafkaTopicListener {
47 public static class DataFromTopic {
48 public final String key;
49 public final String value;
51 public DataFromTopic(String key, String value) {
57 private static final Logger logger = LoggerFactory.getLogger(KafkaTopicListener.class);
59 private Flux<DataFromTopic> dataFromTopic;
60 private final AppConfig appConfig;
62 public KafkaTopicListener(AppConfig applConfig) {
63 this.appConfig = applConfig;
66 public Flux<DataFromTopic> getFlux() {
67 if (this.dataFromTopic == null) {
68 this.dataFromTopic = startReceiveFromTopic();
70 return this.dataFromTopic;
73 private Flux<DataFromTopic> startReceiveFromTopic() {
74 logger.debug("Listening to kafka topic: {}, client id: {}", appConfig.getInputTopic(),
75 appConfig.getKafkaClientId());
76 return KafkaReceiver.create(kafkaInputProperties()) //
79 input -> logger.debug("Received from kafka topic: {} :{}", appConfig.getInputTopic(), input.value())) //
80 .doOnError(t -> logger.error("KafkaTopicReceiver error: {}", t.getMessage())) //
81 .doFinally(sig -> logger.error("KafkaTopicReceiver stopped, reason: {}", sig)) //
82 .doFinally(sig -> this.dataFromTopic = null) //
83 .filter(t -> !t.value().isEmpty() || !t.key().isEmpty()) //
84 .map(input -> new DataFromTopic(input.key(), input.value())) //
89 private ReceiverOptions<String, String> kafkaInputProperties() {
90 Map<String, Object> consumerProps = new HashMap<>();
91 if (appConfig.getKafkaBootStrapServers().isEmpty()) {
92 logger.error("No kafka boostrap server is setup");
94 consumerProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, appConfig.getKafkaBootStrapServers());
95 consumerProps.put(ConsumerConfig.GROUP_ID_CONFIG, "osc-dmaap-adapter-" + appConfig.getInputTopic());
96 consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
97 consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
98 consumerProps.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, true);
99 consumerProps.put(ConsumerConfig.CLIENT_ID_CONFIG, appConfig.getKafkaClientId());
100 this.appConfig.addKafkaSecurityProps(consumerProps);
102 return ReceiverOptions.<String, String>create(consumerProps)
103 .subscription(Collections.singleton(appConfig.getInputTopic()));