2 * ========================LICENSE_START=================================
5 * Copyright (C) 2021 Nordix Foundation
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
11 * http://www.apache.org/licenses/LICENSE-2.0
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 * ========================LICENSE_END===================================
21 package org.oran.dmaapadapter.tasks;
23 import java.util.Collections;
24 import java.util.HashMap;
27 import org.apache.kafka.clients.consumer.ConsumerConfig;
28 import org.apache.kafka.clients.consumer.ConsumerRecord;
29 import org.apache.kafka.common.serialization.IntegerDeserializer;
30 import org.apache.kafka.common.serialization.StringDeserializer;
31 import org.oran.dmaapadapter.configuration.ApplicationConfig;
32 import org.oran.dmaapadapter.repository.InfoType;
33 import org.oran.dmaapadapter.repository.Job;
34 import org.slf4j.Logger;
35 import org.slf4j.LoggerFactory;
37 import reactor.core.Disposable;
38 import reactor.core.publisher.Flux;
39 import reactor.core.publisher.Mono;
40 import reactor.core.publisher.Sinks;
41 import reactor.core.publisher.Sinks.Many;
42 import reactor.kafka.receiver.KafkaReceiver;
43 import reactor.kafka.receiver.ReceiverOptions;
46 * The class fetches incoming requests from DMAAP and sends them further to the
47 * consumers that has a job for this InformationType.
49 @SuppressWarnings("squid:S2629") // Invoke method(s) only conditionally
50 public class KafkaTopicConsumer {
51 private static final Logger logger = LoggerFactory.getLogger(KafkaTopicConsumer.class);
52 private final ApplicationConfig applicationConfig;
53 private final InfoType type;
54 private final Many<String> consumerDistributor;
56 public KafkaTopicConsumer(ApplicationConfig applicationConfig, InfoType type) {
57 this.applicationConfig = applicationConfig;
59 final int CONSUMER_BACKPRESSURE_BUFFER_SIZE = 1024 * 10;
60 this.consumerDistributor = Sinks.many().multicast().onBackpressureBuffer(CONSUMER_BACKPRESSURE_BUFFER_SIZE);
63 startKafkaTopicReceiver();
66 private Disposable startKafkaTopicReceiver() {
67 return KafkaReceiver.create(kafkaInputProperties()) //
69 .doOnNext(this::onReceivedData) //
71 throwable -> logger.error("KafkaTopicReceiver error: {}", throwable.getMessage()), //
72 () -> logger.warn("KafkaTopicReceiver stopped"));
75 private void onReceivedData(ConsumerRecord<Integer, String> input) {
76 logger.debug("Received from kafka topic: {} :{}", this.type.getKafkaInputTopic(), input.value());
77 consumerDistributor.emitNext(input.value(), Sinks.EmitFailureHandler.FAIL_FAST);
80 public Disposable startDistributeToConsumer(Job job) {
81 final int CONCURRENCY = 10; // Has to be 1 to guarantee correct order.
83 return getMessagesFromKafka(job) //
84 .doOnNext(data -> logger.debug("Sending to consumer {} {} {}", job.getId(), job.getCallbackUrl(), data))
85 .flatMap(body -> job.getConsumerRestClient().post("", body), CONCURRENCY) //
86 .onErrorResume(this::handleConsumerErrorResponse) //
88 throwable -> logger.error("KafkaMessageConsumer error: {}", throwable.getMessage()), //
89 () -> logger.warn("KafkaMessageConsumer stopped {}", job.getType().getId()));
92 private Flux<String> getMessagesFromKafka(Job job) {
93 if (job.isBuffered()) {
94 return consumerDistributor.asFlux() //
95 .filter(job::isFilterMatch) //
97 job.getParameters().getBufferTimeout().getMaxSize(), //
98 job.getParameters().getBufferTimeout().getMaxTime()) //
99 .map(Object::toString);
101 return consumerDistributor.asFlux() //
102 .filter(job::isFilterMatch);
106 private Mono<String> handleConsumerErrorResponse(Throwable t) {
107 logger.warn("error from CONSUMER {}", t.getMessage());
111 private ReceiverOptions<Integer, String> kafkaInputProperties() {
112 Map<String, Object> consumerProps = new HashMap<>();
113 if (this.applicationConfig.getKafkaBootStrapServers().isEmpty()) {
114 logger.error("No kafka boostrap server is setup");
116 consumerProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, this.applicationConfig.getKafkaBootStrapServers());
117 consumerProps.put(ConsumerConfig.GROUP_ID_CONFIG, "osc-dmaap-adaptor");
118 consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, IntegerDeserializer.class);
119 consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
121 return ReceiverOptions.<Integer, String>create(consumerProps)
122 .subscription(Collections.singleton(this.type.getKafkaInputTopic()));