2 * ========================LICENSE_START=================================
5 * Copyright (C) 2021 Nordix Foundation
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
11 * http://www.apache.org/licenses/LICENSE-2.0
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 * ========================LICENSE_END===================================
21 package org.oran.dmaapadapter.tasks;
23 import org.oran.dmaapadapter.repository.Job;
24 import org.slf4j.Logger;
25 import org.slf4j.LoggerFactory;
26 import org.springframework.web.reactive.function.client.WebClientResponseException;
28 import reactor.core.Disposable;
29 import reactor.core.publisher.Flux;
30 import reactor.core.publisher.Mono;
31 import reactor.core.publisher.Sinks.Many;
34 * The class streams data from a multi cast sink and sends the data to the Job
35 * owner via REST calls.
37 @SuppressWarnings("squid:S2629") // Invoke method(s) only conditionally
39 public class KafkaJobDataConsumer {
40 private static final Logger logger = LoggerFactory.getLogger(KafkaJobDataConsumer.class);
41 private final Many<String> input;
42 private final Job job;
43 private Disposable subscription;
44 private int errorCounter = 0;
46 KafkaJobDataConsumer(Many<String> input, Job job) {
51 public synchronized void start() {
53 this.subscription = getMessagesFromKafka(job) //
54 .doOnNext(data -> logger.debug("Sending to consumer {} {} {}", job.getId(), job.getCallbackUrl(), data))
55 .flatMap(body -> job.getConsumerRestClient().post("", body), job.getParameters().getMaxConcurrency()) //
56 .onErrorResume(this::handleError) //
57 .subscribe(this::handleConsumerSentOk, //
58 this::handleErrorInStream, //
59 () -> logger.debug("KafkaMessageConsumer stopped, jobId: {}, type: {}", job.getId(),
60 job.getType().getId()));
63 public synchronized void stop() {
64 if (this.subscription != null) {
65 subscription.dispose();
70 public synchronized boolean isRunning() {
71 return this.subscription != null;
74 private Flux<String> getMessagesFromKafka(Job job) {
75 Flux<String> result = input.asFlux() //
76 .filter(job::isFilterMatch);
78 if (job.isBuffered()) {
79 result = result.bufferTimeout( //
80 job.getParameters().getBufferTimeout().getMaxSize(), //
81 job.getParameters().getBufferTimeout().getMaxTime()) //
82 .map(Object::toString);
87 private Mono<String> handleError(Throwable t) {
88 logger.warn("exception: {} job: {}", t.getMessage(), job);
90 final int STOP_AFTER_ERRORS = 5;
91 if (t instanceof WebClientResponseException) {
92 if (++this.errorCounter > STOP_AFTER_ERRORS) {
93 logger.error("Stopping job {}", job);
96 return Mono.empty(); // Discard
99 // This can happen if there is an overflow.
104 private void handleConsumerSentOk(String data) {
105 this.errorCounter = 0;
108 private void handleErrorInStream(Throwable t) {
109 logger.error("KafkaMessageConsumer jobId: {}, error: {}", job.getId(), t.getMessage());
110 this.subscription = null;