import java.util.HashMap;
import java.util.Map;
+import lombok.Getter;
+
import org.oran.dmaapadapter.configuration.ApplicationConfig;
import org.oran.dmaapadapter.repository.InfoType;
import org.oran.dmaapadapter.repository.InfoTypes;
import org.oran.dmaapadapter.repository.Job;
+import org.oran.dmaapadapter.repository.Jobs;
+import org.oran.dmaapadapter.repository.MultiMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.scheduling.annotation.EnableScheduling;
+import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
-import reactor.core.Disposable;
-/**
- * The class fetches incoming requests from DMAAP and sends them further to the
- * consumers that has a job for this InformationType.
- */
@SuppressWarnings("squid:S2629") // Invoke method(s) only conditionally
@Component
+@EnableScheduling
public class KafkaTopicConsumers {
private static final Logger logger = LoggerFactory.getLogger(KafkaTopicConsumers.class);
- private final Map<String, KafkaTopicConsumer> topicConsumers = new HashMap<>();
- private final Map<String, Disposable> activeSubscriptions = new HashMap<>();
- private final ApplicationConfig appConfig;
+ private final Map<String, KafkaTopicListener> topicListeners = new HashMap<>(); // Key is typeId
- public KafkaTopicConsumers(@Autowired ApplicationConfig appConfig) {
- this.appConfig = appConfig;
- }
+ @Getter
+ private final MultiMap<KafkaJobDataConsumer> consumers = new MultiMap<>(); // Key is typeId, jobId
+
+ private static final int CONSUMER_SUPERVISION_INTERVAL_MS = 1000 * 60 * 3;
+
+ public KafkaTopicConsumers(@Autowired ApplicationConfig appConfig, @Autowired InfoTypes types,
+ @Autowired Jobs jobs) {
- public void start(InfoTypes types) {
for (InfoType type : types.getAll()) {
if (type.isKafkaTopicDefined()) {
- KafkaTopicConsumer topicConsumer = new KafkaTopicConsumer(appConfig, type);
- topicConsumers.put(type.getId(), topicConsumer);
+ KafkaTopicListener topicConsumer = new KafkaTopicListener(appConfig, type);
+ topicListeners.put(type.getId(), topicConsumer);
}
}
+
+ jobs.addObserver(new Jobs.Observer() {
+ @Override
+ public void onJobbAdded(Job job) {
+ addJob(job);
+ }
+
+ @Override
+ public void onJobRemoved(Job job) {
+ removeJob(job);
+ }
+ });
}
public synchronized void addJob(Job job) {
- if (this.activeSubscriptions.get(job.getId()) == null && job.getType().isKafkaTopicDefined()) {
+ if (job.getType().isKafkaTopicDefined()) {
+ removeJob(job);
logger.debug("Kafka job added {}", job.getId());
- KafkaTopicConsumer topicConsumer = topicConsumers.get(job.getType().getId());
- Disposable subscription = topicConsumer.startDistributeToConsumer(job);
- activeSubscriptions.put(job.getId(), subscription);
+ KafkaTopicListener topicConsumer = topicListeners.get(job.getType().getId());
+ if (consumers.get(job.getType().getId()).isEmpty()) {
+ topicConsumer.start();
+ }
+ KafkaJobDataConsumer subscription = new KafkaJobDataConsumer(job);
+ subscription.start(topicConsumer.getOutput().asFlux());
+ consumers.put(job.getType().getId(), job.getId(), subscription);
}
}
public synchronized void removeJob(Job job) {
- Disposable d = activeSubscriptions.remove(job.getId());
+ KafkaJobDataConsumer d = consumers.remove(job.getType().getId(), job.getId());
if (d != null) {
logger.debug("Kafka job removed {}", job.getId());
- d.dispose();
+ d.stop();
+ }
+ }
+
+ @Scheduled(fixedRate = CONSUMER_SUPERVISION_INTERVAL_MS)
+ public synchronized void restartNonRunningTopics() {
+ for (String typeId : this.consumers.keySet()) {
+ for (KafkaJobDataConsumer consumer : this.consumers.get(typeId)) {
+ if (!consumer.isRunning()) {
+ restartTopic(consumer);
+ }
+ }
}
}
+ private void restartTopic(KafkaJobDataConsumer consumer) {
+ InfoType type = consumer.getJob().getType();
+ KafkaTopicListener topic = this.topicListeners.get(type.getId());
+ topic.start();
+ restartConsumersOfType(topic, type);
+ }
+
+ private void restartConsumersOfType(KafkaTopicListener topic, InfoType type) {
+ this.consumers.get(type.getId()).forEach(consumer -> consumer.start(topic.getOutput().asFlux()));
+ }
}