2 * ========================LICENSE_START=================================
5 * Copyright (C) 2023 Nordix Foundation
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
11 * http://www.apache.org/licenses/LICENSE-2.0
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 * ========================LICENSE_END===================================
21 package org.oran.pmproducer.tasks;
23 import java.io.ByteArrayInputStream;
24 import java.io.IOException;
25 import java.util.Collections;
26 import java.util.HashMap;
28 import java.util.zip.GZIPInputStream;
32 import lombok.ToString;
34 import org.apache.kafka.clients.consumer.ConsumerConfig;
35 import org.apache.kafka.clients.consumer.ConsumerRecord;
36 import org.apache.kafka.common.header.Header;
37 import org.apache.kafka.common.serialization.ByteArrayDeserializer;
38 import org.oran.pmproducer.configuration.ApplicationConfig;
39 import org.oran.pmproducer.datastore.DataStore;
40 import org.oran.pmproducer.filter.PmReport;
41 import org.oran.pmproducer.repository.InfoType;
42 import org.slf4j.Logger;
43 import org.slf4j.LoggerFactory;
45 import reactor.core.publisher.Flux;
46 import reactor.core.publisher.Mono;
47 import reactor.kafka.receiver.KafkaReceiver;
48 import reactor.kafka.receiver.ReceiverOptions;
51 * The class streams incoming requests from a Kafka topic and sends them further
52 * to a multi cast sink, which several other streams can connect to.
54 @SuppressWarnings("squid:S2629") // Invoke method(s) only conditionally
55 public class TopicListener {
58 public static class DataFromTopic {
59 public final byte[] key;
60 public final byte[] value;
62 public final String infoTypeId;
64 public final Iterable<Header> headers;
66 private static byte[] noBytes = new byte[0];
71 private PmReport cachedPmReport;
73 public DataFromTopic(String typeId, Iterable<Header> headers, byte[] key, byte[] value) {
74 this.key = key == null ? noBytes : key;
75 this.value = value == null ? noBytes : value;
76 this.infoTypeId = typeId;
77 this.headers = headers;
80 public String valueAsString() {
81 return new String(this.value);
84 public static final String ZIPPED_PROPERTY = "gzip";
85 public static final String TYPE_ID_PROPERTY = "type-id";
87 public boolean isZipped() {
88 if (headers == null) {
91 for (Header h : headers) {
92 if (h.key().equals(ZIPPED_PROPERTY)) {
99 public String getTypeIdFromHeaders() {
100 if (headers == null) {
103 for (Header h : headers) {
104 if (h.key().equals(TYPE_ID_PROPERTY)) {
105 return new String(h.value());
112 private static final Logger logger = LoggerFactory.getLogger(TopicListener.class);
113 private final ApplicationConfig applicationConfig;
114 private final InfoType type;
115 private Flux<DataFromTopic> dataFromTopic;
116 private static com.google.gson.Gson gson = new com.google.gson.GsonBuilder().disableHtmlEscaping().create();
117 private final DataStore dataStore;
120 private String kafkaGroupId;
122 public TopicListener(ApplicationConfig applConfig, InfoType type) {
123 this.applicationConfig = applConfig;
125 this.dataStore = DataStore.create(applConfig);
126 this.kafkaGroupId = this.type.getKafkaGroupId();
129 public Flux<DataFromTopic> getFlux() {
130 if (this.dataFromTopic == null) {
131 this.dataFromTopic = start(this.type.getKafkaClientId(this.applicationConfig));
133 return this.dataFromTopic;
136 private Flux<DataFromTopic> start(String clientId) {
137 logger.debug("Listening to kafka topic: {} type :{}", this.type.getKafkaInputTopic(), type.getId());
139 return receiveFromKafka(clientId) //
140 .filter(t -> t.value().length > 0 || t.key().length > 0) //
141 .map(input -> new DataFromTopic(this.type.getId(), input.headers(), input.key(), input.value())) //
142 .flatMap(data -> getDataFromFileIfNewPmFileEvent(data, type, dataStore)) //
147 public Flux<ConsumerRecord<byte[], byte[]>> receiveFromKafka(String clientId) {
148 return KafkaReceiver.create(kafkaInputProperties(clientId)) //
150 .concatMap(consumerRecord -> consumerRecord) //
151 .doOnNext(input -> logger.trace("Received from kafka topic: {}", this.type.getKafkaInputTopic())) //
152 .doOnError(t -> logger.error("Received error: {}", t.getMessage())) //
153 .onErrorResume(t -> Mono.empty()) //
154 .doFinally(sig -> logger.error("TopicListener stopped, type: {}, reason: {}", this.type.getId(), sig));
157 private ReceiverOptions<byte[], byte[]> kafkaInputProperties(String clientId) {
158 Map<String, Object> consumerProps = new HashMap<>();
159 if (this.applicationConfig.getKafkaBootStrapServers().isEmpty()) {
160 logger.error("No kafka boostrap server is setup");
163 consumerProps.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
164 consumerProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, this.applicationConfig.getKafkaBootStrapServers());
165 consumerProps.put(ConsumerConfig.GROUP_ID_CONFIG, kafkaGroupId);
166 consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class);
167 consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class);
168 consumerProps.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
170 consumerProps.put(ConsumerConfig.CLIENT_ID_CONFIG, clientId + "_" + kafkaGroupId);
172 return ReceiverOptions.<byte[], byte[]>create(consumerProps)
173 .subscription(Collections.singleton(this.type.getKafkaInputTopic()));
176 public static Mono<DataFromTopic> getDataFromFileIfNewPmFileEvent(DataFromTopic data, InfoType type,
177 DataStore fileStore) {
179 if (data.value.length > 200) {
180 return Mono.just(data);
183 NewFileEvent ev = gson.fromJson(data.valueAsString(), NewFileEvent.class);
185 if (ev.getFilename() == null) {
186 logger.warn("Ignoring received message: {}", data);
189 logger.trace("Reading PM measurements, type: {}, inputTopic: {}", type.getId(), type.getKafkaInputTopic());
190 return fileStore.readObject(DataStore.Bucket.FILES, ev.getFilename()) //
191 .map(bytes -> unzip(bytes, ev.getFilename())) //
192 .map(bytes -> new DataFromTopic(data.infoTypeId, data.headers, data.key, bytes));
194 } catch (Exception e) {
195 return Mono.just(data);
199 public static byte[] unzip(byte[] bytes) throws IOException {
200 try (final GZIPInputStream gzipInput = new GZIPInputStream(new ByteArrayInputStream(bytes))) {
201 return gzipInput.readAllBytes();
205 private static byte[] unzip(byte[] bytes, String fileName) {
207 return fileName.endsWith(".gz") ? unzip(bytes) : bytes;
208 } catch (IOException e) {
209 logger.error("Error while decompression, file: {}, reason: {}", fileName, e.getMessage());