Adding source name to kafka headers
[nonrtric/plt/ranpm.git] / pmproducer / src / main / java / org / oran / pmproducer / tasks / TopicListener.java
1 /*-
2  * ========================LICENSE_START=================================
3  * O-RAN-SC
4  * %%
5  * Copyright (C) 2023 Nordix Foundation
6  * %%
7  * Licensed under the Apache License, Version 2.0 (the "License");
8  * you may not use this file except in compliance with the License.
9  * You may obtain a copy of the License at
10  *
11  *      http://www.apache.org/licenses/LICENSE-2.0
12  *
13  * Unless required by applicable law or agreed to in writing, software
14  * distributed under the License is distributed on an "AS IS" BASIS,
15  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16  * See the License for the specific language governing permissions and
17  * limitations under the License.
18  * ========================LICENSE_END===================================
19  */
20
21 package org.oran.pmproducer.tasks;
22
23 import java.io.ByteArrayInputStream;
24 import java.io.IOException;
25 import java.util.Collections;
26 import java.util.HashMap;
27 import java.util.Map;
28 import java.util.zip.GZIPInputStream;
29
30 import lombok.Getter;
31 import lombok.Setter;
32 import lombok.ToString;
33
34 import org.apache.kafka.clients.consumer.ConsumerConfig;
35 import org.apache.kafka.clients.consumer.ConsumerRecord;
36 import org.apache.kafka.common.header.Header;
37 import org.apache.kafka.common.serialization.ByteArrayDeserializer;
38 import org.oran.pmproducer.configuration.ApplicationConfig;
39 import org.oran.pmproducer.datastore.DataStore;
40 import org.oran.pmproducer.filter.PmReport;
41 import org.oran.pmproducer.repository.InfoType;
42 import org.slf4j.Logger;
43 import org.slf4j.LoggerFactory;
44
45 import reactor.core.publisher.Flux;
46 import reactor.core.publisher.Mono;
47 import reactor.kafka.receiver.KafkaReceiver;
48 import reactor.kafka.receiver.ReceiverOptions;
49
50 /**
51  * The class streams incoming requests from a Kafka topic and sends them further
52  * to a multi cast sink, which several other streams can connect to.
53  */
54 @SuppressWarnings("squid:S2629") // Invoke method(s) only conditionally
55 public class TopicListener {
56
57     @ToString
58     public static class DataFromTopic {
59         public final byte[] key;
60         public final byte[] value;
61
62         public final String infoTypeId;
63
64         public final Iterable<Header> headers;
65
66         private static byte[] noBytes = new byte[0];
67
68         @Getter
69         @Setter
70         @ToString.Exclude
71         private PmReport cachedPmReport;
72
73         public DataFromTopic(String typeId, Iterable<Header> headers, byte[] key, byte[] value) {
74             this.key = key == null ? noBytes : key;
75             this.value = value == null ? noBytes : value;
76             this.infoTypeId = typeId;
77             this.headers = headers;
78         }
79
80         public String valueAsString() {
81             return new String(this.value);
82         }
83
84         public static final String ZIPPED_PROPERTY = "gzip";
85         public static final String TYPE_ID_PROPERTY = "type-id";
86         public static final String SOURCE_NAME_PROPERTY = "source-name";
87
88         public boolean isZipped() {
89             if (headers == null) {
90                 return false;
91             }
92             for (Header h : headers) {
93                 if (h.key().equals(ZIPPED_PROPERTY)) {
94                     return true;
95                 }
96             }
97             return false;
98         }
99
100         public String getTypeIdFromHeaders() {
101             return this.getStringProperty(TYPE_ID_PROPERTY);
102         }
103
104         public String getSourceNameFromHeaders() {
105             return this.getStringProperty(SOURCE_NAME_PROPERTY);
106         }
107
108         private String getStringProperty(String propertyName) {
109             if (headers == null) {
110                 return "";
111             }
112             for (Header h : headers) {
113                 if (h.key().equals(propertyName)) {
114                     return new String(h.value());
115                 }
116             }
117             return "";
118         }
119
120     }
121
122     private static final Logger logger = LoggerFactory.getLogger(TopicListener.class);
123     private final ApplicationConfig applicationConfig;
124     private final InfoType type;
125     private Flux<DataFromTopic> dataFromTopic;
126     private static com.google.gson.Gson gson = new com.google.gson.GsonBuilder().disableHtmlEscaping().create();
127     private final DataStore dataStore;
128
129     @Setter
130     private String kafkaGroupId;
131
132     public TopicListener(ApplicationConfig applConfig, InfoType type) {
133         this.applicationConfig = applConfig;
134         this.type = type;
135         this.dataStore = DataStore.create(applConfig);
136         this.kafkaGroupId = this.type.getKafkaGroupId();
137     }
138
139     public Flux<DataFromTopic> getFlux() {
140         if (this.dataFromTopic == null) {
141             this.dataFromTopic = start(this.type.getKafkaClientId(this.applicationConfig));
142         }
143         return this.dataFromTopic;
144     }
145
146     private Flux<DataFromTopic> start(String clientId) {
147         logger.debug("Listening to kafka topic: {} type :{}", this.type.getKafkaInputTopic(), type.getId());
148
149         return receiveFromKafka(clientId) //
150                 .filter(t -> t.value().length > 0 || t.key().length > 0) //
151                 .map(input -> new DataFromTopic(this.type.getId(), input.headers(), input.key(), input.value())) //
152                 .flatMap(data -> getDataFromFileIfNewPmFileEvent(data, type, dataStore)) //
153                 .publish() //
154                 .autoConnect(1);
155     }
156
157     public Flux<ConsumerRecord<byte[], byte[]>> receiveFromKafka(String clientId) {
158         return KafkaReceiver.create(kafkaInputProperties(clientId)) //
159                 .receiveAutoAck() //
160                 .concatMap(consumerRecord -> consumerRecord) //
161                 .doOnNext(input -> logger.trace("Received from kafka topic: {}", this.type.getKafkaInputTopic())) //
162                 .doOnError(t -> logger.error("Received error: {}", t.getMessage())) //
163                 .onErrorResume(t -> Mono.empty()) //
164                 .doFinally(sig -> logger.error("TopicListener stopped, type: {}, reason: {}", this.type.getId(), sig));
165     }
166
167     private ReceiverOptions<byte[], byte[]> kafkaInputProperties(String clientId) {
168         Map<String, Object> props = new HashMap<>();
169         if (this.applicationConfig.getKafkaBootStrapServers().isEmpty()) {
170             logger.error("No kafka boostrap server is setup");
171         }
172
173         props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");
174         props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, this.applicationConfig.getKafkaBootStrapServers());
175         props.put(ConsumerConfig.GROUP_ID_CONFIG, kafkaGroupId);
176         props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class);
177         props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ByteArrayDeserializer.class);
178         props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
179         props.put(ConsumerConfig.CLIENT_ID_CONFIG, clientId + "_" + kafkaGroupId);
180
181         this.applicationConfig.addKafkaSecurityProps(props);
182
183         return ReceiverOptions.<byte[], byte[]>create(props)
184                 .subscription(Collections.singleton(this.type.getKafkaInputTopic()));
185     }
186
187     public static Mono<DataFromTopic> getDataFromFileIfNewPmFileEvent(DataFromTopic data, InfoType type,
188             DataStore fileStore) {
189         try {
190             if (data.value.length > 200) {
191                 return Mono.just(data);
192             }
193
194             NewFileEvent ev = gson.fromJson(data.valueAsString(), NewFileEvent.class);
195
196             if (ev.getFilename() == null) {
197                 logger.warn("Ignoring received message: {}", data);
198                 return Mono.empty();
199             }
200             logger.trace("Reading PM measurements, type: {}, inputTopic: {}", type.getId(), type.getKafkaInputTopic());
201             return fileStore.readObject(DataStore.Bucket.FILES, ev.getFilename()) //
202                     .map(bytes -> unzip(bytes, ev.getFilename())) //
203                     .map(bytes -> new DataFromTopic(data.infoTypeId, data.headers, data.key, bytes));
204
205         } catch (Exception e) {
206             return Mono.just(data);
207         }
208     }
209
210     public static byte[] unzip(byte[] bytes) throws IOException {
211         try (final GZIPInputStream gzipInput = new GZIPInputStream(new ByteArrayInputStream(bytes))) {
212             return gzipInput.readAllBytes();
213         }
214     }
215
216     private static byte[] unzip(byte[] bytes, String fileName) {
217         try {
218             return fileName.endsWith(".gz") ? unzip(bytes) : bytes;
219         } catch (IOException e) {
220             logger.error("Error while decompression, file: {}, reason: {}", fileName, e.getMessage());
221             return new byte[0];
222         }
223
224     }
225
226 }