599a33bdbec7426bc393e9fc813953395bf5f47f
[nonrtric/plt/ranpm.git] / pm-file-converter / main.go
1 // -
2 //
3 //      ========================LICENSE_START=================================
4 //      O-RAN-SC
5 //      %%
6 //      Copyright (C) 2023: Nordix Foundation
7 //      %%
8 //      Licensed under the Apache License, Version 2.0 (the "License");
9 //      you may not use this file except in compliance with the License.
10 //      You may obtain a copy of the License at
11 //
12 //           http://www.apache.org/licenses/LICENSE-2.0
13 //
14 //      Unless required by applicable law or agreed to in writing, software
15 //      distributed under the License is distributed on an "AS IS" BASIS,
16 //      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17 //      See the License for the specific language governing permissions and
18 //      limitations under the License.
19 //      ========================LICENSE_END===================================
20 package main
21
22 import (
23         "fmt"
24         jsoniter "github.com/json-iterator/go"
25         log "github.com/sirupsen/logrus"
26         "main/common/dataTypes"
27         "main/common/utils"
28         "main/components/kafkacollector"
29         "net/http"
30         "os"
31         "os/signal"
32         "runtime"
33         "sync"
34         "syscall"
35         "time"
36 )
37
38 var ics_server = os.Getenv("ICS")
39 var self = os.Getenv("SELF")
40
41 // This are optional - set if using SASL protocol is used towards kafka
42 var creds_grant_type = os.Getenv("CREDS_GRANT_TYPE")
43
44 var bootstrapserver = os.Getenv("KAFKA_SERVER")
45
46 const config_file = "application_configuration.json"
47 const producer_name = "kafka-producer"
48
49 var producer_instance_name string = producer_name
50
51 const reader_queue_length = 100 //Per type job
52 const writer_queue_length = 100 //Per info job
53
54 var files_volume = os.Getenv("FILES_VOLUME")
55
56 var data_out_channel = make(chan *dataTypes.KafkaPayload, writer_queue_length)
57 var writer_control = make(chan dataTypes.WriterControl, 1)
58
59 const registration_delay_short = 2
60 const registration_delay_long = 120
61
62 //== Variables ==//
63
64 var AppState = Init
65
66 // Lock for all internal data
67 var datalock sync.Mutex
68
69 const (
70         Init dataTypes.AppStates = iota
71         Running
72         Terminating
73 )
74
75 const registeringProducer = "Registering producer: "
76
77 // == Main ==//
78 func main() {
79
80         //log.SetLevel(log.InfoLevel)
81         log.SetLevel(log.TraceLevel)
82
83         log.Info("Server starting...")
84
85         if self == "" {
86                 log.Panic("Env SELF not configured")
87         }
88         if bootstrapserver == "" {
89                 log.Panic("Env KAFKA_SERVER not set")
90         }
91         if ics_server == "" {
92                 log.Panic("Env ICS not set")
93         }
94         if os.Getenv("KP") != "" {
95                 producer_instance_name = producer_instance_name + "-" + os.Getenv("KP")
96         }
97
98         go kafkacollector.StartTopicWriter(writer_control, data_out_channel)
99
100         //Setup proc for periodic type registration
101         var eventChan = make(chan int) //Channel for stopping the proc
102         go periodicRegistration(eventChan)
103
104         //Wait for term/int signal do try to shut down gracefully
105         sigs := make(chan os.Signal, 1)
106         signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM)
107         go func() {
108                 sig := <-sigs
109                 fmt.Printf("Received signal %s - application will terminate\n", sig)
110                 eventChan <- 0 // Stop periodic registration
111                 datalock.Lock()
112                 defer datalock.Unlock()
113                 AppState = Terminating
114         }()
115
116         AppState = Running
117
118         //Wait until all go routines has exited
119         runtime.Goexit()
120
121         fmt.Println("main routine exit")
122         fmt.Println("server stopped")
123 }
124
125 // == Core functions ==//
126 // Run periodic registration of producers
127 func periodicRegistration(evtch chan int) {
128         var delay int = 1
129         for {
130                 select {
131                 case msg := <-evtch:
132                         if msg == 0 { // Stop thread
133                                 return
134                         }
135                 case <-time.After(time.Duration(delay) * time.Second):
136                         ok := registerProducer()
137                         if ok {
138                                 delay = registration_delay_long
139                         } else {
140                                 if delay < registration_delay_long {
141                                         delay += registration_delay_short
142                                 } else {
143                                         delay = registration_delay_short
144                                 }
145                         }
146                 }
147         }
148 }
149
150 func registerProducer() bool {
151
152         log.Info(registeringProducer, producer_instance_name)
153
154         file, err := os.ReadFile(config_file)
155         if err != nil {
156                 log.Error("Cannot read config file: ", config_file)
157                 // NOSONAR
158                 log.Error(registeringProducer, producer_instance_name, " - failed")
159                 return false
160         }
161         data := dataTypes.DataTypes{}
162         err = jsoniter.Unmarshal([]byte(file), &data)
163         if err != nil {
164                 log.Error("Cannot parse config file: ", config_file)
165                 // NOSONAR
166                 log.Error(registeringProducer, producer_instance_name, " - failed")
167                 return false
168         }
169         var newTypeNames []string
170
171         for i := 0; i < len(data.ProdDataTypes); i++ {
172                 t1 := make(map[string]interface{})
173                 t2 := make(map[string]interface{})
174
175                 t2["schema"] = "http://json-schema.org/draft-07/schema#"
176                 t2["title"] = data.ProdDataTypes[i].ID
177                 t2["description"] = data.ProdDataTypes[i].ID
178                 t2["type"] = "object"
179
180                 t1["info_job_data_schema"] = t2
181
182                 json, err := jsoniter.Marshal(t1)
183                 if err != nil {
184                         log.Error("Cannot create json for type: ", data.ProdDataTypes[i].ID)
185                         // NOSONAR
186                         log.Error(registeringProducer, producer_instance_name, " - failed")
187                         return false
188                 } else {
189                         ok := utils.SendHttpRequest(json, http.MethodPut, "http://"+ics_server+"/data-producer/v1/info-types/"+data.ProdDataTypes[i].ID, true, creds_grant_type != "")
190                         if !ok {
191                                 log.Error("Cannot register type: ", data.ProdDataTypes[i].ID)
192                                 // NOSONAR
193                                 log.Error(registeringProducer, producer_instance_name, " - failed")
194                                 return false
195                         }
196                         newTypeNames = append(newTypeNames, data.ProdDataTypes[i].ID)
197                 }
198
199         }
200
201         log.Debug("Registering types: ", newTypeNames)
202         datalock.Lock()
203         defer datalock.Unlock()
204
205         for _, v := range data.ProdDataTypes {
206                 log.Info("Adding type job for type: ", v.ID, " Type added to configuration")
207                 startTypeJob(v)
208         }
209
210         dataTypes.InfoTypes = data
211         log.Debug("Datatypes: ", dataTypes.InfoTypes)
212         log.Info(registeringProducer, producer_instance_name, " - OK")
213         return true
214 }
215
216 func startTypeJob(dp dataTypes.DataType) {
217         log.Info("Starting type job: ", dp.ID)
218         jobRecord := dataTypes.TypeJobRecord{}
219
220         jobRecord.Job_control = make(chan dataTypes.JobControl, 1)
221         jobRecord.Reader_control = make(chan dataTypes.ReaderControl, 1)
222         jobRecord.Data_in_channel = make(chan *dataTypes.KafkaPayload, reader_queue_length)
223         jobRecord.InfoType = dp.ID
224         jobRecord.InputTopic = dp.KafkaInputTopic
225         jobRecord.GroupId = "kafka-procon-" + dp.ID
226         jobRecord.ClientId = dp.ID + "-" + os.Getenv("KP")
227
228         switch dp.ID {
229         case "xml-file-data-to-filestore":
230                 go kafkacollector.StartJobXmlFileData(dp.ID, jobRecord.Job_control, jobRecord.Data_in_channel, data_out_channel, "", "pm-files-json")
231         case "xml-file-data":
232                 go kafkacollector.StartJobXmlFileData(dp.ID, jobRecord.Job_control, jobRecord.Data_in_channel, data_out_channel, files_volume, "")
233         default:
234         }
235
236         go kafkacollector.StartTopicReader(dp.KafkaInputTopic, dp.ID, jobRecord.Reader_control, jobRecord.Data_in_channel, jobRecord.GroupId, jobRecord.ClientId)
237
238         dataTypes.TypeJobs[dp.ID] = jobRecord
239         log.Debug("Type job input type: ", dp.InputJobType)
240 }