Update readthedocs config
[nonrtric/plt/ranpm.git] / pm-file-converter / main.go
1 // -
2 //
3 //      ========================LICENSE_START=================================
4 //      O-RAN-SC
5 //      %%
6 //      Copyright (C) 2023: Nordix Foundation
7 //      %%
8 //      Licensed under the Apache License, Version 2.0 (the "License");
9 //      you may not use this file except in compliance with the License.
10 //      You may obtain a copy of the License at
11 //
12 //           http://www.apache.org/licenses/LICENSE-2.0
13 //
14 //      Unless required by applicable law or agreed to in writing, software
15 //      distributed under the License is distributed on an "AS IS" BASIS,
16 //      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17 //      See the License for the specific language governing permissions and
18 //      limitations under the License.
19 //      ========================LICENSE_END===================================
20 package main
21
22 import (
23         "fmt"
24         jsoniter "github.com/json-iterator/go"
25         log "github.com/sirupsen/logrus"
26         "main/common/dataTypes"
27         "main/common/utils"
28         "main/components/kafkacollector"
29         "net/http"
30         "os"
31         "os/signal"
32         "runtime"
33         "sync"
34         "syscall"
35         "time"
36 )
37
38 var ics_server = os.Getenv("ICS")
39 var self = os.Getenv("SELF")
40
41 // This are optional - set if using SASL protocol is used towards kafka
42 var creds_grant_type = os.Getenv("CREDS_GRANT_TYPE")
43
44 var bootstrapserver = os.Getenv("KAFKA_SERVER")
45
46 const config_file = "application_configuration.json"
47 const producer_name = "kafka-producer"
48
49 var producer_instance_name string = producer_name
50
51 const reader_queue_length = 100 //Per type job
52 const writer_queue_length = 100 //Per info job
53
54 var files_volume = os.Getenv("FILES_VOLUME")
55
56 var data_out_channel = make(chan *dataTypes.KafkaPayload, writer_queue_length)
57 var writer_control = make(chan dataTypes.WriterControl, 1)
58
59 const registration_delay_short = 2
60 const registration_delay_long = 120
61
62 //== Variables ==//
63
64 var AppState = Init
65
66 // Lock for all internal data
67 var datalock sync.Mutex
68
69 const (
70         Init dataTypes.AppStates = iota
71         Running
72         Terminating
73 )
74
75 // == Main ==//
76 func main() {
77
78         //log.SetLevel(log.InfoLevel)
79         log.SetLevel(log.TraceLevel)
80
81         log.Info("Server starting...")
82
83         if self == "" {
84                 log.Panic("Env SELF not configured")
85         }
86         if bootstrapserver == "" {
87                 log.Panic("Env KAFKA_SERVER not set")
88         }
89         if ics_server == "" {
90                 log.Panic("Env ICS not set")
91         }
92         if os.Getenv("KP") != "" {
93                 producer_instance_name = producer_instance_name + "-" + os.Getenv("KP")
94         }
95
96         go kafkacollector.Start_topic_writer(writer_control, data_out_channel)
97
98         //Setup proc for periodic type registration
99         var event_chan = make(chan int) //Channel for stopping the proc
100         go periodic_registration(event_chan)
101
102         //Wait for term/int signal do try to shut down gracefully
103         sigs := make(chan os.Signal, 1)
104         signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM)
105         go func() {
106                 sig := <-sigs
107                 fmt.Printf("Received signal %s - application will terminate\n", sig)
108                 event_chan <- 0 // Stop periodic registration
109                 datalock.Lock()
110                 defer datalock.Unlock()
111                 AppState = Terminating
112         }()
113
114         AppState = Running
115
116         //Wait until all go routines has exited
117         runtime.Goexit()
118
119         fmt.Println("main routine exit")
120         fmt.Println("server stopped")
121 }
122
123 // == Core functions ==//
124 // Run periodic registration of producers
125 func periodic_registration(evtch chan int) {
126         var delay int = 1
127         for {
128                 select {
129                 case msg := <-evtch:
130                         if msg == 0 { // Stop thread
131                                 return
132                         }
133                 case <-time.After(time.Duration(delay) * time.Second):
134                         ok := register_producer()
135                         if ok {
136                                 delay = registration_delay_long
137                         } else {
138                                 if delay < registration_delay_long {
139                                         delay += registration_delay_short
140                                 } else {
141                                         delay = registration_delay_short
142                                 }
143                         }
144                 }
145         }
146 }
147
148 func register_producer() bool {
149
150         log.Info("Registering producer: ", producer_instance_name)
151
152         file, err := os.ReadFile(config_file)
153         if err != nil {
154                 log.Error("Cannot read config file: ", config_file)
155                 log.Error("Registering producer: ", producer_instance_name, " - failed")
156                 return false
157         }
158         data := dataTypes.DataTypes{}
159         err = jsoniter.Unmarshal([]byte(file), &data)
160         if err != nil {
161                 log.Error("Cannot parse config file: ", config_file)
162                 log.Error("Registering producer: ", producer_instance_name, " - failed")
163                 return false
164         }
165         var new_type_names []string
166
167         for i := 0; i < len(data.ProdDataTypes); i++ {
168                 t1 := make(map[string]interface{})
169                 t2 := make(map[string]interface{})
170
171                 t2["schema"] = "http://json-schema.org/draft-07/schema#"
172                 t2["title"] = data.ProdDataTypes[i].ID
173                 t2["description"] = data.ProdDataTypes[i].ID
174                 t2["type"] = "object"
175
176                 t1["info_job_data_schema"] = t2
177
178                 json, err := jsoniter.Marshal(t1)
179                 if err != nil {
180                         log.Error("Cannot create json for type: ", data.ProdDataTypes[i].ID)
181                         log.Error("Registering producer: ", producer_instance_name, " - failed")
182                         return false
183                 } else {
184                         ok := utils.Send_http_request(json, http.MethodPut, "http://"+ics_server+"/data-producer/v1/info-types/"+data.ProdDataTypes[i].ID, true, creds_grant_type != "")
185                         if !ok {
186                                 log.Error("Cannot register type: ", data.ProdDataTypes[i].ID)
187                                 log.Error("Registering producer: ", producer_instance_name, " - failed")
188                                 return false
189                         }
190                         new_type_names = append(new_type_names, data.ProdDataTypes[i].ID)
191                 }
192
193         }
194
195         log.Debug("Registering types: ", new_type_names)
196         datalock.Lock()
197         defer datalock.Unlock()
198
199         for _, v := range data.ProdDataTypes {
200                 log.Info("Adding type job for type: ", v.ID, " Type added to configuration")
201                 start_type_job(v)
202         }
203
204         dataTypes.InfoTypes = data
205         log.Debug("Datatypes: ", dataTypes.InfoTypes)
206         log.Info("Registering producer: ", producer_instance_name, " - OK")
207         return true
208 }
209
210 func start_type_job(dp dataTypes.DataType) {
211         log.Info("Starting type job: ", dp.ID)
212         job_record := dataTypes.TypeJobRecord{}
213
214         job_record.Job_control = make(chan dataTypes.JobControl, 1)
215         job_record.Reader_control = make(chan dataTypes.ReaderControl, 1)
216         job_record.Data_in_channel = make(chan *dataTypes.KafkaPayload, reader_queue_length)
217         job_record.InfoType = dp.ID
218         job_record.InputTopic = dp.KafkaInputTopic
219         job_record.GroupId = "kafka-procon-" + dp.ID
220         job_record.ClientId = dp.ID + "-" + os.Getenv("KP")
221
222         switch dp.ID {
223         case "xml-file-data-to-filestore":
224                 go kafkacollector.Start_job_xml_file_data(dp.ID, job_record.Job_control, job_record.Data_in_channel, data_out_channel, "", "pm-files-json")
225         case "xml-file-data":
226                 go kafkacollector.Start_job_xml_file_data(dp.ID, job_record.Job_control, job_record.Data_in_channel, data_out_channel, files_volume, "")
227         default:
228         }
229
230         go kafkacollector.Start_topic_reader(dp.KafkaInputTopic, dp.ID, job_record.Reader_control, job_record.Data_in_channel, job_record.GroupId, job_record.ClientId)
231
232         dataTypes.TypeJobs[dp.ID] = job_record
233         log.Debug("Type job input type: ", dp.InputJobType)
234 }