Test FTC100 fails since A1-SIM update 30/12930/1 master
authorraviteja.karumuri <raviteja.karumuri@est.tech>
Mon, 27 May 2024 09:58:44 +0000 (10:58 +0100)
committerraviteja.karumuri <raviteja.karumuri@est.tech>
Mon, 27 May 2024 14:57:34 +0000 (15:57 +0100)
Issue-ID: NONRTRIC-1002
Change-Id: I40245a273b4af57599c91899166ea092b3ec4330
Signed-off-by: Raviteja Karumuri <raviteja.karumuri@est.tech>
37 files changed:
.github/workflows/gerrit-merge.yaml [moved from .github/workflows/gerrit-novote-merge.yaml with 98% similarity]
sample-services/hello-world-sme-invoker/src/main/java/org/oransc/nonrtric/sample/rest/HelloWorldSmeInvokerComponent.java
sample-services/ics-producer-consumer/.gitignore
sample-services/ics-producer-consumer/consumer/src/main/java/com/demo/consumer/consumer/SimpleConsumer.java
sample-services/ics-producer-consumer/consumer/src/main/java/com/demo/consumer/controllers/ConsumerController.java
sample-services/ics-producer-consumer/consumer/src/main/java/com/demo/consumer/dme/JobDataSchema.java [new file with mode: 0644]
sample-services/ics-producer-consumer/consumer/src/main/java/com/demo/consumer/messages/PropertiesHelper.java
sample-services/ics-producer-consumer/consumer/src/main/java/com/demo/consumer/repository/Job.java
sample-services/ics-producer-consumer/consumer/src/main/java/com/demo/consumer/repository/Jobs.java
sample-services/ics-producer-consumer/consumer/src/main/resources/application.yaml
sample-services/ics-producer-consumer/helm-chart/ics-consumer/.helmignore [new file with mode: 0644]
sample-services/ics-producer-consumer/helm-chart/ics-consumer/Chart.yaml [new file with mode: 0644]
sample-services/ics-producer-consumer/helm-chart/ics-consumer/templates/consumer-deployment.yaml [new file with mode: 0644]
sample-services/ics-producer-consumer/helm-chart/ics-consumer/templates/consumer-service.yaml [new file with mode: 0644]
sample-services/ics-producer-consumer/helm-chart/ics-consumer/values.yaml [new file with mode: 0644]
sample-services/ics-producer-consumer/helm-chart/ics-producer/.helmignore [new file with mode: 0644]
sample-services/ics-producer-consumer/helm-chart/ics-producer/Chart.yaml [new file with mode: 0644]
sample-services/ics-producer-consumer/helm-chart/ics-producer/templates/producer-deployment.yaml [new file with mode: 0644]
sample-services/ics-producer-consumer/helm-chart/ics-producer/templates/producer-service.yaml [new file with mode: 0644]
sample-services/ics-producer-consumer/helm-chart/ics-producer/values.yaml [new file with mode: 0644]
sample-services/ics-producer-consumer/producer/src/main/java/com/demo/producer/controllers/ProducerController.java
sample-services/ics-producer-consumer/producer/src/main/java/com/demo/producer/messages/PropertiesHelper.java
sample-services/ics-producer-consumer/producer/src/main/java/com/demo/producer/repository/InfoType.java
sample-services/ics-producer-consumer/producer/src/main/java/com/demo/producer/repository/Jobs.java
sample-services/ics-producer-consumer/producer/src/main/resources/application.yaml
sample-services/ics-producer-consumer/start.sh
sample-services/ics-producer-consumer/utils.sh
test/auto-test/FTC100.sh
test/auto-test/FTC150.sh
test/auto-test/FTC2002.sh
test/auto-test/ONAP_UC.sh
test/auto-test/Suite-Verify-jobs.sh
test/auto-test/verify-jobs-nonrtric.sh
test/common/a1pms_api_functions.sh
test/common/sdnc_api_functions.sh
test/common/testcase_common.sh
test/common/testsuite_common.sh

similarity index 98%
rename from .github/workflows/gerrit-novote-merge.yaml
rename to .github/workflows/gerrit-merge.yaml
index 995b8bd..01cd32b 100644 (file)
@@ -66,7 +66,6 @@ jobs:
           gerrit-change-number: ${{ inputs.GERRIT_CHANGE_NUMBER }}
           gerrit-patchset-number: ${{ inputs.GERRIT_PATCHSET_NUMBER }}
           vote-type: clear
           gerrit-change-number: ${{ inputs.GERRIT_CHANGE_NUMBER }}
           gerrit-patchset-number: ${{ inputs.GERRIT_PATCHSET_NUMBER }}
           vote-type: clear
-          comment-only: true
       - name: Allow replication
         run: sleep 10s
 
       - name: Allow replication
         run: sleep 10s
 
@@ -119,4 +118,3 @@ jobs:
           gerrit-change-number: ${{ inputs.GERRIT_CHANGE_NUMBER }}
           gerrit-patchset-number: ${{ inputs.GERRIT_PATCHSET_NUMBER }}
           vote-type: ${{ env.WORKFLOW_CONCLUSION }}
           gerrit-change-number: ${{ inputs.GERRIT_CHANGE_NUMBER }}
           gerrit-patchset-number: ${{ inputs.GERRIT_PATCHSET_NUMBER }}
           vote-type: ${{ env.WORKFLOW_CONCLUSION }}
-          comment-only: true
index 7d76b4f..7902a3b 100644 (file)
@@ -60,24 +60,22 @@ public class HelloWorldSmeInvokerComponent {
                 throw new CapifAccessException("Unexpected error");\r
             }\r
 \r
                 throw new CapifAccessException("Unexpected error");\r
             }\r
 \r
-            //TODO The below should be uncommented once SME Manager provides an accessible URI\r
-\r
-//            String helloWorldEndpoint = "";\r
-//            List<String> apiSetEndpoints = getApiSetEndpoints(apiResponse, baseUrl);\r
-//            if (apiSetEndpoints != null && !apiSetEndpoints.isEmpty()) {\r
-//                helloWorldEndpoint = apiSetEndpoints.get(0);\r
-//            }\r
-//\r
-//            if (helloWorldEndpoint != null && !helloWorldEndpoint.isEmpty()) {\r
-//                try {\r
-//                    String responseHelloWorld = restTemplate.getForObject(helloWorldEndpoint, String.class);\r
-//                    logger.info("Response :- ", responseHelloWorld);\r
-//                } catch (IllegalArgumentException e) {\r
-//                    throw new CapifAccessException("Error accessing the URL :- " + helloWorldEndpoint);\r
-//                } catch (Exception e) {\r
-//                    throw new CapifAccessException("Unexpected error");\r
-//                }\r
-//            }\r
+            String helloWorldEndpoint = "";\r
+            List<String> apiSetEndpoints = getApiSetEndpoints(apiResponse, baseUrl);\r
+            if (apiSetEndpoints != null && !apiSetEndpoints.isEmpty()) {\r
+                helloWorldEndpoint = apiSetEndpoints.get(0);\r
+            }\r
+\r
+            if (helloWorldEndpoint != null && !helloWorldEndpoint.isEmpty()) {\r
+                try {\r
+                    String responseHelloWorld = restTemplate.getForObject(helloWorldEndpoint, String.class);\r
+                    logger.info("rApp SME Provider Response : {}", responseHelloWorld);\r
+                } catch (IllegalArgumentException e) {\r
+                    throw new CapifAccessException("Error accessing the URL :- " + helloWorldEndpoint);\r
+                } catch (Exception e) {\r
+                    throw new CapifAccessException("Unexpected error");\r
+                }\r
+            }\r
         }\r
     }\r
 \r
         }\r
     }\r
 \r
index 851f236..48d83d4 100644 (file)
@@ -21,6 +21,7 @@ logs/
 *.zip
 *.tar.gz
 *.rar
 *.zip
 *.tar.gz
 *.rar
+*.tgz
 
 # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml
 hs_err_pid*
 
 # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml
 hs_err_pid*
index 430ab59..1d91afb 100644 (file)
@@ -72,7 +72,6 @@ public class SimpleConsumer extends AbstractSimpleKafka {
         // make the consumer available for graceful shutdown
         setKafkaConsumer(consumer);
         consumer.assign(Collections.singleton(new TopicPartition(topicName, 0)));
         // make the consumer available for graceful shutdown
         setKafkaConsumer(consumer);
         consumer.assign(Collections.singleton(new TopicPartition(topicName, 0)));
-        //consumer.seekToBeginning(consumer.assignment()); //--from-beginning
         int recNum = numOfRecs;
         while (recNum > 0) {
             ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(TIME_OUT_MS));
         int recNum = numOfRecs;
         while (recNum > 0) {
             ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(TIME_OUT_MS));
index 63cc215..30225c7 100644 (file)
@@ -32,8 +32,9 @@ import org.springframework.web.bind.annotation.RestController;
 import com.demo.consumer.repository.InfoType;
 import com.demo.consumer.repository.InfoTypes;
 import com.demo.consumer.repository.Job.Parameters;
 import com.demo.consumer.repository.InfoType;
 import com.demo.consumer.repository.InfoTypes;
 import com.demo.consumer.repository.Job.Parameters;
+import com.demo.consumer.repository.Job.Parameters.KafkaDeliveryInfo;
 import com.demo.consumer.dme.ConsumerJobInfo;
 import com.demo.consumer.dme.ConsumerJobInfo;
-import com.demo.consumer.dme.ConsumerStatusInfo;
+import com.demo.consumer.dme.JobDataSchema;
 import com.demo.consumer.repository.Jobs;
 import com.google.gson.Gson;
 import com.google.gson.GsonBuilder;
 import com.demo.consumer.repository.Jobs;
 import com.google.gson.Gson;
 import com.google.gson.GsonBuilder;
@@ -68,14 +69,22 @@ public class ConsumerController {
             this.jobs.addJob(request.infoTypeId, types.getType(request.infoTypeId), request.owner,
                     toJobParameters(request.jobDefinition));
         } catch (Exception e) {
             this.jobs.addJob(request.infoTypeId, types.getType(request.infoTypeId), request.owner,
                     toJobParameters(request.jobDefinition));
         } catch (Exception e) {
-            log.error("Error adding the job" + infoJobId, e.getMessage());
+            log.error("Error adding the job " + infoJobId + "{}", e.getMessage());
         }
     }
 
     @PostMapping("/info-type-status")
     public void statusChange(@RequestBody String requestBody) {
         }
     }
 
     @PostMapping("/info-type-status")
     public void statusChange(@RequestBody String requestBody) {
-        ConsumerStatusInfo request = gson.fromJson(requestBody, ConsumerStatusInfo.class);
-        log.info("Add Status Job Info", request);
+        JobDataSchema request = gson.fromJson(requestBody, JobDataSchema.class);
+        log.debug("Body Received: {}" , requestBody);
+        try {
+            this.jobs.addJob(request.getInfo_type_id(), types.getType(request.getInfo_type_id()), "",
+                new Parameters(new KafkaDeliveryInfo(
+                        request.getJob_data_schema().getTopic(),
+                        request.getJob_data_schema().getBootStrapServers(), 0)));
+        } catch (Exception e) {
+            log.error("Error adding the info type " + request.getInfo_type_id() + "{}", e.getMessage());
+        }
     }
 
     private Parameters toJobParameters(Object jobData) {
     }
 
     private Parameters toJobParameters(Object jobData) {
diff --git a/sample-services/ics-producer-consumer/consumer/src/main/java/com/demo/consumer/dme/JobDataSchema.java b/sample-services/ics-producer-consumer/consumer/src/main/java/com/demo/consumer/dme/JobDataSchema.java
new file mode 100644 (file)
index 0000000..678f536
--- /dev/null
@@ -0,0 +1,59 @@
+/*-
+ * ========================LICENSE_START=================================
+ * O-RAN-SC
+ *
+ * Copyright (C) 2024: OpenInfra Foundation Europe
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ========================LICENSE_END===================================
+ */
+
+package com.demo.consumer.dme;
+
+import com.google.gson.annotations.SerializedName;
+import lombok.Data;
+import lombok.Getter;
+import lombok.Setter;
+
+import com.demo.consumer.repository.Job.Parameters.KafkaDeliveryInfo;
+import com.google.gson.Gson;
+
+@Data
+public class JobDataSchema {
+
+    public enum InfoJobStatusValues {
+        REGISTERED, UNREGISTERED
+    }
+    @SerializedName("info_type_id")
+    private String info_type_id;
+    @SerializedName("job_data_schema")
+    private DataSchema job_data_schema;
+    @SerializedName("status")
+    private InfoJobStatusValues status;
+
+    @Override
+    public String toString() {
+        return new Gson().toJson(this);
+    }
+
+    @Getter
+    @Setter
+    public class DataSchema {
+        private String title;
+        private String description;
+        @SerializedName("topic")
+        private String topic;
+        @SerializedName("bootStrapServers")
+        private String bootStrapServers;
+    }
+}
index 18be2f8..c4bd436 100644 (file)
@@ -33,26 +33,41 @@ import com.demo.consumer.consumer.SimpleConsumer;
 @Component
 public class PropertiesHelper {
     private static final Logger log = LoggerFactory.getLogger(PropertiesHelper.class);
 @Component
 public class PropertiesHelper {
     private static final Logger log = LoggerFactory.getLogger(PropertiesHelper.class);
+    private static String kafkaServers = null;
 
     public static Properties getProperties() throws Exception {
 
     public static Properties getProperties() throws Exception {
-        Properties props = null;
+        Properties props = new Properties();
         try (InputStream input = SimpleConsumer.class.getClassLoader().getResourceAsStream("config.properties")) {
         try (InputStream input = SimpleConsumer.class.getClassLoader().getResourceAsStream("config.properties")) {
-            props = new Properties();
             if (input == null) {
             if (input == null) {
-                log.error("Found no configuration file in resources");
-                throw new Exception("Sorry, unable to find config.properties");
+                log.error("Failed to load configuration file 'config.properties'");
+                throw new IOException("Configuration file 'config.properties' not found");
             }
             props.load(input);
             }
             props.load(input);
-            String kafkaServers = System.getenv("KAFKA_SERVERS");
-            if (kafkaServers != null) {
+            setBootstrapServers(props);
+        } catch (IOException e) {
+            log.error("Error reading configuration file: ", e);
+            throw e;
+        }
+        return props;
+    }
+
+    private static void setBootstrapServers(Properties props) {
+        if (kafkaServers != null && !kafkaServers.isEmpty()) {
+            props.setProperty("bootstrap.servers", kafkaServers);
+            log.info("Using actively bootstrap servers: {}", kafkaServers);
+        } else {
+            String kafkaServersEnv = System.getenv("KAFKA_SERVERS");
+            if (kafkaServersEnv != null && !kafkaServersEnv.isEmpty()) {
+                kafkaServers = kafkaServersEnv;
                 props.setProperty("bootstrap.servers", kafkaServers);
                 props.setProperty("bootstrap.servers", kafkaServers);
-                log.info("Env variable KAFKA_SERVERS found, adding: " + kafkaServers);
+                log.info("Using environment variable KAFKA_SERVERS: {}", kafkaServers);
             } else {
             } else {
-                log.info("Env variable KAFKA_SERVERS not found, defaulting to config file");
+                log.info("Environment variable KAFKA_SERVERS not found, defaulting to config file");
             }
             }
-        } catch (IOException e) {
-            log.error("Error reading configuration file: ", e.getMessage());
         }
         }
-        return props;
+    }
+
+    public static void setKafkaServers(String servers) {
+        kafkaServers = servers;
     }
 }
     }
 }
index ba12345..e681714 100644 (file)
@@ -22,6 +22,7 @@ package com.demo.consumer.repository;
 
 import com.fasterxml.jackson.annotation.JsonProperty;
 
 
 import com.fasterxml.jackson.annotation.JsonProperty;
 
+import lombok.AllArgsConstructor;
 import lombok.Builder;
 import lombok.EqualsAndHashCode;
 import lombok.Getter;
 import lombok.Builder;
 import lombok.EqualsAndHashCode;
 import lombok.Getter;
@@ -29,9 +30,11 @@ import lombok.ToString;
 
 @ToString
 public class Job {
 
 @ToString
 public class Job {
+    @AllArgsConstructor
     @Builder
     public static class Parameters {
 
     @Builder
     public static class Parameters {
 
+        @AllArgsConstructor
         @Builder
         @EqualsAndHashCode
         public static class KafkaDeliveryInfo {
         @Builder
         @EqualsAndHashCode
         public static class KafkaDeliveryInfo {
index 13f3180..eab8035 100644 (file)
@@ -28,6 +28,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.stereotype.Component;
 
 import org.slf4j.LoggerFactory;
 import org.springframework.stereotype.Component;
 
+import com.demo.consumer.messages.PropertiesHelper;
 import com.demo.consumer.repository.Job.Parameters;
 import com.google.gson.Gson;
 import com.google.gson.GsonBuilder;
 import com.demo.consumer.repository.Job.Parameters;
 import com.google.gson.Gson;
 import com.google.gson.GsonBuilder;
@@ -55,9 +56,20 @@ public class Jobs {
 
     public void addJob(String id, InfoType type, String owner, Parameters parameters) {
         Job job = new Job(id, type, owner, parameters);
 
     public void addJob(String id, InfoType type, String owner, Parameters parameters) {
         Job job = new Job(id, type, owner, parameters);
+        setKafkaServersEnvironment(job);
         this.put(job);
     }
 
         this.put(job);
     }
 
+    private void setKafkaServersEnvironment(Job job) {
+        String kafkaServers = job.getParameters().getDeliveryInfo().getBootStrapServers();
+        if (kafkaServers != null && !kafkaServers.isEmpty()) {
+            PropertiesHelper.setKafkaServers(kafkaServers);
+            logger.info("Setting variable bootStrapServers: {}", kafkaServers);
+        } else {
+            logger.warn("bootStrapServers is not set for job: {}", job.getId());
+        }
+    }
+
     private synchronized void put(Job job) {
         logger.debug("Put job: {}", job.getId());
         allJobs.put(job.getId(), job);
     private synchronized void put(Job job) {
         logger.debug("Put job: {}", job.getId());
         allJobs.put(job.getId(), job);
index eed8326..1925f5c 100644 (file)
@@ -19,7 +19,7 @@ server:
 
 vars:
   time: 1000
 
 vars:
   time: 1000
-  autostart: true
+  autostart: false
   topic: mytopic #This topic is used only in autostart
 
 spring:
   topic: mytopic #This topic is used only in autostart
 
 spring:
diff --git a/sample-services/ics-producer-consumer/helm-chart/ics-consumer/.helmignore b/sample-services/ics-producer-consumer/helm-chart/ics-consumer/.helmignore
new file mode 100644 (file)
index 0000000..0e8a0eb
--- /dev/null
@@ -0,0 +1,23 @@
+# Patterns to ignore when building packages.
+# This supports shell glob matching, relative path matching, and
+# negation (prefixed with !). Only one pattern per line.
+.DS_Store
+# Common VCS dirs
+.git/
+.gitignore
+.bzr/
+.bzrignore
+.hg/
+.hgignore
+.svn/
+# Common backup files
+*.swp
+*.bak
+*.tmp
+*.orig
+*~
+# Various IDEs
+.project
+.idea/
+*.tmproj
+.vscode/
diff --git a/sample-services/ics-producer-consumer/helm-chart/ics-consumer/Chart.yaml b/sample-services/ics-producer-consumer/helm-chart/ics-consumer/Chart.yaml
new file mode 100644 (file)
index 0000000..8b972e8
--- /dev/null
@@ -0,0 +1,6 @@
+apiVersion: v2
+name: ics-consumer
+description: A Helm chart for Kubernetes
+type: application
+version: 0.1.0
+appVersion: "1.16.0"
diff --git a/sample-services/ics-producer-consumer/helm-chart/ics-consumer/templates/consumer-deployment.yaml b/sample-services/ics-producer-consumer/helm-chart/ics-consumer/templates/consumer-deployment.yaml
new file mode 100644 (file)
index 0000000..9c107fe
--- /dev/null
@@ -0,0 +1,41 @@
+#
+#   ========================LICENSE_START=================================
+#   O-RAN-SC
+#
+#   Copyright (C) 2024: OpenInfra Foundation Europe
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#   ========================LICENSE_END===================================
+#
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+  name: {{ .Values.consumer.name }}
+spec:
+  replicas: {{ .Values.consumer.replicaCount }}
+  selector:
+    matchLabels:
+      app: {{ .Values.consumer.name }}
+  template:
+    metadata:
+      labels:
+        app: {{ .Values.consumer.name }}
+    spec:
+      containers:
+      - name: {{ .Values.consumer.name }}
+        image: "{{ .Values.consumer.image.repository }}:{{ .Values.consumer.image.tag }}"
+        ports:
+        - containerPort: {{ .Values.consumer.service.port }}
+        env:
+        - name: KAFKA_SERVERS
+          value: "{{ .Values.kafka.host }}:{{ .Values.kafka.port }}"
diff --git a/sample-services/ics-producer-consumer/helm-chart/ics-consumer/templates/consumer-service.yaml b/sample-services/ics-producer-consumer/helm-chart/ics-consumer/templates/consumer-service.yaml
new file mode 100644 (file)
index 0000000..6b33838
--- /dev/null
@@ -0,0 +1,31 @@
+#
+#   ========================LICENSE_START=================================
+#   O-RAN-SC
+#
+#   Copyright (C) 2024: OpenInfra Foundation Europe
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#   ========================LICENSE_END===================================
+#
+apiVersion: v1
+kind: Service
+metadata:
+  name: {{ .Values.consumer.name }}
+spec:
+  type: NodePort
+  ports:
+    - port: {{ .Values.consumer.service.port }}
+      targetPort: {{ .Values.consumer.service.port }}
+      nodePort: {{ .Values.consumer.service.nodePort }}
+  selector:
+    app: {{ .Values.consumer.name }}
diff --git a/sample-services/ics-producer-consumer/helm-chart/ics-consumer/values.yaml b/sample-services/ics-producer-consumer/helm-chart/ics-consumer/values.yaml
new file mode 100644 (file)
index 0000000..5e6c5b0
--- /dev/null
@@ -0,0 +1,32 @@
+#
+#   ========================LICENSE_START=================================
+#   O-RAN-SC
+#
+#   Copyright (C) 2024: OpenInfra Foundation Europe
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#   ========================LICENSE_END===================================
+#
+consumer:
+  name: kafka-consumer
+  replicaCount: 1
+  image:
+    repository: nexus3.o-ran-sc.org:10004/o-ran-sc/nonrtric-sample-icsconsumer
+    tag: 0.0.1
+  service:
+    port: 8081
+    nodePort: 30081
+
+kafka:
+  host: kafka-service
+  port: 9092
diff --git a/sample-services/ics-producer-consumer/helm-chart/ics-producer/.helmignore b/sample-services/ics-producer-consumer/helm-chart/ics-producer/.helmignore
new file mode 100644 (file)
index 0000000..0e8a0eb
--- /dev/null
@@ -0,0 +1,23 @@
+# Patterns to ignore when building packages.
+# This supports shell glob matching, relative path matching, and
+# negation (prefixed with !). Only one pattern per line.
+.DS_Store
+# Common VCS dirs
+.git/
+.gitignore
+.bzr/
+.bzrignore
+.hg/
+.hgignore
+.svn/
+# Common backup files
+*.swp
+*.bak
+*.tmp
+*.orig
+*~
+# Various IDEs
+.project
+.idea/
+*.tmproj
+.vscode/
diff --git a/sample-services/ics-producer-consumer/helm-chart/ics-producer/Chart.yaml b/sample-services/ics-producer-consumer/helm-chart/ics-producer/Chart.yaml
new file mode 100644 (file)
index 0000000..e26165b
--- /dev/null
@@ -0,0 +1,6 @@
+apiVersion: v2
+name: ics-producer
+description: A Helm chart for Kubernetes
+type: application
+version: 0.1.0
+appVersion: "1.16.0"
diff --git a/sample-services/ics-producer-consumer/helm-chart/ics-producer/templates/producer-deployment.yaml b/sample-services/ics-producer-consumer/helm-chart/ics-producer/templates/producer-deployment.yaml
new file mode 100644 (file)
index 0000000..b7433fc
--- /dev/null
@@ -0,0 +1,41 @@
+#
+#   ========================LICENSE_START=================================
+#   O-RAN-SC
+#
+#   Copyright (C) 2024: OpenInfra Foundation Europe
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#   ========================LICENSE_END===================================
+#
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+  name: {{ .Values.producer.name }}
+spec:
+  replicas: {{ .Values.producer.replicaCount }}
+  selector:
+    matchLabels:
+      app: {{ .Values.producer.name }}
+  template:
+    metadata:
+      labels:
+        app: {{ .Values.producer.name }}
+    spec:
+      containers:
+      - name: {{ .Values.producer.name }}
+        image: "{{ .Values.producer.image.repository }}:{{ .Values.producer.image.tag }}"
+        ports:
+        - containerPort: {{ .Values.producer.service.port }}
+        env:
+        - name: KAFKA_SERVERS
+          value: "{{ .Values.kafka.host }}:{{ .Values.kafka.port }}"
diff --git a/sample-services/ics-producer-consumer/helm-chart/ics-producer/templates/producer-service.yaml b/sample-services/ics-producer-consumer/helm-chart/ics-producer/templates/producer-service.yaml
new file mode 100644 (file)
index 0000000..8e17906
--- /dev/null
@@ -0,0 +1,31 @@
+#
+#   ========================LICENSE_START=================================
+#   O-RAN-SC
+#
+#   Copyright (C) 2024: OpenInfra Foundation Europe
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#   ========================LICENSE_END===================================
+#
+apiVersion: v1
+kind: Service
+metadata:
+  name: {{ .Values.producer.name }}
+spec:
+  type: NodePort
+  ports:
+    - port: {{ .Values.producer.service.port }}
+      targetPort: {{ .Values.producer.service.port }}
+      nodePort: {{ .Values.producer.service.nodePort }}
+  selector:
+    app: {{ .Values.producer.name }}
diff --git a/sample-services/ics-producer-consumer/helm-chart/ics-producer/values.yaml b/sample-services/ics-producer-consumer/helm-chart/ics-producer/values.yaml
new file mode 100644 (file)
index 0000000..9a71198
--- /dev/null
@@ -0,0 +1,32 @@
+#
+#   ========================LICENSE_START=================================
+#   O-RAN-SC
+#
+#   Copyright (C) 2024: OpenInfra Foundation Europe
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#        http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#   ========================LICENSE_END===================================
+#
+producer:
+  name: kafka-producer
+  replicaCount: 1
+  image:
+    repository: nexus3.o-ran-sc.org:10004/o-ran-sc/nonrtric-sample-icsproducer
+    tag: 0.0.1
+  service:
+    port: 8080
+    nodePort: 30080
+
+kafka:
+  host: kafka-service
+  port: 9092
index 5bc6821..bbf91b9 100644 (file)
@@ -47,14 +47,11 @@ import com.google.gson.GsonBuilder;
 @RequestMapping(path = "/producer", produces = "application/json")
 public class ProducerController {
     private static final Logger log = LoggerFactory.getLogger(ProducerController.class);
 @RequestMapping(path = "/producer", produces = "application/json")
 public class ProducerController {
     private static final Logger log = LoggerFactory.getLogger(ProducerController.class);
-
     private static Gson gson = new GsonBuilder().create();
     private static Gson gson = new GsonBuilder().create();
-
     private final Jobs jobs;
     private final InfoTypes types;
     private String topicName = "mytopic";
 
     private final Jobs jobs;
     private final InfoTypes types;
     private String topicName = "mytopic";
 
-
     public ProducerController(@Autowired Jobs jobs, @Autowired InfoTypes types) {
         this.jobs = jobs;
         this.types = types;
     public ProducerController(@Autowired Jobs jobs, @Autowired InfoTypes types) {
         this.jobs = jobs;
         this.types = types;
index 7dc2b1e..d7d9f98 100644 (file)
@@ -33,26 +33,41 @@ import com.demo.producer.producer.SimpleProducer;
 @Component
 public class PropertiesHelper {
     private static final Logger log = LoggerFactory.getLogger(PropertiesHelper.class);
 @Component
 public class PropertiesHelper {
     private static final Logger log = LoggerFactory.getLogger(PropertiesHelper.class);
+    private static String kafkaServers = null;
 
     public static Properties getProperties() throws Exception {
 
     public static Properties getProperties() throws Exception {
-        Properties props = null;
+        Properties props = new Properties();
         try (InputStream input = SimpleProducer.class.getClassLoader().getResourceAsStream("config.properties")) {
         try (InputStream input = SimpleProducer.class.getClassLoader().getResourceAsStream("config.properties")) {
-            props = new Properties();
             if (input == null) {
             if (input == null) {
-                log.error("Found no configuration file in resources");
-                throw new Exception("Sorry, unable to find config.properties");
+                log.error("Failed to load configuration file 'config.properties'");
+                throw new IOException("Configuration file 'config.properties' not found");
             }
             props.load(input);
             }
             props.load(input);
-            String kafkaServers = System.getenv("KAFKA_SERVERS");
-            if (kafkaServers != null) {
+            setBootstrapServers(props);
+        } catch (IOException e) {
+            log.error("Error reading configuration file: ", e);
+            throw e;
+        }
+        return props;
+    }
+
+    private static void setBootstrapServers(Properties props) {
+        if (kafkaServers != null && !kafkaServers.isEmpty()) {
+            props.setProperty("bootstrap.servers", kafkaServers);
+            log.info("Using actively bootstrap servers: {}", kafkaServers);
+        } else {
+            String kafkaServersEnv = System.getenv("KAFKA_SERVERS");
+            if (kafkaServersEnv != null && !kafkaServersEnv.isEmpty()) {
+                kafkaServers = kafkaServersEnv;
                 props.setProperty("bootstrap.servers", kafkaServers);
                 props.setProperty("bootstrap.servers", kafkaServers);
-                log.info("Env variable KAFKA_SERVERS found, adding: " + kafkaServers);
+                log.info("Using environment variable KAFKA_SERVERS: {}", kafkaServers);
             } else {
             } else {
-                log.info("Env variable KAFKA_SERVERS not found, defaulting to config file");
+                log.info("Environment variable KAFKA_SERVERS not found, defaulting to config file");
             }
             }
-        } catch (IOException e) {
-            log.error("Error reading configuration file: ", e.getMessage());
         }
         }
-        return props;
+    }
+
+    public static void setKafkaServers(String servers) {
+        kafkaServers = servers;
     }
 }
     }
 }
index f9cffd8..e929e44 100644 (file)
@@ -28,6 +28,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.stereotype.Component;
 
 import org.slf4j.LoggerFactory;
 import org.springframework.stereotype.Component;
 
+import com.demo.producer.messages.PropertiesHelper;
 import com.demo.producer.repository.Job.Parameters;
 import com.google.gson.Gson;
 import com.google.gson.GsonBuilder;
 import com.demo.producer.repository.Job.Parameters;
 import com.google.gson.Gson;
 import com.google.gson.GsonBuilder;
@@ -55,9 +56,20 @@ public class Jobs {
 
     public void addJob(String id, InfoType type, String owner, Parameters parameters) {
         Job job = new Job(id, type, owner, parameters);
 
     public void addJob(String id, InfoType type, String owner, Parameters parameters) {
         Job job = new Job(id, type, owner, parameters);
+        setKafkaServersEnvironment(job);
         this.put(job);
     }
 
         this.put(job);
     }
 
+    private void setKafkaServersEnvironment(Job job) {
+        String kafkaServers = job.getParameters().getDeliveryInfo().getBootStrapServers();
+        if (kafkaServers != null && !kafkaServers.isEmpty()) {
+            PropertiesHelper.setKafkaServers(kafkaServers);
+            logger.info("Setting variable bootStrapServers: {}", kafkaServers);
+        } else {
+            logger.warn("bootStrapServers is not set for job: {}", job.getId());
+        }
+    }
+
     private synchronized void put(Job job) {
         logger.debug("Put job: {}", job.getId());
         allJobs.put(job.getId(), job);
     private synchronized void put(Job job) {
         logger.debug("Put job: {}", job.getId());
         allJobs.put(job.getId(), job);
index 31966df..28ae26c 100644 (file)
@@ -16,7 +16,7 @@
 #
 vars:
   time: 1000
 #
 vars:
   time: 1000
-  autostart: true
+  autostart: false
   topic: mytopic #This topic is used only in autostart
 
 spring:
   topic: mytopic #This topic is used only in autostart
 
 spring:
index 18317a0..7ddc908 100755 (executable)
@@ -71,16 +71,6 @@ wait_for_container "kafka-consumer" "Started Application"
 echo "Kafka container is up and running. Starting producer and consumer..."
 space
 
 echo "Kafka container is up and running. Starting producer and consumer..."
 space
 
-echo "Start 1 Producer on mytopic"
-curl -X GET http://localhost:8080/startProducer/mytopic
-space
-
-echo "Start 1 Consumer on mytopic"
-curl -X GET http://localhost:8081/startConsumer/mytopic
-space
-
-sleep 10
-
 echo "Sending type1 to ICS"
 curl -X 'PUT' \
   'http://localhost:8083/data-producer/v1/info-types/type1' \
 echo "Sending type1 to ICS"
 curl -X 'PUT' \
   'http://localhost:8083/data-producer/v1/info-types/type1' \
@@ -91,8 +81,9 @@ curl -X 'PUT' \
     "$schema":"http://json-schema.org/draft-07/schema#",
     "title":"STD_Type1_1.0.0",
     "description":"Type 1",
     "$schema":"http://json-schema.org/draft-07/schema#",
     "title":"STD_Type1_1.0.0",
     "description":"Type 1",
-    "type":"object"
-  }
+    "topic": "mytopic",
+    "bootStrapServers": "kafka-zkless:9092"
+    }
 }'
 
 echo "Getting types from ICS"
 }'
 
 echo "Getting types from ICS"
@@ -127,8 +118,8 @@ curl -X 'PUT' \
   "job_definition": {
     "deliveryInfo": {
       "topic": "mytopic",
   "job_definition": {
     "deliveryInfo": {
       "topic": "mytopic",
-      "bootStrapServers": "http://kafka-zkless:9092",
-      "numberOfMessages": 0
+      "bootStrapServers": "kafka-zkless:9092",
+      "numberOfMessages": 100
     }
   },
   "job_result_uri": "http://kafka-producer:8080/producer/job",
     }
   },
   "job_result_uri": "http://kafka-producer:8080/producer/job",
@@ -145,14 +136,40 @@ curl -X 'PUT' \
   -H 'accept: application/json' \
   -H 'Content-Type: application/json' \
   -d '{
   -H 'accept: application/json' \
   -H 'Content-Type: application/json' \
   -d '{
-  "status_result_uri": "http://kafka-consumer:8081/info-type-status",
-  "owner": "owner"
+  "status_result_uri": "http://kafka-consumer:8081/consumer/info-type-status",
+  "owner": "demo"
 }'
 echo "Getting Consumer Subscription Job infos from ICS"
 curl -X 'GET' 'http://localhost:8083/data-consumer/v1/info-type-subscription/1' -H 'accept: application/json'
 space
 
 }'
 echo "Getting Consumer Subscription Job infos from ICS"
 curl -X 'GET' 'http://localhost:8083/data-consumer/v1/info-type-subscription/1' -H 'accept: application/json'
 space
 
-sleep 5
+#To Set Kafka Broker in Consumer
+echo "Sending type1 to ICS to use the callback"
+curl -X 'PUT' \
+  'http://localhost:8083/data-producer/v1/info-types/type1' \
+  -H 'accept: application/json' \
+  -H 'Content-Type: application/json' \
+  -d '{
+  "info_job_data_schema": {
+    "$schema":"http://json-schema.org/draft-07/schema#",
+    "title":"STD_Type1_1.0.0",
+    "description":"Type 1",
+    "topic": "mytopic",
+    "bootStrapServers": "kafka-zkless:9092"
+    }
+}'
+
+#Using the autostart flag in the application.yaml
+echo "Start 1 Producer on mytopic"
+curl -X GET http://localhost:8080/startProducer/mytopic
+space
+
+echo "Start 1 Consumer on mytopic"
+curl -X GET http://localhost:8081/startConsumer/mytopic
+space
+
+sleep 10
+
 echo "ICS Producer Docker logs "
 docker logs informationcoordinatorservice | grep -E 'o.o.i.c.r1producer.ProducerCallbacks|o.o.i.repository.InfoTypeSubscriptions'
 space
 echo "ICS Producer Docker logs "
 docker logs informationcoordinatorservice | grep -E 'o.o.i.c.r1producer.ProducerCallbacks|o.o.i.repository.InfoTypeSubscriptions'
 space
@@ -170,6 +187,7 @@ containers=("kafka-producer" "kafka-consumer")
 for container in "${containers[@]}"; do
   if docker logs "$container" | grep -q ERROR; then
     echo "Errors found in logs of $container"
 for container in "${containers[@]}"; do
   if docker logs "$container" | grep -q ERROR; then
     echo "Errors found in logs of $container"
+    docker logs "$container" | grep ERROR
     echo "FAIL"
     exit 1
   else
     echo "FAIL"
     exit 1
   else
index 68d19ec..af1cfa0 100644 (file)
@@ -69,28 +69,45 @@ checkDockerCompose() {
     fi
 }
 
     fi
 }
 
-# Function to wait for a Docker container to be running and log a specific string
+# Function to wait for a Docker container to be running and log a specific string with a maximum timeout of 20 minutes
 wait_for_container() {
     local container_name="$1"
     local log_string="$2"
 wait_for_container() {
     local container_name="$1"
     local log_string="$2"
+    local timeout=1200  # Timeout set to 20 minutes (20 minutes * 60 seconds)
+
+    local start_time=$(date +%s)
+    local end_time=$((start_time + timeout))
 
     while ! docker inspect "$container_name" &>/dev/null; do
         echo "Waiting for container '$container_name' to be created..."
         sleep 5
 
     while ! docker inspect "$container_name" &>/dev/null; do
         echo "Waiting for container '$container_name' to be created..."
         sleep 5
+        if [ "$(date +%s)" -ge "$end_time" ]; then
+            echo "Timeout: Container creation exceeded 20 minutes."
+            exit 1
+        fi
     done
 
     while [ "$(docker inspect -f '{{.State.Status}}' "$container_name")" != "running" ]; do
         echo "Waiting for container '$container_name' to be running..."
         sleep 5
     done
 
     while [ "$(docker inspect -f '{{.State.Status}}' "$container_name")" != "running" ]; do
         echo "Waiting for container '$container_name' to be running..."
         sleep 5
+        if [ "$(date +%s)" -ge "$end_time" ]; then
+            echo "Timeout: Container start exceeded 20 minutes."
+            exit 1
+        fi
     done
 
     # Check container logs for the specified string
     while ! docker logs "$container_name" 2>&1 | grep "$log_string"; do
         echo "Waiting for '$log_string' in container logs of '$container_name'..."
         sleep 5
     done
 
     # Check container logs for the specified string
     while ! docker logs "$container_name" 2>&1 | grep "$log_string"; do
         echo "Waiting for '$log_string' in container logs of '$container_name'..."
         sleep 5
+        if [ "$(date +%s)" -ge "$end_time" ]; then
+            echo "Timeout: Log string not found within 20 minutes."
+            exit 1
+        fi
     done
 }
 
     done
 }
 
+
 space() {
     echo ""
     echo "++++++++++++++++++++++++++++++++++++++++++++++++++++"
 space() {
     echo ""
     echo "++++++++++++++++++++++++++++++++++++++++++++++++++++"
index a473338..04ff8d2 100755 (executable)
@@ -492,10 +492,20 @@ for __httpx in $TESTED_PROTOCOLS ; do
 
         a1pms_api_get_policy_status 404 1
         a1pms_api_get_policy_status 404 2
 
         a1pms_api_get_policy_status 404 1
         a1pms_api_get_policy_status 404 2
-        VAL='NOT IN EFFECT'
-        a1pms_api_get_policy_status 200 5000 OSC "$VAL" "false"
+        if [[ $TEST_ENV_PROFILE =~ ^ORAN-[A-H] ]] || [[ $TEST_ENV_PROFILE =~ ^ONAP-[A-L] ]]; then
+          VAL='NOT IN EFFECT'
+          VAL2="false"
+          VAL3=EMPTY
+          VAL4=EMPTY
+        else
+          VAL="NOT_ENFORCED"
+          VAL2="OTHER_REASON"
+          VAL3="NOT_ENFORCED"
+          VAL4="OTHER_REASON"
+        fi
+        a1pms_api_get_policy_status 200 5000 OSC "$VAL" "$VAL2"
         a1pms_api_get_policy_status 200 5100 STD "UNDEFINED"
         a1pms_api_get_policy_status 200 5100 STD "UNDEFINED"
-        a1pms_api_get_policy_status 200 5200 STD2 EMPTY EMPTY
+        a1pms_api_get_policy_status 200 5200 STD2 $VAL3 $VAL4
 
 
         deviation "TR10 - a1pms allows policy creation on unregistered service (side effect of orig. problem)- test combo $interface and $__httpx"
 
 
         deviation "TR10 - a1pms allows policy creation on unregistered service (side effect of orig. problem)- test combo $interface and $__httpx"
index 0a53d59..103d74c 100755 (executable)
@@ -128,9 +128,14 @@ for __nb_httpx in $NB_TESTED_PROTOCOLS ; do
 
         controller_api_get_A1_policy_status 200 OSC ricsim_g1_1 1 4000
         controller_api_get_A1_policy_status 200 STD ricsim_g2_1 5000
 
         controller_api_get_A1_policy_status 200 OSC ricsim_g1_1 1 4000
         controller_api_get_A1_policy_status 200 STD ricsim_g2_1 5000
-
-        VAL='NOT_ENFORCED'
-        controller_api_get_A1_policy_status 200 OSC ricsim_g1_1 1 4000 "$VAL" "OTHER_REASON"
+        if [[ $TEST_ENV_PROFILE =~ ^ORAN-[A-H] ]] || [[ $TEST_ENV_PROFILE =~ ^ONAP-[A-L] ]]; then
+          VAL='NOT IN EFFECT'
+          VAL2="false"
+        else
+          VAL='NOT_ENFORCED'
+          VAL2="OTHER_REASON"
+        fi
+        controller_api_get_A1_policy_status 200 OSC ricsim_g1_1 1 4000 "$VAL" "$VAL2"
         controller_api_get_A1_policy_status 200 STD ricsim_g2_1 5000 "UNDEFINED"
 
         RESP=202
         controller_api_get_A1_policy_status 200 STD ricsim_g2_1 5000 "UNDEFINED"
 
         RESP=202
index 547492c..c8d3f6e 100755 (executable)
@@ -143,7 +143,13 @@ for __nb_httpx in $NB_TESTED_PROTOCOLS ; do
         controller_api_get_A1_policy_status 200 OSC ricsim_g1_1 1 4000
         controller_api_get_A1_policy_status 200 STD ricsim_g2_1 5000
 
         controller_api_get_A1_policy_status 200 OSC ricsim_g1_1 1 4000
         controller_api_get_A1_policy_status 200 STD ricsim_g2_1 5000
 
-        VAL='NOT IN EFFECT'
+        if [[ $TEST_ENV_PROFILE =~ ^ORAN-[A-H]$  || $TEST_ENV_PROFILE =~ ^ONAP-[A-L]$ ]]; then
+            VAL='NOT IN EFFECT'
+            VAL2="false"
+        else
+            VAL='NOT_ENFORCED'
+            VAL2="OTHER_REASON"
+        fi
         controller_api_get_A1_policy_status 200 OSC ricsim_g1_1 1 4000 "$VAL" "false"
         controller_api_get_A1_policy_status 200 STD ricsim_g2_1 5000 "UNDEFINED"
 
         controller_api_get_A1_policy_status 200 OSC ricsim_g1_1 1 4000 "$VAL" "false"
         controller_api_get_A1_policy_status 200 STD ricsim_g2_1 5000 "UNDEFINED"
 
index 6c699d1..3dc724d 100755 (executable)
@@ -283,11 +283,17 @@ for interface in $TESTED_VARIANTS ; do
     done
 
     # Check status OSC
     done
 
     # Check status OSC
-    VAL='NOT IN EFFECT'
+    if [[ $TEST_ENV_PROFILE =~ ^ORAN-[A-H] ]] || [[ $TEST_ENV_PROFILE =~ ^ONAP-[A-L] ]]; then
+      VAL='NOT IN EFFECT'
+      VAL2="false"
+    else
+      VAL='NOT_ENFORCED'
+      VAL2="OTHER_REASON"
+    fi
     for ((i=1; i<=$OSC_NUM_RICS; i++))
     do
     for ((i=1; i<=$OSC_NUM_RICS; i++))
     do
-        a1pms_api_get_policy_status 200 $((3000+$i)) OSC "$VAL" "false"
-        a1pms_api_get_policy_status 200 $((4000+$i)) OSC "$VAL" "false"
+        a1pms_api_get_policy_status 200 $((3000+$i)) OSC "$VAL" "$VAL2"
+        a1pms_api_get_policy_status 200 $((4000+$i)) OSC "$VAL" "$VAL2"
     done
 
     # Note: Status callback is not tested since this callback (http POST) is made from the
     done
 
     # Note: Status callback is not tested since this callback (http POST) is made from the
index d53af8f..ef97938 100644 (file)
@@ -31,4 +31,6 @@ suite_setup
 
 ##########################################
 
 
 ##########################################
 
-suite_complete
\ No newline at end of file
+suite_complete
+
+exit
index 7bee3fa..e93e1a7 100644 (file)
 TEST_DIRECTORY="test/auto-test"
 TEST_SCRIPT="./Suite-Verify-jobs.sh"
 DOCKER_COMPOSE_VERSION="v2.21.0"
 TEST_DIRECTORY="test/auto-test"
 TEST_SCRIPT="./Suite-Verify-jobs.sh"
 DOCKER_COMPOSE_VERSION="v2.21.0"
+PULL_IMAGE_TYPE="remote-remove"
+RUN_MODE="docker"
+IMAGE_VERSION="release"
+ENV_FLAG="--env-file"
+ENV_FILE="../common/test_env-oran-h-release.sh"
 
 # Check if jq is installed, and install it if not
 if ! command -v jq &> /dev/null; then
 
 # Check if jq is installed, and install it if not
 if ! command -v jq &> /dev/null; then
@@ -50,7 +55,8 @@ fi
 
 cd "$TEST_DIRECTORY"
 sudo chmod 775 "$TEST_SCRIPT"
 
 cd "$TEST_DIRECTORY"
 sudo chmod 775 "$TEST_SCRIPT"
-"$TEST_SCRIPT" remote-remove docker release --env-file ../common/test_env-oran-h-release.sh
+"$TEST_SCRIPT" $PULL_IMAGE_TYPE $RUN_MODE $IMAGE_VERSION $ENV_FLAG $ENV_FILE
+exit_val=$?
 
 # Remove docker-compose after tests are done
 if command -v docker-compose &> /dev/null; then
 
 # Remove docker-compose after tests are done
 if command -v docker-compose &> /dev/null; then
@@ -62,3 +68,5 @@ if command -v jq &> /dev/null; then
     echo "Removing jq..."
     sudo apt-get remove -y jq
 fi
     echo "Removing jq..."
     sudo apt-get remove -y jq
 fi
+
+exit $exit_val
index b80bb25..a0062ac 100644 (file)
@@ -1830,11 +1830,18 @@ a1pms_api_get_policy_status() {
                fi
                targetJson=$targetJson"}"
        elif [ "$3" == "OSC" ]; then
                fi
                targetJson=$targetJson"}"
        elif [ "$3" == "OSC" ]; then
-               targetJson="{\"instance_status\":\"$4\""
-               if [ $# -eq 5 ]; then
-                       targetJson=$targetJson",\"has_been_deleted\":\"$5\""
-               fi
-               targetJson=$targetJson",\"created_at\":\"????\"}"
+         if [[ $TEST_ENV_PROFILE =~ ^ORAN-[A-H] ]] || [[ $TEST_ENV_PROFILE =~ ^ONAP-[A-L] ]]; then
+      targetJson="{\"instance_status\":\"$4\""
+      if [ $# -eq 5 ]; then
+        targetJson=$targetJson",\"has_been_deleted\":\"$5\""
+      fi
+      targetJson=$targetJson",\"created_at\":\"????\"}"
+    else
+      targetJson="{\"enforceStatus\":\"$4\""
+      if [ $# -eq 5 ]; then
+        targetJson=$targetJson",\"enforceReason\":\"$5\"}"
+      fi
+    fi
        else
                __print_err "<response-code> (STD <enforce-status> [<reason>])|(OSC <instance-status> <has-been-deleted>)" $@
                return 1
        else
                __print_err "<response-code> (STD <enforce-status> [<reason>])|(OSC <instance-status> <has-been-deleted>)" $@
                return 1
index 4fd9de4..a18afe3 100644 (file)
@@ -588,8 +588,14 @@ controller_api_get_A1_policy_status() {
     if [ $# -ge 5 ] && [ $2 == "OSC" ]; then
         url="$ric_id/a1-p/policytypes/$4/policies/$UUID$5/status"
         if [ $# -gt 5 ]; then
     if [ $# -ge 5 ] && [ $2 == "OSC" ]; then
         url="$ric_id/a1-p/policytypes/$4/policies/$UUID$5/status"
         if [ $# -gt 5 ]; then
-            targetJson="{\"enforceStatus\":\"$6\""
-            targetJson=$targetJson",\"enforceReason\":\"$7\"}"
+            if [[ $TEST_ENV_PROFILE =~ ^ORAN-[A-H] ]] || [[ $TEST_ENV_PROFILE =~ ^ONAP-[A-L] ]]; then
+              targetJson="{\"instance_status\":\"$6\""
+              targetJson=$targetJson",\"has_been_deleted\":\"$7\""
+              targetJson=$targetJson",\"created_at\":\"????\"}"
+            else
+              targetJson="{\"enforceStatus\":\"$6\""
+              targetJson=$targetJson",\"enforceReason\":\"$7\"}"
+            fi
         fi
                paramError=0
     elif [ $# -ge 4 ] && [ $2 == "STD" ]; then
         fi
                paramError=0
     elif [ $# -ge 4 ] && [ $2 == "STD" ]; then
index 3f5970c..21b67e9 100755 (executable)
@@ -3055,7 +3055,7 @@ __check_service_start() {
                                #If response is too long, truncate
                                result="...response text too long, omitted"
                        fi
                                #If response is too long, truncate
                                result="...response text too long, omitted"
                        fi
-                       echo -ne " Waiting for {ENTITY} $BOLD${appname}$EBOLD service status on ${3}, result: $result${SAMELINE}"
+                       echo -ne " Waiting for ${ENTITY} $BOLD${appname}$EBOLD service status on ${url}, result: $result${SAMELINE}"
                        echo -ne " The ${ENTITY} $BOLD${appname}$EBOLD$GREEN is alive$EGREEN, responds to service status:$GREEN $result $EGREEN on ${url} after $(($SECONDS-$TSTART)) seconds"
                        a1pmsst=true
                        break
                        echo -ne " The ${ENTITY} $BOLD${appname}$EBOLD$GREEN is alive$EGREEN, responds to service status:$GREEN $result $EGREEN on ${url} after $(($SECONDS-$TSTART)) seconds"
                        a1pmsst=true
                        break
index 4117cdb..5728510 100644 (file)
@@ -94,8 +94,8 @@ suite_complete() {
 
     total=$((TCSUITE_PASS_CTR+TCSUITE_FAIL_CTR))
     if [ $TCSUITE_CTR -eq 0 ]; then
 
     total=$((TCSUITE_PASS_CTR+TCSUITE_FAIL_CTR))
     if [ $TCSUITE_CTR -eq 0 ]; then
-               echo -e "\033[1mNo test cases seem to have executed. Check the script....\033[0m"
-       elif [ $total != $TCSUITE_CTR ]; then
+                   echo -e "\033[1mNo test cases seem to have executed. Check the script....\033[0m"
+         elif [ $total != $TCSUITE_CTR ]; then
         echo -e "\033[1mTotal number of test cases does not match the sum of passed and failed test cases. Check the script....\033[0m"
     fi
     echo "Number of test cases : " $TCSUITE_CTR
         echo -e "\033[1mTotal number of test cases does not match the sum of passed and failed test cases. Check the script....\033[0m"
     fi
     echo "Number of test cases : " $TCSUITE_CTR
@@ -108,7 +108,13 @@ suite_complete() {
     echo "FAIL test cases"
     cat .tmp_tcsuite_fail
     echo ""
     echo "FAIL test cases"
     cat .tmp_tcsuite_fail
     echo ""
+    if [ $TCSUITE_FAIL_CTR -ne 0 ]; then
+      echo "###################################      Test suite completed with Tests FAIL     ##############################"
+      echo "#################################################################################################"
+    else
+      echo "###################################      Test suite completed      ##############################"
+      echo "#################################################################################################"
+    fi
 
 
-    echo "###################################      Test suite completed      ##############################"
-    echo "#################################################################################################"
+    exit $TCSUITE_FAIL_CTR
 }
\ No newline at end of file
 }
\ No newline at end of file