Merge "Updates for F release"
authorHenrik Andersson <henrik.b.andersson@est.tech>
Tue, 25 Jan 2022 09:26:22 +0000 (09:26 +0000)
committerGerrit Code Review <gerrit@o-ran-sc.org>
Tue, 25 Jan 2022 09:26:22 +0000 (09:26 +0000)
79 files changed:
a1-policy-management-service/Dockerfile
a1-policy-management-service/pom.xml
dmaap-adaptor-java/Dockerfile
dmaap-adaptor-java/api/api.json
dmaap-adaptor-java/api/api.yaml
dmaap-adaptor-java/pom.xml
dmaap-adaptor-java/src/main/java/org/oran/dmaapadapter/tasks/KafkaTopicConsumers.java
dmaap-adaptor-java/src/main/resources/typeSchemaKafka.json
dmaap-adaptor-java/src/test/java/org/oran/dmaapadapter/ApplicationTest.java
dmaap-adaptor-java/src/test/java/org/oran/dmaapadapter/IntegrationWithIcs.java
dmaap-adaptor-java/src/test/java/org/oran/dmaapadapter/IntegrationWithKafka.java
dmaap-mediator-producer/Dockerfile
dmaap-mediator-producer/README.md
dmaap-mediator-producer/api/docs.go [new file with mode: 0644]
dmaap-mediator-producer/api/swagger.json [new file with mode: 0644]
dmaap-mediator-producer/api/swagger.yaml [new file with mode: 0644]
dmaap-mediator-producer/configs/typeSchemaDmaap.json [new file with mode: 0644]
dmaap-mediator-producer/configs/typeSchemaKafka.json [new file with mode: 0644]
dmaap-mediator-producer/configs/type_config.json
dmaap-mediator-producer/container-tag.yaml [new file with mode: 0644]
dmaap-mediator-producer/generate_swagger_docs.sh [moved from dmaap-mediator-producer/build_and_test.sh with 86% similarity]
dmaap-mediator-producer/go.mod
dmaap-mediator-producer/go.sum
dmaap-mediator-producer/internal/config/config.go
dmaap-mediator-producer/internal/config/config_test.go
dmaap-mediator-producer/internal/config/registrator.go
dmaap-mediator-producer/internal/config/registrator_test.go
dmaap-mediator-producer/internal/jobs/jobs.go
dmaap-mediator-producer/internal/jobs/jobs_test.go
dmaap-mediator-producer/internal/kafkaclient/kafkaclient.go [new file with mode: 0644]
dmaap-mediator-producer/internal/restclient/HTTPClient.go
dmaap-mediator-producer/internal/restclient/HTTPClient_test.go
dmaap-mediator-producer/internal/server/server.go
dmaap-mediator-producer/internal/server/server_test.go
dmaap-mediator-producer/main.go
dmaap-mediator-producer/main_test.go [new file with mode: 0644]
dmaap-mediator-producer/mocks/KafkaConsumer.go [new file with mode: 0644]
dmaap-mediator-producer/mocks/KafkaFactory.go [new file with mode: 0644]
dmaap-mediator-producer/mocks/jobshandler/JobsHandler.go [moved from dmaap-mediator-producer/mocks/jobhandler/JobHandler.go with 66% similarity]
dmaap-mediator-producer/pom.xml [deleted file]
dmaap-mediator-producer/stub/ics/ics.go
docker-compose/.env
docker-compose/policy-service/docker-compose.yaml
docker-compose/sdnc/docker-compose.yml
docs/api-docs.rst
docs/conf.py
docs/developer-guide.rst
docs/installation-guide.rst
docs/overview.rst
helm-manager/pom.xml
information-coordinator-service/Dockerfile
information-coordinator-service/api/ics-api.json
information-coordinator-service/api/ics-api.yaml
information-coordinator-service/pom.xml
information-coordinator-service/src/main/java/org/oransc/ics/clients/AsyncRestClientFactory.java
information-coordinator-service/src/main/java/org/oransc/ics/controllers/a1e/A1eCallbacks.java
information-coordinator-service/src/main/java/org/oransc/ics/repository/InfoProducers.java
information-coordinator-service/src/main/java/org/oransc/ics/tasks/ProducerSupervision.java
onap/oran
pom.xml
r-app-catalogue/Dockerfile
r-app-catalogue/api/rac-api.yaml
r-app-catalogue/pom.xml
r-app-catalogue/src/test/java/org/oransc/rappcatalogue/HttpsRequestTest.java
test/kafka-procon/.gitignore
test/usecases/odusliceassurance/goversion/main.go
test/usecases/odusliceassurance/goversion/pom.xml [deleted file]
test/usecases/odusliceassurance/goversion/stub/sdnr/sdnrstub.go
test/usecases/oruclosedlooprecovery/goversion/build_and_test.sh [deleted file]
test/usecases/oruclosedlooprecovery/goversion/internal/linkfailure/linkfailurehandler.go
test/usecases/oruclosedlooprecovery/goversion/internal/linkfailure/linkfailurehandler_test.go
test/usecases/oruclosedlooprecovery/goversion/o-ru-to-o-du-map.csv
test/usecases/oruclosedlooprecovery/goversion/pom.xml [deleted file]
test/usecases/oruclosedlooprecovery/goversion/stub/producer/producerstub.go
test/usecases/oruclosedlooprecovery/goversion/stub/sdnr/sdnrstub.go
test/usecases/oruclosedlooprecovery/scriptversion/app/Dockerfile
test/usecases/oruclosedlooprecovery/scriptversion/app/main.py
test/usecases/oruclosedlooprecovery/scriptversion/app/o-ru-to-o-du-map.txt
test/usecases/oruclosedlooprecovery/scriptversion/simulators/sdnr_simulator.py

index f64eebb..6f8387e 100644 (file)
@@ -34,9 +34,15 @@ ADD /config/application_configuration.json /opt/app/policy-agent/data/applicatio
 ADD /config/keystore.jks /opt/app/policy-agent/etc/cert/keystore.jks
 ADD /config/truststore.jks /opt/app/policy-agent/etc/cert/truststore.jks
 
-RUN chmod -R 777 /opt/app/policy-agent/config/
-RUN chmod -R 777 /opt/app/policy-agent/data/
+ARG user=nonrtric
+ARG group=nonrtric
 
-ADD target/${JAR} /opt/app/policy-agent/policy-agent.jar
-CMD ["java", "-jar", "/opt/app/policy-agent/policy-agent.jar"]
+RUN groupadd $user && \
+    useradd -r -g $group $user
+RUN chown -R $user:$group /opt/app/policy-agent
+RUN chown -R $user:$group /var/log/policy-agent
+
+USER ${user}
 
+ADD target/${JAR} /opt/app/policy-agent/policy-agent.jar
+CMD ["java", "-jar", "/opt/app/policy-agent/policy-agent.jar"]
\ No newline at end of file
index 8a6d584..870874d 100644 (file)
@@ -26,7 +26,7 @@
     <parent>
         <groupId>org.springframework.boot</groupId>
         <artifactId>spring-boot-starter-parent</artifactId>
-        <version>2.5.3</version>
+        <version>2.6.2</version>
         <relativePath />
     </parent>
     <groupId>org.o-ran-sc.nonrtric</groupId>
     <properties>
         <java.version>11</java.version>
         <springfox.version>3.0.0</springfox.version>
-        <immutable.version>2.8.2</immutable.version>
-        <sdk.version>1.1.6</sdk.version>
-        <swagger.version>2.1.6</swagger.version>
+        <immutable.version>2.8.2</immutable.version>      
+        <swagger.version>2.1.12</swagger.version>
         <json.version>20190722</json.version>
         <maven-compiler-plugin.version>3.8.0</maven-compiler-plugin.version>
         <formatter-maven-plugin.version>2.12.2</formatter-maven-plugin.version>
         <spotless-maven-plugin.version>1.18.0</spotless-maven-plugin.version>
         <docker-maven-plugin>0.30.0</docker-maven-plugin>
-        <version.dmaap>1.1.11</version.dmaap>
         <javax.ws.rs-api.version>2.1.1</javax.ws.rs-api.version>
         <sonar-maven-plugin.version>3.7.0.1746</sonar-maven-plugin.version>
         <jacoco-maven-plugin.version>0.8.5</jacoco-maven-plugin.version>
         <exec-maven-plugin.version>3.0.0</exec-maven-plugin.version>
     </properties>
     <dependencies>
+        <dependency>
+            <groupId>commons-io</groupId>
+            <artifactId>commons-io</artifactId>
+            <version>2.11.0</version>
+        </dependency>
         <dependency>
             <groupId>com.google.guava</groupId>
             <artifactId>guava</artifactId>
-            <version>30.0-jre</version>
+            <version>31.0.1-jre</version>
         </dependency>
         <dependency>
             <groupId>org.springdoc</groupId>
             <artifactId>springdoc-openapi-ui</artifactId>
-            <version>1.5.2</version>
+            <version>1.6.3</version>
         </dependency>
         <dependency>
             <groupId>org.springframework.boot</groupId>
             <artifactId>spring-boot-configuration-processor</artifactId>
             <optional>true</optional>
         </dependency>
-        <dependency>
-            <groupId>org.onap.dcaegen2.services.sdk.rest.services</groupId>
-            <artifactId>cbs-client</artifactId>
-            <version>${sdk.version}</version>
-        </dependency>
         <dependency>
             <groupId>org.projectlombok</groupId>
             <artifactId>lombok</artifactId>
             <scope>provided</scope>
         </dependency>
-        <dependency>
-            <groupId>org.onap.dmaap.messagerouter.dmaapclient</groupId>
-            <artifactId>dmaapClient</artifactId>
-            <version>${version.dmaap}</version>
-        </dependency>
         <dependency>
             <groupId>javax.ws.rs</groupId>
             <artifactId>javax.ws.rs-api</artifactId>
index b2c0c30..f565e80 100644 (file)
@@ -30,14 +30,22 @@ WORKDIR /opt/app/dmaap-adaptor-service
 RUN mkdir -p /var/log/dmaap-adaptor-service
 RUN mkdir -p /opt/app/dmaap-adaptor-service/etc/cert/
 RUN mkdir -p /var/dmaap-adaptor-service
-RUN chmod -R 777 /var/dmaap-adaptor-service
 
 ADD /config/application.yaml /opt/app/dmaap-adaptor-service/config/application.yaml
 ADD /config/application_configuration.json /opt/app/dmaap-adaptor-service/data/application_configuration.json_example
 ADD /config/keystore.jks /opt/app/dmaap-adaptor-service/etc/cert/keystore.jks
 ADD /config/truststore.jks /opt/app/dmaap-adaptor-service/etc/cert/truststore.jks
 
-RUN chmod -R 777 /opt/app/dmaap-adaptor-service/config/
+ARG user=nonrtric
+ARG group=nonrtric
+
+RUN groupadd $user && \
+    useradd -r -g $group $user
+RUN chown -R $user:$group /opt/app/dmaap-adaptor-service
+RUN chown -R $user:$group /var/log/dmaap-adaptor-service
+RUN chown -R $user:$group /var/dmaap-adaptor-service
+
+USER ${user}
 
 ADD target/${JAR} /opt/app/dmaap-adaptor-service/dmaap-adaptor.jar
 CMD ["java", "-jar", "/opt/app/dmaap-adaptor-service/dmaap-adaptor.jar"]
index 04c4ab0..88fed46 100644 (file)
     "paths": {
         "/actuator/threaddump": {"get": {
             "summary": "Actuator web endpoint 'threaddump'",
-            "operationId": "handle_2_1_3",
+            "operationId": "threaddump_4",
             "responses": {"200": {
                 "description": "OK",
                 "content": {"*/*": {"schema": {"type": "object"}}}
         }},
         "/actuator/info": {"get": {
             "summary": "Actuator web endpoint 'info'",
-            "operationId": "handle_9",
+            "operationId": "info_2",
             "responses": {"200": {
                 "description": "OK",
                 "content": {"*/*": {"schema": {"type": "object"}}}
         },
         "/actuator/loggers": {"get": {
             "summary": "Actuator web endpoint 'loggers'",
-            "operationId": "handle_6",
+            "operationId": "loggers_2",
             "responses": {"200": {
                 "description": "OK",
                 "content": {"*/*": {"schema": {"type": "object"}}}
         }},
         "/actuator/health/**": {"get": {
             "summary": "Actuator web endpoint 'health-path'",
-            "operationId": "handle_12",
+            "operationId": "health-path_2",
             "responses": {"200": {
                 "description": "OK",
                 "content": {"*/*": {"schema": {"type": "object"}}}
         }},
         "/actuator/metrics/{requiredMetricName}": {"get": {
             "summary": "Actuator web endpoint 'metrics-requiredMetricName'",
-            "operationId": "handle_5",
+            "operationId": "metrics-requiredMetricName_2",
             "responses": {"200": {
                 "description": "OK",
                 "content": {"*/*": {"schema": {"type": "object"}}}
         }},
         "/actuator/logfile": {"get": {
             "summary": "Actuator web endpoint 'logfile'",
-            "operationId": "handle_8",
+            "operationId": "logfile_2",
             "responses": {"200": {
                 "description": "OK",
                 "content": {"*/*": {"schema": {"type": "object"}}}
         "/actuator/loggers/{name}": {
             "post": {
                 "summary": "Actuator web endpoint 'loggers-name'",
-                "operationId": "handle_0",
+                "operationId": "loggers-name_3",
                 "responses": {"200": {
                     "description": "OK",
                     "content": {"*/*": {"schema": {"type": "object"}}}
             },
             "get": {
                 "summary": "Actuator web endpoint 'loggers-name'",
-                "operationId": "handle_7",
+                "operationId": "loggers-name_4",
                 "responses": {"200": {
                     "description": "OK",
                     "content": {"*/*": {"schema": {"type": "object"}}}
         },
         "/actuator/health": {"get": {
             "summary": "Actuator web endpoint 'health'",
-            "operationId": "handle_11",
+            "operationId": "health_2",
             "responses": {"200": {
                 "description": "OK",
                 "content": {"*/*": {"schema": {"type": "object"}}}
         }},
         "/actuator/metrics": {"get": {
             "summary": "Actuator web endpoint 'metrics'",
-            "operationId": "handle_4",
+            "operationId": "metrics_2",
             "responses": {"200": {
                 "description": "OK",
                 "content": {"*/*": {"schema": {"type": "object"}}}
         }},
         "/actuator/heapdump": {"get": {
             "summary": "Actuator web endpoint 'heapdump'",
-            "operationId": "handle_10",
+            "operationId": "heapdump_2",
             "responses": {"200": {
                 "description": "OK",
                 "content": {"*/*": {"schema": {"type": "object"}}}
         "title": "Generic Dmaap and Kafka Information Producer",
         "version": "1.0"
     },
-    "tags": [{
-        "name": "Actuator",
-        "description": "Monitor and interact",
-        "externalDocs": {
-            "description": "Spring Boot Actuator Web API Documentation",
-            "url": "https://docs.spring.io/spring-boot/docs/current/actuator-api/html/"
+    "tags": [
+        {"name": "Information Coordinator Service Simulator (exists only in test)"},
+        {"name": "Producer job control API"},
+        {"name": "Test Consumer Simulator (exists only in test)"},
+        {"name": "DMAAP Simulator (exists only in test)"},
+        {
+            "name": "Actuator",
+            "description": "Monitor and interact",
+            "externalDocs": {
+                "description": "Spring Boot Actuator Web API Documentation",
+                "url": "https://docs.spring.io/spring-boot/docs/current/actuator-api/html/"
+            }
         }
-    }]
+    ]
 }
\ No newline at end of file
index 1fb78fa..f6eb1f7 100644 (file)
@@ -10,6 +10,10 @@ info:
 servers:
 - url: /
 tags:
+- name: Information Coordinator Service Simulator (exists only in test)
+- name: Producer job control API
+- name: Test Consumer Simulator (exists only in test)
+- name: DMAAP Simulator (exists only in test)
 - name: Actuator
   description: Monitor and interact
   externalDocs:
@@ -21,7 +25,7 @@ paths:
       tags:
       - Actuator
       summary: Actuator web endpoint 'threaddump'
-      operationId: handle_2_1_3
+      operationId: threaddump_4
       responses:
         200:
           description: OK
@@ -34,7 +38,7 @@ paths:
       tags:
       - Actuator
       summary: Actuator web endpoint 'info'
-      operationId: handle_9
+      operationId: info_2
       responses:
         200:
           description: OK
@@ -136,7 +140,7 @@ paths:
       tags:
       - Actuator
       summary: Actuator web endpoint 'loggers'
-      operationId: handle_6
+      operationId: loggers_2
       responses:
         200:
           description: OK
@@ -149,7 +153,7 @@ paths:
       tags:
       - Actuator
       summary: Actuator web endpoint 'health-path'
-      operationId: handle_12
+      operationId: health-path_2
       responses:
         200:
           description: OK
@@ -230,7 +234,7 @@ paths:
       tags:
       - Actuator
       summary: Actuator web endpoint 'metrics-requiredMetricName'
-      operationId: handle_5
+      operationId: metrics-requiredMetricName_2
       parameters:
       - name: requiredMetricName
         in: path
@@ -268,7 +272,7 @@ paths:
       tags:
       - Actuator
       summary: Actuator web endpoint 'logfile'
-      operationId: handle_8
+      operationId: logfile_2
       responses:
         200:
           description: OK
@@ -281,7 +285,7 @@ paths:
       tags:
       - Actuator
       summary: Actuator web endpoint 'loggers-name'
-      operationId: handle_7
+      operationId: loggers-name_4
       parameters:
       - name: name
         in: path
@@ -301,7 +305,7 @@ paths:
       tags:
       - Actuator
       summary: Actuator web endpoint 'loggers-name'
-      operationId: handle_0
+      operationId: loggers-name_3
       parameters:
       - name: name
         in: path
@@ -322,7 +326,7 @@ paths:
       tags:
       - Actuator
       summary: Actuator web endpoint 'health'
-      operationId: handle_11
+      operationId: health_2
       responses:
         200:
           description: OK
@@ -370,7 +374,7 @@ paths:
       tags:
       - Actuator
       summary: Actuator web endpoint 'metrics'
-      operationId: handle_4
+      operationId: metrics_2
       responses:
         200:
           description: OK
@@ -383,7 +387,7 @@ paths:
       tags:
       - Actuator
       summary: Actuator web endpoint 'heapdump'
-      operationId: handle_10
+      operationId: heapdump_2
       responses:
         200:
           description: OK
index 9c1dd44..b555912 100644 (file)
@@ -26,7 +26,7 @@
     <parent>
         <groupId>org.springframework.boot</groupId>
         <artifactId>spring-boot-starter-parent</artifactId>
-        <version>2.5.3</version>
+        <version>2.5.8</version>
         <relativePath />
     </parent>
     <groupId>org.o-ran-sc.nonrtric</groupId>
@@ -56,7 +56,6 @@
         <spotless-maven-plugin.version>1.24.3</spotless-maven-plugin.version>
         <swagger-codegen-maven-plugin.version>3.0.11</swagger-codegen-maven-plugin.version>
         <docker-maven-plugin>0.30.0</docker-maven-plugin>
-        <javax.ws.rs-api.version>2.1.1</javax.ws.rs-api.version>
         <sonar-maven-plugin.version>3.7.0.1746</sonar-maven-plugin.version>
         <jacoco-maven-plugin.version>0.8.5</jacoco-maven-plugin.version>
         <exec.skip>true</exec.skip>
@@ -65,7 +64,7 @@
         <dependency>
             <groupId>org.springdoc</groupId>
             <artifactId>springdoc-openapi-ui</artifactId>
-            <version>1.5.4</version>
+            <version>1.6.3</version>
         </dependency>
         <dependency>
             <groupId>org.springframework.boot</groupId>
@@ -93,7 +92,6 @@
             <artifactId>swagger-jaxrs2-servlet-initializer</artifactId>
             <version>${swagger.version}</version>
         </dependency>
-     
         <dependency>
             <groupId>org.immutables</groupId>
             <artifactId>value</artifactId>
         <dependency>
             <groupId>io.projectreactor.kafka</groupId>
             <artifactId>reactor-kafka</artifactId>
-            <version>1.3.7</version>
+            <version>1.3.9</version>
         </dependency>
         <dependency>
             <groupId>com.google.guava</groupId>
index 4809017..5233401 100644 (file)
@@ -100,7 +100,9 @@ public class KafkaTopicConsumers {
     public synchronized void restartNonRunningTopics() {
         for (String typeId : this.consumers.keySet()) {
             for (KafkaJobDataConsumer consumer : this.consumers.get(typeId)) {
-                restartTopic(consumer);
+                if (!consumer.isRunning()) {
+                    restartTopic(consumer);
+                }
             }
         }
     }
index 38e7807..f7e6e87 100644 (file)
@@ -6,16 +6,20 @@
       "type": "string"
     },
     "maxConcurrency": {
-      "type": "integer"
+      "type": "integer",
+      "minimum": 1
     },
     "bufferTimeout": {
       "type": "object",
       "properties": {
         "maxSize": {
-          "type": "integer"
+          "type": "integer",
+          "minimum": 1
         },
         "maxTimeMiliseconds": {
-          "type": "integer"
+          "type": "integer",
+          "minimum": 0,
+          "maximum": 160000
         }
       },
       "additionalProperties": false,
index 8c41423..6660175 100644 (file)
@@ -333,11 +333,11 @@ class ApplicationTest {
                 () -> assertThat(icsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(2));
     }
 
-    private void testErrorCode(Mono<?> request, HttpStatus expStatus, String responseContains) {
+    public static void testErrorCode(Mono<?> request, HttpStatus expStatus, String responseContains) {
         testErrorCode(request, expStatus, responseContains, true);
     }
 
-    private void testErrorCode(Mono<?> request, HttpStatus expStatus, String responseContains,
+    public static void testErrorCode(Mono<?> request, HttpStatus expStatus, String responseContains,
             boolean expectApplicationProblemJsonMediaType) {
         StepVerifier.create(request) //
                 .expectSubscription() //
@@ -346,7 +346,7 @@ class ApplicationTest {
                 .verify();
     }
 
-    private boolean checkWebClientError(Throwable throwable, HttpStatus expStatus, String responseContains,
+    private static boolean checkWebClientError(Throwable throwable, HttpStatus expStatus, String responseContains,
             boolean expectApplicationProblemJsonMediaType) {
         assertTrue(throwable instanceof WebClientResponseException);
         WebClientResponseException responseException = (WebClientResponseException) throwable;
index d1d7e91..9f0ef19 100644 (file)
@@ -49,9 +49,11 @@ import org.springframework.boot.test.context.TestConfiguration;
 import org.springframework.boot.web.embedded.tomcat.TomcatServletWebServerFactory;
 import org.springframework.boot.web.servlet.server.ServletWebServerFactory;
 import org.springframework.context.annotation.Bean;
+import org.springframework.http.HttpStatus;
 import org.springframework.test.context.TestPropertySource;
 import org.springframework.test.context.junit.jupiter.SpringExtension;
 
+
 @SuppressWarnings("java:S3577") // Rename class
 @ExtendWith(SpringExtension.class)
 @SpringBootTest(webEnvironment = WebEnvironment.DEFINED_PORT)
@@ -232,7 +234,23 @@ class IntegrationWithIcs {
     }
 
     @Test
-    void testWholeChain() throws Exception {
+    void testKafkaJobParameterOutOfRange() {
+        await().untilAsserted(() -> assertThat(producerRegstrationTask.isRegisteredInIcs()).isTrue());
+        final String TYPE_ID = "KafkaInformationType";
+
+        Job.Parameters param = new Job.Parameters("filter", new Job.BufferTimeout(123, 170 * 1000), 1);
+
+        ConsumerJobInfo jobInfo =
+                new ConsumerJobInfo(TYPE_ID, jsonObject(gson.toJson(param)), "owner", consumerUri(), "");
+        String body = gson.toJson(jobInfo);
+
+        ApplicationTest.testErrorCode(restClient().put(jobUrl("KAFKA_JOB_ID"), body), HttpStatus.BAD_REQUEST,
+                "Json validation failure");
+
+    }
+
+    @Test
+    void testDmaapMessage() throws Exception {
         await().untilAsserted(() -> assertThat(producerRegstrationTask.isRegisteredInIcs()).isTrue());
 
         createInformationJobInIcs(DMAAP_TYPE_ID, DMAAP_JOB_ID, ".*DmaapResponse.*");
@@ -250,7 +268,6 @@ class IntegrationWithIcs {
         deleteInformationJobInIcs(DMAAP_JOB_ID);
 
         await().untilAsserted(() -> assertThat(this.jobs.size()).isZero());
-
     }
 
 }
index c38af8a..5a48d61 100644 (file)
@@ -215,7 +215,7 @@ class IntegrationWithKafka {
 
         Map<String, Object> props = new HashMap<>();
         props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
-        props.put(ProducerConfig.CLIENT_ID_CONFIG, "sample-producer");
+        props.put(ProducerConfig.CLIENT_ID_CONFIG, "sample-producerx");
         props.put(ProducerConfig.ACKS_CONFIG, "all");
         props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, IntegerSerializer.class);
         props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
@@ -236,6 +236,8 @@ class IntegrationWithKafka {
                 .doOnError(e -> logger.error("Send failed", e)) //
                 .blockLast();
 
+        sender.close();
+
     }
 
     private void verifiedReceivedByConsumer(String... strings) {
@@ -246,6 +248,29 @@ class IntegrationWithKafka {
         }
     }
 
+    @Test
+    void simpleCase() throws InterruptedException {
+        final String JOB_ID = "ID";
+
+        // Register producer, Register types
+        await().untilAsserted(() -> assertThat(icsSimulatorController.testResults.registrationInfo).isNotNull());
+        assertThat(icsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(this.types.size());
+
+        this.icsSimulatorController.addJob(consumerJobInfo(null, Duration.ZERO, 0, 1), JOB_ID, restClient());
+        await().untilAsserted(() -> assertThat(this.jobs.size()).isEqualTo(1));
+
+        Thread.sleep(4000);
+        var dataToSend = Flux.just(senderRecord("Message"));
+        sendDataToStream(dataToSend);
+
+        verifiedReceivedByConsumer("Message");
+
+        this.icsSimulatorController.deleteJob(JOB_ID, restClient());
+
+        await().untilAsserted(() -> assertThat(this.jobs.size()).isZero());
+        await().untilAsserted(() -> assertThat(this.kafkaTopicConsumers.getConsumers().keySet()).isEmpty());
+    }
+
     @Test
     void kafkaIntegrationTest() throws Exception {
         final String JOB_ID1 = "ID1";
@@ -256,12 +281,13 @@ class IntegrationWithKafka {
         assertThat(icsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(this.types.size());
 
         // Create two jobs. One buffering and one with a filter
-        this.icsSimulatorController.addJob(consumerJobInfo(null, Duration.ofMillis(400), 1000, 20), JOB_ID1,
+        this.icsSimulatorController.addJob(consumerJobInfo(null, Duration.ofMillis(400), 10, 20), JOB_ID1,
                 restClient());
         this.icsSimulatorController.addJob(consumerJobInfo("^Message_1$", Duration.ZERO, 0, 1), JOB_ID2, restClient());
 
         await().untilAsserted(() -> assertThat(this.jobs.size()).isEqualTo(2));
 
+        Thread.sleep(2000);
         var dataToSend = Flux.range(1, 3).map(i -> senderRecord("Message_" + i)); // Message_1, Message_2 etc.
         sendDataToStream(dataToSend);
 
index 1c7f45c..6d9b2b8 100644 (file)
@@ -30,7 +30,7 @@ RUN go build -o /dmaapmediatorproducer
 ##
 ## Deploy
 ##
-FROM gcr.io/distroless/base-debian10
+FROM gcr.io/distroless/base-debian11
 WORKDIR /
 ## Copy from "build" stage
 COPY --from=build /dmaapmediatorproducer .
index 7cb1919..6009a8f 100644 (file)
@@ -14,7 +14,13 @@ The producer takes a number of environment variables, described below, as config
 >- PRODUCER_KEY_PATH   Optional. The path to the key to the certificate to use for https.         Defaults to `security/producer.key`
 >- LOG_LEVEL           Optional. The log level, which can be `Error`, `Warn`, `Info` or `Debug`.  Defaults to `Info`.
 
-The file `configs/type_config.json` contains the configuration of job types that the producer will support.
+Any of the addresses used by this product can be configured to use https, by specifying it as the scheme of the address URI. Clients configured to use https will not use server certificate verification. The communication towards the consumers will use https if their callback address URI uses that scheme. The producer's own callback will only listen to the scheme configured in the scheme of the info producer host address.
+
+The configured public key and cerificate shall be PEM-encoded. A self signed certificate and key are provided in the `security` folder of the project. These files should be replaced for production. To generate a self signed key and certificate, use the example code below:
+
+    openssl req -new -x509 -sha256 -key server.key -out server.crt -days 3650
+
+The file `configs/type_config.json` contains the configuration of job types that the producer will support, see example below.
 
     {
        "types":
@@ -22,36 +28,97 @@ The file `configs/type_config.json` contains the configuration of job types that
           {
             "id": The ID of the job type, e.g. "STD_Fault_Messages",
             "dmaapTopicUrl": The topic URL to poll from DMaaP Message Router, e.g. "events/unauthenticated.SEC_FAULT_OUTPUT/dmaapmediatorproducer/STD_Fault_Messages"
+          },
+          {
+            "id": The ID of the job type, e.g. "Kafka_TestTopic",
+            "kafkaInputTopic": The Kafka topic to poll
           }
       ]
     }
 
-Any of the addresses used by this product can be configured to use https, by specifying it as the scheme of the address URI. Clients configured to use https will not use server certificate verification. The communication towards the consumers will use https if their callback address URI uses that scheme. The producer's own callback will only listen to the scheme configured in the scheme of the info producer host address.
+Each information type has the following properties:
+ - id the information type identity as exposed in the Information Coordination Service data consumer API
+ - dmaapTopicUrl the URL to for fetching information from  DMaaP
+ - kafkaInputTopic the Kafka topic to get input from
 
-The configured public key and cerificate shall be PEM-encoded. A self signed certificate and key are provided in the `security` folder of the project. These files should be replaced for production. To generate a self signed key and certificate, use the example code below:
-
-    openssl req -new -x509 -sha256 -key server.key -out server.crt -days 3650
+Either the "dmaapTopicUrl" or the "kafkaInputTopic" must be provided for each type, not both.
 
 ## Functionality
 
 At start up the producer will register the configured job types in ICS and also register itself as a producer supporting these types. If ICS is unavailable, the producer will retry to connect indefinetely. The same goes for MR.
 
-Once the initial registration is done, the producer will constantly poll MR for all configured job types. When receiving messages for a type, it will distribute these messages to all jobs registered for the type. If no jobs for that type are registered, the messages will be discarded. If a consumer is unavailable for distribution, the messages will be discarded for that consumer until it is available again.
+Once the initial registration is done, the producer will constantly poll MR and/or Kafka for all configured job types. When receiving messages for a type, it will distribute these messages to all jobs registered for the type. If no jobs for that type are registered, the messages will be discarded. If a consumer is unavailable for distribution, the messages will be discarded for that consumer until it is available again.
 
 The producer provides a REST API that fulfills the ICS Data producer API, see [Data producer (callbacks)](<https://docs.o-ran-sc.org/projects/o-ran-sc-nonrtric/en/latest/ics-api.html#tag/Data-producer-(callbacks)>). The health check method returns the registration status of the producer in ICS as JSON. It also provides a method to control the log level of the producer. The available log levels are the same as the ones used in the configuration above.
 
     PUT https://mrproducer:8085/admin/log?level=<new level>
 
+The Swagger documentation of the producer's API is also available, through the `/swagger` path.
+
+When an Information Job is created in the Information Coordinator Service Consumer API, it is possible to define a number of job specific properties. For an Information type that has a Kafka topic defined, the following Json schema defines the properties that can be used:
+
+
+```sh
+{
+  "$schema": "http://json-schema.org/draft-04/schema#",
+  "type": "object",
+  "properties": {
+    "bufferTimeout": {
+      "type": "object",
+      "properties": {
+        "maxSize": {
+          "type": "integer"
+        },
+        "maxTimeMiliseconds": {
+          "type": "integer"
+        }
+      },
+      "additionalProperties": false,
+      "required": [
+        "maxSize",
+        "maxTimeMiliseconds"
+      ]
+    }
+  },
+  "additionalProperties": false
+}
+```
+-bufferTimeout, can be used to reduce the number of REST calls to the consumer. If defined, a number of objects will be
+ buffered and sent in one REST call to the consumer.
+ The buffered objects will be put in a Json array and quoted. Example;
+   Object1 and Object2 may be posted in one call -->  ["Object1", "Object2"]
+ The bufferTimeout is a Json object and the parameters in the object are:
+   - maxSize the maximum number of buffered objects before posting
+   - maxTimeMiliseconds the maximum delay time to buffer before posting
+ If no bufferTimeout is specified, each object will be posted as received in separate calls (not quoted and put in a Json array).
+
+
+For an information type that only has a DMaaP topic, the following Json schema is used:
+
+```sh
+{
+  "$schema": "http://json-schema.org/draft-04/schema#",
+  "type": "object",
+  "properties": {
+  },
+  "additionalProperties": false
+}
+
 ## Development
 
-To make it easy to test during development of the producer, two stubs are provided in the `stub` folder.
+To make it easy to test during development of the producer, three stubs are provided in the `stub` folder.
 
 One, under the `dmaap` folder, called `dmaap` that stubs MR and respond with an array with one message with `eventSeverity` alternating between `NORMAL` and `CRITICAL`. The default port is `3905`, but this can be overridden by passing a `-port <PORT>` flag when starting the stub. To build and start the stub, do the following:
 >1. cd stub/dmaap
 >2. go build
 >3. ./dmaap [-port \<PORT>]
 
-One, under the `consumer` folder, called `consumer` that at startup will register a job of type `STD_Fault_Messages` in ICS, and then listen for REST calls and print the body of them. By default, it listens to the port `40935`, but his can be overridden by passing a `-port <PORT>` flag when starting the stub. To build and start the stub, do the following:
+An ICS stub, under the `ics` folder, that listens for registration calls from the producer. When it gets a call it prints out the data of the call. By default, it listens to the port `8434`, but his can be overridden by passing a `-port [PORT]` flag when starting the stub. To build and start the stub, do the following:
+>1. cd stub/ics
+>2. go build [-port \<PORT>]
+>3. ./ics
+
+One, under the `consumer` folder, called `consumer` that at startup will register a job of type `STD_Fault_Messages` in ICS, if it is available, and then listen for REST calls and print the body of them. By default, it listens to the port `40935`, but his can be overridden by passing a `-port <PORT>` flag when starting the stub. To build and start the stub, do the following:
 >1. cd stub/consumer
 >2. go build
 >3. ./consumer [-port \<PORT>]
diff --git a/dmaap-mediator-producer/api/docs.go b/dmaap-mediator-producer/api/docs.go
new file mode 100644 (file)
index 0000000..dbfc42b
--- /dev/null
@@ -0,0 +1,303 @@
+// Package api GENERATED BY THE COMMAND ABOVE; DO NOT EDIT
+// This file was generated by swaggo/swag
+package api
+
+import (
+       "bytes"
+       "encoding/json"
+       "strings"
+       "text/template"
+
+       "github.com/swaggo/swag"
+)
+
+var doc = `{
+    "schemes": {{ marshal .Schemes }},
+    "swagger": "2.0",
+    "info": {
+        "description": "{{escape .Description}}",
+        "title": "{{.Title}}",
+        "contact": {},
+        "license": {
+            "name": "Apache 2.0",
+            "url": "http://www.apache.org/licenses/LICENSE-2.0.html"
+        },
+        "version": "{{.Version}}"
+    },
+    "host": "{{.Host}}",
+    "basePath": "{{.BasePath}}",
+    "paths": {
+        "/admin/log": {
+            "put": {
+                "description": "Set the log level of the producer.",
+                "tags": [
+                    "Admin"
+                ],
+                "summary": "Set log level",
+                "parameters": [
+                    {
+                        "enum": [
+                            "Error",
+                            "Warn",
+                            "Info",
+                            "Debug"
+                        ],
+                        "type": "string",
+                        "description": "string enums",
+                        "name": "level",
+                        "in": "query"
+                    }
+                ],
+                "responses": {
+                    "200": {
+                        "description": ""
+                    },
+                    "400": {
+                        "description": "Problem as defined in https://tools.ietf.org/html/rfc7807",
+                        "schema": {
+                            "$ref": "#/definitions/ErrorInfo"
+                        },
+                        "headers": {
+                            "Content-Type": {
+                                "type": "string",
+                                "description": "application/problem+json"
+                            }
+                        }
+                    }
+                }
+            }
+        },
+        "/health_check": {
+            "get": {
+                "description": "Get the status of the producer. Will show if the producer has registered in ICS.",
+                "produces": [
+                    "application/json"
+                ],
+                "tags": [
+                    "Data producer (callbacks)"
+                ],
+                "summary": "Get status",
+                "responses": {
+                    "200": {
+                        "description": "OK",
+                        "schema": {
+                            "$ref": "#/definitions/"
+                        }
+                    }
+                }
+            }
+        },
+        "/info_job": {
+            "post": {
+                "description": "Callback for ICS to add an info job",
+                "consumes": [
+                    "application/json"
+                ],
+                "tags": [
+                    "Data producer (callbacks)"
+                ],
+                "summary": "Add info job",
+                "parameters": [
+                    {
+                        "description": "Info job data",
+                        "name": "user",
+                        "in": "body",
+                        "required": true,
+                        "schema": {
+                            "$ref": "#/definitions/JobInfo"
+                        }
+                    }
+                ],
+                "responses": {
+                    "200": {
+                        "description": ""
+                    },
+                    "400": {
+                        "description": "Problem as defined in https://tools.ietf.org/html/rfc7807",
+                        "schema": {
+                            "$ref": "#/definitions/ErrorInfo"
+                        },
+                        "headers": {
+                            "Content-Type": {
+                                "type": "string",
+                                "description": "application/problem+json"
+                            }
+                        }
+                    }
+                }
+            }
+        },
+        "/info_job/{infoJobId}": {
+            "delete": {
+                "description": "Callback for ICS to delete an info job",
+                "tags": [
+                    "Data producer (callbacks)"
+                ],
+                "summary": "Delete info job",
+                "parameters": [
+                    {
+                        "type": "string",
+                        "description": "Info job ID",
+                        "name": "infoJobId",
+                        "in": "path",
+                        "required": true
+                    }
+                ],
+                "responses": {
+                    "200": {
+                        "description": ""
+                    }
+                }
+            }
+        },
+        "/swagger": {
+            "get": {
+                "description": "Get the Swagger API documentation for the producer.",
+                "tags": [
+                    "Admin"
+                ],
+                "summary": "Get Swagger Documentation",
+                "responses": {
+                    "200": {
+                        "description": ""
+                    }
+                }
+            }
+        }
+    },
+    "definitions": {
+        "": {
+            "type": "object",
+            "properties": {
+                "registeredStatus": {
+                    "description": "The registration status of the producer in Information Coordinator Service. Either ` + "`" + `registered` + "`" + ` or ` + "`" + `not registered` + "`" + `",
+                    "type": "string",
+                    "example": "registered"
+                }
+            }
+        },
+        "BufferTimeout": {
+            "type": "object",
+            "properties": {
+                "maxSize": {
+                    "type": "integer"
+                },
+                "maxTimeMiliseconds": {
+                    "type": "integer"
+                }
+            }
+        },
+        "ErrorInfo": {
+            "type": "object",
+            "properties": {
+                "detail": {
+                    "description": "A human-readable explanation specific to this occurrence of the problem.",
+                    "type": "string",
+                    "example": "Info job type not found"
+                },
+                "instance": {
+                    "description": "A URI reference that identifies the specific occurrence of the problem.",
+                    "type": "string"
+                },
+                "status": {
+                    "description": "The HTTP status code generated by the origin server for this occurrence of the problem.",
+                    "type": "integer",
+                    "example": 400
+                },
+                "title": {
+                    "description": "A short, human-readable summary of the problem type.",
+                    "type": "string"
+                },
+                "type": {
+                    "description": "A URI reference that identifies the problem type.",
+                    "type": "string"
+                }
+            }
+        },
+        "JobInfo": {
+            "type": "object",
+            "properties": {
+                "info_job_data": {
+                    "$ref": "#/definitions/Parameters"
+                },
+                "info_job_identity": {
+                    "type": "string"
+                },
+                "info_type_identity": {
+                    "type": "string"
+                },
+                "last_updated": {
+                    "type": "string"
+                },
+                "owner": {
+                    "type": "string"
+                },
+                "target_uri": {
+                    "type": "string"
+                }
+            }
+        },
+        "Parameters": {
+            "type": "object",
+            "properties": {
+                "bufferTimeout": {
+                    "$ref": "#/definitions/BufferTimeout"
+                }
+            }
+        }
+    }
+}`
+
+type swaggerInfo struct {
+       Version     string
+       Host        string
+       BasePath    string
+       Schemes     []string
+       Title       string
+       Description string
+}
+
+// SwaggerInfo holds exported Swagger Info so clients can modify it
+var SwaggerInfo = swaggerInfo{
+       Version:     "1.1.0",
+       Host:        "",
+       BasePath:    "",
+       Schemes:     []string{},
+       Title:       "DMaaP Mediator Producer",
+       Description: "",
+}
+
+type s struct{}
+
+func (s *s) ReadDoc() string {
+       sInfo := SwaggerInfo
+       sInfo.Description = strings.Replace(sInfo.Description, "\n", "\\n", -1)
+
+       t, err := template.New("swagger_info").Funcs(template.FuncMap{
+               "marshal": func(v interface{}) string {
+                       a, _ := json.Marshal(v)
+                       return string(a)
+               },
+               "escape": func(v interface{}) string {
+                       // escape tabs
+                       str := strings.Replace(v.(string), "\t", "\\t", -1)
+                       // replace " with \", and if that results in \\", replace that with \\\"
+                       str = strings.Replace(str, "\"", "\\\"", -1)
+                       return strings.Replace(str, "\\\\\"", "\\\\\\\"", -1)
+               },
+       }).Parse(doc)
+       if err != nil {
+               return doc
+       }
+
+       var tpl bytes.Buffer
+       if err := t.Execute(&tpl, sInfo); err != nil {
+               return doc
+       }
+
+       return tpl.String()
+}
+
+func init() {
+       swag.Register("swagger", &s{})
+}
diff --git a/dmaap-mediator-producer/api/swagger.json b/dmaap-mediator-producer/api/swagger.json
new file mode 100644 (file)
index 0000000..8910022
--- /dev/null
@@ -0,0 +1,232 @@
+{
+    "swagger": "2.0",
+    "info": {
+        "title": "DMaaP Mediator Producer",
+        "contact": {},
+        "license": {
+            "name": "Apache 2.0",
+            "url": "http://www.apache.org/licenses/LICENSE-2.0.html"
+        },
+        "version": "1.1.0"
+    },
+    "paths": {
+        "/admin/log": {
+            "put": {
+                "description": "Set the log level of the producer.",
+                "tags": [
+                    "Admin"
+                ],
+                "summary": "Set log level",
+                "parameters": [
+                    {
+                        "enum": [
+                            "Error",
+                            "Warn",
+                            "Info",
+                            "Debug"
+                        ],
+                        "type": "string",
+                        "description": "string enums",
+                        "name": "level",
+                        "in": "query"
+                    }
+                ],
+                "responses": {
+                    "200": {
+                        "description": ""
+                    },
+                    "400": {
+                        "description": "Problem as defined in https://tools.ietf.org/html/rfc7807",
+                        "schema": {
+                            "$ref": "#/definitions/ErrorInfo"
+                        },
+                        "headers": {
+                            "Content-Type": {
+                                "type": "string",
+                                "description": "application/problem+json"
+                            }
+                        }
+                    }
+                }
+            }
+        },
+        "/health_check": {
+            "get": {
+                "description": "Get the status of the producer. Will show if the producer has registered in ICS.",
+                "produces": [
+                    "application/json"
+                ],
+                "tags": [
+                    "Data producer (callbacks)"
+                ],
+                "summary": "Get status",
+                "responses": {
+                    "200": {
+                        "description": "OK",
+                        "schema": {
+                            "$ref": "#/definitions/"
+                        }
+                    }
+                }
+            }
+        },
+        "/info_job": {
+            "post": {
+                "description": "Callback for ICS to add an info job",
+                "consumes": [
+                    "application/json"
+                ],
+                "tags": [
+                    "Data producer (callbacks)"
+                ],
+                "summary": "Add info job",
+                "parameters": [
+                    {
+                        "description": "Info job data",
+                        "name": "user",
+                        "in": "body",
+                        "required": true,
+                        "schema": {
+                            "$ref": "#/definitions/JobInfo"
+                        }
+                    }
+                ],
+                "responses": {
+                    "200": {
+                        "description": ""
+                    },
+                    "400": {
+                        "description": "Problem as defined in https://tools.ietf.org/html/rfc7807",
+                        "schema": {
+                            "$ref": "#/definitions/ErrorInfo"
+                        },
+                        "headers": {
+                            "Content-Type": {
+                                "type": "string",
+                                "description": "application/problem+json"
+                            }
+                        }
+                    }
+                }
+            }
+        },
+        "/info_job/{infoJobId}": {
+            "delete": {
+                "description": "Callback for ICS to delete an info job",
+                "tags": [
+                    "Data producer (callbacks)"
+                ],
+                "summary": "Delete info job",
+                "parameters": [
+                    {
+                        "type": "string",
+                        "description": "Info job ID",
+                        "name": "infoJobId",
+                        "in": "path",
+                        "required": true
+                    }
+                ],
+                "responses": {
+                    "200": {
+                        "description": ""
+                    }
+                }
+            }
+        },
+        "/swagger": {
+            "get": {
+                "description": "Get the Swagger API documentation for the producer.",
+                "tags": [
+                    "Admin"
+                ],
+                "summary": "Get Swagger Documentation",
+                "responses": {
+                    "200": {
+                        "description": ""
+                    }
+                }
+            }
+        }
+    },
+    "definitions": {
+        "": {
+            "type": "object",
+            "properties": {
+                "registeredStatus": {
+                    "description": "The registration status of the producer in Information Coordinator Service. Either `registered` or `not registered`",
+                    "type": "string",
+                    "example": "registered"
+                }
+            }
+        },
+        "BufferTimeout": {
+            "type": "object",
+            "properties": {
+                "maxSize": {
+                    "type": "integer"
+                },
+                "maxTimeMiliseconds": {
+                    "type": "integer"
+                }
+            }
+        },
+        "ErrorInfo": {
+            "type": "object",
+            "properties": {
+                "detail": {
+                    "description": "A human-readable explanation specific to this occurrence of the problem.",
+                    "type": "string",
+                    "example": "Info job type not found"
+                },
+                "instance": {
+                    "description": "A URI reference that identifies the specific occurrence of the problem.",
+                    "type": "string"
+                },
+                "status": {
+                    "description": "The HTTP status code generated by the origin server for this occurrence of the problem.",
+                    "type": "integer",
+                    "example": 400
+                },
+                "title": {
+                    "description": "A short, human-readable summary of the problem type.",
+                    "type": "string"
+                },
+                "type": {
+                    "description": "A URI reference that identifies the problem type.",
+                    "type": "string"
+                }
+            }
+        },
+        "JobInfo": {
+            "type": "object",
+            "properties": {
+                "info_job_data": {
+                    "$ref": "#/definitions/Parameters"
+                },
+                "info_job_identity": {
+                    "type": "string"
+                },
+                "info_type_identity": {
+                    "type": "string"
+                },
+                "last_updated": {
+                    "type": "string"
+                },
+                "owner": {
+                    "type": "string"
+                },
+                "target_uri": {
+                    "type": "string"
+                }
+            }
+        },
+        "Parameters": {
+            "type": "object",
+            "properties": {
+                "bufferTimeout": {
+                    "$ref": "#/definitions/BufferTimeout"
+                }
+            }
+        }
+    }
+}
\ No newline at end of file
diff --git a/dmaap-mediator-producer/api/swagger.yaml b/dmaap-mediator-producer/api/swagger.yaml
new file mode 100644 (file)
index 0000000..adf70a8
--- /dev/null
@@ -0,0 +1,159 @@
+definitions:
+  "":
+    properties:
+      registeredStatus:
+        description: The registration status of the producer in Information Coordinator
+          Service. Either `registered` or `not registered`
+        example: registered
+        type: string
+    type: object
+  BufferTimeout:
+    properties:
+      maxSize:
+        type: integer
+      maxTimeMiliseconds:
+        type: integer
+    type: object
+  ErrorInfo:
+    properties:
+      detail:
+        description: A human-readable explanation specific to this occurrence of the
+          problem.
+        example: Info job type not found
+        type: string
+      instance:
+        description: A URI reference that identifies the specific occurrence of the
+          problem.
+        type: string
+      status:
+        description: The HTTP status code generated by the origin server for this
+          occurrence of the problem.
+        example: 400
+        type: integer
+      title:
+        description: A short, human-readable summary of the problem type.
+        type: string
+      type:
+        description: A URI reference that identifies the problem type.
+        type: string
+    type: object
+  JobInfo:
+    properties:
+      info_job_data:
+        $ref: '#/definitions/Parameters'
+      info_job_identity:
+        type: string
+      info_type_identity:
+        type: string
+      last_updated:
+        type: string
+      owner:
+        type: string
+      target_uri:
+        type: string
+    type: object
+  Parameters:
+    properties:
+      bufferTimeout:
+        $ref: '#/definitions/BufferTimeout'
+    type: object
+info:
+  contact: {}
+  license:
+    name: Apache 2.0
+    url: http://www.apache.org/licenses/LICENSE-2.0.html
+  title: DMaaP Mediator Producer
+  version: 1.1.0
+paths:
+  /admin/log:
+    put:
+      description: Set the log level of the producer.
+      parameters:
+      - description: string enums
+        enum:
+        - Error
+        - Warn
+        - Info
+        - Debug
+        in: query
+        name: level
+        type: string
+      responses:
+        "200":
+          description: ""
+        "400":
+          description: Problem as defined in https://tools.ietf.org/html/rfc7807
+          headers:
+            Content-Type:
+              description: application/problem+json
+              type: string
+          schema:
+            $ref: '#/definitions/ErrorInfo'
+      summary: Set log level
+      tags:
+      - Admin
+  /health_check:
+    get:
+      description: Get the status of the producer. Will show if the producer has registered
+        in ICS.
+      produces:
+      - application/json
+      responses:
+        "200":
+          description: OK
+          schema:
+            $ref: '#/definitions/'
+      summary: Get status
+      tags:
+      - Data producer (callbacks)
+  /info_job:
+    post:
+      consumes:
+      - application/json
+      description: Callback for ICS to add an info job
+      parameters:
+      - description: Info job data
+        in: body
+        name: user
+        required: true
+        schema:
+          $ref: '#/definitions/JobInfo'
+      responses:
+        "200":
+          description: ""
+        "400":
+          description: Problem as defined in https://tools.ietf.org/html/rfc7807
+          headers:
+            Content-Type:
+              description: application/problem+json
+              type: string
+          schema:
+            $ref: '#/definitions/ErrorInfo'
+      summary: Add info job
+      tags:
+      - Data producer (callbacks)
+  /info_job/{infoJobId}:
+    delete:
+      description: Callback for ICS to delete an info job
+      parameters:
+      - description: Info job ID
+        in: path
+        name: infoJobId
+        required: true
+        type: string
+      responses:
+        "200":
+          description: ""
+      summary: Delete info job
+      tags:
+      - Data producer (callbacks)
+  /swagger:
+    get:
+      description: Get the Swagger API documentation for the producer.
+      responses:
+        "200":
+          description: ""
+      summary: Get Swagger Documentation
+      tags:
+      - Admin
+swagger: "2.0"
diff --git a/dmaap-mediator-producer/configs/typeSchemaDmaap.json b/dmaap-mediator-producer/configs/typeSchemaDmaap.json
new file mode 100644 (file)
index 0000000..4abee49
--- /dev/null
@@ -0,0 +1,7 @@
+{
+  "$schema": "http://json-schema.org/draft-04/schema#",
+  "type": "object",
+  "properties": {
+  },
+  "additionalProperties": false
+}
diff --git a/dmaap-mediator-producer/configs/typeSchemaKafka.json b/dmaap-mediator-producer/configs/typeSchemaKafka.json
new file mode 100644 (file)
index 0000000..9c3980f
--- /dev/null
@@ -0,0 +1,23 @@
+{
+  "$schema": "http://json-schema.org/draft-04/schema#",
+  "type": "object",
+  "properties": {
+    "bufferTimeout": {
+      "type": "object",
+      "properties": {
+        "maxSize": {
+          "type": "integer"
+        },
+        "maxTimeMiliseconds": {
+          "type": "integer"
+        }
+      },
+      "additionalProperties": false,
+      "required": [
+        "maxSize",
+        "maxTimeMiliseconds"
+      ]
+    }
+  },
+  "additionalProperties": false
+}
\ No newline at end of file
index f75d0e4..1149669 100644 (file)
@@ -4,6 +4,10 @@
       {
         "id": "STD_Fault_Messages",
         "dmaapTopicUrl": "/events/unauthenticated.SEC_FAULT_OUTPUT/dmaapmediatorproducer/STD_Fault_Messages"
+      },
+      {
+        "id": "Kafka_TestTopic",
+        "kafkaInputTopic": "TestTopic"
       }
   ]
 }
\ No newline at end of file
diff --git a/dmaap-mediator-producer/container-tag.yaml b/dmaap-mediator-producer/container-tag.yaml
new file mode 100644 (file)
index 0000000..f84eeb1
--- /dev/null
@@ -0,0 +1,5 @@
+# The Jenkins job requires a tag to build the Docker image.
+# By default this file is in the docker build directory,
+# but the location can configured in the JJB template.
+---
+tag: 1.1.0
similarity index 86%
rename from dmaap-mediator-producer/build_and_test.sh
rename to dmaap-mediator-producer/generate_swagger_docs.sh
index 397124d..8a13f30 100755 (executable)
@@ -1,7 +1,7 @@
 #!/bin/bash
 ##############################################################################
 #
-#   Copyright (C) 2021: Nordix Foundation
+#   Copyright (C) 2022: Nordix Foundation
 #
 #   Licensed under the Apache License, Version 2.0 (the "License");
 #   you may not use this file except in compliance with the License.
@@ -17,6 +17,6 @@
 #
 ##############################################################################
 
-go build
-
-go test ./...
+go get -u github.com/swaggo/swag/cmd/swag
+swag init --output api
+swag fmt
\ No newline at end of file
index eaaecf7..ea7b361 100644 (file)
@@ -3,17 +3,39 @@ module oransc.org/nonrtric/dmaapmediatorproducer
 go 1.17
 
 require (
+       github.com/confluentinc/confluent-kafka-go v1.8.2
        github.com/gorilla/mux v1.8.0
        github.com/hashicorp/go-retryablehttp v0.7.0
        github.com/sirupsen/logrus v1.8.1
        github.com/stretchr/testify v1.7.0
+       github.com/swaggo/http-swagger v1.1.2
+       github.com/swaggo/swag v1.7.8
 )
 
 require (
+       github.com/KyleBanks/depth v1.2.1 // indirect
+       github.com/PuerkitoBio/purell v1.1.1 // indirect
+       github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 // indirect
+       github.com/cpuguy83/go-md2man/v2 v2.0.1 // indirect
        github.com/davecgh/go-spew v1.1.1 // indirect
+       github.com/ghodss/yaml v1.0.0 // indirect
+       github.com/go-openapi/jsonpointer v0.19.5 // indirect
+       github.com/go-openapi/jsonreference v0.19.6 // indirect
+       github.com/go-openapi/spec v0.20.4 // indirect
+       github.com/go-openapi/swag v0.19.15 // indirect
        github.com/hashicorp/go-cleanhttp v0.5.1 // indirect
+       github.com/josharian/intern v1.0.0 // indirect
+       github.com/mailru/easyjson v0.7.7 // indirect
        github.com/pmezard/go-difflib v1.0.0 // indirect
+       github.com/russross/blackfriday/v2 v2.1.0 // indirect
+       github.com/shurcooL/sanitized_anchor_name v1.0.0 // indirect
        github.com/stretchr/objx v0.1.0 // indirect
-       golang.org/x/sys v0.0.0-20191026070338-33540a1f6037 // indirect
-       gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c // indirect
+       github.com/swaggo/files v0.0.0-20210815190702-a29dd2bc99b2 // indirect
+       github.com/urfave/cli/v2 v2.3.0 // indirect
+       golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d // indirect
+       golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e // indirect
+       golang.org/x/text v0.3.7 // indirect
+       golang.org/x/tools v0.1.7 // indirect
+       gopkg.in/yaml.v2 v2.4.0 // indirect
+       gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776 // indirect
 )
index 4b3557b..f7a6405 100644 (file)
@@ -1,6 +1,38 @@
+github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
+github.com/KyleBanks/depth v1.2.1 h1:5h8fQADFrWtarTdtDudMmGsC7GPbOAu6RVB3ffsVFHc=
+github.com/KyleBanks/depth v1.2.1/go.mod h1:jzSb9d0L43HxTQfT+oSA1EEp2q+ne2uh6XgeJcm8brE=
+github.com/PuerkitoBio/purell v1.1.1 h1:WEQqlqaGbrPkxLJWfBwQmfEAE1Z7ONdDLqrN38tNFfI=
+github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
+github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 h1:d+Bc7a5rLufV/sSk/8dngufqelfh6jnri85riMAaF/M=
+github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=
+github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
+github.com/confluentinc/confluent-kafka-go v1.8.2 h1:PBdbvYpyOdFLehj8j+9ba7FL4c4Moxn79gy9cYKxG5E=
+github.com/confluentinc/confluent-kafka-go v1.8.2/go.mod h1:u2zNLny2xq+5rWeTQjFHbDzzNuba4P1vo31r9r4uAdg=
+github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
+github.com/cpuguy83/go-md2man/v2 v2.0.1 h1:r/myEWzV9lfsM1tFLgDyu0atFtJ1fXn261LKYj/3DxU=
+github.com/cpuguy83/go-md2man/v2 v2.0.1/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
+github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
 github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
 github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
 github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk=
+github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
+github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg=
+github.com/go-openapi/jsonpointer v0.19.5 h1:gZr+CIYByUqjcgeLXnQu2gHYQC9o73G2XUeOFYEICuY=
+github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg=
+github.com/go-openapi/jsonreference v0.19.4/go.mod h1:RdybgQwPxbL4UEjuAruzK1x3nE69AqPYEJeo/TWfEeg=
+github.com/go-openapi/jsonreference v0.19.5/go.mod h1:RdybgQwPxbL4UEjuAruzK1x3nE69AqPYEJeo/TWfEeg=
+github.com/go-openapi/jsonreference v0.19.6 h1:UBIxjkht+AWIgYzCDSv2GN+E/togfwXUJFRTWhl2Jjs=
+github.com/go-openapi/jsonreference v0.19.6/go.mod h1:diGHMEHg2IqXZGKxqyvWdfWU/aim5Dprw5bqpKkTvns=
+github.com/go-openapi/spec v0.19.14/go.mod h1:gwrgJS15eCUgjLpMjBJmbZezCsw88LmgeEip0M63doA=
+github.com/go-openapi/spec v0.20.0/go.mod h1:+81FIL1JwC5P3/Iuuozq3pPE9dXdIEGxFutcFKaVbmU=
+github.com/go-openapi/spec v0.20.4 h1:O8hJrt0UMnhHcluhIdUgCLRWyM2x7QkBXRvOs7m+O1M=
+github.com/go-openapi/spec v0.20.4/go.mod h1:faYFR1CvsJZ0mNsmsphTMSoRrNV3TEDoAM7FOEWeq8I=
+github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk=
+github.com/go-openapi/swag v0.19.11/go.mod h1:Uc0gKkdR+ojzsEpjh39QChyu92vPgIr72POcgHMAgSY=
+github.com/go-openapi/swag v0.19.12/go.mod h1:eFdyEBkTdoAf/9RXBvj4cr1nH7GD8Kzo5HTt47gr72M=
+github.com/go-openapi/swag v0.19.15 h1:D2NRCBzS9/pEY3gP9Nl8aDqGUcPFrwG2p+CNFrLyrCM=
+github.com/go-openapi/swag v0.19.15/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ=
 github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI=
 github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So=
 github.com/hashicorp/go-cleanhttp v0.5.1 h1:dH3aiDG9Jvb5r5+bYHsikaOUIpcM0xvgMXVoDkXMzJM=
@@ -9,18 +41,97 @@ github.com/hashicorp/go-hclog v0.9.2 h1:CG6TE5H9/JXsFWJCfoIVpKFIkFe6ysEuHirp4DxC
 github.com/hashicorp/go-hclog v0.9.2/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ=
 github.com/hashicorp/go-retryablehttp v0.7.0 h1:eu1EI/mbirUgP5C8hVsTNaGZreBDlYiwC1FZWkvQPQ4=
 github.com/hashicorp/go-retryablehttp v0.7.0/go.mod h1:vAew36LZh98gCBJNLH42IQ1ER/9wtLZZ8meHqQvEYWY=
+github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
+github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
+github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
+github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
+github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
+github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
+github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
+github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
+github.com/mailru/easyjson v0.7.6/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
+github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
+github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
+github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
 github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
 github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
+github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
+github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
+github.com/shurcooL/sanitized_anchor_name v1.0.0 h1:PdmoCO6wvbs+7yrJyMORt4/BmY5IYyJwS/kOiWx8mHo=
+github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
 github.com/sirupsen/logrus v1.8.1 h1:dJKuHgqk1NNQlqoA6BTlM1Wf9DOH3NBjQyu0h9+AZZE=
 github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=
 github.com/stretchr/objx v0.1.0 h1:4G4v2dO3VZwixGIRoQ5Lfboy6nUhCyYzaqnIAPPhYs4=
 github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
 github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
+github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
+github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
 github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
 github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+github.com/swaggo/files v0.0.0-20190704085106-630677cd5c14/go.mod h1:gxQT6pBGRuIGunNf/+tSOB5OHvguWi8Tbt82WOkf35E=
+github.com/swaggo/files v0.0.0-20210815190702-a29dd2bc99b2 h1:+iNTcqQJy0OZ5jk6a5NLib47eqXK8uYcPX+O4+cBpEM=
+github.com/swaggo/files v0.0.0-20210815190702-a29dd2bc99b2/go.mod h1:lKJPbtWzJ9JhsTN1k1gZgleJWY/cqq0psdoMmaThG3w=
+github.com/swaggo/http-swagger v1.1.2 h1:ikcSD+EUOx+2oNZ2N6u8IYa8ScOsAvE7Jh+E1dW6i94=
+github.com/swaggo/http-swagger v1.1.2/go.mod h1:mX5nhypDmoSt4iw2mc5aKXxRFvp1CLLcCiog2B9M+Ro=
+github.com/swaggo/swag v1.7.0/go.mod h1:BdPIL73gvS9NBsdi7M1JOxLvlbfvNRaBP8m6WT6Aajo=
+github.com/swaggo/swag v1.7.8 h1:w249t0l/kc/DKMGlS0fppNJQxKyJ8heNaUWB6nsH3zc=
+github.com/swaggo/swag v1.7.8/go.mod h1:gZ+TJ2w/Ve1RwQsA2IRoSOTidHz6DX+PIG8GWvbnoLU=
+github.com/urfave/cli/v2 v2.3.0 h1:qph92Y649prgesehzOrQjdWyxFOp/QVM+6imKHad91M=
+github.com/urfave/cli/v2 v2.3.0/go.mod h1:LJmUH05zAU44vOAcrfzZQKsZbVcdbOG8rtL3/XcUArI=
+github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
+golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
+golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
+golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
+golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
+golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
+golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
+golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
+golang.org/x/net v0.0.0-20201207224615-747e23833adb/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
+golang.org/x/net v0.0.0-20210421230115-4e50805a0758/go.mod h1:72T/g9IO56b78aLF+1Kcs5dz7/ng1VjMUvfKvpfy+jM=
+golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d h1:20cMwl2fHAzkJMEA+8J4JgqBQcQGzbisXo31MIeenXI=
+golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
+golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/sys v0.0.0-20191026070338-33540a1f6037 h1:YyJpGZS1sBuBCzLAR1VEpK193GlqGZbnPFnPV/5Rsb4=
 golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210420072515-93ed5bcd2bfe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e h1:WUoyKPm6nCo1BnNUvPGnFG3T5DUVem42yDJZZ4CNxMA=
+golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
+golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
+golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk=
+golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
+golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.0.0-20201120155355-20be4ac4bd6e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
+golang.org/x/tools v0.0.0-20201208062317-e652b2f42cc7/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
+golang.org/x/tools v0.1.7 h1:6j8CgantCy3yc8JGBqkDLMKWqZ0RDU2g1HVgacojGWQ=
+golang.org/x/tools v0.1.7/go.mod h1:LGqMHiF4EqQNHR1JncWGqT5BVaXmza+X+BDGol+dOxo=
+golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
 gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
 gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU=
+gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
+gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
 gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=
 gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776 h1:tQIYjPdBoyREyB9XMu+nnTclpTYkz2zFM+lzLJFO4gQ=
+gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
index e03c40a..7582e9c 100644 (file)
@@ -24,6 +24,7 @@ import (
        "encoding/json"
        "fmt"
        "os"
+       "path/filepath"
        "strconv"
 
        log "github.com/sirupsen/logrus"
@@ -35,6 +36,7 @@ type Config struct {
        InfoProducerPort       int
        InfoCoordinatorAddress string
        DMaaPMRAddress         string
+       KafkaBootstrapServers  string
        ProducerCertPath       string
        ProducerKeyPath        string
 }
@@ -45,6 +47,7 @@ func New() *Config {
                InfoProducerPort:       getEnvAsInt("INFO_PRODUCER_PORT", 8085),
                InfoCoordinatorAddress: getEnv("INFO_COORD_ADDR", "https://informationservice:8434"),
                DMaaPMRAddress:         getEnv("DMAAP_MR_ADDR", "https://message-router.onap:3905"),
+               KafkaBootstrapServers:  getEnv("KAFKA_BOOTSTRAP_SERVERS", "localhost:9092"),
                ProducerCertPath:       getEnv("PRODUCER_CERT_PATH", "security/producer.crt"),
                ProducerKeyPath:        getEnv("PRODUCER_KEY_PATH", "security/producer.key"),
                LogLevel:               getLogLevel(),
@@ -83,8 +86,8 @@ func getLogLevel() log.Level {
        }
 }
 
-func GetJobTypesFromConfiguration(configFile string) ([]TypeDefinition, error) {
-       typeDefsByte, err := os.ReadFile(configFile)
+func GetJobTypesFromConfiguration(configFolder string) ([]TypeDefinition, error) {
+       typeDefsByte, err := os.ReadFile(filepath.Join(configFolder, "type_config.json"))
        if err != nil {
                return nil, err
        }
@@ -96,5 +99,35 @@ func GetJobTypesFromConfiguration(configFile string) ([]TypeDefinition, error) {
                return nil, err
        }
 
+       kafkaTypeSchema, err := getTypeSchema(filepath.Join(configFolder, "typeSchemaKafka.json"))
+       if err != nil {
+               return nil, err
+       }
+
+       dMaaPTypeSchema, err := getTypeSchema(filepath.Join(configFolder, "typeSchemaDmaap.json"))
+       if err != nil {
+               return nil, err
+       }
+
+       for i, typeDef := range typeDefs.Types {
+               if typeDef.IsKafkaType() {
+                       typeDefs.Types[i].TypeSchema = kafkaTypeSchema
+               } else {
+                       typeDefs.Types[i].TypeSchema = dMaaPTypeSchema
+               }
+       }
        return typeDefs.Types, nil
 }
+
+func getTypeSchema(schemaFile string) (interface{}, error) {
+       typeDefsByte, err := os.ReadFile(schemaFile)
+       if err != nil {
+               return nil, err
+       }
+       var schema interface{}
+       err = json.Unmarshal(typeDefsByte, &schema)
+       if err != nil {
+               return nil, err
+       }
+       return schema, nil
+}
index faf5900..0e081a8 100644 (file)
@@ -22,6 +22,7 @@ package config
 
 import (
        "bytes"
+       "encoding/json"
        "os"
        "path/filepath"
        "testing"
@@ -37,6 +38,7 @@ func TestNew_envVarsSetConfigContainSetValues(t *testing.T) {
        os.Setenv("INFO_PRODUCER_PORT", "8095")
        os.Setenv("INFO_COORD_ADDR", "infoCoordAddr")
        os.Setenv("DMAAP_MR_ADDR", "mrHost:3908")
+       os.Setenv("KAFKA_BOOTSTRAP_SERVERS", "localhost:9093")
        os.Setenv("PRODUCER_CERT_PATH", "cert")
        os.Setenv("PRODUCER_KEY_PATH", "key")
        t.Cleanup(func() {
@@ -48,6 +50,7 @@ func TestNew_envVarsSetConfigContainSetValues(t *testing.T) {
                InfoProducerPort:       8095,
                InfoCoordinatorAddress: "infoCoordAddr",
                DMaaPMRAddress:         "mrHost:3908",
+               KafkaBootstrapServers:  "localhost:9093",
                ProducerCertPath:       "cert",
                ProducerKeyPath:        "key",
        }
@@ -72,6 +75,7 @@ func TestNew_faultyIntValueSetConfigContainDefaultValueAndWarnInLog(t *testing.T
                InfoProducerPort:       8085,
                InfoCoordinatorAddress: "https://informationservice:8434",
                DMaaPMRAddress:         "https://message-router.onap:3905",
+               KafkaBootstrapServers:  "localhost:9092",
                ProducerCertPath:       "security/producer.crt",
                ProducerKeyPath:        "security/producer.key",
        }
@@ -98,6 +102,7 @@ func TestNew_envFaultyLogLevelConfigContainDefaultValues(t *testing.T) {
                InfoProducerPort:       8085,
                InfoCoordinatorAddress: "https://informationservice:8434",
                DMaaPMRAddress:         "https://message-router.onap:3905",
+               KafkaBootstrapServers:  "localhost:9092",
                ProducerCertPath:       "security/producer.crt",
                ProducerKeyPath:        "security/producer.key",
        }
@@ -109,29 +114,61 @@ func TestNew_envFaultyLogLevelConfigContainDefaultValues(t *testing.T) {
        assertions.Contains(logString, "Invalid log level: wrong. Log level will be Info!")
 }
 
-const typeDefinition = `{"types": [{"id": "type1", "dmaapTopicUrl": "events/unauthenticated.SEC_FAULT_OUTPUT/dmaapmediatorproducer/type1"}]}`
-
-func TestGetTypesFromConfiguration_fileOkShouldReturnSliceOfTypeDefinitions(t *testing.T) {
+func TestGetJobTypesFromConfiguration_fileOkShouldReturnSliceOfTypeDefinitions(t *testing.T) {
        assertions := require.New(t)
+       typesDir := CreateTypeConfigFiles(t)
+       t.Cleanup(func() {
+               os.RemoveAll(typesDir)
+       })
+
+       var typeSchemaObj interface{}
+       json.Unmarshal([]byte(typeSchemaFileContent), &typeSchemaObj)
+
+       types, err := GetJobTypesFromConfiguration(typesDir)
+
+       wantedDMaaPType := TypeDefinition{
+               Identity:      "type1",
+               DMaaPTopicURL: "events/unauthenticated.SEC_FAULT_OUTPUT/dmaapmediatorproducer/type1",
+               TypeSchema:    typeSchemaObj,
+       }
+       wantedKafkaType := TypeDefinition{
+               Identity:        "type2",
+               KafkaInputTopic: "TestTopic",
+               TypeSchema:      typeSchemaObj,
+       }
+       wantedTypes := []TypeDefinition{wantedDMaaPType, wantedKafkaType}
+       assertions.EqualValues(wantedTypes, types)
+       assertions.Nil(err)
+}
+
+const typeDefinition = `{"types": [{"id": "type1", "dmaapTopicUrl": "events/unauthenticated.SEC_FAULT_OUTPUT/dmaapmediatorproducer/type1"}, {"id": "type2", "kafkaInputTopic": "TestTopic"}]}`
+const typeSchemaFileContent = `{
+       "$schema": "http://json-schema.org/draft-04/schema#",
+       "type": "object",
+       "properties": {
+         "filter": {
+                "type": "string"
+          }
+       },
+       "additionalProperties": false
+  }`
+
+func CreateTypeConfigFiles(t *testing.T) string {
        typesDir, err := os.MkdirTemp("", "configs")
        if err != nil {
                t.Errorf("Unable to create temporary directory for types due to: %v", err)
        }
        fname := filepath.Join(typesDir, "type_config.json")
-       t.Cleanup(func() {
-               os.RemoveAll(typesDir)
-       })
        if err = os.WriteFile(fname, []byte(typeDefinition), 0666); err != nil {
                t.Errorf("Unable to create temporary config file for types due to: %v", err)
        }
-
-       types, err := GetJobTypesFromConfiguration(fname)
-
-       wantedType := TypeDefinition{
-               Id:            "type1",
-               DmaapTopicURL: "events/unauthenticated.SEC_FAULT_OUTPUT/dmaapmediatorproducer/type1",
+       fname = filepath.Join(typesDir, "typeSchemaDmaap.json")
+       if err = os.WriteFile(fname, []byte(typeSchemaFileContent), 0666); err != nil {
+               t.Errorf("Unable to create temporary schema file for DMaaP type due to: %v", err)
        }
-       wantedTypes := []TypeDefinition{wantedType}
-       assertions.EqualValues(wantedTypes, types)
-       assertions.Nil(err)
+       fname = filepath.Join(typesDir, "typeSchemaKafka.json")
+       if err = os.WriteFile(fname, []byte(typeSchemaFileContent), 0666); err != nil {
+               t.Errorf("Unable to create temporary schema file for Kafka type due to: %v", err)
+       }
+       return typesDir
 }
index 83ed43f..1dd0ad1 100644 (file)
@@ -32,11 +32,20 @@ import (
 
 const registerTypePath = "/data-producer/v1/info-types/"
 const registerProducerPath = "/data-producer/v1/info-producers/"
-const typeSchema = `{"type": "object","properties": {},"additionalProperties": false}`
 
 type TypeDefinition struct {
-       Id            string `json:"id"`
-       DmaapTopicURL string `json:"dmaapTopicUrl"`
+       Identity        string `json:"id"`
+       DMaaPTopicURL   string `json:"dmaapTopicUrl"`
+       KafkaInputTopic string `json:"kafkaInputTopic"`
+       TypeSchema      interface{}
+}
+
+func (td TypeDefinition) IsKafkaType() bool {
+       return td.KafkaInputTopic != ""
+}
+
+func (td TypeDefinition) IsDMaaPType() bool {
+       return td.DMaaPTopicURL != ""
 }
 
 type ProducerRegistrationInfo struct {
@@ -64,8 +73,9 @@ func NewRegistratorImpl(infoCoordAddr string, client restclient.HTTPClient) *Reg
 
 func (r RegistratorImpl) RegisterTypes(jobTypes []TypeDefinition) error {
        for _, jobType := range jobTypes {
-               body := fmt.Sprintf(`{"info_job_data_schema": %v}`, typeSchema)
-               if error := restclient.Put(r.infoCoordinatorAddress+registerTypePath+url.PathEscape(jobType.Id), []byte(body), r.httpClient); error != nil {
+               s, _ := json.Marshal(jobType.TypeSchema)
+               body := fmt.Sprintf(`{"info_job_data_schema": %v}`, string(s))
+               if error := restclient.Put(r.infoCoordinatorAddress+registerTypePath+url.PathEscape(jobType.Identity), []byte(body), r.httpClient); error != nil {
                        return error
                }
                log.Debugf("Registered type: %v", jobType)
index 324aed0..b2f10cc 100644 (file)
@@ -21,6 +21,7 @@
 package config
 
 import (
+       "encoding/json"
        "io/ioutil"
        "net/http"
        "testing"
@@ -39,8 +40,17 @@ func TestRegisterTypes(t *testing.T) {
                StatusCode: http.StatusCreated,
        }, nil)
 
+       schemaString := `{
+               "type": "object",
+               "properties": {},
+               "additionalProperties": false
+               }`
+       var schemaObj interface{}
+       json.Unmarshal([]byte(schemaString), &schemaObj)
+
        type1 := TypeDefinition{
-               Id: "Type1",
+               Identity:   "Type1",
+               TypeSchema: schemaObj,
        }
        types := []TypeDefinition{type1}
 
@@ -59,7 +69,7 @@ func TestRegisterTypes(t *testing.T) {
        assertions.Equal("/data-producer/v1/info-types/Type1", actualRequest.URL.Path)
        assertions.Equal("application/json", actualRequest.Header.Get("Content-Type"))
        body, _ := ioutil.ReadAll(actualRequest.Body)
-       expectedBody := []byte(`{"info_job_data_schema": {"type": "object","properties": {},"additionalProperties": false}}`)
+       expectedBody := []byte(`{"info_job_data_schema": {"additionalProperties":false,"properties":{},"type":"object"}}`)
        assertions.Equal(expectedBody, body)
        clientMock.AssertNumberOfCalls(t, "Do", 1)
 }
index 867894f..86bfe05 100644 (file)
@@ -22,28 +22,36 @@ package jobs
 
 import (
        "fmt"
+       "strings"
        "sync"
        "time"
 
+       "github.com/confluentinc/confluent-kafka-go/kafka"
        log "github.com/sirupsen/logrus"
        "oransc.org/nonrtric/dmaapmediatorproducer/internal/config"
+       "oransc.org/nonrtric/dmaapmediatorproducer/internal/kafkaclient"
        "oransc.org/nonrtric/dmaapmediatorproducer/internal/restclient"
 )
 
 type TypeData struct {
-       TypeId        string `json:"id"`
-       DMaaPTopicURL string `json:"dmaapTopicUrl"`
-       jobsHandler   *jobsHandler
+       Identity    string `json:"id"`
+       jobsHandler *jobsHandler
 }
 
+type sourceType string
+
+const dMaaPSource = sourceType("dmaap")
+const kafkaSource = sourceType("kafka")
+
 type JobInfo struct {
-       Owner            string      `json:"owner"`
-       LastUpdated      string      `json:"last_updated"`
-       InfoJobIdentity  string      `json:"info_job_identity"`
-       TargetUri        string      `json:"target_uri"`
-       InfoJobData      interface{} `json:"info_job_data"`
-       InfoTypeIdentity string      `json:"info_type_identity"`
-}
+       Owner            string     `json:"owner"`
+       LastUpdated      string     `json:"last_updated"`
+       InfoJobIdentity  string     `json:"info_job_identity"`
+       TargetUri        string     `json:"target_uri"`
+       InfoJobData      Parameters `json:"info_job_data"`
+       InfoTypeIdentity string     `json:"info_type_identity"`
+       sourceType       sourceType
+} // @name JobInfo
 
 type JobTypesManager interface {
        LoadTypesFromConfiguration(types []config.TypeDefinition) []config.TypeDefinition
@@ -59,14 +67,16 @@ type JobsManagerImpl struct {
        allTypes         map[string]TypeData
        pollClient       restclient.HTTPClient
        mrAddress        string
+       kafkaFactory     kafkaclient.KafkaFactory
        distributeClient restclient.HTTPClient
 }
 
-func NewJobsManagerImpl(pollClient restclient.HTTPClient, mrAddr string, distributeClient restclient.HTTPClient) *JobsManagerImpl {
+func NewJobsManagerImpl(pollClient restclient.HTTPClient, mrAddr string, kafkaFactory kafkaclient.KafkaFactory, distributeClient restclient.HTTPClient) *JobsManagerImpl {
        return &JobsManagerImpl{
                allTypes:         make(map[string]TypeData),
                pollClient:       pollClient,
                mrAddress:        mrAddr,
+               kafkaFactory:     kafkaFactory,
                distributeClient: distributeClient,
        }
 }
@@ -74,6 +84,7 @@ func NewJobsManagerImpl(pollClient restclient.HTTPClient, mrAddr string, distrib
 func (jm *JobsManagerImpl) AddJobFromRESTCall(ji JobInfo) error {
        if err := jm.validateJobInfo(ji); err == nil {
                typeData := jm.allTypes[ji.InfoTypeIdentity]
+               ji.sourceType = typeData.jobsHandler.sourceType
                typeData.jobsHandler.addJobCh <- ji
                log.Debug("Added job: ", ji)
                return nil
@@ -84,7 +95,7 @@ func (jm *JobsManagerImpl) AddJobFromRESTCall(ji JobInfo) error {
 
 func (jm *JobsManagerImpl) DeleteJobFromRESTCall(jobId string) {
        for _, typeData := range jm.allTypes {
-               log.Debugf("Deleting job %v from type %v", jobId, typeData.TypeId)
+               log.Debugf("Deleting job %v from type %v", jobId, typeData.Identity)
                typeData.jobsHandler.deleteJobCh <- jobId
        }
        log.Debug("Deleted job: ", jobId)
@@ -106,10 +117,12 @@ func (jm *JobsManagerImpl) validateJobInfo(ji JobInfo) error {
 
 func (jm *JobsManagerImpl) LoadTypesFromConfiguration(types []config.TypeDefinition) []config.TypeDefinition {
        for _, typeDef := range types {
-               jm.allTypes[typeDef.Id] = TypeData{
-                       TypeId:        typeDef.Id,
-                       DMaaPTopicURL: typeDef.DmaapTopicURL,
-                       jobsHandler:   newJobsHandler(typeDef.Id, typeDef.DmaapTopicURL, jm.pollClient, jm.distributeClient),
+               if typeDef.DMaaPTopicURL == "" && typeDef.KafkaInputTopic == "" {
+                       log.Fatal("DMaaPTopicURL or KafkaInputTopic must be defined for type: ", typeDef.Identity)
+               }
+               jm.allTypes[typeDef.Identity] = TypeData{
+                       Identity:    typeDef.Identity,
+                       jobsHandler: newJobsHandler(typeDef, jm.mrAddress, jm.kafkaFactory, jm.pollClient, jm.distributeClient),
                }
        }
        return types
@@ -126,7 +139,7 @@ func (jm *JobsManagerImpl) GetSupportedTypes() []string {
 func (jm *JobsManagerImpl) StartJobsForAllTypes() {
        for _, jobType := range jm.allTypes {
 
-               go jobType.jobsHandler.startPollingAndDistribution(jm.mrAddress)
+               go jobType.jobsHandler.startPollingAndDistribution()
 
        }
 }
@@ -134,30 +147,35 @@ func (jm *JobsManagerImpl) StartJobsForAllTypes() {
 type jobsHandler struct {
        mu               sync.Mutex
        typeId           string
-       topicUrl         string
+       sourceType       sourceType
+       pollingAgent     pollingAgent
        jobs             map[string]job
        addJobCh         chan JobInfo
        deleteJobCh      chan string
-       pollClient       restclient.HTTPClient
        distributeClient restclient.HTTPClient
 }
 
-func newJobsHandler(typeId string, topicURL string, pollClient restclient.HTTPClient, distributeClient restclient.HTTPClient) *jobsHandler {
+func newJobsHandler(typeDef config.TypeDefinition, mRAddress string, kafkaFactory kafkaclient.KafkaFactory, pollClient restclient.HTTPClient, distributeClient restclient.HTTPClient) *jobsHandler {
+       pollingAgent := createPollingAgent(typeDef, mRAddress, pollClient, kafkaFactory, typeDef.KafkaInputTopic)
+       sourceType := kafkaSource
+       if typeDef.DMaaPTopicURL != "" {
+               sourceType = dMaaPSource
+       }
        return &jobsHandler{
-               typeId:           typeId,
-               topicUrl:         topicURL,
+               typeId:           typeDef.Identity,
+               sourceType:       sourceType,
+               pollingAgent:     pollingAgent,
                jobs:             make(map[string]job),
                addJobCh:         make(chan JobInfo),
                deleteJobCh:      make(chan string),
-               pollClient:       pollClient,
                distributeClient: distributeClient,
        }
 }
 
-func (jh *jobsHandler) startPollingAndDistribution(mRAddress string) {
+func (jh *jobsHandler) startPollingAndDistribution() {
        go func() {
                for {
-                       jh.pollAndDistributeMessages(mRAddress)
+                       jh.pollAndDistributeMessages()
                }
        }()
 
@@ -168,19 +186,20 @@ func (jh *jobsHandler) startPollingAndDistribution(mRAddress string) {
        }()
 }
 
-func (jh *jobsHandler) pollAndDistributeMessages(mRAddress string) {
+func (jh *jobsHandler) pollAndDistributeMessages() {
        log.Debugf("Processing jobs for type: %v", jh.typeId)
-       messagesBody, error := restclient.Get(mRAddress+jh.topicUrl, jh.pollClient)
+       messagesBody, error := jh.pollingAgent.pollMessages()
        if error != nil {
-               log.Warn("Error getting data from MR. Cause: ", error)
-               time.Sleep(time.Minute) // Must wait before trying to call MR again
+               log.Warn("Error getting data from source. Cause: ", error)
+               time.Sleep(time.Minute) // Must wait before trying to call data source again
+               return
        }
-       log.Debug("Received messages: ", string(messagesBody))
        jh.distributeMessages(messagesBody)
 }
 
 func (jh *jobsHandler) distributeMessages(messages []byte) {
-       if len(messages) > 2 {
+       if string(messages) != "[]" && len(messages) > 0 { // MR returns an ampty array if there are no messages.
+               log.Debug("Distributing messages: ", string(messages))
                jh.mu.Lock()
                defer jh.mu.Unlock()
                for _, job := range jh.jobs {
@@ -234,6 +253,61 @@ func (jh *jobsHandler) deleteJob(deletedJob string) {
        jh.mu.Unlock()
 }
 
+type pollingAgent interface {
+       pollMessages() ([]byte, error)
+}
+
+func createPollingAgent(typeDef config.TypeDefinition, mRAddress string, pollClient restclient.HTTPClient, kafkaFactory kafkaclient.KafkaFactory, topicID string) pollingAgent {
+       if typeDef.DMaaPTopicURL != "" {
+               return dMaaPPollingAgent{
+                       messageRouterURL: mRAddress + typeDef.DMaaPTopicURL,
+                       pollClient:       pollClient,
+               }
+       } else {
+               return newKafkaPollingAgent(kafkaFactory, typeDef.KafkaInputTopic)
+       }
+}
+
+type dMaaPPollingAgent struct {
+       messageRouterURL string
+       pollClient       restclient.HTTPClient
+}
+
+func (pa dMaaPPollingAgent) pollMessages() ([]byte, error) {
+       return restclient.Get(pa.messageRouterURL, pa.pollClient)
+}
+
+type kafkaPollingAgent struct {
+       kafkaClient kafkaclient.KafkaClient
+}
+
+func newKafkaPollingAgent(kafkaFactory kafkaclient.KafkaFactory, topicID string) kafkaPollingAgent {
+       c, err := kafkaclient.NewKafkaClient(kafkaFactory, topicID)
+       if err != nil {
+               log.Fatalf("Cannot create Kafka client for topic: %v, error details: %v\n", topicID, err)
+       }
+       return kafkaPollingAgent{
+               kafkaClient: c,
+       }
+}
+
+func (pa kafkaPollingAgent) pollMessages() ([]byte, error) {
+       msg, err := pa.kafkaClient.ReadMessage()
+       if err == nil {
+               return msg, nil
+       } else {
+               if isKafkaTimedOutError(err) {
+                       return []byte(""), nil
+               }
+               return nil, err
+       }
+}
+
+func isKafkaTimedOutError(err error) bool {
+       kafkaErr, ok := err.(kafka.Error)
+       return ok && kafkaErr.Code() == kafka.ErrTimedOut
+}
+
 type job struct {
        jobInfo         JobInfo
        client          restclient.HTTPClient
@@ -242,6 +316,7 @@ type job struct {
 }
 
 func newJob(j JobInfo, c restclient.HTTPClient) job {
+
        return job{
                jobInfo:         j,
                client:          c,
@@ -250,7 +325,24 @@ func newJob(j JobInfo, c restclient.HTTPClient) job {
        }
 }
 
+type Parameters struct {
+       BufferTimeout BufferTimeout `json:"bufferTimeout"`
+} // @name Parameters
+
+type BufferTimeout struct {
+       MaxSize            int   `json:"maxSize"`
+       MaxTimeMiliseconds int64 `json:"maxTimeMiliseconds"`
+} // @name BufferTimeout
+
 func (j *job) start() {
+       if j.isJobBuffered() {
+               j.startReadingMessagesBuffered()
+       } else {
+               j.startReadingSingleMessages()
+       }
+}
+
+func (j *job) startReadingSingleMessages() {
 out:
        for {
                select {
@@ -263,10 +355,107 @@ out:
        }
 }
 
+func (j *job) startReadingMessagesBuffered() {
+out:
+       for {
+               select {
+               case <-j.controlChannel:
+                       log.Debug("Stop distribution for job: ", j.jobInfo.InfoJobIdentity)
+                       break out
+               default:
+                       msgs := j.read(j.jobInfo.InfoJobData.BufferTimeout)
+                       if len(msgs) > 0 {
+                               j.sendMessagesToConsumer(msgs)
+                       }
+               }
+       }
+}
+
+func (j *job) read(bufferParams BufferTimeout) []byte {
+       wg := sync.WaitGroup{}
+       wg.Add(bufferParams.MaxSize)
+       rawMsgs := make([][]byte, 0, bufferParams.MaxSize)
+       c := make(chan struct{})
+       go func() {
+               i := 0
+       out:
+               for {
+                       select {
+                       case <-c:
+                               break out
+                       case msg := <-j.messagesChannel:
+                               rawMsgs = append(rawMsgs, msg)
+                               i++
+                               wg.Done()
+                               if i == bufferParams.MaxSize {
+                                       break out
+                               }
+                       }
+               }
+       }()
+       j.waitTimeout(&wg, time.Duration(bufferParams.MaxTimeMiliseconds)*time.Millisecond)
+       close(c)
+       return getAsJSONArray(rawMsgs)
+}
+
+func getAsJSONArray(rawMsgs [][]byte) []byte {
+       if len(rawMsgs) == 0 {
+               return []byte("")
+       }
+       strings := ""
+       for i := 0; i < len(rawMsgs); i++ {
+               strings = strings + makeIntoString(rawMsgs[i])
+               strings = addSeparatorIfNeeded(strings, i, len(rawMsgs))
+       }
+       return []byte(wrapInJSONArray(strings))
+}
+
+func makeIntoString(rawMsg []byte) string {
+       return `"` + strings.ReplaceAll(string(rawMsg), "\"", "\\\"") + `"`
+}
+
+func addSeparatorIfNeeded(strings string, position, length int) string {
+       if position < length-1 {
+               strings = strings + ","
+       }
+       return strings
+}
+
+func wrapInJSONArray(strings string) string {
+       return "[" + strings + "]"
+}
+
+func (j *job) waitTimeout(wg *sync.WaitGroup, timeout time.Duration) bool {
+       c := make(chan struct{})
+       go func() {
+               defer close(c)
+               wg.Wait()
+       }()
+       select {
+       case <-c:
+               return false // completed normally
+       case <-time.After(timeout):
+               return true // timed out
+       }
+}
+
 func (j *job) sendMessagesToConsumer(messages []byte) {
        log.Debug("Processing job: ", j.jobInfo.InfoJobIdentity)
-       if postErr := restclient.Post(j.jobInfo.TargetUri, messages, j.client); postErr != nil {
+       contentType := restclient.ContentTypeJSON
+       if j.isJobKafka() && !j.isJobBuffered() {
+               contentType = restclient.ContentTypePlain
+       }
+       if postErr := restclient.Post(j.jobInfo.TargetUri, messages, contentType, j.client); postErr != nil {
                log.Warnf("Error posting data for job: %v. Cause: %v", j.jobInfo, postErr)
+               return
        }
        log.Debugf("Messages for job: %v distributed to consumer: %v", j.jobInfo.InfoJobIdentity, j.jobInfo.Owner)
 }
+
+func (j *job) isJobBuffered() bool {
+       return j.jobInfo.InfoJobData.BufferTimeout.MaxSize > 0 && j.jobInfo.InfoJobData.BufferTimeout.MaxTimeMiliseconds > 0
+}
+
+func (j *job) isJobKafka() bool {
+       return j.jobInfo.sourceType == kafkaSource
+}
index 30b4ffd..7d02104 100644 (file)
@@ -22,52 +22,62 @@ package jobs
 
 import (
        "bytes"
+       "fmt"
        "io/ioutil"
        "net/http"
+       "strconv"
        "sync"
        "testing"
        "time"
 
+       "github.com/confluentinc/confluent-kafka-go/kafka"
+       "github.com/stretchr/testify/mock"
        "github.com/stretchr/testify/require"
        "oransc.org/nonrtric/dmaapmediatorproducer/internal/config"
+       "oransc.org/nonrtric/dmaapmediatorproducer/internal/kafkaclient"
+       "oransc.org/nonrtric/dmaapmediatorproducer/mocks"
 )
 
-const typeDefinition = `{"types": [{"id": "type1", "dmaapTopicUrl": "events/unauthenticated.SEC_FAULT_OUTPUT/dmaapmediatorproducer/type1"}]}`
-
-func TestJobsManagerGetTypes_filesOkShouldReturnSliceOfTypesAndProvideSupportedTypes(t *testing.T) {
+func TestJobsManagerLoadTypesFromConfiguration_shouldReturnSliceOfTypesAndProvideSupportedTypes(t *testing.T) {
        assertions := require.New(t)
 
-       managerUnderTest := NewJobsManagerImpl(nil, "", nil)
+       managerUnderTest := NewJobsManagerImpl(nil, "", kafkaclient.KafkaFactoryImpl{}, nil)
 
-       wantedType := config.TypeDefinition{
-               Id:            "type1",
-               DmaapTopicURL: "events/unauthenticated.SEC_FAULT_OUTPUT/dmaapmediatorproducer/type1",
+       wantedDMaaPType := config.TypeDefinition{
+               Identity:      "type1",
+               DMaaPTopicURL: "events/unauthenticated.SEC_FAULT_OUTPUT/dmaapmediatorproducer/type1",
+       }
+       wantedKafkaType := config.TypeDefinition{
+               Identity:        "type2",
+               KafkaInputTopic: "topic",
        }
-       wantedTypes := []config.TypeDefinition{wantedType}
+       wantedTypes := []config.TypeDefinition{wantedDMaaPType, wantedKafkaType}
 
        types := managerUnderTest.LoadTypesFromConfiguration(wantedTypes)
 
        assertions.EqualValues(wantedTypes, types)
 
        supportedTypes := managerUnderTest.GetSupportedTypes()
-       assertions.EqualValues([]string{"type1"}, supportedTypes)
+       assertions.ElementsMatch([]string{"type1", "type2"}, supportedTypes)
+       assertions.Equal(dMaaPSource, managerUnderTest.allTypes["type1"].jobsHandler.sourceType)
+       assertions.Equal(kafkaSource, managerUnderTest.allTypes["type2"].jobsHandler.sourceType)
 }
 
 func TestJobsManagerAddJobWhenTypeIsSupported_shouldAddJobToChannel(t *testing.T) {
        assertions := require.New(t)
-       managerUnderTest := NewJobsManagerImpl(nil, "", nil)
+       managerUnderTest := NewJobsManagerImpl(nil, "", kafkaclient.KafkaFactoryImpl{}, nil)
        wantedJob := JobInfo{
                Owner:            "owner",
                LastUpdated:      "now",
                InfoJobIdentity:  "job1",
                TargetUri:        "target",
-               InfoJobData:      "{}",
+               InfoJobData:      Parameters{},
                InfoTypeIdentity: "type1",
        }
        jobsHandler := jobsHandler{
                addJobCh: make(chan JobInfo)}
        managerUnderTest.allTypes["type1"] = TypeData{
-               TypeId:      "type1",
+               Identity:    "type1",
                jobsHandler: &jobsHandler,
        }
 
@@ -83,7 +93,7 @@ func TestJobsManagerAddJobWhenTypeIsSupported_shouldAddJobToChannel(t *testing.T
 
 func TestJobsManagerAddJobWhenTypeIsNotSupported_shouldReturnError(t *testing.T) {
        assertions := require.New(t)
-       managerUnderTest := NewJobsManagerImpl(nil, "", nil)
+       managerUnderTest := NewJobsManagerImpl(nil, "", kafkaclient.KafkaFactoryImpl{}, nil)
        jobInfo := JobInfo{
                InfoTypeIdentity: "type1",
        }
@@ -95,9 +105,9 @@ func TestJobsManagerAddJobWhenTypeIsNotSupported_shouldReturnError(t *testing.T)
 
 func TestJobsManagerAddJobWhenJobIdMissing_shouldReturnError(t *testing.T) {
        assertions := require.New(t)
-       managerUnderTest := NewJobsManagerImpl(nil, "", nil)
+       managerUnderTest := NewJobsManagerImpl(nil, "", kafkaclient.KafkaFactoryImpl{}, nil)
        managerUnderTest.allTypes["type1"] = TypeData{
-               TypeId: "type1",
+               Identity: "type1",
        }
 
        jobInfo := JobInfo{
@@ -105,14 +115,14 @@ func TestJobsManagerAddJobWhenJobIdMissing_shouldReturnError(t *testing.T) {
        }
        err := managerUnderTest.AddJobFromRESTCall(jobInfo)
        assertions.NotNil(err)
-       assertions.Equal("missing required job identity: {    <nil> type1}", err.Error())
+       assertions.Equal("missing required job identity: {    {{0 0}} type1 }", err.Error())
 }
 
 func TestJobsManagerAddJobWhenTargetUriMissing_shouldReturnError(t *testing.T) {
        assertions := require.New(t)
-       managerUnderTest := NewJobsManagerImpl(nil, "", nil)
+       managerUnderTest := NewJobsManagerImpl(nil, "", kafkaclient.KafkaFactoryImpl{}, nil)
        managerUnderTest.allTypes["type1"] = TypeData{
-               TypeId: "type1",
+               Identity: "type1",
        }
 
        jobInfo := JobInfo{
@@ -121,16 +131,16 @@ func TestJobsManagerAddJobWhenTargetUriMissing_shouldReturnError(t *testing.T) {
        }
        err := managerUnderTest.AddJobFromRESTCall(jobInfo)
        assertions.NotNil(err)
-       assertions.Equal("missing required target URI: {  job1  <nil> type1}", err.Error())
+       assertions.Equal("missing required target URI: {  job1  {{0 0}} type1 }", err.Error())
 }
 
 func TestJobsManagerDeleteJob_shouldSendDeleteToChannel(t *testing.T) {
        assertions := require.New(t)
-       managerUnderTest := NewJobsManagerImpl(nil, "", nil)
+       managerUnderTest := NewJobsManagerImpl(nil, "", kafkaclient.KafkaFactoryImpl{}, nil)
        jobsHandler := jobsHandler{
                deleteJobCh: make(chan string)}
        managerUnderTest.allTypes["type1"] = TypeData{
-               TypeId:      "type1",
+               Identity:    "type1",
                jobsHandler: &jobsHandler,
        }
 
@@ -139,21 +149,21 @@ func TestJobsManagerDeleteJob_shouldSendDeleteToChannel(t *testing.T) {
        assertions.Equal("job2", <-jobsHandler.deleteJobCh)
 }
 
-func TestAddJobToJobsManager_shouldStartPollAndDistributeMessages(t *testing.T) {
+func TestStartJobsManagerAddDMaaPJob_shouldStartPollAndDistributeMessages(t *testing.T) {
        assertions := require.New(t)
 
        called := false
-       messages := `[{"message": {"data": "data"}}]`
+       dMaaPMessages := `[{"message": {"data": "dmaap"}}]`
        pollClientMock := NewTestClient(func(req *http.Request) *http.Response {
                if req.URL.String() == "http://mrAddr/topicUrl" {
                        assertions.Equal(req.Method, "GET")
                        body := "[]"
                        if !called {
                                called = true
-                               body = messages
+                               body = dMaaPMessages
                        }
                        return &http.Response{
-                               StatusCode: 200,
+                               StatusCode: http.StatusOK,
                                Body:       ioutil.NopCloser(bytes.NewReader([]byte(body))),
                                Header:     make(http.Header), // Must be set to non-nil value or it panics
                        }
@@ -165,9 +175,9 @@ func TestAddJobToJobsManager_shouldStartPollAndDistributeMessages(t *testing.T)
 
        wg := sync.WaitGroup{}
        distributeClientMock := NewTestClient(func(req *http.Request) *http.Response {
-               if req.URL.String() == "http://consumerHost/target" {
+               if req.URL.String() == "http://consumerHost/dmaaptarget" {
                        assertions.Equal(req.Method, "POST")
-                       assertions.Equal(messages, getBodyAsString(req, t))
+                       assertions.Equal(dMaaPMessages, getBodyAsString(req, t))
                        assertions.Equal("application/json", req.Header.Get("Content-Type"))
                        wg.Done()
                        return &http.Response{
@@ -180,25 +190,88 @@ func TestAddJobToJobsManager_shouldStartPollAndDistributeMessages(t *testing.T)
                t.Fail()
                return nil
        })
-       jobsHandler := newJobsHandler("type1", "/topicUrl", pollClientMock, distributeClientMock)
-
-       jobsManager := NewJobsManagerImpl(pollClientMock, "http://mrAddr", distributeClientMock)
-       jobsManager.allTypes["type1"] = TypeData{
+       dMaaPTypeDef := config.TypeDefinition{
+               Identity:      "type1",
                DMaaPTopicURL: "/topicUrl",
-               TypeId:        "type1",
-               jobsHandler:   jobsHandler,
        }
+       dMaaPJobsHandler := newJobsHandler(dMaaPTypeDef, "http://mrAddr", nil, pollClientMock, distributeClientMock)
 
+       jobsManager := NewJobsManagerImpl(pollClientMock, "http://mrAddr", kafkaclient.KafkaFactoryImpl{}, distributeClientMock)
+       jobsManager.allTypes["type1"] = TypeData{
+               Identity:    "type1",
+               jobsHandler: dMaaPJobsHandler,
+       }
        jobsManager.StartJobsForAllTypes()
 
-       jobInfo := JobInfo{
+       dMaaPJobInfo := JobInfo{
                InfoTypeIdentity: "type1",
                InfoJobIdentity:  "job1",
-               TargetUri:        "http://consumerHost/target",
+               TargetUri:        "http://consumerHost/dmaaptarget",
+       }
+
+       wg.Add(1) // Wait till the distribution has happened
+       err := jobsManager.AddJobFromRESTCall(dMaaPJobInfo)
+       assertions.Nil(err)
+
+       if waitTimeout(&wg, 2*time.Second) {
+               t.Error("Not all calls to server were made")
+               t.Fail()
+       }
+}
+
+func TestStartJobsManagerAddKafkaJob_shouldStartPollAndDistributeMessages(t *testing.T) {
+       assertions := require.New(t)
+
+       kafkaMessages := `1`
+       wg := sync.WaitGroup{}
+       distributeClientMock := NewTestClient(func(req *http.Request) *http.Response {
+               if req.URL.String() == "http://consumerHost/kafkatarget" {
+                       assertions.Equal(req.Method, "POST")
+                       assertions.Equal(kafkaMessages, getBodyAsString(req, t))
+                       assertions.Equal("text/plain", req.Header.Get("Content-Type"))
+                       wg.Done()
+                       return &http.Response{
+                               StatusCode: 200,
+                               Body:       ioutil.NopCloser(bytes.NewBufferString(`OK`)),
+                               Header:     make(http.Header), // Must be set to non-nil value or it panics
+                       }
+               }
+               t.Error("Wrong call to client: ", req)
+               t.Fail()
+               return nil
+       })
+
+       kafkaTypeDef := config.TypeDefinition{
+               Identity:        "type2",
+               KafkaInputTopic: "topic",
+       }
+       kafkaFactoryMock := mocks.KafkaFactory{}
+       kafkaConsumerMock := mocks.KafkaConsumer{}
+       kafkaConsumerMock.On("Commit").Return([]kafka.TopicPartition{}, error(nil))
+       kafkaConsumerMock.On("Subscribe", mock.Anything).Return(error(nil))
+       kafkaConsumerMock.On("ReadMessage", mock.Anything).Return(&kafka.Message{
+               Value: []byte(kafkaMessages),
+       }, error(nil)).Once()
+       kafkaConsumerMock.On("ReadMessage", mock.Anything).Return(nil, fmt.Errorf("Just to stop"))
+       kafkaFactoryMock.On("NewKafkaConsumer", mock.Anything).Return(kafkaConsumerMock, nil)
+       kafkaJobsHandler := newJobsHandler(kafkaTypeDef, "", kafkaFactoryMock, nil, distributeClientMock)
+
+       jobsManager := NewJobsManagerImpl(nil, "", kafkaFactoryMock, distributeClientMock)
+       jobsManager.allTypes["type2"] = TypeData{
+               Identity:    "type2",
+               jobsHandler: kafkaJobsHandler,
+       }
+
+       jobsManager.StartJobsForAllTypes()
+
+       kafkaJobInfo := JobInfo{
+               InfoTypeIdentity: "type2",
+               InfoJobIdentity:  "job2",
+               TargetUri:        "http://consumerHost/kafkatarget",
        }
 
        wg.Add(1) // Wait till the distribution has happened
-       err := jobsManager.AddJobFromRESTCall(jobInfo)
+       err := jobsManager.AddJobFromRESTCall(kafkaJobInfo)
        assertions.Nil(err)
 
        if waitTimeout(&wg, 2*time.Second) {
@@ -210,7 +283,11 @@ func TestAddJobToJobsManager_shouldStartPollAndDistributeMessages(t *testing.T)
 func TestJobsHandlerDeleteJob_shouldDeleteJobFromJobsMap(t *testing.T) {
        jobToDelete := newJob(JobInfo{}, nil)
        go jobToDelete.start()
-       jobsHandler := newJobsHandler("type1", "/topicUrl", nil, nil)
+       typeDef := config.TypeDefinition{
+               Identity:      "type1",
+               DMaaPTopicURL: "/topicUrl",
+       }
+       jobsHandler := newJobsHandler(typeDef, "http://mrAddr", kafkaclient.KafkaFactoryImpl{}, nil, nil)
        jobsHandler.jobs["job1"] = jobToDelete
 
        go jobsHandler.monitorManagementChannels()
@@ -233,7 +310,11 @@ func TestJobsHandlerEmptyJobMessageBufferWhenItIsFull(t *testing.T) {
                InfoJobIdentity: "job",
        }, nil)
 
-       jobsHandler := newJobsHandler("type1", "/topicUrl", nil, nil)
+       typeDef := config.TypeDefinition{
+               Identity:      "type1",
+               DMaaPTopicURL: "/topicUrl",
+       }
+       jobsHandler := newJobsHandler(typeDef, "http://mrAddr", kafkaclient.KafkaFactoryImpl{}, nil, nil)
        jobsHandler.jobs["job1"] = job
 
        fillMessagesBuffer(job.messagesChannel)
@@ -243,6 +324,147 @@ func TestJobsHandlerEmptyJobMessageBufferWhenItIsFull(t *testing.T) {
        require.New(t).Len(job.messagesChannel, 0)
 }
 
+func TestKafkaPollingAgentTimedOut_shouldResultInEMptyMessages(t *testing.T) {
+       assertions := require.New(t)
+
+       kafkaFactoryMock := mocks.KafkaFactory{}
+       kafkaConsumerMock := mocks.KafkaConsumer{}
+       kafkaConsumerMock.On("Commit").Return([]kafka.TopicPartition{}, error(nil))
+       kafkaConsumerMock.On("Subscribe", mock.Anything).Return(error(nil))
+       kafkaConsumerMock.On("ReadMessage", mock.Anything).Return(nil, kafka.NewError(kafka.ErrTimedOut, "", false))
+       kafkaFactoryMock.On("NewKafkaConsumer", mock.Anything).Return(kafkaConsumerMock, nil)
+
+       pollingAgentUnderTest := newKafkaPollingAgent(kafkaFactoryMock, "")
+       messages, err := pollingAgentUnderTest.pollMessages()
+
+       assertions.Equal([]byte(""), messages)
+       assertions.Nil(err)
+}
+
+func TestJobWithoutParameters_shouldSendOneMessageAtATime(t *testing.T) {
+       assertions := require.New(t)
+
+       wg := sync.WaitGroup{}
+       messageNo := 1
+       distributeClientMock := NewTestClient(func(req *http.Request) *http.Response {
+               if req.URL.String() == "http://consumerHost/target" {
+                       assertions.Equal(req.Method, "POST")
+                       assertions.Equal(fmt.Sprint("message", messageNo), getBodyAsString(req, t))
+                       messageNo++
+                       assertions.Equal("text/plain", req.Header.Get("Content-Type"))
+                       wg.Done()
+                       return &http.Response{
+                               StatusCode: 200,
+                               Body:       ioutil.NopCloser(bytes.NewBufferString(`OK`)),
+                               Header:     make(http.Header), // Must be set to non-nil value or it panics
+                       }
+               }
+               t.Error("Wrong call to client: ", req)
+               t.Fail()
+               return nil
+       })
+
+       jobUnderTest := newJob(JobInfo{
+               sourceType: kafkaSource,
+               TargetUri:  "http://consumerHost/target",
+       }, distributeClientMock)
+
+       wg.Add(2)
+       go jobUnderTest.start()
+
+       jobUnderTest.messagesChannel <- []byte("message1")
+       jobUnderTest.messagesChannel <- []byte("message2")
+
+       if waitTimeout(&wg, 2*time.Second) {
+               t.Error("Not all calls to server were made")
+               t.Fail()
+       }
+}
+
+func TestJobWithBufferedParameters_shouldSendMessagesTogether(t *testing.T) {
+       assertions := require.New(t)
+
+       wg := sync.WaitGroup{}
+       distributeClientMock := NewTestClient(func(req *http.Request) *http.Response {
+               if req.URL.String() == "http://consumerHost/target" {
+                       assertions.Equal(req.Method, "POST")
+                       assertions.Equal(`["{\"data\": 1}","{\"data\": 2}","ABCDEFGH"]`, getBodyAsString(req, t))
+                       assertions.Equal("application/json", req.Header.Get("Content-Type"))
+                       wg.Done()
+                       return &http.Response{
+                               StatusCode: 200,
+                               Body:       ioutil.NopCloser(bytes.NewBufferString(`OK`)),
+                               Header:     make(http.Header), // Must be set to non-nil value or it panics
+                       }
+               }
+               t.Error("Wrong call to client: ", req)
+               t.Fail()
+               return nil
+       })
+
+       jobUnderTest := newJob(JobInfo{
+               TargetUri: "http://consumerHost/target",
+               InfoJobData: Parameters{
+                       BufferTimeout: BufferTimeout{
+                               MaxSize:            5,
+                               MaxTimeMiliseconds: 200,
+                       },
+               },
+       }, distributeClientMock)
+
+       wg.Add(1)
+       go jobUnderTest.start()
+
+       go func() {
+               jobUnderTest.messagesChannel <- []byte(`{"data": 1}`)
+               jobUnderTest.messagesChannel <- []byte(`{"data": 2}`)
+               jobUnderTest.messagesChannel <- []byte("ABCDEFGH")
+       }()
+
+       if waitTimeout(&wg, 2*time.Second) {
+               t.Error("Not all calls to server were made")
+               t.Fail()
+       }
+}
+
+func TestJobReadMoreThanBufferSizeMessages_shouldOnlyReturnMaxSizeNoOfMessages(t *testing.T) {
+       assertions := require.New(t)
+
+       jobUnderTest := newJob(JobInfo{}, nil)
+
+       go func() {
+               for i := 0; i < 4; i++ {
+                       jobUnderTest.messagesChannel <- []byte(strconv.Itoa(i))
+               }
+       }()
+
+       msgs := jobUnderTest.read(BufferTimeout{
+               MaxSize:            2,
+               MaxTimeMiliseconds: 200,
+       })
+
+       assertions.Equal([]byte("[\"0\",\"1\"]"), msgs)
+}
+func TestJobReadBufferedWhenTimeout_shouldOnlyReturnMessagesSentBeforeTimeout(t *testing.T) {
+       assertions := require.New(t)
+
+       jobUnderTest := newJob(JobInfo{}, nil)
+
+       go func() {
+               for i := 0; i < 4; i++ {
+                       time.Sleep(10 * time.Millisecond)
+                       jobUnderTest.messagesChannel <- []byte(strconv.Itoa(i))
+               }
+       }()
+
+       msgs := jobUnderTest.read(BufferTimeout{
+               MaxSize:            2,
+               MaxTimeMiliseconds: 30,
+       })
+
+       assertions.Equal([]byte("[\"0\",\"1\"]"), msgs)
+}
+
 func fillMessagesBuffer(mc chan []byte) {
        for i := 0; i < cap(mc); i++ {
                mc <- []byte("msg")
diff --git a/dmaap-mediator-producer/internal/kafkaclient/kafkaclient.go b/dmaap-mediator-producer/internal/kafkaclient/kafkaclient.go
new file mode 100644 (file)
index 0000000..16abcb4
--- /dev/null
@@ -0,0 +1,94 @@
+// -
+//   ========================LICENSE_START=================================
+//   O-RAN-SC
+//   %%
+//   Copyright (C) 2021: Nordix Foundation
+//   %%
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//        http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+//   ========================LICENSE_END===================================
+//
+
+package kafkaclient
+
+import (
+       "time"
+
+       "github.com/confluentinc/confluent-kafka-go/kafka"
+)
+
+type KafkaFactory interface {
+       NewKafkaConsumer(topicID string) (KafkaConsumer, error)
+}
+
+type KafkaFactoryImpl struct {
+       BootstrapServer string
+}
+
+func (kf KafkaFactoryImpl) NewKafkaConsumer(topicID string) (KafkaConsumer, error) {
+       consumer, err := kafka.NewConsumer(&kafka.ConfigMap{
+               "bootstrap.servers": kf.BootstrapServer,
+               "group.id":          "dmaap-mediator-producer",
+               "auto.offset.reset": "earliest",
+       })
+       if err != nil {
+               return nil, err
+       }
+       return KafkaConsumerImpl{consumer: consumer}, nil
+}
+
+func NewKafkaClient(factory KafkaFactory, topicID string) (KafkaClient, error) {
+       consumer, err := factory.NewKafkaConsumer(topicID)
+       if err != nil {
+               return KafkaClient{}, err
+       }
+       consumer.Commit()
+       err = consumer.Subscribe(topicID)
+       if err != nil {
+               return KafkaClient{}, err
+       }
+       return KafkaClient{consumer: consumer}, nil
+}
+
+type KafkaClient struct {
+       consumer KafkaConsumer
+}
+
+func (kc KafkaClient) ReadMessage() ([]byte, error) {
+       msg, err := kc.consumer.ReadMessage(time.Second)
+       if err != nil {
+               return nil, err
+       }
+       return msg.Value, nil
+}
+
+type KafkaConsumer interface {
+       Commit() ([]kafka.TopicPartition, error)
+       Subscribe(topic string) (err error)
+       ReadMessage(timeout time.Duration) (*kafka.Message, error)
+}
+
+type KafkaConsumerImpl struct {
+       consumer *kafka.Consumer
+}
+
+func (kc KafkaConsumerImpl) Commit() ([]kafka.TopicPartition, error) {
+       return kc.consumer.Commit()
+}
+
+func (kc KafkaConsumerImpl) Subscribe(topic string) error {
+       return kc.consumer.Subscribe(topic, nil)
+}
+
+func (kc KafkaConsumerImpl) ReadMessage(timeout time.Duration) (*kafka.Message, error) {
+       return kc.consumer.ReadMessage(timeout)
+}
index 9a827e7..a7582c2 100644 (file)
@@ -34,6 +34,9 @@ import (
        log "github.com/sirupsen/logrus"
 )
 
+const ContentTypeJSON = "application/json"
+const ContentTypePlain = "text/plain"
+
 // HTTPClient interface
 type HTTPClient interface {
        Get(url string) (*http.Response, error)
@@ -68,16 +71,16 @@ func Get(url string, client HTTPClient) ([]byte, error) {
 }
 
 func Put(url string, body []byte, client HTTPClient) error {
-       return do(http.MethodPut, url, body, client)
+       return do(http.MethodPut, url, body, ContentTypeJSON, client)
 }
 
-func Post(url string, body []byte, client HTTPClient) error {
-       return do(http.MethodPost, url, body, client)
+func Post(url string, body []byte, contentType string, client HTTPClient) error {
+       return do(http.MethodPost, url, body, contentType, client)
 }
 
-func do(method string, url string, body []byte, client HTTPClient) error {
+func do(method string, url string, body []byte, contentType string, client HTTPClient) error {
        if req, reqErr := http.NewRequest(method, url, bytes.NewBuffer(body)); reqErr == nil {
-               req.Header.Set("Content-Type", "application/json")
+               req.Header.Set("Content-Type", contentType)
                if response, respErr := client.Do(req); respErr == nil {
                        if isResponseSuccess(response.StatusCode) {
                                return nil
index 20c26dd..90db6ae 100644 (file)
@@ -142,7 +142,7 @@ func TestPostOk(t *testing.T) {
                StatusCode: http.StatusOK,
        }, nil)
 
-       if err := Post("http://localhost:9990", []byte("body"), &clientMock); err != nil {
+       if err := Post("http://localhost:9990", []byte("body"), "application/json", &clientMock); err != nil {
                t.Errorf("Put() error = %v, did not want error", err)
        }
        var actualRequest *http.Request
@@ -202,7 +202,7 @@ func Test_doErrorCases(t *testing.T) {
                                StatusCode: tt.args.mockReturnStatus,
                                Body:       ioutil.NopCloser(bytes.NewReader(tt.args.mockReturnBody)),
                        }, tt.args.mockReturnError)
-                       err := do("PUT", tt.args.url, nil, &clientMock)
+                       err := do("PUT", tt.args.url, nil, "", &clientMock)
                        assertions.Equal(tt.wantErr, err, tt.name)
                })
        }
index 02f3a98..46bc2a2 100644 (file)
@@ -38,6 +38,19 @@ const deleteJobPath = AddJobPath + "/{" + jobIdToken + "}"
 const logLevelToken = "level"
 const logAdminPath = "/admin/log"
 
+type ErrorInfo struct {
+       // A URI reference that identifies the problem type.
+       Type string `json:"type" swaggertype:"string"`
+       // A short, human-readable summary of the problem type.
+       Title string `json:"title" swaggertype:"string"`
+       // The HTTP status code generated by the origin server for this occurrence of the problem.
+       Status int `json:"status" swaggertype:"integer" example:"400"`
+       // A human-readable explanation specific to this occurrence of the problem.
+       Detail string `json:"detail" swaggertype:"string" example:"Info job type not found"`
+       // A URI reference that identifies the specific occurrence of the problem.
+       Instance string `json:"instance" swaggertype:"string"`
+} // @name ErrorInfo
+
 type ProducerCallbackHandler struct {
        jobsManager jobs.JobsManager
 }
@@ -60,22 +73,38 @@ func NewRouter(jm jobs.JobsManager, hcf func(http.ResponseWriter, *http.Request)
        return r
 }
 
+// @Summary      Add info job
+// @Description  Callback for ICS to add an info job
+// @Tags         Data producer (callbacks)
+// @Accept       json
+// @Param        user  body  jobs.JobInfo  true  "Info job data"
+// @Success      200
+// @Failure      400  {object}  ErrorInfo     "Problem as defined in https://tools.ietf.org/html/rfc7807"
+// @Header       400  {string}  Content-Type  "application/problem+json"
+// @Router       /info_job [post]
 func (h *ProducerCallbackHandler) addInfoJobHandler(w http.ResponseWriter, r *http.Request) {
        b, readErr := ioutil.ReadAll(r.Body)
        if readErr != nil {
-               http.Error(w, fmt.Sprintf("Unable to read body due to: %v", readErr), http.StatusBadRequest)
+               returnError(fmt.Sprintf("Unable to read body due to: %v", readErr), w)
                return
        }
        jobInfo := jobs.JobInfo{}
        if unmarshalErr := json.Unmarshal(b, &jobInfo); unmarshalErr != nil {
-               http.Error(w, fmt.Sprintf("Invalid json body. Cause: %v", unmarshalErr), http.StatusBadRequest)
+               returnError(fmt.Sprintf("Invalid json body. Cause: %v", unmarshalErr), w)
                return
        }
        if err := h.jobsManager.AddJobFromRESTCall(jobInfo); err != nil {
-               http.Error(w, fmt.Sprintf("Invalid job info. Cause: %v", err), http.StatusBadRequest)
+               returnError(fmt.Sprintf("Invalid job info. Cause: %v", err), w)
+               return
        }
 }
 
+// @Summary      Delete info job
+// @Description  Callback for ICS to delete an info job
+// @Tags         Data producer (callbacks)
+// @Param        infoJobId  path  string  true  "Info job ID"
+// @Success      200
+// @Router       /info_job/{infoJobId} [delete]
 func (h *ProducerCallbackHandler) deleteInfoJobHandler(w http.ResponseWriter, r *http.Request) {
        vars := mux.Vars(r)
        id, ok := vars[jobIdToken]
@@ -87,13 +116,21 @@ func (h *ProducerCallbackHandler) deleteInfoJobHandler(w http.ResponseWriter, r
        h.jobsManager.DeleteJobFromRESTCall(id)
 }
 
+// @Summary      Set log level
+// @Description  Set the log level of the producer.
+// @Tags         Admin
+// @Param        level  query  string  false  "string enums"  Enums(Error, Warn, Info, Debug)
+// @Success      200
+// @Failure      400  {object}  ErrorInfo     "Problem as defined in https://tools.ietf.org/html/rfc7807"
+// @Header       400  {string}  Content-Type  "application/problem+json"
+// @Router       /admin/log [put]
 func (h *ProducerCallbackHandler) setLogLevel(w http.ResponseWriter, r *http.Request) {
        query := r.URL.Query()
        logLevelStr := query.Get(logLevelToken)
        if loglevel, err := log.ParseLevel(logLevelStr); err == nil {
                log.SetLevel(loglevel)
        } else {
-               http.Error(w, fmt.Sprintf("Invalid log level: %v. Log level will not be changed!", logLevelStr), http.StatusBadRequest)
+               returnError(fmt.Sprintf("Invalid log level: %v. Log level will not be changed!", logLevelStr), w)
                return
        }
 }
@@ -109,3 +146,13 @@ type methodNotAllowedHandler struct{}
 func (h *methodNotAllowedHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
        http.Error(w, "Method is not supported.", http.StatusMethodNotAllowed)
 }
+
+func returnError(msg string, w http.ResponseWriter) {
+       errInfo := ErrorInfo{
+               Status: http.StatusBadRequest,
+               Detail: msg,
+       }
+       w.Header().Add("Content-Type", "application/problem+json")
+       w.WriteHeader(http.StatusBadRequest)
+       json.NewEncoder(w).Encode(errInfo)
+}
index 6248c22..dbe503d 100644 (file)
@@ -34,7 +34,7 @@ import (
        "github.com/stretchr/testify/mock"
        "github.com/stretchr/testify/require"
        "oransc.org/nonrtric/dmaapmediatorproducer/internal/jobs"
-       "oransc.org/nonrtric/dmaapmediatorproducer/mocks/jobhandler"
+       "oransc.org/nonrtric/dmaapmediatorproducer/mocks/jobshandler"
 )
 
 func TestNewRouter(t *testing.T) {
@@ -88,7 +88,7 @@ func TestNewRouter(t *testing.T) {
        assertions.Equal("/admin/log", path)
 }
 
-func TestAddInfoJobHandler(t *testing.T) {
+func TestAddInfoJobToJobsHandler(t *testing.T) {
        assertions := require.New(t)
 
        type args struct {
@@ -96,27 +96,27 @@ func TestAddInfoJobHandler(t *testing.T) {
                mockReturn error
        }
        tests := []struct {
-               name         string
-               args         args
-               wantedStatus int
-               wantedBody   string
+               name            string
+               args            args
+               wantedStatus    int
+               wantedErrorInfo *ErrorInfo
        }{
                {
-                       name: "AddInfoJobHandler with correct job, should return OK",
+                       name: "AddInfoJobToJobsHandler with correct job, should return OK",
                        args: args{
                                job: jobs.JobInfo{
                                        Owner:            "owner",
                                        LastUpdated:      "now",
                                        InfoJobIdentity:  "jobId",
                                        TargetUri:        "target",
-                                       InfoJobData:      "{}",
+                                       InfoJobData:      jobs.Parameters{},
                                        InfoTypeIdentity: "type",
                                },
                        },
                        wantedStatus: http.StatusOK,
                },
                {
-                       name: "AddInfoJobHandler with incorrect job info, should return BadRequest",
+                       name: "AddInfoJobToJobsHandler with incorrect job info, should return BadRequest",
                        args: args{
                                job: jobs.JobInfo{
                                        Owner: "bad",
@@ -124,15 +124,18 @@ func TestAddInfoJobHandler(t *testing.T) {
                                mockReturn: errors.New("error"),
                        },
                        wantedStatus: http.StatusBadRequest,
-                       wantedBody:   "Invalid job info. Cause: error",
+                       wantedErrorInfo: &ErrorInfo{
+                               Status: http.StatusBadRequest,
+                               Detail: "Invalid job info. Cause: error",
+                       },
                },
        }
        for _, tt := range tests {
                t.Run(tt.name, func(t *testing.T) {
-                       jobHandlerMock := jobhandler.JobHandler{}
-                       jobHandlerMock.On("AddJobFromRESTCall", tt.args.job).Return(tt.args.mockReturn)
+                       jobsHandlerMock := jobshandler.JobsHandler{}
+                       jobsHandlerMock.On("AddJobFromRESTCall", tt.args.job).Return(tt.args.mockReturn)
 
-                       callbackHandlerUnderTest := NewProducerCallbackHandler(&jobHandlerMock)
+                       callbackHandlerUnderTest := NewProducerCallbackHandler(&jobsHandlerMock)
 
                        handler := http.HandlerFunc(callbackHandlerUnderTest.addInfoJobHandler)
                        responseRecorder := httptest.NewRecorder()
@@ -141,18 +144,27 @@ func TestAddInfoJobHandler(t *testing.T) {
                        handler.ServeHTTP(responseRecorder, r)
 
                        assertions.Equal(tt.wantedStatus, responseRecorder.Code, tt.name)
-                       assertions.Contains(responseRecorder.Body.String(), tt.wantedBody, tt.name)
-                       jobHandlerMock.AssertCalled(t, "AddJobFromRESTCall", tt.args.job)
+                       if tt.wantedErrorInfo != nil {
+                               var actualErrInfo ErrorInfo
+                               err := json.Unmarshal(getBody(responseRecorder, t), &actualErrInfo)
+                               if err != nil {
+                                       t.Error("Unable to unmarshal error body", err)
+                                       t.Fail()
+                               }
+                               assertions.Equal(*tt.wantedErrorInfo, actualErrInfo, tt.name)
+                               assertions.Equal("application/problem+json", responseRecorder.Result().Header.Get("Content-Type"))
+                       }
+                       jobsHandlerMock.AssertCalled(t, "AddJobFromRESTCall", tt.args.job)
                })
        }
 }
 
 func TestDeleteJob(t *testing.T) {
        assertions := require.New(t)
-       jobHandlerMock := jobhandler.JobHandler{}
-       jobHandlerMock.On("DeleteJobFromRESTCall", mock.Anything).Return(nil)
+       jobsHandlerMock := jobshandler.JobsHandler{}
+       jobsHandlerMock.On("DeleteJobFromRESTCall", mock.Anything).Return(nil)
 
-       callbackHandlerUnderTest := NewProducerCallbackHandler(&jobHandlerMock)
+       callbackHandlerUnderTest := NewProducerCallbackHandler(&jobsHandlerMock)
 
        responseRecorder := httptest.NewRecorder()
        r := mux.SetURLVars(newRequest(http.MethodDelete, "/jobs/", nil, t), map[string]string{"infoJobId": "job1"})
@@ -162,7 +174,7 @@ func TestDeleteJob(t *testing.T) {
 
        assertions.Equal("", responseRecorder.Body.String())
 
-       jobHandlerMock.AssertCalled(t, "DeleteJobFromRESTCall", "job1")
+       jobsHandlerMock.AssertCalled(t, "DeleteJobFromRESTCall", "job1")
 }
 
 func TestSetLogLevel(t *testing.T) {
@@ -172,10 +184,10 @@ func TestSetLogLevel(t *testing.T) {
                logLevel string
        }
        tests := []struct {
-               name         string
-               args         args
-               wantedStatus int
-               wantedBody   string
+               name            string
+               args            args
+               wantedStatus    int
+               wantedErrorInfo *ErrorInfo
        }{
                {
                        name: "Set to valid log level, should return OK",
@@ -190,7 +202,10 @@ func TestSetLogLevel(t *testing.T) {
                                logLevel: "bad",
                        },
                        wantedStatus: http.StatusBadRequest,
-                       wantedBody:   "Invalid log level: bad",
+                       wantedErrorInfo: &ErrorInfo{
+                               Detail: "Invalid log level: bad. Log level will not be changed!",
+                               Status: http.StatusBadRequest,
+                       },
                },
        }
        for _, tt := range tests {
@@ -204,7 +219,16 @@ func TestSetLogLevel(t *testing.T) {
                        handler.ServeHTTP(responseRecorder, r)
 
                        assertions.Equal(tt.wantedStatus, responseRecorder.Code, tt.name)
-                       assertions.Contains(responseRecorder.Body.String(), tt.wantedBody, tt.name)
+                       if tt.wantedErrorInfo != nil {
+                               var actualErrInfo ErrorInfo
+                               err := json.Unmarshal(getBody(responseRecorder, t), &actualErrInfo)
+                               if err != nil {
+                                       t.Error("Unable to unmarshal error body", err)
+                                       t.Fail()
+                               }
+                               assertions.Equal(*tt.wantedErrorInfo, actualErrInfo, tt.name)
+                               assertions.Equal("application/problem+json", responseRecorder.Result().Header.Get("Content-Type"))
+                       }
                })
        }
 }
@@ -222,3 +246,12 @@ func newRequest(method string, url string, jobInfo *jobs.JobInfo, t *testing.T)
                return nil
        }
 }
+
+func getBody(responseRecorder *httptest.ResponseRecorder, t *testing.T) []byte {
+       buf := new(bytes.Buffer)
+       if _, err := buf.ReadFrom(responseRecorder.Body); err != nil {
+               t.Error("Unable to read error body", err)
+               t.Fail()
+       }
+       return buf.Bytes()
+}
index 1aabdda..65a84a2 100644 (file)
@@ -22,15 +22,21 @@ package main
 
 import (
        "crypto/tls"
+       "encoding/json"
        "fmt"
        "net/http"
        "time"
 
+       "github.com/gorilla/mux"
        log "github.com/sirupsen/logrus"
+       _ "oransc.org/nonrtric/dmaapmediatorproducer/api"
        "oransc.org/nonrtric/dmaapmediatorproducer/internal/config"
        "oransc.org/nonrtric/dmaapmediatorproducer/internal/jobs"
+       "oransc.org/nonrtric/dmaapmediatorproducer/internal/kafkaclient"
        "oransc.org/nonrtric/dmaapmediatorproducer/internal/restclient"
        "oransc.org/nonrtric/dmaapmediatorproducer/internal/server"
+
+       httpSwagger "github.com/swaggo/http-swagger"
 )
 
 var configuration *config.Config
@@ -40,6 +46,12 @@ func init() {
        configuration = config.New()
 }
 
+// @title    DMaaP Mediator Producer
+// @version  1.1.0
+
+// @license.name  Apache 2.0
+// @license.url   http://www.apache.org/licenses/LICENSE-2.0.html
+
 func main() {
        log.SetLevel(configuration.LogLevel)
        log.Debug("Initializing DMaaP Mediator Producer")
@@ -55,9 +67,12 @@ func main() {
        } else {
                log.Fatalf("Stopping producer due to error: %v", err)
        }
+
        retryClient := restclient.CreateRetryClient(cert)
+       kafkaFactory := kafkaclient.KafkaFactoryImpl{BootstrapServer: configuration.KafkaBootstrapServers}
+       distributionClient := restclient.CreateClientWithoutRetry(cert, 10*time.Second)
 
-       jobsManager := jobs.NewJobsManagerImpl(retryClient, configuration.DMaaPMRAddress, restclient.CreateClientWithoutRetry(cert, 10*time.Second))
+       jobsManager := jobs.NewJobsManagerImpl(retryClient, configuration.DMaaPMRAddress, kafkaFactory, distributionClient)
        go startCallbackServer(jobsManager, callbackAddress)
 
        if err := registerTypesAndProducer(jobsManager, configuration.InfoCoordinatorAddress, callbackAddress, retryClient); err != nil {
@@ -78,22 +93,25 @@ func validateConfiguration(configuration *config.Config) error {
        if configuration.ProducerCertPath == "" || configuration.ProducerKeyPath == "" {
                return fmt.Errorf("missing PRODUCER_CERT and/or PRODUCER_KEY")
        }
+       if configuration.DMaaPMRAddress == "" && configuration.KafkaBootstrapServers == "" {
+               return fmt.Errorf("at least one of DMAAP_MR_ADDR or KAFKA_BOOTSRAP_SERVERS must be provided")
+       }
        return nil
 }
-func registerTypesAndProducer(jobTypesHandler jobs.JobTypesManager, infoCoordinatorAddress string, callbackAddress string, client restclient.HTTPClient) error {
+func registerTypesAndProducer(jobTypesManager jobs.JobTypesManager, infoCoordinatorAddress string, callbackAddress string, client restclient.HTTPClient) error {
        registrator := config.NewRegistratorImpl(infoCoordinatorAddress, client)
-       configTypes, err := config.GetJobTypesFromConfiguration("configs/type_config.json")
+       configTypes, err := config.GetJobTypesFromConfiguration("configs")
        if err != nil {
                return fmt.Errorf("unable to register all types due to: %v", err)
        }
-       regErr := registrator.RegisterTypes(jobTypesHandler.LoadTypesFromConfiguration(configTypes))
+       regErr := registrator.RegisterTypes(jobTypesManager.LoadTypesFromConfiguration(configTypes))
        if regErr != nil {
                return fmt.Errorf("unable to register all types due to: %v", regErr)
        }
 
        producer := config.ProducerRegistrationInfo{
                InfoProducerSupervisionCallbackUrl: callbackAddress + server.HealthCheckPath,
-               SupportedInfoTypes:                 jobTypesHandler.GetSupportedTypes(),
+               SupportedInfoTypes:                 jobTypesManager.GetSupportedTypes(),
                InfoJobCallbackUrl:                 callbackAddress + server.AddJobPath,
        }
        if err := registrator.RegisterProducer("DMaaP_Mediator_Producer", &producer); err != nil {
@@ -105,6 +123,7 @@ func registerTypesAndProducer(jobTypesHandler jobs.JobTypesManager, infoCoordina
 func startCallbackServer(jobsManager jobs.JobsManager, callbackAddress string) {
        log.Debugf("Starting callback server at port %v", configuration.InfoProducerPort)
        r := server.NewRouter(jobsManager, statusHandler)
+       addSwaggerHandler(r)
        if restclient.IsUrlSecure(callbackAddress) {
                log.Fatalf("Server stopped: %v", http.ListenAndServeTLS(fmt.Sprintf(":%v", configuration.InfoProducerPort), configuration.ProducerCertPath, configuration.ProducerKeyPath, r))
        } else {
@@ -112,12 +131,34 @@ func startCallbackServer(jobsManager jobs.JobsManager, callbackAddress string) {
        }
 }
 
+type ProducerStatus struct {
+       // The registration status of the producer in Information Coordinator Service. Either `registered` or `not registered`
+       RegisteredStatus string `json:"registeredStatus" swaggertype:"string" example:"registered"`
+} // @name  ProducerStatus
+
+// @Summary      Get status
+// @Description  Get the status of the producer. Will show if the producer has registered in ICS.
+// @Tags         Data producer (callbacks)
+// @Produce      json
+// @Success      200  {object}  ProducerStatus
+// @Router       /health_check [get]
 func statusHandler(w http.ResponseWriter, r *http.Request) {
-       registeredStatus := "not registered"
+       status := ProducerStatus{
+               RegisteredStatus: "not registered",
+       }
        if registered {
-               registeredStatus = "registered"
+               status.RegisteredStatus = "registered"
        }
-       fmt.Fprintf(w, `{"status": "%v"}`, registeredStatus)
+       json.NewEncoder(w).Encode(status)
+}
+
+// @Summary      Get Swagger Documentation
+// @Description  Get the Swagger API documentation for the producer.
+// @Tags         Admin
+// @Success      200
+// @Router       /swagger [get]
+func addSwaggerHandler(r *mux.Router) {
+       r.PathPrefix("/swagger").Handler(httpSwagger.WrapHandler)
 }
 
 func keepProducerAlive() {
diff --git a/dmaap-mediator-producer/main_test.go b/dmaap-mediator-producer/main_test.go
new file mode 100644 (file)
index 0000000..19851be
--- /dev/null
@@ -0,0 +1,205 @@
+// -
+//   ========================LICENSE_START=================================
+//   O-RAN-SC
+//   %%
+//   Copyright (C) 2022: Nordix Foundation
+//   %%
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//        http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+//   ========================LICENSE_END===================================
+//
+
+package main
+
+import (
+       "bytes"
+       "fmt"
+       "io/ioutil"
+       "net/http"
+       "os/exec"
+       "sync"
+       "testing"
+       "time"
+
+       "github.com/stretchr/testify/require"
+       "oransc.org/nonrtric/dmaapmediatorproducer/internal/config"
+       "oransc.org/nonrtric/dmaapmediatorproducer/internal/jobs"
+       "oransc.org/nonrtric/dmaapmediatorproducer/internal/kafkaclient"
+)
+
+// This is not a real test, just a way to get the Swagger documentation generated automatically.
+// Hence there are no assertions in this test.
+func TestGenerateSwaggerDocs(t *testing.T) {
+       cmd := exec.Command("./generate_swagger_docs.sh")
+
+       err := cmd.Run()
+       if err != nil {
+               fmt.Println("Error generating Swagger:", err)
+       }
+}
+
+func TestValidateConfiguration(t *testing.T) {
+       assertions := require.New(t)
+
+       validConfig := config.Config{
+               InfoProducerHost:      "host",
+               DMaaPMRAddress:        "address",
+               KafkaBootstrapServers: "servers",
+               ProducerCertPath:      "path",
+               ProducerKeyPath:       "path",
+       }
+       assertions.Nil(validateConfiguration(&validConfig))
+
+       missingProducerHost := config.Config{
+               DMaaPMRAddress:        "address",
+               KafkaBootstrapServers: "servers",
+               ProducerCertPath:      "path",
+               ProducerKeyPath:       "path",
+       }
+       assertions.Contains(validateConfiguration(&missingProducerHost).Error(), "INFO_PRODUCER_HOST")
+
+       missingCert := config.Config{
+               InfoProducerHost:      "host",
+               DMaaPMRAddress:        "address",
+               KafkaBootstrapServers: "servers",
+               ProducerKeyPath:       "path",
+       }
+       assertions.Contains(validateConfiguration(&missingCert).Error(), "PRODUCER_CERT")
+
+       missingCertKey := config.Config{
+               InfoProducerHost:      "host",
+               DMaaPMRAddress:        "address",
+               KafkaBootstrapServers: "servers",
+               ProducerCertPath:      "path",
+       }
+       assertions.Contains(validateConfiguration(&missingCertKey).Error(), "PRODUCER_KEY")
+
+       missingMRAddress := config.Config{
+               InfoProducerHost:      "host",
+               KafkaBootstrapServers: "servers",
+               ProducerCertPath:      "path",
+               ProducerKeyPath:       "path",
+       }
+       assertions.Nil(validateConfiguration(&missingMRAddress))
+
+       missingKafkaServers := config.Config{
+               InfoProducerHost: "host",
+               DMaaPMRAddress:   "address",
+               ProducerCertPath: "path",
+               ProducerKeyPath:  "path",
+       }
+       assertions.Nil(validateConfiguration(&missingKafkaServers))
+
+       missingMRAddressdAndKafkaServers := config.Config{
+               InfoProducerHost: "host",
+               ProducerCertPath: "path",
+               ProducerKeyPath:  "path",
+       }
+       assertions.Contains(validateConfiguration(&missingMRAddressdAndKafkaServers).Error(), "DMAAP_MR_ADDR")
+       assertions.Contains(validateConfiguration(&missingMRAddressdAndKafkaServers).Error(), "KAFKA_BOOTSRAP_SERVERS")
+}
+
+func TestRegisterTypesAndProducer(t *testing.T) {
+       assertions := require.New(t)
+
+       wg := sync.WaitGroup{}
+       clientMock := NewTestClient(func(req *http.Request) *http.Response {
+               if req.URL.String() == configuration.InfoCoordinatorAddress+"/data-producer/v1/info-types/STD_Fault_Messages" {
+                       assertions.Equal(req.Method, "PUT")
+                       body := getBodyAsString(req, t)
+                       assertions.Contains(body, "info_job_data_schema")
+                       assertions.Equal("application/json", req.Header.Get("Content-Type"))
+                       wg.Done()
+                       return &http.Response{
+                               StatusCode: 200,
+                               Body:       ioutil.NopCloser(bytes.NewBufferString(`OK`)),
+                               Header:     make(http.Header), // Must be set to non-nil value or it panics
+                       }
+               } else if req.URL.String() == configuration.InfoCoordinatorAddress+"/data-producer/v1/info-types/Kafka_TestTopic" {
+                       assertions.Equal(req.Method, "PUT")
+                       body := getBodyAsString(req, t)
+                       assertions.Contains(body, "info_job_data_schema")
+                       assertions.Equal("application/json", req.Header.Get("Content-Type"))
+                       wg.Done()
+                       return &http.Response{
+                               StatusCode: 200,
+                               Body:       ioutil.NopCloser(bytes.NewBufferString(`OK`)),
+                               Header:     make(http.Header), // Must be set to non-nil value or it panics
+                       }
+               } else if req.URL.String() == configuration.InfoCoordinatorAddress+"/data-producer/v1/info-producers/DMaaP_Mediator_Producer" {
+                       assertions.Equal(req.Method, "PUT")
+                       body := getBodyAsString(req, t)
+                       assertions.Contains(body, "callbackAddress/health_check")
+                       assertions.Contains(body, "callbackAddress/info_job")
+                       assertions.Contains(body, "Kafka_TestTopic")
+                       assertions.Contains(body, "STD_Fault_Messages")
+                       assertions.Equal("application/json", req.Header.Get("Content-Type"))
+                       wg.Done()
+                       return &http.Response{
+                               StatusCode: 200,
+                               Body:       ioutil.NopCloser(bytes.NewBufferString(`OK`)),
+                               Header:     make(http.Header), // Must be set to non-nil value or it panics
+                       }
+               }
+               t.Error("Wrong call to client: ", req)
+               t.Fail()
+               return nil
+       })
+       jobsManager := jobs.NewJobsManagerImpl(clientMock, configuration.DMaaPMRAddress, kafkaclient.KafkaFactoryImpl{}, nil)
+
+       wg.Add(3)
+       err := registerTypesAndProducer(jobsManager, configuration.InfoCoordinatorAddress, "callbackAddress", clientMock)
+
+       assertions.Nil(err)
+
+       if waitTimeout(&wg, 2*time.Second) {
+               t.Error("Not all calls to server were made")
+               t.Fail()
+       }
+}
+
+type RoundTripFunc func(req *http.Request) *http.Response
+
+func (f RoundTripFunc) RoundTrip(req *http.Request) (*http.Response, error) {
+       return f(req), nil
+}
+
+//NewTestClient returns *http.Client with Transport replaced to avoid making real calls
+func NewTestClient(fn RoundTripFunc) *http.Client {
+       return &http.Client{
+               Transport: RoundTripFunc(fn),
+       }
+}
+
+func getBodyAsString(req *http.Request, t *testing.T) string {
+       buf := new(bytes.Buffer)
+       if _, err := buf.ReadFrom(req.Body); err != nil {
+               t.Fail()
+       }
+       return buf.String()
+}
+
+// waitTimeout waits for the waitgroup for the specified max timeout.
+// Returns true if waiting timed out.
+func waitTimeout(wg *sync.WaitGroup, timeout time.Duration) bool {
+       c := make(chan struct{})
+       go func() {
+               defer close(c)
+               wg.Wait()
+       }()
+       select {
+       case <-c:
+               return false // completed normally
+       case <-time.After(timeout):
+               return true // timed out
+       }
+}
diff --git a/dmaap-mediator-producer/mocks/KafkaConsumer.go b/dmaap-mediator-producer/mocks/KafkaConsumer.go
new file mode 100644 (file)
index 0000000..8ae0893
--- /dev/null
@@ -0,0 +1,76 @@
+// Code generated by mockery v1.0.0. DO NOT EDIT.
+
+package mocks
+
+import (
+       kafka "github.com/confluentinc/confluent-kafka-go/kafka"
+
+       mock "github.com/stretchr/testify/mock"
+
+       time "time"
+)
+
+// KafkaConsumer is an autogenerated mock type for the KafkaConsumer type
+type KafkaConsumer struct {
+       mock.Mock
+}
+
+// Commit provides a mock function with given fields:
+func (_m KafkaConsumer) Commit() ([]kafka.TopicPartition, error) {
+       ret := _m.Called()
+
+       var r0 []kafka.TopicPartition
+       if rf, ok := ret.Get(0).(func() []kafka.TopicPartition); ok {
+               r0 = rf()
+       } else {
+               if ret.Get(0) != nil {
+                       r0 = ret.Get(0).([]kafka.TopicPartition)
+               }
+       }
+
+       var r1 error
+       if rf, ok := ret.Get(1).(func() error); ok {
+               r1 = rf()
+       } else {
+               r1 = ret.Error(1)
+       }
+
+       return r0, r1
+}
+
+// ReadMessage provides a mock function with given fields: timeout
+func (_m KafkaConsumer) ReadMessage(timeout time.Duration) (*kafka.Message, error) {
+       ret := _m.Called(timeout)
+
+       var r0 *kafka.Message
+       if rf, ok := ret.Get(0).(func(time.Duration) *kafka.Message); ok {
+               r0 = rf(timeout)
+       } else {
+               if ret.Get(0) != nil {
+                       r0 = ret.Get(0).(*kafka.Message)
+               }
+       }
+
+       var r1 error
+       if rf, ok := ret.Get(1).(func(time.Duration) error); ok {
+               r1 = rf(timeout)
+       } else {
+               r1 = ret.Error(1)
+       }
+
+       return r0, r1
+}
+
+// Subscribe provides a mock function with given fields: topic
+func (_m KafkaConsumer) Subscribe(topic string) error {
+       ret := _m.Called(topic)
+
+       var r0 error
+       if rf, ok := ret.Get(0).(func(string) error); ok {
+               r0 = rf(topic)
+       } else {
+               r0 = ret.Error(0)
+       }
+
+       return r0
+}
diff --git a/dmaap-mediator-producer/mocks/KafkaFactory.go b/dmaap-mediator-producer/mocks/KafkaFactory.go
new file mode 100644 (file)
index 0000000..f05457a
--- /dev/null
@@ -0,0 +1,36 @@
+// Code generated by mockery v1.0.0. DO NOT EDIT.
+
+package mocks
+
+import (
+       mock "github.com/stretchr/testify/mock"
+       "oransc.org/nonrtric/dmaapmediatorproducer/internal/kafkaclient"
+)
+
+// KafkaFactory is an autogenerated mock type for the KafkaFactory type
+type KafkaFactory struct {
+       mock.Mock
+}
+
+// NewKafkaConsumer provides a mock function with given fields: topicID
+func (_m KafkaFactory) NewKafkaConsumer(topicID string) (kafkaclient.KafkaConsumer, error) {
+       ret := _m.Called(topicID)
+
+       var r0 kafkaclient.KafkaConsumer
+       if rf, ok := ret.Get(0).(func(string) kafkaclient.KafkaConsumer); ok {
+               r0 = rf(topicID)
+       } else {
+               if ret.Get(0) != nil {
+                       r0 = ret.Get(0).(kafkaclient.KafkaConsumer)
+               }
+       }
+
+       var r1 error
+       if rf, ok := ret.Get(1).(func(string) error); ok {
+               r1 = rf(topicID)
+       } else {
+               r1 = ret.Error(1)
+       }
+
+       return r0, r1
+}
@@ -1,19 +1,19 @@
 // Code generated by mockery v2.9.3. DO NOT EDIT.
 
-package jobhandler
+package jobshandler
 
 import (
        mock "github.com/stretchr/testify/mock"
        jobs "oransc.org/nonrtric/dmaapmediatorproducer/internal/jobs"
 )
 
-// JobHandler is an autogenerated mock type for the JobHandler type
-type JobHandler struct {
+// JobsHandler is an autogenerated mock type for the JobsHandler type
+type JobsHandler struct {
        mock.Mock
 }
 
 // AddJob provides a mock function with given fields: _a0
-func (_m *JobHandler) AddJobFromRESTCall(_a0 jobs.JobInfo) error {
+func (_m *JobsHandler) AddJobFromRESTCall(_a0 jobs.JobInfo) error {
        ret := _m.Called(_a0)
 
        var r0 error
@@ -27,6 +27,6 @@ func (_m *JobHandler) AddJobFromRESTCall(_a0 jobs.JobInfo) error {
 }
 
 // DeleteJob provides a mock function with given fields: jobId
-func (_m *JobHandler) DeleteJobFromRESTCall(jobId string) {
+func (_m *JobsHandler) DeleteJobFromRESTCall(jobId string) {
        _m.Called(jobId)
 }
diff --git a/dmaap-mediator-producer/pom.xml b/dmaap-mediator-producer/pom.xml
deleted file mode 100644 (file)
index eb30b6b..0000000
+++ /dev/null
@@ -1,112 +0,0 @@
-<!--
-  ============LICENSE_START=======================================================
-   Copyright (C) 2021 Nordix Foundation.
-  ================================================================================
-  Licensed under the Apache License, Version 2.0 (the "License");
-  you may not use this file except in compliance with the License.
-  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
-
-  SPDX-License-Identifier: Apache-2.0
-  ============LICENSE_END=========================================================
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-    xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-
-    <groupId>oransc.org</groupId>
-    <artifactId>dmaapmediatorproducer</artifactId>
-    <version>1.1.0</version>
-    <properties>
-        <docker-maven-plugin.version>0.30.0</docker-maven-plugin.version>
-    </properties>
-
-     <build>
-        <plugins>
-            <plugin>
-              <artifactId>exec-maven-plugin</artifactId>
-              <groupId>org.codehaus.mojo</groupId>
-              <executions>
-                  <execution>
-                      <id>Build Go binary</id>
-                      <phase>generate-sources</phase>
-                      <goals>
-                          <goal>exec</goal>
-                      </goals>
-                      <configuration>
-                          <executable>${basedir}/build_and_test.sh</executable>
-                      </configuration>
-                  </execution>
-              </executions>
-          </plugin>
-            <plugin>
-                <groupId>io.fabric8</groupId>
-                <artifactId>docker-maven-plugin</artifactId>
-                <version>${docker-maven-plugin.version}</version>
-                <inherited>false</inherited>
-                <executions>
-                    <execution>
-                        <id>generate-nonrtric-dmaap-mediator-producer-image</id>
-                        <phase>package</phase>
-                        <goals>
-                            <goal>build</goal>
-                        </goals>
-                        <configuration>
-                            <pullRegistry>${env.CONTAINER_PULL_REGISTRY}</pullRegistry>
-                            <images>
-                                
-                            </images>
-                        </configuration>
-                    </execution>
-                    <execution>
-                        <id>push-nonrtric-dmaap-mediator-producer-image</id>
-                        <goals>
-                            <goal>build</goal>
-                            <goal>push</goal>
-                        </goals>
-                        <configuration>
-                            <pullRegistry>${env.CONTAINER_PULL_REGISTRY}</pullRegistry>
-                            <pushRegistry>${env.CONTAINER_PUSH_REGISTRY}</pushRegistry>
-                            <images>
-                                
-                            </images>
-                        </configuration>
-                    </execution>
-                </executions>
-            </plugin>
-      </plugins>
-    </build>
-</project>
index 0818d5e..87457c2 100644 (file)
@@ -23,6 +23,7 @@ package main
 import (
        "flag"
        "fmt"
+       "io/ioutil"
        "net/http"
 
        "github.com/gorilla/mux"
@@ -43,7 +44,7 @@ func handleTypeRegistration(w http.ResponseWriter, r *http.Request) {
        vars := mux.Vars(r)
        id, ok := vars["typeId"]
        if ok {
-               fmt.Println("Registered type ", id)
+               fmt.Printf("Registered type %v with schema: %v\n", id, readBody(r))
        }
 }
 
@@ -51,6 +52,14 @@ func handleProducerRegistration(w http.ResponseWriter, r *http.Request) {
        vars := mux.Vars(r)
        id, ok := vars["producerId"]
        if ok {
-               fmt.Println("Registered producer ", id)
+               fmt.Printf("Registered producer %v with data: %v\n", id, readBody(r))
        }
 }
+
+func readBody(r *http.Request) string {
+       b, readErr := ioutil.ReadAll(r.Body)
+       if readErr != nil {
+               return fmt.Sprintf("Unable to read body due to: %v", readErr)
+       }
+       return string(b)
+}
index b984df9..66db95f 100644 (file)
 #
 
 #PMS
-PMS_IMAGE_BASE="nexus3.o-ran-sc.org:10002/o-ran-sc/nonrtric-policy-agent"
-PMS_IMAGE_TAG="2.2.0"
+PMS_IMAGE_BASE="nexus3.o-ran-sc.org:10002/o-ran-sc/nonrtric-a1-policy-management-service"
+PMS_IMAGE_TAG="2.3.0"
 
 #A1_SIM
 A1_SIM_IMAGE_BASE="nexus3.o-ran-sc.org:10002/o-ran-sc/a1-simulator"
-A1_SIM_IMAGE_TAG="2.1.0"
+A1_SIM_IMAGE_TAG="2.2.0"
 
 #RAPP
 RAPP_IMAGE_BASE="nexus3.o-ran-sc.org:10002/o-ran-sc/nonrtric-r-app-catalogue"
-RAPP_IMAGE_TAG="1.0.0"
+RAPP_IMAGE_TAG="1.0.1"
 
 #CONTROL_PANEL
 CONTROL_PANEL_IMAGE_BASE="nexus3.o-ran-sc.org:10002/o-ran-sc/nonrtric-controlpanel"
-CONTROL_PANEL_IMAGE_TAG="2.2.0"
+CONTROL_PANEL_IMAGE_TAG="2.3.0"
 
 #GATEWAY
 NONRTRIC_GATEWAY_IMAGE_BASE="nexus3.o-ran-sc.org:10002/o-ran-sc/nonrtric-gateway"
@@ -37,7 +37,7 @@ NONRTRIC_GATEWAY_IMAGE_TAG="1.0.0"
 
 #ICS
 ICS_IMAGE_BASE="nexus3.o-ran-sc.org:10002/o-ran-sc/nonrtric-information-coordinator-service"
-ICS_IMAGE_TAG="1.1.0"
+ICS_IMAGE_TAG="1.2.0"
 
 #CONSUMER
 CONSUMER_IMAGE_BASE="eexit/mirror-http-server"
@@ -48,8 +48,8 @@ ORU_APP_IMAGE_BASE="nexus3.o-ran-sc.org:10002/o-ran-sc/nonrtric-o-ru-closed-loop
 ORU_APP_IMAGE_TAG="1.0.0"
 
 #DB
-DB_IMAGE_BASE="mysql/mysql-server"
-DB_IMAGE_TAG="5.6"
+DB_IMAGE_BASE="mariadb"
+DB_IMAGE_TAG="10.5"
 
 #A1CONTROLLER
 A1CONTROLLER_IMAGE_BASE="nexus3.onap.org:10002/onap/sdnc-image"
index 2dfc38c..79e6a6e 100644 (file)
@@ -33,7 +33,7 @@ services:
       - 8433:8433
     volumes:
       - ./policy-service/config/application-policyagent.yaml:/opt/app/policy-agent/config/application.yaml:ro
-      - ./policy-service/config/application_configuration.json:/opt/app/policy-agent/data/application_configuration.json:ro
+      - ./policy-service/config/application_configuration.json:/opt/app/policy-agent/data/application_configuration.json:rw
     # For using own certs instead of the default ones (built into the container),
     # place them in config/ directory, update the application-policyagent.yaml file, and uncomment the following lines
     #  - ./policy-service/config/keystore-policyagent.jks:/opt/app/policy-agent/etc/cert/keystore.jks:ro
index 3661db9..69d8ac9 100644 (file)
@@ -22,7 +22,7 @@ networks:
 
 services:
   db:
-    image: mysql/mysql-server:5.6
+    image: "${DB_IMAGE_BASE}:${DB_IMAGE_TAG}"
     container_name: sdnc-db
     networks:
       - default
index 8ab5b45..29207b8 100644 (file)
@@ -46,7 +46,7 @@ The API is also described in Swagger-JSON and YAML:
 DMaaP Adaptor
 =============
 
-The DMaaP Adaptor provides support for push delivery of any data received from DMaap or Kafka.
+The DMaaP Adaptor provides support for push delivery of any data received from DMaaP or Kafka.
 
 See `DMaaP Adaptor API <./dmaap-adaptor-api.html>`_ for full details of the API.
 
@@ -59,6 +59,22 @@ The API is also described in Swagger-JSON and YAML:
 
    "DMaaP Adaptor API", ":download:`link <../dmaap-adaptor-java/api/api.json>`", ":download:`link <../dmaap-adaptor-java/api/api.yaml>`"
 
+DMaaP Mediator Producer
+=======================
+
+The DMaaP Mediator Producer provides support for push delivery of any data received from DMaaP or Kafka.
+
+See `DMaaP Mediator Producer API <./dmaap-mediator-producer-api.html>`_ for full details of the API.
+
+The API is also described in Swagger-JSON and YAML:
+
+
+.. csv-table::
+   :header: "API name", "|swagger-icon|", "|yaml-icon|"
+   :widths: 10,5, 5
+
+   "DMaaP Mediator Producer API", ":download:`link <../dmaap-mediator-producer/api/swagger.json>`", ":download:`link <../dmaap-mediator-producer/api/swagger.yaml>`"
+
 Non-RT-RIC App Catalogue (Initial)
 ==================================
 
index c5e504d..4ee6998 100644 (file)
@@ -11,6 +11,7 @@ linkcheck_ignore = [
     './rac-api.html', #Generated file that doesn't exist at link check.
     './ics-api.html', #Generated file that doesn't exist at link check.
     './dmaap-adaptor-api.html' #Generated file that doesn't exist at link check.
+    './dmaap-mediator-producer-api.html' #Generated file that doesn't exist at link check.
 ]
 
 extensions = ['sphinxcontrib.redoc', 'sphinx.ext.intersphinx',]
@@ -32,6 +33,11 @@ redoc = [
                 'name': 'DMaaP Adaptor API',
                 'page': 'dmaap-adaptor-api',
                 'spec': '../dmaap-adaptor-java/api/api.json',
+            },
+            {
+                'name': 'DMaaP Mediator Producer API',
+                'page': 'dmaap-mediator-producer-api',
+                'spec': '../dmaap-mediator-producer/api/swagger.json',
                 'embed': True,
             }
         ]
index 0d950cd..a3c3422 100644 (file)
@@ -7,12 +7,12 @@ Developer Guide
 
 This document provides a quickstart for developers of the Non-RT RIC parts.
 
-Additional developer guides are available on the `O-RAN SC NONRTRIC Developer wiki <https://wiki.o-ran-sc.org/display/RICNR/Release+E>`_
+Additional developer guides are available on the `O-RAN SC NONRTRIC Developer wiki <https://wiki.o-ran-sc.org/display/RICNR/Release+E>`_.
 
 A1 Policy Management Service & SDNC/A1 Controller & A1 Adapter
 --------------------------------------------------------------
 
-The A1 Policy Management Service is implemented in ONAP. For documentation see `ONAP CCSDK documentation <https://docs.onap.org/projects/onap-ccsdk-oran/en/latest/index.html>`_
+The A1 Policy Management Service is implemented in ONAP. For documentation see `ONAP CCSDK documentation <https://docs.onap.org/projects/onap-ccsdk-oran/en/latest/index.html>`_.
 and `wiki <https://wiki.onap.org/pages/viewpage.action?pageId=84672221>`_.
 
 Information Coordinator Service
@@ -65,36 +65,42 @@ See the README.md file in the *r-app-catalogue* directory in the Gerrit repo for
 
 DMaaP Adaptor Service
 ---------------------
-This is run in the same way as the Information Coordinator Service
+
+This Java implementation is run in the same way as the Information Coordinator Service.
 
 The following properties in the application.yaml file have to be modified:
 * server.ssl.key-store=./config/keystore.jks
 * app.webclient.trust-store=./config/truststore.jks
 * app.configuration-filepath=./src/test/resources/test_application_configuration.json
 
+DMaaP Mediator Producer
+-----------------------
+
+To build and run this Go implementation, see the README.md file under the folder "dmaap-mediator-producer" in the "nonrtric" repo.
+
 O-DU & O-RU fronthaul recovery
 ------------------------------
 
-See the page in Wiki: `O-RU Fronthaul Recovery usecase <https://wiki.o-ran-sc.org/display/RICNR/O-RU+Fronthaul+Recovery+usecase>`_
+See the page in Wiki: `O-RU Fronthaul Recovery usecase <https://wiki.o-ran-sc.org/display/RICNR/O-RU+Fronthaul+Recovery+usecase>`_.
 
 O-DU Slicing use cases
 ----------------------
 
-See the page in Wiki: `O-DU Slice Assurance usecase <https://wiki.o-ran-sc.org/display/RICNR/O-DU+Slice+Assurance+usecase>`_
+See the page in Wiki: `O-DU Slice Assurance usecase <https://wiki.o-ran-sc.org/display/RICNR/O-DU+Slice+Assurance+usecase>`_.
 
 Helm Manager
 ------------
 
-See the page in Wiki: `Release E <https://wiki.o-ran-sc.org/display/RICNR/Release+E>`_
+See the page in Wiki: `Release E <https://wiki.o-ran-sc.org/display/RICNR/Release+E>`_.
 
 Kubernetes deployment
 =====================
 
-Non-RT RIC can be also deployed in a Kubernetes cluster, `it/dep repository <https://gerrit.o-ran-sc.org/r/admin/repos/it/dep>`_
+Non-RT RIC can be also deployed in a Kubernetes cluster, `it/dep repository <https://gerrit.o-ran-sc.org/r/admin/repos/it/dep>`_.
 hosts deployment and integration artifacts. Instructions and helm charts to deploy the Non-RT-RIC functions in the
 OSC NONRTRIC integrated test environment can be found in the *./nonrtric* directory.
 
-For more information on installation of NonRT-RIC in Kubernetes, see `Deploy NONRTRIC in Kubernetes <https://wiki.o-ran-sc.org/display/RICNR/Deploy+NONRTRIC+in+Kubernetes>`_
+For more information on installation of NonRT-RIC in Kubernetes, see `Deploy NONRTRIC in Kubernetes <https://wiki.o-ran-sc.org/display/RICNR/Deploy+NONRTRIC+in+Kubernetes>`_.
 
 For more information see `Integration and Testing documentation on the O-RAN-SC wiki <https://docs.o-ran-sc.org/projects/o-ran-sc-it-dep/en/latest/index.html>`_.
 
index 17e3492..1837152 100644 (file)
@@ -25,8 +25,8 @@ command to start the components:
            -f policy-service/docker-compose.yaml
            -f ics/docker-compose.yaml
 
-The example above is just an example to start some of the components. 
-For more information on running and configuring the functions can be found in the README file in the "`docker-compose <https://gerrit.o-ran-sc.org/r/gitweb?p=nonrtric.git;a=tree;f=docker-compose>`__" folder, and on the `wiki page <https://wiki.o-ran-sc.org/display/RICNR/Release+E+-+Run>`_
+The example above is just an example to start some of the components.
+For more information on running and configuring the functions can be found in the README file in the "`docker-compose <https://gerrit.o-ran-sc.org/r/gitweb?p=nonrtric.git;a=tree;f=docker-compose>`__" folder, and on the `wiki page <https://wiki.o-ran-sc.org/display/RICNR/Release+E+-+Run+in+Docker>`_
 
 Install with Helm
 +++++++++++++++++
index 8126776..19194f4 100644 (file)
@@ -8,13 +8,13 @@
 Summary
 -------
 
-The Non-RealTime RIC (RAN Intelligent Controller) is an Orchestration and Automation function described by the O-RAN Alliance for non-real-time intelligent management of RAN (Radio Access Network) functions. 
+The Non-RealTime RIC (RAN Intelligent Controller) is an Orchestration and Automation function described by the O-RAN Alliance for non-real-time intelligent management of RAN (Radio Access Network) functions.
 
-The primary goal of the Non-RealTime RIC is to support non-real-time radio resource management, higher layer procedure optimization, policy optimization in RAN, and providing guidance, parameters, policies and AI/ML models to support the operation of near-RealTime RIC functions in the RAN to achieve higher-level non-real-time objectives. 
+The primary goal of the Non-RealTime RIC is to support non-real-time radio resource management, higher layer procedure optimization, policy optimization in RAN, and providing guidance, parameters, policies and AI/ML models to support the operation of near-RealTime RIC functions in the RAN to achieve higher-level non-real-time objectives.
 
-Non-RealTime RIC functions include service and policy management, RAN analytics and model-training for the near-RealTime RICs. 
+Non-RealTime RIC functions include service and policy management, RAN analytics and model-training for the near-RealTime RICs.
 The Non-RealTime RIC platform hosts and coordinates rApps (Non-RT RIC applications) to perform Non-RealTime RIC tasks.
-The Non-RealTime RIC also hosts the new R1 interface (between rApps and SMO/Non-RealTime-RIC services)
+The Non-RealTime RIC also hosts the new R1 interface (between rApps and SMO/Non-RealTime-RIC services).
 
 The O-RAN-SC (OSC) NONRTRIC project provides concepts, architecture and reference implementations as defined and described by the `O-RAN Alliance <https://www.o-ran.org>`_ architecture.
 The OSC NONRTRIC implementation communicates with near-RealTime RIC elements in the RAN via the A1 interface. Using the A1 interface the NONRTRIC will facilitate the provision of policies for individual UEs or groups of UEs; monitor and provide basic feedback on policy state from near-RealTime RICs; provide enrichment information as required by near-RealTime RICs; and facilitate ML model training, distribution and inference in cooperation with the near-RealTime RICs.
@@ -49,7 +49,7 @@ The source code for "E" Release is in the `NONRTRIC <https://gerrit.o-ran-sc.org
 Non-RT-RIC Control Panel / NONRTRIC Dashboard
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-Graphical user interface
+Graphical user interface.
 
 * View and Manage A1 policies in the RAN (near-RT-RICs)
 * Graphical A1 policy creation/editing is model-driven, based on policy type's JSON schema
@@ -64,7 +64,7 @@ Implementation:
 
 Please refer the developer guide and the `Wiki <https://wiki.o-ran-sc.org/display/RICNR/>`_ to set up in your local environment.
 
-More details available at the `NONRTRIC-Portal documentation site <https://docs.o-ran-sc.org/projects/o-ran-sc-portal-nonrtric-controlpanel>`_
+More details available at the `NONRTRIC-Portal documentation site <https://docs.o-ran-sc.org/projects/o-ran-sc-portal-nonrtric-controlpanel>`_.
 
 Information Coordination Service
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -80,140 +80,138 @@ Coordinate/Register A1-EI Types, Producers, Consumers, and Jobs (A1 Enrichment I
   + Information Consumers
   + Information Jobs
 
-* Information Query API (e.g. per producer, per consumer, per types)
-* Query status of Information jobs
-* After Information-type/Producer/Consumer/Job is successfully registered delivery/flow can happen directly between Information Producers and Information Consumers
+* Information Query API (e.g. per producer, per consumer, per types).
+* Query status of Information jobs.
+* After Information-type/Producer/Consumer/Job is successfully registered delivery/flow can happen directly between Information Producers and Information Consumers.
 * The Information Coordinator Service natively supports the O-RAN A1 Enrichment Information (A1-EI) interface, supporting coordination A1-EI Jobs where information (A1-EI)flow from the SMO/Non-RT-RIC/rApps to near-RT-RICs over the A1 interface.
 
 Implementation:
 
-* Implemented as a Java Spring Boot application
+* Implemented as a Java Spring Boot application.
 
 A1 Policy Management Service (from ONAP CCSDK)
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
 A1 Controller Service above A1 Controller/Adaptor that provides:
 
-* Unified REST & DMaaP NBI APIs for managing A1 Policies in all near-RT-RICs
+* Unified REST & DMaaP NBI APIs for managing A1 Policies in all near-RT-RICs.
 
-  + Query A1 Policy Types in near-RT-RICs
-  + Create/Query/Update/Delete A1 Policy Instances in near-RT-RICs
-  + Query Status for A1 Policy Instances
+  + Query A1 Policy Types in near-RT-RICs.
+  + Create/Query/Update/Delete A1 Policy Instances in near-RT-RICs.
+  + Query Status for A1 Policy Instances.
 
-* Maintains (persistent) cache of RAN's A1 Policy information
+* Maintains (persistent) cache of RAN's A1 Policy information.
 
-  * Support RAN-wide view of A1 Policy information
-  * Streamline A1 traffic
-  * Enable (optional) re-synchronization after inconsistencies / near-RT-RIC restarts
-  * Supports a large number of near-RT-RICs (& multi-version support)
+  * Support RAN-wide view of A1 Policy information.
+  * Streamline A1 traffic.
+  * Enable (optional) re-synchronization after inconsistencies / near-RT-RIC restarts.
+  * Supports a large number of near-RT-RICs (& multi-version support).
 
-* Converged ONAP & O-RAN-SC A1 Adapter/Controller functions in ONAP SDNC/CCSDK (Optionally deploy without A1 Adaptor to connect direct to near-RT-RICs)
-* Support for different Southbound connectors per near-RT-RIC - e.g. different A1 versions, different near-RT-RIC version, different A1 adapter/controllers supports different or proprietary A1 controllers/EMSs
+* Converged ONAP & O-RAN-SC A1 Adapter/Controller functions in ONAP SDNC/CCSDK (Optionally deploy without A1 Adaptor to connect direct to near-RT-RICs).
+* Support for different Southbound connectors per near-RT-RIC - e.g. different A1 versions, different near-RT-RIC version, different A1 adapter/controllers supports different or proprietary A1 controllers/EMSs.
 
-See also: `A1 Policy Management Service in ONAP <https://wiki.onap.org/pages/viewpage.action?pageId=84672221>`_ 
+See also: `A1 Policy Management Service in ONAP <https://wiki.onap.org/pages/viewpage.action?pageId=84672221>`_ .
 
 Implementation:
 
-* Implemented as a Java Spring Boot application
+* Implemented as a Java Spring Boot application.
 
 A1/SDNC Controller & A1 Adapter (Controller plugin)
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-Mediation point for A1 interface termination in SMO/NONRTRIC
+Mediation point for A1 interface termination in SMO/NONRTRIC.
 
-* Implemented as CCSDK OSGI Feature/Bundles
-* A1 REST southbound
-* RESTCONF Northbound
-* NETCONF YANG > RESTCONF adapter
-* SLI Mapping logic supported
-* Can be included in an any controller based on ONAP CCSDK
+* Implemented as CCSDK OSGI Feature/Bundles.
+* A1 REST southbound.
+* RESTCONF Northbound.
+* NETCONF YANG > RESTCONF adapter.
+* SLI Mapping logic supported.
+* Can be included in an any controller based on ONAP CCSDK.
 
-See also: `A1 Adapter/Controller Functions in ONAP <https://wiki.onap.org/pages/viewpage.action?pageId=84672221>`_ 
+See also: `A1 Adapter/Controller Functions in ONAP <https://wiki.onap.org/pages/viewpage.action?pageId=84672221>`_ .
 
 A1 Interface / Near-RT-RIC Simulator
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
 Stateful A1 test stub.
 
-* Used to create multiple stateful A1 providers (simulated near-rt-rics)
-* Supports A1-Policy and A1-Enrichment Information
-* Swagger-based northbound interface, so easy to change the A1 profile exposed (e.g. A1 version, A1 Policy Types, A1-E1 consumers, etc)
-* All A1-AP versions supported
+* Used to create multiple stateful A1 providers (simulated near-rt-rics).
+* Supports A1-Policy and A1-Enrichment Information.
+* Swagger-based northbound interface, so easy to change the A1 profile exposed (e.g. A1 version, A1 Policy Types, A1-E1 consumers, etc).
+* All A1-AP versions supported.
 
 Implementation:
 
-* Implemented as a Python application
-* Repo: *sim/a1-interface*
+* Implemented as a Python application.
+* Repo: *sim/a1-interface*.
 
 More details available at the `A1 Simulator documentation site <https://docs.o-ran-sc.org/projects/o-ran-sc-sim-a1-interface>`_
 
 Non-RT-RIC (Spring Cloud) Service Gateway
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-Support Apps to use A1 Services 
+Support Apps to use A1 Services.
 
-* `Spring Cloud Gateway <https://cloud.spring.io/spring-cloud-gateway>`_ provides the library to build a basic API gateway
-* Exposes A1 Policy Management Service & Information Coordinator Service. 
+* `Spring Cloud Gateway <https://cloud.spring.io/spring-cloud-gateway>`_ provides the library to build a basic API gateway.
+* Exposes A1 Policy Management Service & Information Coordinator Service.
 * Additional predicates can be added in code or preferably in the Gateway yaml configuration.
 
 Implementation:
 
-* Implemented as a Java Spring Cloud application
-* Repo: *portal/nonrtric-controlpanel*
+* Implemented as a Java Spring Cloud application.
+* Repo: *portal/nonrtric-controlpanel*.
 
 
 Non-RT-RIC (Kong) Service Exposure Prototyping
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-Support Apps to use NONRTRIC, SMO and other App interfaces
-A building block for coming releases as the R1 Interface concept matures 
+Support Apps to use NONRTRIC, SMO and other App interfaces.
+A building block for coming releases as the R1 Interface concept matures .
 
-* Support dynamic registration and exposure of service interfaces to Non-RT-RIC applications (& NONRTRIC Control panel)
-* Extends a static gateway function specifically for NONRTRIC Control panel (described above)
-* Initial version based on `Kong API Gateway <https://docs.konghq.com/gateway-oss>`_ function
-* Initial exposure candidates include A1 (NONRTRIC) services & O1 (OAM/SMO) services
+* Support dynamic registration and exposure of service interfaces to Non-RT-RIC applications (& NONRTRIC Control panel).
+* Extends a static gateway function specifically for NONRTRIC Control panel (described above).
+* Initial version based on `Kong API Gateway <https://docs.konghq.com/gateway-oss>`_ function.
+* Initial exposure candidates include A1 (NONRTRIC) services & O1 (OAM/SMO) services.
 
-NONRTRIC Kubernetes deployment - including Kong configurations can be found in the OSC `it/dep <https://gerrit.o-ran-sc.org/r/gitweb?p=it/dep.git;a=tree;f=nonrtric/helm/nonrtric>`_ Gerrit repo. 
+NONRTRIC Kubernetes deployment - including Kong configurations can be found in the OSC `it/dep <https://gerrit.o-ran-sc.org/r/gitweb?p=it/dep.git;a=tree;f=nonrtric/helm/nonrtric>`_ Gerrit repo.
 
 DMaaP/Kafka Information Producer Adapters
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-Configurable mediators to take information from DMaaP (& Kafka) and present it as a coordinated Information Producer 
-
-These mediators/adapters are generic information producers, which registers themselves as an information producers of defined information types (in Information Coordination Service).
-The information types are defined in a configuration file. 
-Information jobs defined using Information Coordination Service (ICS) then allow information consumers to retrieve data from DMaaP MR or Kafka topics (accessing the ICS API).
+Configurable mediators to take information from DMaaP and Kafka and present it as a coordinated Information Producer.
 
-Two alternative implementations to allow Information Consumers to consume DMaaP or Kafka events as coordinated Information Jobs.
+These mediators/adapters are generic information producers, which register themselves as information producers of defined information types in Information Coordination Service (ICS).
+The information types are defined in a configuration file.
+Information jobs defined using ICS then allow information consumers to retrieve data from DMaaP MR or Kafka topics (accessing the ICS API).
 
-Implementations:
+There are two alternative implementations to allow Information Consumers to consume DMaaP or Kafka events as coordinated Information Jobs.
 
-1. A version implemented in Java (Spring) - Supporting DMaaP and Kafka mediation
-2. A version implemented in Go - Supporting DMaaP mediation 
+1. A version implemented in Java Spring (DMaaP Adaptor Service).
+2. A version implemented in Go (DMaaP Mediator Producer).
 
 Initial Non-RT-RIC App Catalogue
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
 Register for Non-RT-RIC Apps.
 
-* Non-RT-RIC Apps can be registered / queried
-* Limited functionality/integration for now
-* *More work required in coming releases as the rApp concept matures*
+* Non-RT-RIC Apps can be registered / queried.
+* Limited functionality/integration for now.
+* *More work required in coming releases as the rApp concept matures*.
 
 Initial K8S Helm Chart LCM Manager
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
-Onboard, start, stop, and modify Non-RT-RIC App ÂµServices as Helm Charts
-*A building block for coming releases as the R-APP concept matures*
+Onboard, start, stop, and modify Non-RT-RIC App ÂµServices as Helm Charts.
+*A building block for coming releases as the R-APP concept matures*.
 
-* Interfaces that accepts Non-RT-RIC App ÂµServices Helm Charts
-* Support basic LCM operations
-* Onboard, Start, Stop, Modify, Monitor
-* Initial version co-developed with v. similar functions in ONAP
-* *Limited functionality/integration for now*
+* Interfaces that accepts Non-RT-RIC App ÂµServices Helm Charts.
+* Support basic LCM operations.
+* Onboard, Start, Stop, Modify, Monitor.
+* Initial version co-developed with v. similar functions in ONAP.
+* *Limited functionality/integration for now*.
 
 Test Framework
 ~~~~~~~~~~~~~~
 
-A full test environment with extensive test cases/scripts can be found in the ``test`` directory in the *nonrtric* source code
+A full test environment with extensive test cases/scripts can be found in the ``test`` directory in the *nonrtric* source code.
 
 Use Cases
 ~~~~~~~~~
@@ -221,9 +219,9 @@ Use Cases
 "Helloworld" O-RU Fronthaul Recovery use case
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
-A very simplified closed-loop rApp use case to re-establish front-haul connections between O-DUs and O-RUs if they fail. Not intended to to be 'real-world'
+A very simplified closed-loop rApp use case to re-establish front-haul connections between O-DUs and O-RUs if they fail. Not intended to to be 'real-world'.
 
 "Helloworld" O-DU Slice Assurance use case
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
-A very simplified closed-loop rApp use case to re-prioritize a RAN slice's radio resource allocation priority if sufficient throughput cannot be maintained. Not intended to to be 'real-world'
+A very simplified closed-loop rApp use case to re-prioritize a RAN slice's radio resource allocation priority if sufficient throughput cannot be maintained. Not intended to to be 'real-world'.
index 607a6ad..7188c01 100644 (file)
@@ -26,7 +26,7 @@
     <parent>
         <groupId>org.springframework.boot</groupId>
         <artifactId>spring-boot-starter-parent</artifactId>
-        <version>2.3.8.RELEASE</version>
+        <version>2.6.2</version>
         <relativePath />
     </parent>
     <groupId>org.o-ran-sc.nonrtric</groupId>
@@ -53,9 +53,9 @@
     </properties>
     <dependencies>
         <dependency>
-        <groupId>org.onap.policy.clamp.participant</groupId>
-        <artifactId>policy-clamp-participant-impl-kubernetes</artifactId>
-        <version>${policy-clamp-participant-impl-kubernetes.version}</version>
+            <groupId>org.onap.policy.clamp.participant</groupId>
+            <artifactId>policy-clamp-participant-impl-kubernetes</artifactId>
+            <version>${policy-clamp-participant-impl-kubernetes.version}</version>
         </dependency>
     </dependencies>
     <build>
                 <groupId>org.apache.maven.plugins</groupId>
                 <artifactId>maven-dependency-plugin</artifactId>
                 <executions>
-                <execution>
-                    <id>copy</id>
-                    <phase>package</phase>
-                    <goals>
-                    <goal>copy</goal>
-                    </goals>
-                    <configuration>
-                    <artifactItems>
-                        <artifactItem>
-                        <groupId>org.onap.policy.clamp.participant</groupId>
-                        <artifactId>policy-clamp-participant-impl-kubernetes</artifactId>
-                        <version>${policy-clamp-participant-impl-kubernetes.version}</version>
-                        <type>jar</type>
-                        <overWrite>true</overWrite>
-                        <outputDirectory>${basedir}/target</outputDirectory>
-                        <destFileName>app.jar</destFileName>
-                        </artifactItem>
-                    </artifactItems>
-                    </configuration>
-                </execution>
+                    <execution>
+                        <id>copy</id>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>copy</goal>
+                        </goals>
+                        <configuration>
+                            <artifactItems>
+                                <artifactItem>
+                                    <groupId>org.onap.policy.clamp.participant</groupId>
+                                    <artifactId>policy-clamp-participant-impl-kubernetes</artifactId>
+                                    <version>${policy-clamp-participant-impl-kubernetes.version}</version>
+                                    <type>jar</type>
+                                    <overWrite>true</overWrite>
+                                    <outputDirectory>${basedir}/target</outputDirectory>
+                                    <destFileName>app.jar</destFileName>
+                                </artifactItem>
+                            </artifactItems>
+                        </configuration>
+                    </execution>
                 </executions>
             </plugin>
             <plugin>
         <system>JIRA</system>
         <url>https://jira.o-ran-sc.org/</url>
     </issueManagement>
-</project>
+</project>
\ No newline at end of file
index e9d179d..cc8813e 100644 (file)
@@ -25,7 +25,6 @@ WORKDIR /opt/app/information-coordinator-service
 RUN mkdir -p /var/log/information-coordinator-service
 RUN mkdir -p /opt/app/information-coordinator-service/etc/cert/
 RUN mkdir -p /var/information-coordinator-service
-RUN chmod -R 777 /var/information-coordinator-service
 
 EXPOSE 8083 8434
 
@@ -34,8 +33,16 @@ ADD target/${JAR} /opt/app/information-coordinator-service/information-coordinat
 ADD /config/keystore.jks /opt/app/information-coordinator-service/etc/cert/keystore.jks
 ADD /config/truststore.jks /opt/app/information-coordinator-service/etc/cert/truststore.jks
 
+ARG user=nonrtric
+ARG group=nonrtric
 
-RUN chmod -R 777 /opt/app/information-coordinator-service/config/
+RUN groupadd $user && \
+    useradd -r -g $group $user
+RUN chown -R $user:$group /opt/app/information-coordinator-service
+RUN chown -R $user:$group /var/log/information-coordinator-service
+RUN chown -R $user:$group /var/information-coordinator-service
+
+USER ${user}
 
 CMD ["java", "-jar", "/opt/app/information-coordinator-service/information-coordinator-service.jar"]
 
index 2fe3474..69ad473 100644 (file)
         }},
         "/actuator/threaddump": {"get": {
             "summary": "Actuator web endpoint 'threaddump'",
-            "operationId": "handle_2_1_3",
+            "operationId": "threaddump_4",
             "responses": {"200": {
                 "description": "OK",
                 "content": {"*/*": {"schema": {"type": "object"}}}
         },
         "/actuator/loggers": {"get": {
             "summary": "Actuator web endpoint 'loggers'",
-            "operationId": "handle_6",
+            "operationId": "loggers_2",
             "responses": {"200": {
                 "description": "OK",
                 "content": {"*/*": {"schema": {"type": "object"}}}
         }},
         "/actuator/health/**": {"get": {
             "summary": "Actuator web endpoint 'health-path'",
-            "operationId": "handle_12",
+            "operationId": "health-path_2",
             "responses": {"200": {
                 "description": "OK",
                 "content": {"*/*": {"schema": {"type": "object"}}}
         }},
         "/actuator/metrics/{requiredMetricName}": {"get": {
             "summary": "Actuator web endpoint 'metrics-requiredMetricName'",
-            "operationId": "handle_5",
+            "operationId": "metrics-requiredMetricName_2",
             "responses": {"200": {
                 "description": "OK",
                 "content": {"*/*": {"schema": {"type": "object"}}}
         "/actuator/loggers/{name}": {
             "post": {
                 "summary": "Actuator web endpoint 'loggers-name'",
-                "operationId": "handle_0",
+                "operationId": "loggers-name_3",
                 "responses": {"200": {
                     "description": "OK",
                     "content": {"*/*": {"schema": {"type": "object"}}}
             },
             "get": {
                 "summary": "Actuator web endpoint 'loggers-name'",
-                "operationId": "handle_7",
+                "operationId": "loggers-name_4",
                 "responses": {"200": {
                     "description": "OK",
                     "content": {"*/*": {"schema": {"type": "object"}}}
         }},
         "/actuator/metrics": {"get": {
             "summary": "Actuator web endpoint 'metrics'",
-            "operationId": "handle_4",
+            "operationId": "metrics_2",
             "responses": {"200": {
                 "description": "OK",
                 "content": {"*/*": {"schema": {"type": "object"}}}
         }},
         "/actuator/info": {"get": {
             "summary": "Actuator web endpoint 'info'",
-            "operationId": "handle_9",
+            "operationId": "info_2",
             "responses": {"200": {
                 "description": "OK",
                 "content": {"*/*": {"schema": {"type": "object"}}}
         },
         "/actuator/logfile": {"get": {
             "summary": "Actuator web endpoint 'logfile'",
-            "operationId": "handle_8",
+            "operationId": "logfile_2",
             "responses": {"200": {
                 "description": "OK",
                 "content": {"*/*": {"schema": {"type": "object"}}}
         }},
         "/actuator/health": {"get": {
             "summary": "Actuator web endpoint 'health'",
-            "operationId": "handle_11",
+            "operationId": "health_2",
             "responses": {"200": {
                 "description": "OK",
                 "content": {"*/*": {"schema": {"type": "object"}}}
         }},
         "/actuator/heapdump": {"get": {
             "summary": "Actuator web endpoint 'heapdump'",
-            "operationId": "handle_10",
+            "operationId": "heapdump_2",
             "responses": {"200": {
                 "description": "OK",
                 "content": {"*/*": {"schema": {"type": "object"}}}
         "version": "1.0"
     },
     "tags": [
+        {"name": "A1-EI (callbacks)"},
+        {
+            "name": "Data producer (callbacks)",
+            "description": "API implemented by data producers"
+        },
+        {"name": "Data consumer"},
+        {"name": "Data consumer (callbacks)"},
         {
             "name": "A1-EI (registration)",
             "description": "Data consumer EI job registration"
             "name": "A1-EI (callbacks)",
             "description": "Data consumer EI job status callbacks"
         },
-        {
-            "name": "Data producer (callbacks)",
-            "description": "API implemented by data producers"
-        },
+        {"name": "Service status"},
+        {"name": "A1-EI (registration)"},
+        {"name": "Data producer (registration)"},
+        {"name": "Data producer (callbacks)"},
         {
             "name": "Data producer (registration)",
             "description": "API for data producers"
         },
-        {
-            "name": "Service status",
-            "description": "API for monitoring of the service"
-        },
         {
             "name": "Data consumer",
             "description": "API for data consumers"
         },
+        {
+            "name": "Service status",
+            "description": "API for monitoring of the service"
+        },
         {
             "name": "Actuator",
             "description": "Monitor and interact",
index 03de106..dafb0ff 100644 (file)
@@ -30,18 +30,25 @@ info:
 servers:
 - url: /
 tags:
+- name: A1-EI (callbacks)
+- name: Data producer (callbacks)
+  description: API implemented by data producers
+- name: Data consumer
+- name: Data consumer (callbacks)
 - name: A1-EI (registration)
   description: Data consumer EI job registration
 - name: A1-EI (callbacks)
   description: Data consumer EI job status callbacks
+- name: Service status
+- name: A1-EI (registration)
+- name: Data producer (registration)
 - name: Data producer (callbacks)
-  description: API implemented by data producers
 - name: Data producer (registration)
   description: API for data producers
-- name: Service status
-  description: API for monitoring of the service
 - name: Data consumer
   description: API for data consumers
+- name: Service status
+  description: API for monitoring of the service
 - name: Actuator
   description: Monitor and interact
   externalDocs:
@@ -118,7 +125,7 @@ paths:
       tags:
       - Actuator
       summary: Actuator web endpoint 'threaddump'
-      operationId: handle_2_1_3
+      operationId: threaddump_4
       responses:
         200:
           description: OK
@@ -348,7 +355,7 @@ paths:
       tags:
       - Actuator
       summary: Actuator web endpoint 'loggers'
-      operationId: handle_6
+      operationId: loggers_2
       responses:
         200:
           description: OK
@@ -361,7 +368,7 @@ paths:
       tags:
       - Actuator
       summary: Actuator web endpoint 'health-path'
-      operationId: handle_12
+      operationId: health-path_2
       responses:
         200:
           description: OK
@@ -389,7 +396,7 @@ paths:
       tags:
       - Actuator
       summary: Actuator web endpoint 'metrics-requiredMetricName'
-      operationId: handle_5
+      operationId: metrics-requiredMetricName_2
       parameters:
       - name: requiredMetricName
         in: path
@@ -466,7 +473,7 @@ paths:
       tags:
       - Actuator
       summary: Actuator web endpoint 'loggers-name'
-      operationId: handle_7
+      operationId: loggers-name_4
       parameters:
       - name: name
         in: path
@@ -486,7 +493,7 @@ paths:
       tags:
       - Actuator
       summary: Actuator web endpoint 'loggers-name'
-      operationId: handle_0
+      operationId: loggers-name_3
       parameters:
       - name: name
         in: path
@@ -617,7 +624,7 @@ paths:
       tags:
       - Actuator
       summary: Actuator web endpoint 'metrics'
-      operationId: handle_4
+      operationId: metrics_2
       responses:
         200:
           description: OK
@@ -630,7 +637,7 @@ paths:
       tags:
       - Actuator
       summary: Actuator web endpoint 'info'
-      operationId: handle_9
+      operationId: info_2
       responses:
         200:
           description: OK
@@ -923,7 +930,7 @@ paths:
       tags:
       - Actuator
       summary: Actuator web endpoint 'logfile'
-      operationId: handle_8
+      operationId: logfile_2
       responses:
         200:
           description: OK
@@ -1107,7 +1114,7 @@ paths:
       tags:
       - Actuator
       summary: Actuator web endpoint 'health'
-      operationId: handle_11
+      operationId: health_2
       responses:
         200:
           description: OK
@@ -1211,7 +1218,7 @@ paths:
       tags:
       - Actuator
       summary: Actuator web endpoint 'heapdump'
-      operationId: handle_10
+      operationId: heapdump_2
       responses:
         200:
           description: OK
index d66992d..eb5d662 100644 (file)
@@ -26,7 +26,7 @@
     <parent>
         <groupId>org.springframework.boot</groupId>
         <artifactId>spring-boot-starter-parent</artifactId>
-        <version>2.5.3</version>
+        <version>2.6.2</version>
         <relativePath />
     </parent>
     <groupId>org.o-ran-sc.nonrtric</groupId>
@@ -56,8 +56,6 @@
         <spotless-maven-plugin.version>1.24.3</spotless-maven-plugin.version>
         <swagger-codegen-maven-plugin.version>3.0.11</swagger-codegen-maven-plugin.version>
         <docker-maven-plugin>0.30.0</docker-maven-plugin>
-        <version.dmaap>1.1.11</version.dmaap>
-        <javax.ws.rs-api.version>2.1.1</javax.ws.rs-api.version>
         <sonar-maven-plugin.version>3.7.0.1746</sonar-maven-plugin.version>
         <jacoco-maven-plugin.version>0.8.5</jacoco-maven-plugin.version>
         <exec.skip>true</exec.skip>
@@ -66,7 +64,7 @@
         <dependency>
             <groupId>org.springdoc</groupId>
             <artifactId>springdoc-openapi-ui</artifactId>
-            <version>1.5.4</version>
+            <version>1.6.3</version>
         </dependency>
         <dependency>
             <groupId>org.springframework.boot</groupId>
                     <java>
                         <removeUnusedImports />
                         <importOrder>
-                            <order>com,java,javax,org</order>
+                            <order>com,java,org</order>
                         </importOrder>
                     </java>
                 </configuration>
         <system>JIRA</system>
         <url>https://jira.o-ran-sc.org/</url>
     </issueManagement>
-</project>
+</project>
\ No newline at end of file
index cfac5cf..0b47733 100644 (file)
@@ -38,7 +38,6 @@ import java.security.cert.X509Certificate;
 import java.util.Collections;
 import java.util.List;
 import java.util.stream.Collectors;
-
 import javax.net.ssl.KeyManagerFactory;
 
 import org.oransc.ics.configuration.WebClientConfig;
index d0fda94..b369261 100644 (file)
@@ -53,26 +53,24 @@ public class A1eCallbacks {
 
     private final AsyncRestClient restClient;
     private final InfoJobs eiJobs;
-    private final InfoProducers eiProducers;
 
     @Autowired
-    public A1eCallbacks(ApplicationConfig config, InfoJobs eiJobs, InfoProducers eiProducers) {
+    public A1eCallbacks(ApplicationConfig config, InfoJobs eiJobs) {
         AsyncRestClientFactory restClientFactory = new AsyncRestClientFactory(config.getWebClientConfig());
         this.restClient = restClientFactory.createRestClientUseHttpProxy("");
         this.eiJobs = eiJobs;
-        this.eiProducers = eiProducers;
     }
 
-    public Flux<String> notifyJobStatus(Collection<InfoType> eiTypes) {
+    public Flux<String> notifyJobStatus(Collection<InfoType> eiTypes, InfoProducers eiProducers) {
         return Flux.fromIterable(eiTypes) //
             .flatMap(eiType -> Flux.fromIterable(this.eiJobs.getJobsForType(eiType))) //
             .filter(eiJob -> !eiJob.getJobStatusUrl().isEmpty()) //
-            .filter(eiJob -> this.eiProducers.isJobEnabled(eiJob) != eiJob.isLastStatusReportedEnabled())
-            .flatMap(this::noifyStatusToJobOwner);
+            .filter(eiJob -> eiProducers.isJobEnabled(eiJob) != eiJob.isLastStatusReportedEnabled())
+            .flatMap(eiJob -> noifyStatusToJobOwner(eiJob, eiProducers));
     }
 
-    private Mono<String> noifyStatusToJobOwner(InfoJob job) {
-        boolean isJobEnabled = this.eiProducers.isJobEnabled(job);
+    private Mono<String> noifyStatusToJobOwner(InfoJob job, InfoProducers eiProducers) {
+        boolean isJobEnabled = eiProducers.isJobEnabled(job);
         A1eEiJobStatus status = isJobEnabled ? new A1eEiJobStatus(A1eEiJobStatus.EiJobStatusValues.ENABLED)
             : new A1eEiJobStatus(A1eEiJobStatus.EiJobStatusValues.DISABLED);
         String body = gson.toJson(status);
index 9d98eaa..19b2698 100644 (file)
@@ -91,9 +91,9 @@ public class InfoProducers {
 
         producerCallbacks.startInfoJobs(producer, this.infoJobs) //
             .collectList() //
-            .flatMapMany(list -> consumerCallbacks.notifyJobStatus(producer.getInfoTypes())) //
+            .flatMapMany(list -> consumerCallbacks.notifyJobStatus(producer.getInfoTypes(), this)) //
             .collectList() //
-            .flatMapMany(list -> consumerCallbacks.notifyJobStatus(previousTypes)) //
+            .flatMapMany(list -> consumerCallbacks.notifyJobStatus(previousTypes, this)) //
             .subscribe();
 
         return producer;
@@ -136,7 +136,7 @@ public class InfoProducers {
                 this.logger.error("Bug, no producer found");
             }
         }
-        this.consumerCallbacks.notifyJobStatus(producer.getInfoTypes()) //
+        this.consumerCallbacks.notifyJobStatus(producer.getInfoTypes(), this) //
             .subscribe();
     }
 
index 690f47c..36ca28e 100644 (file)
@@ -89,7 +89,7 @@ public class ProducerSupervision {
             .filter(infoJob -> !producer.isJobEnabled(infoJob)) //
             .flatMap(infoJob -> producerCallbacks.startInfoJob(producer, infoJob, Retry.max(1)), MAX_CONCURRENCY) //
             .collectList() //
-            .flatMapMany(startedJobs -> consumerCallbacks.notifyJobStatus(producer.getInfoTypes())) //
+            .flatMapMany(startedJobs -> consumerCallbacks.notifyJobStatus(producer.getInfoTypes(), infoProducers)) //
             .collectList();
     }
 
index 6e31874..0f8b205 160000 (submodule)
--- a/onap/oran
+++ b/onap/oran
@@ -1 +1 @@
-Subproject commit 6e31874958b44f45c5dd78aef5c783916b16c6ee
+Subproject commit 0f8b20544745afaf9c7b38140b9516667d9c4752
diff --git a/pom.xml b/pom.xml
index 39c316e..1a7d888 100644 (file)
--- a/pom.xml
+++ b/pom.xml
@@ -38,9 +38,6 @@
         <module>r-app-catalogue</module>
         <module>helm-manager</module>
         <module>dmaap-adaptor-java</module>
-        <module>dmaap-mediator-producer</module>
-        <module>test/usecases/oruclosedlooprecovery/goversion</module>
-        <module>test/usecases/odusliceassurance/goversion</module>
     </modules>
     <build>
         <plugins>
index cd2efc9..ed4be95 100644 (file)
@@ -31,8 +31,15 @@ ADD /config/application.yaml /opt/app/r-app-catalogue/config/application.yaml
 ADD /config/r-app-catalogue-keystore.jks /opt/app/r-app-catalogue/etc/cert/keystore.jks
 ADD target/${JAR} /opt/app/r-app-catalogue/r-app-catalogue.jar
 
+ARG user=nonrtric
+ARG group=nonrtric
 
-RUN chmod -R 777 /opt/app/r-app-catalogue/config/
+RUN groupadd $user && \
+    useradd -r -g $group $user
+RUN chown -R $user:$group /opt/app/r-app-catalogue
+RUN chown -R $user:$group /var/log/r-app-catalogue
+
+USER ${user}
 
 CMD ["java", "-jar", "/opt/app/r-app-catalogue/r-app-catalogue.jar"]
 
index 748cf2b..9ba6c39 100644 (file)
@@ -14,7 +14,7 @@ paths:
       summary: Services
       operationId: getServices
       responses:
-        200:
+        "200":
           description: Services
           content:
             application/json:
@@ -40,13 +40,13 @@ paths:
           type: string
         example: DroneIdentifier
       responses:
-        200:
+        "200":
           description: Service
           content:
             application/json:
               schema:
                 $ref: '#/components/schemas/service'
-        404:
+        "404":
           description: Service is not found
           content:
             application/json:
@@ -75,9 +75,9 @@ paths:
               $ref: '#/components/schemas/inputService'
         required: true
       responses:
-        200:
+        "200":
           description: Service updated
-        201:
+        "201":
           description: Service created
           headers:
             Location:
@@ -86,14 +86,14 @@ paths:
               explode: false
               schema:
                 type: string
-        400:
+        "400":
           description: Provided service is not correct
           content:
             application/json:
               schema:
                 $ref: '#/components/schemas/error_information'
               example:
-                detail: 'Service is missing required property: version'
+                detail: "Service is missing required property: version"
                 status: 400
       deprecated: false
     delete:
@@ -111,7 +111,7 @@ paths:
           type: string
         example: DroneIdentifier
       responses:
-        204:
+        "204":
           description: Service deleted
       deprecated: false
 components:
index 5da52d3..67be912 100644 (file)
@@ -26,7 +26,7 @@
     <parent>\r
         <groupId>org.springframework.boot</groupId>\r
         <artifactId>spring-boot-starter-parent</artifactId>\r
-        <version>2.3.4.RELEASE</version>\r
+        <version>2.6.2</version>\r
         <relativePath />\r
     </parent>\r
     <groupId>org.o-ran-sc.nonrtric</groupId>\r
@@ -43,8 +43,8 @@
         <swagger-annotations.version>1.5.22</swagger-annotations.version>\r
         <springfox.version>2.9.2</springfox.version>\r
         <jackson-databind-nullable.version>0.2.1</jackson-databind-nullable.version>\r
-        <openapi-generator-maven-plugin.version>4.3.1</openapi-generator-maven-plugin.version>\r
-        <swagger-codegen-maven-plugin.version>3.0.11</swagger-codegen-maven-plugin.version>\r
+        <openapi-generator-maven-plugin.version>5.3.1</openapi-generator-maven-plugin.version>\r
+        <swagger-codegen-maven-plugin.version>3.0.31</swagger-codegen-maven-plugin.version>\r
         <formatter-maven-plugin.version>2.12.2</formatter-maven-plugin.version>\r
         <spotless-maven-plugin.version>1.24.3</spotless-maven-plugin.version>\r
         <jacoco-maven-plugin.version>0.8.6</jacoco-maven-plugin.version>\r
             </plugin>\r
         </plugins>\r
     </build>\r
-</project>
\ No newline at end of file
+</project>\r
index 9b943df..8a66e14 100644 (file)
@@ -18,9 +18,9 @@
 
 package org.oransc.rappcatalogue;
 
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertThrows;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertThrows;
+import static org.junit.jupiter.api.Assertions.assertTrue;
 
 import javax.net.ssl.SSLContext;
 
@@ -28,8 +28,8 @@ import org.apache.http.client.HttpClient;
 import org.apache.http.conn.ssl.SSLConnectionSocketFactory;
 import org.apache.http.impl.client.HttpClients;
 import org.apache.http.ssl.SSLContextBuilder;
-import org.junit.Test;
-import org.junit.runner.RunWith;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.beans.factory.annotation.Value;
 import org.springframework.boot.test.context.SpringBootTest;
@@ -43,11 +43,11 @@ import org.springframework.http.HttpStatus;
 import org.springframework.http.ResponseEntity;
 import org.springframework.http.client.HttpComponentsClientHttpRequestFactory;
 import org.springframework.test.context.TestPropertySource;
-import org.springframework.test.context.junit4.SpringRunner;
+import org.springframework.test.context.junit.jupiter.SpringExtension;
 import org.springframework.util.ResourceUtils;
 import org.springframework.web.client.ResourceAccessException;
 
-@RunWith(SpringRunner.class)
+@ExtendWith(SpringExtension.class)
 @SpringBootTest(webEnvironment = WebEnvironment.RANDOM_PORT)
 @TestPropertySource(
     properties = { //
index 6703e3c..aa0b599 100644 (file)
@@ -2,3 +2,4 @@
 .dockererr
 .env
 .payload
+kafkaprocon
index 8366843..f1c8d42 100644 (file)
@@ -29,7 +29,7 @@ import (
        "oransc.org/usecase/oduclosedloop/internal/sliceassurance"
 )
 
-const TOPIC string = "/events/unauthenticated.PERFORMANCE_MEASUREMENTS"
+const TOPIC string = "unauthenticated.VES_O_RAN_SC_HELLO_WORLD_PM_STREAMING_OUTPUT"
 
 var configuration *config.Config
 
diff --git a/test/usecases/odusliceassurance/goversion/pom.xml b/test/usecases/odusliceassurance/goversion/pom.xml
deleted file mode 100644 (file)
index f741038..0000000
+++ /dev/null
@@ -1,96 +0,0 @@
-<!--
-  ============LICENSE_START=======================================================
-   Copyright (C) 2021 Nordix Foundation.
-  ================================================================================
-  Licensed under the Apache License, Version 2.0 (the "License");
-  you may not use this file except in compliance with the License.
-  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
-
-  SPDX-License-Identifier: Apache-2.0
-  ============LICENSE_END=========================================================
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-    xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-
-    <groupId>oransc.org</groupId>
-    <artifactId>o-du-slice-assurance</artifactId>
-    <version>1.1.0</version>
-    <properties>
-        <docker-maven-plugin.version>0.30.0</docker-maven-plugin.version>
-    </properties>
-
-     <build>
-        <plugins>
-            <plugin>
-                <groupId>io.fabric8</groupId>
-                <artifactId>docker-maven-plugin</artifactId>
-                <version>${docker-maven-plugin.version}</version>
-                <inherited>false</inherited>
-                <executions>
-                    <execution>
-                        <id>generate-nonrtric-o-du-slice-assurance-image</id>
-                        <phase>package</phase>
-                        <goals>
-                            <goal>build</goal>
-                        </goals>
-                        <configuration>
-                            <pullRegistry>${env.CONTAINER_PULL_REGISTRY}</pullRegistry>
-                            <images>
-                                
-                            </images>
-                        </configuration>
-                    </execution>
-                    <execution>
-                        <id>push-nonrtric-o-du-slice-assurance-image</id>
-                        <goals>
-                            <goal>build</goal>
-                            <goal>push</goal>
-                        </goals>
-                        <configuration>
-                            <pullRegistry>${env.CONTAINER_PULL_REGISTRY}</pullRegistry>
-                            <pushRegistry>${env.CONTAINER_PUSH_REGISTRY}</pushRegistry>
-                            <images>
-                                
-                            </images>
-                        </configuration>
-                    </execution>
-                </executions>
-            </plugin>
-      </plugins>
-    </build>
-</project>
index b8f80be..41cc928 100644 (file)
@@ -55,8 +55,8 @@ func getDistributedUnitFunctions(w http.ResponseWriter, r *http.Request) {
                                        AdmState:                "locked",
                                        UserLabel:               "rrm-pol-1",
                                        RRMPolicyMaxRatio:       100,
-                                       RRMPolicyMinRatio:       "0",
-                                       RRMPolicyDedicatedRatio: "0",
+                                       RRMPolicyMinRatio:       0,
+                                       RRMPolicyDedicatedRatio: 0,
                                        ResourceType:            "prb",
                                        RRMPolicyMembers: []messages.RRMPolicyMember{
                                                {
@@ -72,8 +72,8 @@ func getDistributedUnitFunctions(w http.ResponseWriter, r *http.Request) {
                                        AdmState:                "unlocked",
                                        UserLabel:               "rrm-pol-2",
                                        RRMPolicyMaxRatio:       20,
-                                       RRMPolicyMinRatio:       "10",
-                                       RRMPolicyDedicatedRatio: "15",
+                                       RRMPolicyMinRatio:       10,
+                                       RRMPolicyDedicatedRatio: 15,
                                        ResourceType:            "prb",
                                        RRMPolicyMembers: []messages.RRMPolicyMember{
                                                {
@@ -89,8 +89,8 @@ func getDistributedUnitFunctions(w http.ResponseWriter, r *http.Request) {
                                        AdmState:                "unlocked",
                                        UserLabel:               "rrm-pol-3",
                                        RRMPolicyMaxRatio:       30,
-                                       RRMPolicyMinRatio:       "10",
-                                       RRMPolicyDedicatedRatio: "5",
+                                       RRMPolicyMinRatio:       10,
+                                       RRMPolicyDedicatedRatio: 5,
                                        ResourceType:            "prb",
                                        RRMPolicyMembers: []messages.RRMPolicyMember{
                                                {
@@ -109,8 +109,6 @@ func getDistributedUnitFunctions(w http.ResponseWriter, r *http.Request) {
 }
 
 func updateRRMPolicyDedicatedRatio(w http.ResponseWriter, r *http.Request) {
-       //vars := mux.Vars(r)
-       fmt.Println("::updateRRMPolicyDedicatedRatio::")
        var prMessage messages.DistributedUnitFunction
        decoder := json.NewDecoder(r.Body)
 
@@ -121,7 +119,6 @@ func updateRRMPolicyDedicatedRatio(w http.ResponseWriter, r *http.Request) {
        defer r.Body.Close()
 
        fmt.Println("prMessage: ", prMessage)
-       //prMessage.Id = vars["POLICY-ID"]
 
        respondWithJSON(w, http.StatusOK, map[string]string{"status": "200"})
 }
diff --git a/test/usecases/oruclosedlooprecovery/goversion/build_and_test.sh b/test/usecases/oruclosedlooprecovery/goversion/build_and_test.sh
deleted file mode 100755 (executable)
index 397124d..0000000
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/bin/bash
-##############################################################################
-#
-#   Copyright (C) 2021: Nordix Foundation
-#
-#   Licensed under the Apache License, Version 2.0 (the "License");
-#   you may not use this file except in compliance with the License.
-#   You may obtain a copy of the License at
-#
-#       http://www.apache.org/licenses/LICENSE-2.0
-#
-#   Unless required by applicable law or agreed to in writing, software
-#   distributed under the License is distributed on an "AS IS" BASIS,
-#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#   See the License for the specific language governing permissions and
-#   limitations under the License.
-#
-##############################################################################
-
-go build
-
-go test ./...
index 3aecf45..26b3ab3 100644 (file)
@@ -37,8 +37,8 @@ type Configuration struct {
        SDNRPassword string
 }
 
-const rawSdnrPath = "/rests/data/network-topology:network-topology/topology=topology-netconf/node=[O-DU-ID]/yang-ext:mount/o-ran-sc-du-hello-world:network-function/du-to-ru-connection=[O-RU-ID]"
-const unlockMessage = `{"o-ran-sc-du-hello-world:du-to-ru-connection": [{"name":"[O-RU-ID]","administrative-state":"UNLOCKED"}]}`
+const rawSdnrPath = "/rests/data/network-topology:network-topology/topology=topology-netconf/node=[O-DU-ID]/yang-ext:mount/o-ran-sc-du-hello-world:network-function/distributed-unit-functions=[O-DU-ID]/radio-resource-management-policy-ratio=rrm-pol-1"
+const unlockMessage = `{"o-ran-sc-du-hello-world:radio-resource-management-policy-ratio":[{"id":"rrm-pol-1","radio-resource-management-policy-max-ratio":25,"radio-resource-management-policy-members":[{"mobile-country-code":"310","mobile-network-code":"150","slice-differentiator":1,"slice-service-type":1}],"radio-resource-management-policy-min-ratio":15,"user-label":"rrm-pol-1","resource-type":"prb","radio-resource-management-policy-dedicated-ratio":20,"administrative-state":"unlocked"}]}`
 
 type LinkFailureHandler struct {
        lookupService repository.LookupService
@@ -71,8 +71,7 @@ func (lfh LinkFailureHandler) MessagesHandler(w http.ResponseWriter, r *http.Req
 
 func (lfh LinkFailureHandler) sendUnlockMessage(oRuId string) {
        if oDuId, err := lfh.lookupService.GetODuID(oRuId); err == nil {
-               sdnrPath := getSdnrPath(oRuId, oDuId)
-               unlockMessage := lfh.getUnlockMessage(oRuId)
+               sdnrPath := getSdnrPath(oDuId)
                if error := restclient.Put(lfh.config.SDNRAddress+sdnrPath, unlockMessage, lfh.client, lfh.config.SDNRUser, lfh.config.SDNRPassword); error == nil {
                        log.Debugf("Sent unlock message for O-RU: %v to O-DU: %v.", oRuId, oDuId)
                } else {
@@ -84,12 +83,7 @@ func (lfh LinkFailureHandler) sendUnlockMessage(oRuId string) {
 
 }
 
-func getSdnrPath(oRuId string, oDuId string) string {
-       sdnrPath := strings.Replace(rawSdnrPath, "[O-DU-ID]", oDuId, 1)
-       sdnrPath = strings.Replace(sdnrPath, "[O-RU-ID]", oRuId, 1)
+func getSdnrPath(oDuId string) string {
+       sdnrPath := strings.Replace(rawSdnrPath, "[O-DU-ID]", oDuId, -1)
        return sdnrPath
 }
-
-func (lfh LinkFailureHandler) getUnlockMessage(oRuId string) string {
-       return strings.Replace(unlockMessage, "[O-RU-ID]", oRuId, 1)
-}
index a3df704..050417f 100644 (file)
@@ -56,7 +56,7 @@ func Test_MessagesHandlerWithLinkFailure(t *testing.T) {
 
        lookupServiceMock := mocks.LookupService{}
 
-       lookupServiceMock.On("GetODuID", mock.Anything).Return("HCL-O-DU-1122", nil)
+       lookupServiceMock.On("GetODuID", mock.Anything).Return("O-DU-1122", nil)
 
        handlerUnderTest := NewLinkFailureHandler(&lookupServiceMock, Configuration{
                SDNRAddress:  "http://localhost:9990",
@@ -78,21 +78,21 @@ func Test_MessagesHandlerWithLinkFailure(t *testing.T) {
        assertions.Equal(http.MethodPut, actualRequest.Method)
        assertions.Equal("http", actualRequest.URL.Scheme)
        assertions.Equal("localhost:9990", actualRequest.URL.Host)
-       expectedSdnrPath := "/rests/data/network-topology:network-topology/topology=topology-netconf/node=HCL-O-DU-1122/yang-ext:mount/o-ran-sc-du-hello-world:network-function/du-to-ru-connection=ERICSSON-O-RU-11220"
+       expectedSdnrPath := "/rests/data/network-topology:network-topology/topology=topology-netconf/node=O-DU-1122/yang-ext:mount/o-ran-sc-du-hello-world:network-function/distributed-unit-functions=O-DU-1122/radio-resource-management-policy-ratio=rrm-pol-1"
        assertions.Equal(expectedSdnrPath, actualRequest.URL.Path)
        assertions.Equal("application/json; charset=utf-8", actualRequest.Header.Get("Content-Type"))
        tempRequest, _ := http.NewRequest("", "", nil)
        tempRequest.SetBasicAuth("admin", "pwd")
        assertions.Equal(tempRequest.Header.Get("Authorization"), actualRequest.Header.Get("Authorization"))
        body, _ := ioutil.ReadAll(actualRequest.Body)
-       expectedBody := []byte(`{"o-ran-sc-du-hello-world:du-to-ru-connection": [{"name":"ERICSSON-O-RU-11220","administrative-state":"UNLOCKED"}]}`)
+       expectedBody := []byte(`{"o-ran-sc-du-hello-world:radio-resource-management-policy-ratio":[{"id":"rrm-pol-1","radio-resource-management-policy-max-ratio":25,"radio-resource-management-policy-members":[{"mobile-country-code":"310","mobile-network-code":"150","slice-differentiator":1,"slice-service-type":1}],"radio-resource-management-policy-min-ratio":15,"user-label":"rrm-pol-1","resource-type":"prb","radio-resource-management-policy-dedicated-ratio":20,"administrative-state":"unlocked"}]}`)
        assertions.Equal(expectedBody, body)
        clientMock.AssertNumberOfCalls(t, "Do", 1)
 
        logString := buf.String()
        assertions.Contains(logString, "Sent unlock message")
        assertions.Contains(logString, "O-RU: ERICSSON-O-RU-11220")
-       assertions.Contains(logString, "O-DU: HCL-O-DU-1122")
+       assertions.Contains(logString, "O-DU: O-DU-1122")
 }
 
 func newRequest(method string, url string, bodyAsBytes []byte, t *testing.T) *http.Request {
@@ -117,7 +117,7 @@ func Test_MessagesHandlerWithClearLinkFailure(t *testing.T) {
 
        lookupServiceMock := mocks.LookupService{}
 
-       lookupServiceMock.On("GetODuID", mock.Anything).Return("HCL-O-DU-1122", nil)
+       lookupServiceMock.On("GetODuID", mock.Anything).Return("O-DU-1122", nil)
 
        handlerUnderTest := NewLinkFailureHandler(&lookupServiceMock, Configuration{}, nil)
 
index 951337a..2c5417d 100644 (file)
@@ -1,11 +1,11 @@
-ERICSSON-O-RU-11220,HCL-O-DU-1122
-ERICSSON-O-RU-11221,HCL-O-DU-1122
-ERICSSON-O-RU-11222,HCL-O-DU-1122
-ERICSSON-O-RU-11223,HCL-O-DU-1122
-ERICSSON-O-RU-11223,HCL-O-DU-1122
-ERICSSON-O-RU-11224,HCL-O-DU-1123
-ERICSSON-O-RU-11225,HCL-O-DU-1123
-ERICSSON-O-RU-11226,HCL-O-DU-1123
-ERICSSON-O-RU-11227,HCL-O-DU-1124
-ERICSSON-O-RU-11228,HCL-O-DU-1125
-ERICSSON-O-RU-11229,HCL-O-DU-1125
\ No newline at end of file
+ERICSSON-O-RU-11220,O-DU-1122
+ERICSSON-O-RU-11221,O-DU-1122
+ERICSSON-O-RU-11222,O-DU-1122
+ERICSSON-O-RU-11223,O-DU-1122
+ERICSSON-O-RU-11223,O-DU-1122
+ERICSSON-O-RU-11224,O-DU-1123
+ERICSSON-O-RU-11225,O-DU-1123
+ERICSSON-O-RU-11226,O-DU-1123
+ERICSSON-O-RU-11227,O-DU-1124
+ERICSSON-O-RU-11228,O-DU-1125
+ERICSSON-O-RU-11229,O-DU-1125
\ No newline at end of file
diff --git a/test/usecases/oruclosedlooprecovery/goversion/pom.xml b/test/usecases/oruclosedlooprecovery/goversion/pom.xml
deleted file mode 100644 (file)
index dc568e1..0000000
+++ /dev/null
@@ -1,112 +0,0 @@
-<!--
-  ============LICENSE_START=======================================================
-   Copyright (C) 2021 Nordix Foundation.
-  ================================================================================
-  Licensed under the Apache License, Version 2.0 (the "License");
-  you may not use this file except in compliance with the License.
-  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
-
-  SPDX-License-Identifier: Apache-2.0
-  ============LICENSE_END=========================================================
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-    xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-
-    <groupId>oransc.org</groupId>
-    <artifactId>o-ru-closed-loop-consumer</artifactId>
-    <version>1.1.0</version>
-    <properties>
-        <docker-maven-plugin.version>0.30.0</docker-maven-plugin.version>
-    </properties>
-
-     <build>
-        <plugins>
-            <plugin>
-              <artifactId>exec-maven-plugin</artifactId>
-              <groupId>org.codehaus.mojo</groupId>
-              <executions>
-                  <execution>
-                      <id>Build Go binary</id>
-                      <phase>generate-sources</phase>
-                      <goals>
-                          <goal>exec</goal>
-                      </goals>
-                      <configuration>
-                          <executable>${basedir}/build_and_test.sh</executable>
-                      </configuration>
-                  </execution>
-              </executions>
-          </plugin>
-            <plugin>
-                <groupId>io.fabric8</groupId>
-                <artifactId>docker-maven-plugin</artifactId>
-                <version>${docker-maven-plugin.version}</version>
-                <inherited>false</inherited>
-                <executions>
-                    <execution>
-                        <id>generate-nonrtric-o-ru-closed-loop-consumer-image</id>
-                        <phase>package</phase>
-                        <goals>
-                            <goal>build</goal>
-                        </goals>
-                        <configuration>
-                            <pullRegistry>${env.CONTAINER_PULL_REGISTRY}</pullRegistry>
-                            <images>
-                                
-                            </images>
-                        </configuration>
-                    </execution>
-                    <execution>
-                        <id>push-nonrtric-o-ru-closed-loop-consumer-image</id>
-                        <goals>
-                            <goal>build</goal>
-                            <goal>push</goal>
-                        </goals>
-                        <configuration>
-                            <pullRegistry>${env.CONTAINER_PULL_REGISTRY}</pullRegistry>
-                            <pushRegistry>${env.CONTAINER_PUSH_REGISTRY}</pushRegistry>
-                            <images>
-                                
-                            </images>
-                        </configuration>
-                    </execution>
-                </executions>
-            </plugin>
-      </plugins>
-    </build>
-</project>
index 4af16cf..e219e19 100644 (file)
@@ -52,7 +52,7 @@ func createJobHandler(w http.ResponseWriter, r *http.Request) {
 
        started = true
        fmt.Println("Start pushing messages for job: ", id)
-       startPushingMessages()
+       go startPushingMessages()
 }
 
 func deleteJobHandler(w http.ResponseWriter, r *http.Request) {
index fd0af03..0bcd0f4 100644 (file)
@@ -34,7 +34,7 @@ func main() {
        flag.Parse()
 
        r := mux.NewRouter()
-       r.HandleFunc("/rests/data/network-topology:network-topology/topology=topology-netconf/node={O-DU-ID}/yang-ext:mount/o-ran-sc-du-hello-world:network-function/du-to-ru-connection={O-RU-ID}", handleData)
+       r.HandleFunc("/rests/data/network-topology:network-topology/topology=topology-netconf/node={O-DU-ID}/yang-ext:mount/o-ran-sc-du-hello-world:network-function/distributed-unit-functions={O-DU-ID}/radio-resource-management-policy-ratio=rrm-pol-1", handleData)
 
        fmt.Println("Starting SDNR on port: ", *port)
        fmt.Println(http.ListenAndServe(fmt.Sprintf(":%v", *port), r))
index 4cb03c7..21b24b1 100644 (file)
@@ -29,4 +29,13 @@ RUN apt-get install iputils-ping -y
 
 RUN pip install -r requirements.txt
 
+ARG user=nonrtric
+ARG group=nonrtric
+
+RUN groupadd $user && \
+    useradd -r -g $group $user
+RUN chown -R $user:$group /usr/src/app/
+
+USER ${user}
+
 CMD [ "python3", "-u", "main.py" ]
index 8f5b244..52f0ca8 100644 (file)
@@ -24,14 +24,29 @@ import requests
 import time
 
 MR_PATH = "/events/[TOPIC]/users/test/"
-SDNR_PATH = "/rests/data/network-topology:network-topology/topology=topology-netconf/node=[O-DU-ID]/yang-ext:mount/o-ran-sc-du-hello-world:network-function/du-to-ru-connection=[O-RU-ID]"
+SDNR_PATH = "/rests/data/network-topology:network-topology/topology=topology-netconf/node=[O-DU-ID]/yang-ext:mount/o-ran-sc-du-hello-world:network-function/distributed-unit-functions=[O-DU-ID]/radio-resource-management-policy-ratio=rrm-pol-1"
 FAUILT_ID = "28"
 
 UNLOCK_MESSAGE = {
-    "o-ran-sc-du-hello-world:du-to-ru-connection": [
+    "o-ran-sc-du-hello-world:radio-resource-management-policy-ratio":
+    [
         {
-            "name":"",
-            "administrative-state":"UNLOCKED"
+            "id":"rrm-pol-1",
+            "radio-resource-management-policy-max-ratio":25,
+            "radio-resource-management-policy-members":
+                [
+                    {
+                        "mobile-country-code":"310",
+                        "mobile-network-code":"150",
+                        "slice-differentiator":1,
+                        "slice-service-type":1
+                    }
+                ],
+            "radio-resource-management-policy-min-ratio":15,
+            "user-label":"rrm-pol-1",
+            "resource-type":"prb",
+            "radio-resource-management-policy-dedicated-ratio":20,
+            "administrative-state":"unlocked"
         }
     ]
 }
@@ -71,8 +86,7 @@ def handle_link_failure(message, o_ru_to_o_du_map, sdnr_address, sdnr_user, sdnr
         o_du_id = o_ru_to_o_du_map[o_ru_id]
         verboseprint("O-DU ID: " + o_du_id)
         unlock_msg = json.loads(json.dumps(UNLOCK_MESSAGE))
-        unlock_msg["o-ran-sc-du-hello-world:du-to-ru-connection"][0]["name"] = o_ru_id
-        send_path = SDNR_PATH.replace("[O-DU-ID]", o_du_id).replace("[O-RU-ID]", o_ru_id)
+        send_path = SDNR_PATH.replace("[O-DU-ID]", o_du_id)
         requests.put(sdnr_address + send_path, auth=(sdnr_user, sdnr_pwd), json=unlock_msg)
     else:
         print("ERROR: No mapping for O-RU ID: " + o_ru_id)
index c9ec8ea..314495c 100644 (file)
@@ -1,13 +1,13 @@
 {
-    "ERICSSON-O-RU-11220": "HCL-O-DU-1122",
-    "ERICSSON-O-RU-11221": "HCL-O-DU-1122",
-    "ERICSSON-O-RU-11222": "HCL-O-DU-1122",
-    "ERICSSON-O-RU-11223": "HCL-O-DU-1122",
-    "ERICSSON-O-RU-11223": "HCL-O-DU-1122",
-    "ERICSSON-O-RU-11224": "HCL-O-DU-1123",
-    "ERICSSON-O-RU-11225": "HCL-O-DU-1123",
-    "ERICSSON-O-RU-11226": "HCL-O-DU-1123",
-    "ERICSSON-O-RU-11227": "HCL-O-DU-1124",
-    "ERICSSON-O-RU-11228": "HCL-O-DU-1125",
-    "ERICSSON-O-RU-11229": "HCL-O-DU-1125",
+    "ERICSSON-O-RU-11220": "O-DU-1122",
+    "ERICSSON-O-RU-11221": "O-DU-1122",
+    "ERICSSON-O-RU-11222": "O-DU-1122",
+    "ERICSSON-O-RU-11223": "O-DU-1122",
+    "ERICSSON-O-RU-11223": "O-DU-1122",
+    "ERICSSON-O-RU-11224": "O-DU-1123",
+    "ERICSSON-O-RU-11225": "O-DU-1123",
+    "ERICSSON-O-RU-11226": "O-DU-1123",
+    "ERICSSON-O-RU-11227": "O-DU-1124",
+    "ERICSSON-O-RU-11228": "O-DU-1125",
+    "ERICSSON-O-RU-11229": "O-DU-1125",
 }
\ No newline at end of file
index 535c3ee..a44c073 100644 (file)
@@ -39,7 +39,7 @@ MR_PATH = "/events/unauthenticated.SEC_FAULT_OUTPUT"
 # Server info
 HOST_IP = "::"
 HOST_PORT = 9990
-APP_URL = "/rests/data/network-topology:network-topology/topology=topology-netconf/node=<string:o_du_id>/yang-ext:mount/o-ran-sc-du-hello-world:network-function/du-to-ru-connection=<string:o_ru_id>"
+APP_URL = "/rests/data/network-topology:network-topology/topology=topology-netconf/node=<string:o_du_id>/yang-ext:mount/o-ran-sc-du-hello-world:network-function/distributed-unit-functions=<string:o_du_id2>/radio-resource-management-policy-ratio=rrm-pol-1"
 
 USERNAME = "admin"
 PASSWORD = "Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U"
@@ -88,17 +88,17 @@ linkFailureMessage = {
 
 class AlarmClearThread (threading.Thread):
 
-    def __init__(self, sleep_time, o_ru_id):
+    def __init__(self, sleep_time, o_du_id):
         threading.Thread.__init__(self)
         self.sleep_time = sleep_time
-        self.o_ru_id = o_ru_id
+        self.o_du_id = o_du_id
 
     def run(self):
-        print(f'Sleeping: {self.sleep_time} before clearing O-DU: {self.o_ru_id}')
+        print(f'Sleeping: {self.sleep_time} before clearing O-DU: {self.o_du_id}')
         time.sleep(self.sleep_time)
         msg_as_json = json.loads(json.dumps(linkFailureMessage))
-        msg_as_json["event"]["commonEventHeader"]["sourceName"] = self.o_ru_id
-        print("Sedning alarm clear for O-RU: " + self.o_ru_id)
+        msg_as_json["event"]["commonEventHeader"]["sourceName"] = self.o_du_id
+        print("Sedning alarm clear for O-DU: " + self.o_du_id)
         requests.post(mr_host + ":" + mr_port + MR_PATH, json=msg_as_json);
 
 
@@ -118,10 +118,10 @@ def verify_password(username, password):
 @app.route(APP_URL,
     methods=['PUT'])
 @auth.login_required
-def sendrequest(o_du_id, o_ru_id):
-    print("Got request with O-DU ID: " + o_du_id + " and O-RU ID: " + o_ru_id)
+def sendrequest(o_du_id, o_du_id2):
+    print("Got request with O-DU ID: " + o_du_id)
     random_time = int(10 * random.random())
-    alarm_clear_thread = AlarmClearThread(random_time, o_ru_id)
+    alarm_clear_thread = AlarmClearThread(random_time, o_du_id)
     alarm_clear_thread.start()
 
     return Response(status=200)