Merge "Make logging in Mediator Producer managed by REST"
authorHenrik Andersson <henrik.b.andersson@est.tech>
Thu, 25 Nov 2021 06:43:08 +0000 (06:43 +0000)
committerGerrit Code Review <gerrit@o-ran-sc.org>
Thu, 25 Nov 2021 06:43:08 +0000 (06:43 +0000)
221 files changed:
dmaap-adaptor-java/README.md
dmaap-adaptor-java/api/api.json
dmaap-adaptor-java/api/api.yaml
dmaap-adaptor-java/config/application.yaml
dmaap-adaptor-java/config/application_configuration.json
dmaap-adaptor-java/src/main/java/org/oran/dmaapadapter/SwaggerConfig.java
dmaap-adaptor-java/src/main/java/org/oran/dmaapadapter/clients/AsyncRestClient.java
dmaap-adaptor-java/src/main/java/org/oran/dmaapadapter/configuration/ApplicationConfig.java
dmaap-adaptor-java/src/main/java/org/oran/dmaapadapter/controllers/ProducerCallbacksController.java
dmaap-adaptor-java/src/main/java/org/oran/dmaapadapter/r1/ConsumerJobInfo.java
dmaap-adaptor-java/src/main/java/org/oran/dmaapadapter/repository/InfoTypes.java
dmaap-adaptor-java/src/main/java/org/oran/dmaapadapter/repository/MultiMap.java
dmaap-adaptor-java/src/main/java/org/oran/dmaapadapter/tasks/DmaapTopicConsumer.java
dmaap-adaptor-java/src/main/java/org/oran/dmaapadapter/tasks/KafkaJobDataConsumer.java
dmaap-adaptor-java/src/main/java/org/oran/dmaapadapter/tasks/KafkaTopicConsumers.java
dmaap-adaptor-java/src/main/java/org/oran/dmaapadapter/tasks/KafkaTopicListener.java
dmaap-adaptor-java/src/main/java/org/oran/dmaapadapter/tasks/ProducerRegstrationTask.java
dmaap-adaptor-java/src/main/resources/typeSchemaDmaap.json [new file with mode: 0644]
dmaap-adaptor-java/src/main/resources/typeSchemaKafka.json
dmaap-adaptor-java/src/test/java/org/oran/dmaapadapter/ApplicationTest.java
dmaap-adaptor-java/src/test/java/org/oran/dmaapadapter/IcsSimulatorController.java [moved from dmaap-adaptor-java/src/test/java/org/oran/dmaapadapter/EcsSimulatorController.java with 99% similarity]
dmaap-adaptor-java/src/test/java/org/oran/dmaapadapter/IntegrationWithIcs.java [moved from dmaap-adaptor-java/src/test/java/org/oran/dmaapadapter/IntegrationWithEcs.java with 78% similarity]
dmaap-adaptor-java/src/test/java/org/oran/dmaapadapter/IntegrationWithKafka.java
dmaap-adaptor-java/src/test/resources/test_application_configuration.json
dmaap-adaptor-java/src/test/resources/test_application_configuration_kafka.json [deleted file]
docker-compose/.env [new file with mode: 0644]
docker-compose/a1-sim/docker-compose.yaml
docker-compose/dmaap-mediator-go/docker-compose.yaml
docker-compose/dmaap-mediator-java/docker-compose.yaml
docker-compose/ecs/docker-compose.yaml
docker-compose/policy-service/docker-compose.yaml
docker-compose/rapp/docker-compose.yaml
docs/api-docs.rst
docs/conf.py
docs/developer-guide.rst
docs/overview.rst
enrichment-coordinator-service [new symlink]
information-coordinator-service/.gitignore [moved from enrichment-coordinator-service/.gitignore with 100% similarity]
information-coordinator-service/Dockerfile [moved from enrichment-coordinator-service/Dockerfile with 53% similarity]
information-coordinator-service/api/ics-api.json [moved from enrichment-coordinator-service/api/ecs-api.json with 100% similarity]
information-coordinator-service/api/ics-api.yaml [moved from enrichment-coordinator-service/api/ecs-api.yaml with 100% similarity]
information-coordinator-service/config/README [moved from enrichment-coordinator-service/config/README with 100% similarity]
information-coordinator-service/config/application.yaml [moved from enrichment-coordinator-service/config/application.yaml with 81% similarity]
information-coordinator-service/config/keystore.jks [moved from enrichment-coordinator-service/config/keystore.jks with 100% similarity]
information-coordinator-service/config/truststore.jks [moved from enrichment-coordinator-service/config/truststore.jks with 100% similarity]
information-coordinator-service/eclipse-formatter.xml [moved from enrichment-coordinator-service/eclipse-formatter.xml with 100% similarity]
information-coordinator-service/pom.xml [moved from enrichment-coordinator-service/pom.xml with 96% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/Application.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/Application.java with 97% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/BeanFactory.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/BeanFactory.java with 93% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/SwaggerConfig.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/SwaggerConfig.java with 94% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/clients/AsyncRestClient.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/clients/AsyncRestClient.java with 98% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/clients/AsyncRestClientFactory.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/clients/AsyncRestClientFactory.java with 97% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/configuration/ApplicationConfig.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/configuration/ApplicationConfig.java with 96% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/configuration/WebClientConfig.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/configuration/WebClientConfig.java with 96% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/ErrorResponse.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/ErrorResponse.java with 97% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/StatusController.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/StatusController.java with 95% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/VoidResponse.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/VoidResponse.java with 95% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/a1e/A1eCallbacks.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/a1e/A1eCallbacks.java with 88% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/a1e/A1eConsts.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/a1e/A1eConsts.java with 97% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/a1e/A1eController.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/a1e/A1eController.java with 96% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/a1e/A1eEiJobInfo.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/a1e/A1eEiJobInfo.java with 98% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/a1e/A1eEiJobStatus.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/a1e/A1eEiJobStatus.java with 97% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/a1e/A1eEiTypeInfo.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/a1e/A1eEiTypeInfo.java with 95% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/r1consumer/ConsumerCallbacks.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/r1consumer/ConsumerCallbacks.java with 88% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/r1consumer/ConsumerConsts.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/r1consumer/ConsumerConsts.java with 97% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/r1consumer/ConsumerController.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/r1consumer/ConsumerController.java with 97% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/r1consumer/ConsumerInfoTypeInfo.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/r1consumer/ConsumerInfoTypeInfo.java with 97% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/r1consumer/ConsumerJobInfo.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/r1consumer/ConsumerJobInfo.java with 98% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/r1consumer/ConsumerJobStatus.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/r1consumer/ConsumerJobStatus.java with 97% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/r1consumer/ConsumerTypeRegistrationInfo.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/r1consumer/ConsumerTypeRegistrationInfo.java with 97% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/r1consumer/ConsumerTypeSubscriptionInfo.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/r1consumer/ConsumerTypeSubscriptionInfo.java with 97% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/r1producer/ProducerCallbacks.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/r1producer/ProducerCallbacks.java with 91% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/r1producer/ProducerConsts.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/r1producer/ProducerConsts.java with 95% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/r1producer/ProducerController.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/r1producer/ProducerController.java with 96% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/r1producer/ProducerInfoTypeInfo.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/r1producer/ProducerInfoTypeInfo.java with 97% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/r1producer/ProducerJobInfo.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/r1producer/ProducerJobInfo.java with 96% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/r1producer/ProducerRegistrationInfo.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/r1producer/ProducerRegistrationInfo.java with 97% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/r1producer/ProducerStatusInfo.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/r1producer/ProducerStatusInfo.java with 97% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/exceptions/ServiceException.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/exceptions/ServiceException.java with 97% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/repository/InfoJob.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/repository/InfoJob.java with 97% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/repository/InfoJobs.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/repository/InfoJobs.java with 96% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/repository/InfoProducer.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/repository/InfoProducer.java with 98% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/repository/InfoProducers.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/repository/InfoProducers.java with 96% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/repository/InfoType.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/repository/InfoType.java with 96% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/repository/InfoTypeSubscriptions.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/repository/InfoTypeSubscriptions.java with 98% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/repository/InfoTypes.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/repository/InfoTypes.java with 96% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/repository/MultiMap.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/repository/MultiMap.java with 97% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/tasks/ProducerSupervision.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/tasks/ProducerSupervision.java with 90% similarity]
information-coordinator-service/src/test/java/org/oransc/ics/ApplicationTest.java [moved from enrichment-coordinator-service/src/test/java/org/oransc/enrichment/ApplicationTest.java with 95% similarity]
information-coordinator-service/src/test/java/org/oransc/ics/MockEnrichmentService.java [moved from enrichment-coordinator-service/src/test/java/org/oransc/enrichment/MockEnrichmentService.java with 98% similarity]
information-coordinator-service/src/test/java/org/oransc/ics/clients/AsyncRestClientTest.java [moved from enrichment-coordinator-service/src/test/java/org/oransc/enrichment/clients/AsyncRestClientTest.java with 99% similarity]
information-coordinator-service/src/test/java/org/oransc/ics/controller/ConsumerSimulatorController.java [moved from enrichment-coordinator-service/src/test/java/org/oransc/enrichment/controller/ConsumerSimulatorController.java with 93% similarity]
information-coordinator-service/src/test/java/org/oransc/ics/controller/ProducerSimulatorController.java [moved from enrichment-coordinator-service/src/test/java/org/oransc/enrichment/controller/ProducerSimulatorController.java with 96% similarity]
pom.xml
test/auto-test/FTC1.sh
test/auto-test/FTC10.sh
test/auto-test/FTC100.sh
test/auto-test/FTC110.sh
test/auto-test/FTC1100.sh
test/auto-test/FTC150.sh
test/auto-test/FTC1800.sh
test/auto-test/FTC2001.sh
test/auto-test/FTC2002.sh
test/auto-test/FTC2003.sh
test/auto-test/FTC300.sh
test/auto-test/FTC3000.sh
test/auto-test/FTC310.sh
test/auto-test/FTC350.sh
test/auto-test/FTC800.sh
test/auto-test/FTC805.sh
test/auto-test/FTC810.sh
test/auto-test/FTC850.sh
test/auto-test/FTC900.sh
test/auto-test/FTC_HELM_E_RELEASE.sh
test/auto-test/ONAP_UC.sh
test/auto-test/PM_DEMO.sh
test/auto-test/PM_EI_DEMO.sh
test/auto-test/README.md
test/auto-test/startMR.sh
test/auto-test/testdata/dmaap-adapter/job-schema-1-kafka [new file with mode: 0644]
test/auto-test/testdata/dmaap-adapter/job-template-1-kafka.json [new file with mode: 0644]
test/common/README.md
test/common/api_curl.sh
test/common/cbs_api_functions.sh [new file with mode: 0644]
test/common/clean_kube.sh
test/common/consul_api_functions.sh [moved from test/common/consul_cbs_functions.sh with 67% similarity]
test/common/cp_api_functions.sh [moved from test/common/control_panel_api_functions.sh with 95% similarity]
test/common/cr_api_functions.sh
test/common/dmaapadp_api_functions.sh
test/common/dmaapmed_api_functions.sh
test/common/dmaapmr_api_functions.sh [new file with mode: 0644]
test/common/ecs_api_functions.sh
test/common/genstat.sh [new file with mode: 0755]
test/common/httpproxy_api_functions.sh [moved from test/common/http_proxy_api_functions.sh with 95% similarity]
test/common/kubeproxy_api_functions.sh [moved from test/common/kube_proxy_api_functions.sh with 96% similarity]
test/common/mr_api_functions.sh
test/common/ngw_api_functions.sh [moved from test/common/gateway_api_functions.sh with 96% similarity]
test/common/pa_api_functions.sh [moved from test/common/agent_api_functions.sh with 92% similarity]
test/common/prodstub_api_functions.sh
test/common/pvccleaner_api_functions.sh
test/common/rc_api_functions.sh [moved from test/common/rapp_catalogue_api_functions.sh with 96% similarity]
test/common/ricsim_api_functions.sh [moved from test/common/ricsimulator_api_functions.sh with 86% similarity]
test/common/sdnc_api_functions.sh [moved from test/common/controller_api_functions.sh with 95% similarity]
test/common/test_env-onap-guilin.sh
test/common/test_env-onap-honolulu.sh
test/common/test_env-onap-istanbul.sh
test/common/test_env-oran-cherry.sh
test/common/test_env-oran-d-release.sh
test/common/test_env-oran-e-release.sh
test/common/testcase_common.sh
test/cr/.gitignore
test/cr/app/cr.py
test/cr/app/nginx.conf
test/mrstub/app/main.py
test/mrstub/app/nginx.conf
test/simulator-group/consul_cbs/.gitignore
test/simulator-group/consul_cbs/docker-compose.yml
test/simulator-group/control_panel/.gitignore
test/simulator-group/control_panel/docker-compose.yml
test/simulator-group/cr/.env [new file with mode: 0644]
test/simulator-group/cr/.gitignore
test/simulator-group/cr/app.yaml
test/simulator-group/cr/docker-compose.yml
test/simulator-group/cr/svc.yaml
test/simulator-group/dmaapadp/.gitignore
test/simulator-group/dmaapadp/application.yaml
test/simulator-group/dmaapadp/application_configuration.json
test/simulator-group/dmaapadp/docker-compose.yml
test/simulator-group/dmaapadp/mnt/.gitignore [new file with mode: 0644]
test/simulator-group/dmaapmed/.gitignore
test/simulator-group/dmaapmed/app.yaml
test/simulator-group/dmaapmed/docker-compose.yml
test/simulator-group/dmaapmed/mnt/.gitignore [new file with mode: 0644]
test/simulator-group/dmaapmed/type_config.json
test/simulator-group/dmaapmr/.gitignore [new file with mode: 0644]
test/simulator-group/dmaapmr/app.yaml
test/simulator-group/dmaapmr/configs0/kafka/zk_client_jaas.conf [moved from test/simulator-group/dmaapmr/mnt/kafka/zk_client_jaas.conf with 100% similarity]
test/simulator-group/dmaapmr/configs0/mr/MsgRtrApi.properties [moved from test/simulator-group/dmaapmr/mnt/mr/MsgRtrApi.properties with 98% similarity]
test/simulator-group/dmaapmr/configs0/mr/cadi.properties [moved from test/simulator-group/dmaapmr/mnt/mr/cadi.properties with 100% similarity]
test/simulator-group/dmaapmr/configs0/mr/logback.xml [moved from test/simulator-group/dmaapmr/mnt/mr/logback.xml with 100% similarity]
test/simulator-group/dmaapmr/configs0/zk/zk_server_jaas.conf [moved from test/simulator-group/dmaapmr/mnt/zk/zk_server_jaas.conf with 100% similarity]
test/simulator-group/dmaapmr/configs1/kafka/zk_client_jaas.conf [moved from test/simulator-group/dmaapmr/mnt2/kafka/zk_client_jaas.conf with 100% similarity]
test/simulator-group/dmaapmr/configs1/mr/MsgRtrApi.properties [moved from test/simulator-group/dmaapmr/mnt2/mr/MsgRtrApi.properties with 95% similarity]
test/simulator-group/dmaapmr/configs1/mr/cadi.properties [new file with mode: 0644]
test/simulator-group/dmaapmr/configs1/mr/logback.xml [moved from test/simulator-group/dmaapmr/mnt2/mr/logback.xml with 99% similarity]
test/simulator-group/dmaapmr/configs1/zk/zk_server_jaas.conf [moved from test/simulator-group/dmaapmr/mnt2/zk/zk_server_jaas.conf with 100% similarity]
test/simulator-group/dmaapmr/docker-compose.yml
test/simulator-group/dmaapmr/mnt/.gitignore [new file with mode: 0644]
test/simulator-group/dmaapmr/mnt/mr/KUBE-MsgRtrApi.properties [deleted file]
test/simulator-group/dmaapmr/mnt2/mr/KUBE-MsgRtrApi.properties [deleted file]
test/simulator-group/dmaapmr/mnt2/mr/cadi.properties [deleted file]
test/simulator-group/dmaapmr/svc.yaml
test/simulator-group/ecs/.gitignore
test/simulator-group/ecs/docker-compose.yml
test/simulator-group/httpproxy/.gitignore
test/simulator-group/httpproxy/docker-compose.yml
test/simulator-group/kubeproxy/.gitignore
test/simulator-group/kubeproxy/docker-compose.yml
test/simulator-group/mrstub/.gitignore
test/simulator-group/mrstub/app.yaml
test/simulator-group/mrstub/docker-compose.yml
test/simulator-group/ngw/.gitignore
test/simulator-group/ngw/docker-compose.yml
test/simulator-group/policy_agent/.gitignore
test/simulator-group/policy_agent/docker-compose.yml
test/simulator-group/prodstub/.gitignore
test/simulator-group/prodstub/docker-compose.yml
test/simulator-group/rapp_catalogue/.gitignore
test/simulator-group/rapp_catalogue/docker-compose.yml
test/simulator-group/ric/.gitignore
test/simulator-group/ric/docker-compose.yml
test/simulator-group/sdnc/.gitignore
test/simulator-group/sdnc/app.yaml
test/simulator-group/sdnc/app2.yaml
test/simulator-group/sdnc/docker-compose-2.yml
test/simulator-group/sdnc/docker-compose.yml
test/simulator-group/sdnc/svc.yaml
test/usecases/odusliceassurance/apexpolicyversion/SliceAssurance/events/OscDefinedEvent.json [new file with mode: 0644]
test/usecases/odusliceassurance/apexpolicyversion/SliceAssurance/events/StdDefinedEvent.json [new file with mode: 0644]
test/usecases/odusliceassurance/apexpolicyversion/SliceAssurance/schemas/OscDefinedInputSchema.avsc [new file with mode: 0644]
test/usecases/odusliceassurance/apexpolicyversion/SliceAssurance/schemas/StdDefinedInputSchema.avsc [new file with mode: 0644]

index 9b35fe5..162bfb2 100644 (file)
@@ -1,9 +1,9 @@
 # O-RAN-SC Non-RealTime RIC DMaaP Information Producer
-This product is a generic information producer (as defined by the Information Coordinator Service (ICS)). It can produce any information that can be retrieved from DMaaP. Its main tasks is to register information types and itself as a producer using the ICS Data Producer API.
+This product is a generic information producer (as defined by the Information Coordinator Service (ICS)). It can produce any information that can be retrieved from DMaaP or Kafka. Its main tasks is to register information types and itself as a producer using the ICS Data Producer API.
 
 A data consumer may create information jobs through the ICS Data Producer API.
 
-This service will retrieve data from the DMaaP Message Router (MR) and distribute it further to the data consumers (information job owners).
+This service will retrieve data from the DMaaP Message Router (MR) or from the Kafka streaming platform and will distribute it further to the data consumers (information job owners).
 
 The component is a springboot service and is configured as any springboot service through the file `config/application.yaml`. The component log can be retrieved and logging can be controled by means of REST call. See the API documentation (api/api.yaml).
 
@@ -14,20 +14,91 @@ The file `config/application_configuration.json` contains the configuration of j
        "types":
         [
           {
-             "id":  "STD_Fault_Messages",
-             "dmaapTopicUrl":  events/unauthenticated.SEC_FAULT_OUTPUT/dmaapmediatorproducer/STD-Fault-Messages_1.0.0",
+             "id":  "ExampleInformationType1_1.0.0",
+             "dmaapTopicUrl":  "events/unauthenticated.SEC_FAULT_OUTPUT/dmaapmediatorproducer/STD-Fault-Messages_1.0.0",
+             "useHttpProxy": true
+          },
+          {
+             "id": "ExampleInformationType2_2.0.0",
+             "kafkaInputTopic": "KafkaInputTopic",
              "useHttpProxy": false
           }
         ]
     }
 ```
 
-Each information has the following properties:
+Each information type has the following properties:
  - id the information type identity as exposed in the Information Coordination Service data consumer API
  - dmaapTopicUrl the URL to for fetching information from  DMaaP
+ - kafkaInputTopic a Kafka topic to get input from
  - useHttpProxy if true, the received information will be delivered using a HTTP proxy (provided that one is setup in the application.yaml file). This might for instance be needed if the data consumer is in the RAN or outside the cluster.
 
-The service producer will constantly poll MR for all configured job types. When receiving messages for a type, it will distribute these messages to all jobs registered for the type. If no jobs for that type are registered, the messages will be discarded. If a consumer is unavailable for distribution, the messages will be discarded for that consumer.
+The service producer will poll MR and/or listen to Kafka topics for all configured job types. When receiving messages for a type, it will distribute these messages to all jobs registered for the type. If a consumer is unavailable for distribution, the messages will be discarded for that consumer.
+
+When an Information Job is created in the Information Coordinator Service Consumer API, it is possible to define a number of job specific properties. For an Information type that has a Kafka topic defined, the following Json schema defines the properties that can be used:
+
+
+```sh
+{
+  "$schema": "http://json-schema.org/draft-04/schema#",
+  "type": "object",
+  "properties": {
+    "filter": {
+      "type": "string"
+    },
+    "maxConcurrency": {
+      "type": "integer"
+    },
+    "bufferTimeout": {
+      "type": "object",
+      "properties": {
+        "maxSize": {
+          "type": "integer"
+        },
+        "maxTimeMiliseconds": {
+          "type": "integer"
+        }
+      },
+      "additionalProperties": false,
+      "required": [
+        "maxSize",
+        "maxTimeMiliseconds"
+      ]
+    }
+  },
+  "additionalProperties": false
+}
+```
+-filter is a regular expression. Only strings that matches the expression will be pushed further to the consumer.
+-maxConcurrency the maximum number of concurrent REST session for the data delivery to the consumer. 
+ The default is 1 and that is the number that must be used to guarantee that the object sequence is maintained. 
+ A higher number will give higher throughtput. 
+-bufferTimeout, can be used to reduce the number of REST calls to the consumer. If defined, a number of objects will be 
+ buffered and sent in one REST call to the consumer.
+ The buffered objects will be put in a Json array and quoted. Example; 
+   Object1 and Object2 may be posted in one call -->  ["Object1", "Object2"]
+ The bufferTimeout is a Json object and the parameters in the object are:
+   - maxSize the maximum number of buffered objects before posting
+   - maxTimeMiliseconds the maximum delay time to buffer before posting
+ If no bufferTimeout is specified, each object will be posted as received in separate calls (not quoted and put in a Json array).
+
+
+For an information type that only has a DMaaP topic, the following Json schema defines the possible parameters to use when creating an information job:
+
+```sh
+{
+  "$schema": "http://json-schema.org/draft-04/schema#",
+  "type": "object",
+  "properties": {
+    "filter": {
+       "type": "string"
+     }
+  },
+  "additionalProperties": false
+}
+```
+-filter is a regular expression. Only strings that matches the expression will be pushed furter to the consumer. This
+ has a similar meaning as in jobs that receives data from Kafka.
 
 ## License
 
index 39056e9..04c4ab0 100644 (file)
     }},
     "openapi": "3.0.1",
     "paths": {
-        "/dmaap_dataproducer/info_job": {
-            "post": {
-                "summary": "Callback for Information Job creation/modification",
-                "requestBody": {
-                    "content": {"application/json": {"schema": {"type": "string"}}},
-                    "required": true
-                },
-                "description": "The call is invoked to activate or to modify a data subscription. The endpoint is provided by the Information Producer.",
-                "operationId": "jobCreatedCallback",
-                "responses": {
-                    "200": {
-                        "description": "OK",
-                        "content": {"application/json": {"schema": {"$ref": "#/components/schemas/void"}}}
-                    },
-                    "404": {
-                        "description": "Information type is not found",
-                        "content": {"application/json": {"schema": {"$ref": "#/components/schemas/error_information"}}}
-                    }
-                },
-                "tags": ["Producer job control API"]
-            },
-            "get": {
-                "summary": "Get all jobs",
-                "description": "Returns all info jobs, can be used for trouble shooting",
-                "operationId": "getJobs",
-                "responses": {"200": {
-                    "description": "Information jobs",
-                    "content": {"application/json": {"schema": {
-                        "type": "array",
-                        "items": {"$ref": "#/components/schemas/producer_info_job_request"}
-                    }}}
-                }},
-                "tags": ["Producer job control API"]
-            }
-        },
-        "/dmaap_dataproducer/health_check": {"get": {
-            "summary": "Producer supervision",
-            "description": "The endpoint is provided by the Information Producer and is used for supervision of the producer.",
-            "operationId": "producerSupervision",
-            "responses": {"200": {
-                "description": "The producer is OK",
-                "content": {"application/json": {"schema": {"type": "string"}}}
-            }},
-            "tags": ["Producer job control API"]
-        }},
         "/actuator/threaddump": {"get": {
             "summary": "Actuator web endpoint 'threaddump'",
             "operationId": "handle_2_1_3",
             }],
             "tags": ["Information Coordinator Service Simulator (exists only in test)"]
         }},
+        "/generic_dataproducer/health_check": {"get": {
+            "summary": "Producer supervision",
+            "description": "The endpoint is provided by the Information Producer and is used for supervision of the producer.",
+            "operationId": "producerSupervision",
+            "responses": {"200": {
+                "description": "The producer is OK",
+                "content": {"application/json": {"schema": {"type": "string"}}}
+            }},
+            "tags": ["Producer job control API"]
+        }},
+        "/generic_dataproducer/info_job": {
+            "post": {
+                "summary": "Callback for Information Job creation/modification",
+                "requestBody": {
+                    "content": {"application/json": {"schema": {"type": "string"}}},
+                    "required": true
+                },
+                "description": "The call is invoked to activate or to modify a data subscription. The endpoint is provided by the Information Producer.",
+                "operationId": "jobCreatedCallback",
+                "responses": {
+                    "200": {
+                        "description": "OK",
+                        "content": {"application/json": {"schema": {"$ref": "#/components/schemas/void"}}}
+                    },
+                    "400": {
+                        "description": "Other error in the request",
+                        "content": {"application/json": {"schema": {"$ref": "#/components/schemas/error_information"}}}
+                    },
+                    "404": {
+                        "description": "Information type is not found",
+                        "content": {"application/json": {"schema": {"$ref": "#/components/schemas/error_information"}}}
+                    }
+                },
+                "tags": ["Producer job control API"]
+            },
+            "get": {
+                "summary": "Get all jobs",
+                "description": "Returns all info jobs, can be used for trouble shooting",
+                "operationId": "getJobs",
+                "responses": {"200": {
+                    "description": "Information jobs",
+                    "content": {"application/json": {"schema": {
+                        "type": "array",
+                        "items": {"$ref": "#/components/schemas/producer_info_job_request"}
+                    }}}
+                }},
+                "tags": ["Producer job control API"]
+            }
+        },
         "/actuator/loggers": {"get": {
             "summary": "Actuator web endpoint 'loggers'",
             "operationId": "handle_6",
                 "tags": ["Information Coordinator Service Simulator (exists only in test)"]
             }
         },
+        "/generic_dataproducer/info_job/{infoJobId}": {"delete": {
+            "summary": "Callback for Information Job deletion",
+            "description": "The call is invoked to terminate a data subscription. The endpoint is provided by the Information Producer.",
+            "operationId": "jobDeletedCallback",
+            "responses": {"200": {
+                "description": "OK",
+                "content": {"application/json": {"schema": {"$ref": "#/components/schemas/void"}}}
+            }},
+            "parameters": [{
+                "schema": {"type": "string"},
+                "in": "path",
+                "name": "infoJobId",
+                "required": true
+            }],
+            "tags": ["Producer job control API"]
+        }},
         "/actuator/metrics/{requiredMetricName}": {"get": {
             "summary": "Actuator web endpoint 'metrics-requiredMetricName'",
             "operationId": "handle_5",
                 "tags": ["Actuator"]
             }
         },
-        "/dmaap_dataproducer/info_job/{infoJobId}": {"delete": {
-            "summary": "Callback for Information Job deletion",
-            "description": "The call is invoked to terminate a data subscription. The endpoint is provided by the Information Producer.",
-            "operationId": "jobDeletedCallback",
-            "responses": {"200": {
-                "description": "OK",
-                "content": {"application/json": {"schema": {"$ref": "#/components/schemas/void"}}}
-            }},
-            "parameters": [{
-                "schema": {"type": "string"},
-                "in": "path",
-                "name": "infoJobId",
-                "required": true
-            }],
-            "tags": ["Producer job control API"]
-        }},
         "/actuator/health": {"get": {
             "summary": "Actuator web endpoint 'health'",
             "operationId": "handle_11",
             "name": "Copyright (C) 2021 Nordix Foundation. Licensed under the Apache License.",
             "url": "http://www.apache.org/licenses/LICENSE-2.0"
         },
-        "description": "Reads data from DMAAP and sends it further to information consumers",
-        "title": "Generic Dmaap Information Producer",
+        "description": "Reads data from DMaaP and Kafka and posts it further to information consumers",
+        "title": "Generic Dmaap and Kafka Information Producer",
         "version": "1.0"
     },
     "tags": [{
index 3c9fb59..1fb78fa 100644 (file)
@@ -1,7 +1,8 @@
 openapi: 3.0.1
 info:
-  title: Generic Dmaap Information Producer
-  description: Reads data from DMAAP and sends it further to information consumers
+  title: Generic Dmaap and Kafka Information Producer
+  description: Reads data from DMaaP and Kafka and posts it further to information
+    consumers
   license:
     name: Copyright (C) 2021 Nordix Foundation. Licensed under the Apache License.
     url: http://www.apache.org/licenses/LICENSE-2.0
@@ -15,63 +16,6 @@ tags:
     description: Spring Boot Actuator Web API Documentation
     url: https://docs.spring.io/spring-boot/docs/current/actuator-api/html/
 paths:
-  /dmaap_dataproducer/info_job:
-    get:
-      tags:
-      - Producer job control API
-      summary: Get all jobs
-      description: Returns all info jobs, can be used for trouble shooting
-      operationId: getJobs
-      responses:
-        200:
-          description: Information jobs
-          content:
-            application/json:
-              schema:
-                type: array
-                items:
-                  $ref: '#/components/schemas/producer_info_job_request'
-    post:
-      tags:
-      - Producer job control API
-      summary: Callback for Information Job creation/modification
-      description: The call is invoked to activate or to modify a data subscription.
-        The endpoint is provided by the Information Producer.
-      operationId: jobCreatedCallback
-      requestBody:
-        content:
-          application/json:
-            schema:
-              type: string
-        required: true
-      responses:
-        200:
-          description: OK
-          content:
-            application/json:
-              schema:
-                $ref: '#/components/schemas/void'
-        404:
-          description: Information type is not found
-          content:
-            application/json:
-              schema:
-                $ref: '#/components/schemas/error_information'
-  /dmaap_dataproducer/health_check:
-    get:
-      tags:
-      - Producer job control API
-      summary: Producer supervision
-      description: The endpoint is provided by the Information Producer and is used
-        for supervision of the producer.
-      operationId: producerSupervision
-      responses:
-        200:
-          description: The producer is OK
-          content:
-            application/json:
-              schema:
-                type: string
   /actuator/threaddump:
     get:
       tags:
@@ -124,6 +68,69 @@ paths:
             application/json:
               schema:
                 type: object
+  /generic_dataproducer/health_check:
+    get:
+      tags:
+      - Producer job control API
+      summary: Producer supervision
+      description: The endpoint is provided by the Information Producer and is used
+        for supervision of the producer.
+      operationId: producerSupervision
+      responses:
+        200:
+          description: The producer is OK
+          content:
+            application/json:
+              schema:
+                type: string
+  /generic_dataproducer/info_job:
+    get:
+      tags:
+      - Producer job control API
+      summary: Get all jobs
+      description: Returns all info jobs, can be used for trouble shooting
+      operationId: getJobs
+      responses:
+        200:
+          description: Information jobs
+          content:
+            application/json:
+              schema:
+                type: array
+                items:
+                  $ref: '#/components/schemas/producer_info_job_request'
+    post:
+      tags:
+      - Producer job control API
+      summary: Callback for Information Job creation/modification
+      description: The call is invoked to activate or to modify a data subscription.
+        The endpoint is provided by the Information Producer.
+      operationId: jobCreatedCallback
+      requestBody:
+        content:
+          application/json:
+            schema:
+              type: string
+        required: true
+      responses:
+        200:
+          description: OK
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/void'
+        400:
+          description: Other error in the request
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/error_information'
+        404:
+          description: Information type is not found
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/error_information'
   /actuator/loggers:
     get:
       tags:
@@ -195,6 +202,29 @@ paths:
             application/json:
               schema:
                 type: object
+  /generic_dataproducer/info_job/{infoJobId}:
+    delete:
+      tags:
+      - Producer job control API
+      summary: Callback for Information Job deletion
+      description: The call is invoked to terminate a data subscription. The endpoint
+        is provided by the Information Producer.
+      operationId: jobDeletedCallback
+      parameters:
+      - name: infoJobId
+        in: path
+        required: true
+        style: simple
+        explode: false
+        schema:
+          type: string
+      responses:
+        200:
+          description: OK
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/void'
   /actuator/metrics/{requiredMetricName}:
     get:
       tags:
@@ -287,29 +317,6 @@ paths:
             '*/*':
               schema:
                 type: object
-  /dmaap_dataproducer/info_job/{infoJobId}:
-    delete:
-      tags:
-      - Producer job control API
-      summary: Callback for Information Job deletion
-      description: The call is invoked to terminate a data subscription. The endpoint
-        is provided by the Information Producer.
-      operationId: jobDeletedCallback
-      parameters:
-      - name: infoJobId
-        in: path
-        required: true
-        style: simple
-        explode: false
-        schema:
-          type: string
-      responses:
-        200:
-          description: OK
-          content:
-            application/json:
-              schema:
-                $ref: '#/components/schemas/void'
   /actuator/health:
     get:
       tags:
index 6a2d68a..c3476ac 100644 (file)
@@ -46,14 +46,15 @@ app:
     # The HTTP proxy (if configured) will only be used for accessing NearRT RIC:s
     http.proxy-host:
     http.proxy-port: 0
-  ecs-base-url: https://localhost:8434
+  ics-base-url: https://localhost:8434
   # Location of the component configuration file. The file will only be used if the Consul database is not used;
   # configuration from the Consul will override the file.
   configuration-filepath: /opt/app/dmaap-adaptor-service/data/application_configuration.json
   dmaap-base-url: http://dradmin:dradmin@localhost:2222
   # The url used to adress this component. This is used as a callback url sent to other components.
   dmaap-adapter-base-url: https://localhost:8435
-  # KAFKA boostrap server. This is only needed if there are Information Types that uses a kafkaInputTopic
+  # KAFKA boostrap servers. This is only needed if there are Information Types that uses a kafkaInputTopic
+  # several redundant boostrap servers can be specified, separated by a comma ','.
   kafka:
     bootstrap-servers: localhost:9092
 
index ae34c56..6aaffd1 100644 (file)
@@ -1,9 +1,15 @@
 {
    "types": [
       {
-         "id": "ExampleInformationType",
+         "id": "ExampleInformationType1",
          "dmaapTopicUrl": "/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c12/C12",
          "useHttpProxy": true
+      },
+      {
+         "id": "ExampleInformationType2",
+         "kafkaInputTopic": "TutorialTopic",
+         "useHttpProxy": false
       }
+      
    ]
-}
\ No newline at end of file
+}
index 8f33377..6128d2e 100644 (file)
@@ -38,6 +38,6 @@ import io.swagger.v3.oas.annotations.info.License;
 public class SwaggerConfig {
     private SwaggerConfig() {}
 
-    static final String API_TITLE = "Generic Dmaap Information Producer";
-    static final String DESCRIPTION = "Reads data from DMAAP and sends it further to information consumers";
+    static final String API_TITLE = "Generic Dmaap and Kafka Information Producer";
+    static final String DESCRIPTION = "Reads data from DMaaP and Kafka and posts it further to information consumers";
 }
index 8b3efed..d54ac44 100644 (file)
@@ -47,6 +47,7 @@ import reactor.netty.transport.ProxyProvider;
 /**
  * Generic reactive REST client.
  */
+@SuppressWarnings("java:S4449") // @Add Nullable to third party api
 public class AsyncRestClient {
 
     private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
@@ -83,7 +84,7 @@ public class AsyncRestClient {
     }
 
     public Mono<String> postWithAuthHeader(String uri, String body, String username, String password,
-            MediaType mediaType) {
+            @Nullable MediaType mediaType) {
         Object traceTag = createTraceTag();
         logger.debug("{} POST (auth) uri = '{}{}''", traceTag, baseUrl, uri);
         logger.trace("{} POST body: {}", traceTag, body);
index f17a9c0..3ea64e7 100644 (file)
@@ -28,6 +28,7 @@ import java.util.Collection;
 import java.util.Collections;
 
 import lombok.Getter;
+import lombok.Setter;
 
 import org.oran.dmaapadapter.configuration.WebClientConfig.HttpProxyConfig;
 import org.oran.dmaapadapter.repository.InfoType;
@@ -73,12 +74,13 @@ public class ApplicationConfig {
     private int httpProxyPort = 0;
 
     @Getter
+    @Setter
     @Value("${server.port}")
     private int localServerHttpPort;
 
     @Getter
-    @Value("${app.ecs-base-url}")
-    private String ecsBaseUrl;
+    @Value("${app.ics-base-url}")
+    private String icsBaseUrl;
 
     @Getter
     @Value("${app.dmaap-adapter-base-url}")
index 07f5aa7..94f9f8d 100644 (file)
@@ -34,6 +34,7 @@ import io.swagger.v3.oas.annotations.tags.Tag;
 import java.util.ArrayList;
 import java.util.Collection;
 
+import org.oran.dmaapadapter.exceptions.ServiceException;
 import org.oran.dmaapadapter.r1.ProducerJobInfo;
 import org.oran.dmaapadapter.repository.InfoTypes;
 import org.oran.dmaapadapter.repository.Job;
@@ -58,8 +59,8 @@ public class ProducerCallbacksController {
 
     public static final String API_NAME = "Producer job control API";
     public static final String API_DESCRIPTION = "";
-    public static final String JOB_URL = "/dmaap_dataproducer/info_job";
-    public static final String SUPERVISION_URL = "/dmaap_dataproducer/health_check";
+    public static final String JOB_URL = "/generic_dataproducer/info_job";
+    public static final String SUPERVISION_URL = "/generic_dataproducer/health_check";
     private static Gson gson = new GsonBuilder().create();
     private final Jobs jobs;
     private final InfoTypes types;
@@ -77,6 +78,8 @@ public class ProducerCallbacksController {
                     content = @Content(schema = @Schema(implementation = VoidResponse.class))), //
             @ApiResponse(responseCode = "404", description = "Information type is not found", //
                     content = @Content(schema = @Schema(implementation = ErrorResponse.ErrorInfo.class))), //
+            @ApiResponse(responseCode = "400", description = "Other error in the request", //
+                    content = @Content(schema = @Schema(implementation = ErrorResponse.ErrorInfo.class))) //
     })
     public ResponseEntity<Object> jobCreatedCallback( //
             @RequestBody String body) {
@@ -86,8 +89,12 @@ public class ProducerCallbacksController {
             this.jobs.addJob(request.id, request.targetUri, types.getType(request.typeId), request.owner,
                     request.lastUpdated, toJobParameters(request.jobData));
             return new ResponseEntity<>(HttpStatus.OK);
+        } catch (ServiceException e) {
+            logger.warn("jobCreatedCallback failed: {}", e.getMessage());
+            return ErrorResponse.create(e, e.getHttpStatus());
         } catch (Exception e) {
-            return ErrorResponse.create(e, HttpStatus.NOT_FOUND);
+            logger.warn("jobCreatedCallback failed: {}", e.getMessage());
+            return ErrorResponse.create(e, HttpStatus.BAD_REQUEST);
         }
     }
 
index ce4a3b7..c1737db 100644 (file)
@@ -28,7 +28,7 @@ import io.swagger.v3.oas.annotations.media.Schema;
 import org.immutables.gson.Gson;
 
 @Gson.TypeAdapters
-@Schema(name = "consumer_job", description = "Information for an Enrichment Information Job")
+@Schema(name = "consumer_job", description = "Information for an Information Job")
 public class ConsumerJobInfo {
 
     @Schema(name = "info_type_id", description = "Information type Idenitifier of the subscription job",
index 558fc46..baa998b 100644 (file)
@@ -28,6 +28,7 @@ import java.util.Vector;
 import org.oran.dmaapadapter.exceptions.ServiceException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+import org.springframework.http.HttpStatus;
 
 public class InfoTypes {
     private static final Logger logger = LoggerFactory.getLogger(InfoTypes.class);
@@ -47,7 +48,7 @@ public class InfoTypes {
     public synchronized InfoType getType(String id) throws ServiceException {
         InfoType type = allTypes.get(id);
         if (type == null) {
-            throw new ServiceException("Could not find type: " + id);
+            throw new ServiceException("Could not find type: " + id, HttpStatus.NOT_FOUND);
         }
         return type;
     }
index 38f3d17..f7cc14e 100644 (file)
@@ -24,6 +24,7 @@ import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
+import java.util.Set;
 import java.util.Vector;
 
 /**
@@ -50,6 +51,14 @@ public class MultiMap<T> {
         return null;
     }
 
+    public T get(String key1, String key2) {
+        Map<String, T> innerMap = this.map.get(key1);
+        if (innerMap == null) {
+            return null;
+        }
+        return innerMap.get(key2);
+    }
+
     public Collection<T> get(String key) {
         Map<String, T> innerMap = this.map.get(key);
         if (innerMap == null) {
@@ -58,6 +67,10 @@ public class MultiMap<T> {
         return new Vector<>(innerMap.values());
     }
 
+    public Set<String> keySet() {
+        return this.map.keySet();
+    }
+
     public void clear() {
         this.map.clear();
     }
index 217a072..fe7ec8b 100644 (file)
@@ -32,7 +32,6 @@ import org.slf4j.LoggerFactory;
 import org.springframework.http.MediaType;
 
 import reactor.core.publisher.Flux;
-import reactor.core.publisher.FluxSink;
 import reactor.core.publisher.Mono;
 
 /**
@@ -44,42 +43,10 @@ public class DmaapTopicConsumer {
     private static final Logger logger = LoggerFactory.getLogger(DmaapTopicConsumer.class);
 
     private final AsyncRestClient dmaapRestClient;
-    private final InfiniteFlux infiniteSubmitter = new InfiniteFlux();
     protected final ApplicationConfig applicationConfig;
     protected final InfoType type;
     protected final Jobs jobs;
 
-    /** Submits new elements until stopped */
-    private static class InfiniteFlux {
-        private FluxSink<Integer> sink;
-        private int counter = 0;
-
-        public synchronized Flux<Integer> start() {
-            stop();
-            return Flux.create(this::next).doOnRequest(this::onRequest);
-        }
-
-        public synchronized void stop() {
-            if (this.sink != null) {
-                this.sink.complete();
-                this.sink = null;
-            }
-        }
-
-        void onRequest(long no) {
-            logger.debug("InfiniteFlux.onRequest {}", no);
-            for (long i = 0; i < no; ++i) {
-                sink.next(counter++);
-            }
-        }
-
-        void next(FluxSink<Integer> sink) {
-            logger.debug("InfiniteFlux.next");
-            this.sink = sink;
-            sink.next(counter++);
-        }
-    }
-
     public DmaapTopicConsumer(ApplicationConfig applicationConfig, InfoType type, Jobs jobs) {
         AsyncRestClientFactory restclientFactory = new AsyncRestClientFactory(applicationConfig.getWebClientConfig());
         this.dmaapRestClient = restclientFactory.createRestClientNoHttpProxy("");
@@ -89,14 +56,18 @@ public class DmaapTopicConsumer {
     }
 
     public void start() {
-        infiniteSubmitter.start() //
+        Flux.range(0, Integer.MAX_VALUE) //
                 .flatMap(notUsed -> getFromMessageRouter(getDmaapUrl()), 1) //
                 .flatMap(this::pushDataToConsumers) //
                 .subscribe(//
                         null, //
                         throwable -> logger.error("DmaapMessageConsumer error: {}", throwable.getMessage()), //
-                        () -> logger.warn("DmaapMessageConsumer stopped {}", type.getId())); //
+                        this::onComplete); //
+    }
 
+    private void onComplete() {
+        logger.warn("DmaapMessageConsumer completed {}", type.getId());
+        start();
     }
 
     private String getDmaapUrl() {
@@ -128,6 +99,7 @@ public class DmaapTopicConsumer {
 
         // Distibute the body to all jobs for this type
         return Flux.fromIterable(this.jobs.getJobsForType(this.type)) //
+                .filter(job -> job.isFilterMatch(body)) //
                 .doOnNext(job -> logger.debug("Sending to consumer {}", job.getCallbackUrl())) //
                 .flatMap(job -> job.getConsumerRestClient().post("", body, MediaType.APPLICATION_JSON), CONCURRENCY) //
                 .onErrorResume(this::handleConsumerErrorResponse);
index 5550ce0..2a16f47 100644 (file)
@@ -31,7 +31,6 @@ import org.springframework.web.reactive.function.client.WebClientResponseExcepti
 import reactor.core.Disposable;
 import reactor.core.publisher.Flux;
 import reactor.core.publisher.Mono;
-import reactor.core.publisher.Sinks.Many;
 
 /**
  * The class streams data from a multi cast sink and sends the data to the Job
@@ -75,17 +74,22 @@ public class KafkaJobDataConsumer {
         this.job = job;
     }
 
-    public synchronized void start(Many<String> input) {
+    public synchronized void start(Flux<String> input) {
         stop();
         this.errorStats.resetKafkaErrors();
         this.subscription = getMessagesFromKafka(input, job) //
                 .flatMap(this::postToClient, job.getParameters().getMaxConcurrency()) //
                 .onErrorResume(this::handleError) //
                 .subscribe(this::handleConsumerSentOk, //
-                        t -> stop(), //
+                        this::handleExceptionInStream, //
                         () -> logger.warn("KafkaMessageConsumer stopped jobId: {}", job.getId()));
     }
 
+    private void handleExceptionInStream(Throwable t) {
+        logger.warn("KafkaMessageConsumer exception: {}, jobId: {}", t.getMessage(), job.getId());
+        stop();
+    }
+
     private Mono<String> postToClient(String body) {
         logger.debug("Sending to consumer {} {} {}", job.getId(), job.getCallbackUrl(), body);
         MediaType contentType = this.job.isBuffered() ? MediaType.APPLICATION_JSON : null;
@@ -94,8 +98,8 @@ public class KafkaJobDataConsumer {
 
     public synchronized void stop() {
         if (this.subscription != null) {
-            subscription.dispose();
-            subscription = null;
+            this.subscription.dispose();
+            this.subscription = null;
         }
     }
 
@@ -103,9 +107,8 @@ public class KafkaJobDataConsumer {
         return this.subscription != null;
     }
 
-    private Flux<String> getMessagesFromKafka(Many<String> input, Job job) {
-        Flux<String> result = input.asFlux() //
-                .filter(job::isFilterMatch);
+    private Flux<String> getMessagesFromKafka(Flux<String> input, Job job) {
+        Flux<String> result = input.filter(job::isFilterMatch);
 
         if (job.isBuffered()) {
             result = result.map(this::quote) //
index 0ed85c6..4809017 100644 (file)
@@ -30,6 +30,7 @@ import org.oran.dmaapadapter.repository.InfoType;
 import org.oran.dmaapadapter.repository.InfoTypes;
 import org.oran.dmaapadapter.repository.Job;
 import org.oran.dmaapadapter.repository.Jobs;
+import org.oran.dmaapadapter.repository.MultiMap;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
@@ -46,7 +47,7 @@ public class KafkaTopicConsumers {
     private final Map<String, KafkaTopicListener> topicListeners = new HashMap<>(); // Key is typeId
 
     @Getter
-    private final Map<String, KafkaJobDataConsumer> consumers = new HashMap<>(); // Key is jobId
+    private final MultiMap<KafkaJobDataConsumer> consumers = new MultiMap<>(); // Key is typeId, jobId
 
     private static final int CONSUMER_SUPERVISION_INTERVAL_MS = 1000 * 60 * 3;
 
@@ -70,22 +71,25 @@ public class KafkaTopicConsumers {
             public void onJobRemoved(Job job) {
                 removeJob(job);
             }
-
         });
     }
 
     public synchronized void addJob(Job job) {
-        if (this.consumers.get(job.getId()) == null && job.getType().isKafkaTopicDefined()) {
+        if (job.getType().isKafkaTopicDefined()) {
+            removeJob(job);
             logger.debug("Kafka job added {}", job.getId());
             KafkaTopicListener topicConsumer = topicListeners.get(job.getType().getId());
+            if (consumers.get(job.getType().getId()).isEmpty()) {
+                topicConsumer.start();
+            }
             KafkaJobDataConsumer subscription = new KafkaJobDataConsumer(job);
-            subscription.start(topicConsumer.getOutput());
-            consumers.put(job.getId(), subscription);
+            subscription.start(topicConsumer.getOutput().asFlux());
+            consumers.put(job.getType().getId(), job.getId(), subscription);
         }
     }
 
     public synchronized void removeJob(Job job) {
-        KafkaJobDataConsumer d = consumers.remove(job.getId());
+        KafkaJobDataConsumer d = consumers.remove(job.getType().getId(), job.getId());
         if (d != null) {
             logger.debug("Kafka job removed {}", job.getId());
             d.stop();
@@ -93,10 +97,9 @@ public class KafkaTopicConsumers {
     }
 
     @Scheduled(fixedRate = CONSUMER_SUPERVISION_INTERVAL_MS)
-    public synchronized void restartNonRunningTasks() {
-
-        for (KafkaJobDataConsumer consumer : consumers.values()) {
-            if (!consumer.isRunning()) {
+    public synchronized void restartNonRunningTopics() {
+        for (String typeId : this.consumers.keySet()) {
+            for (KafkaJobDataConsumer consumer : this.consumers.get(typeId)) {
                 restartTopic(consumer);
             }
         }
@@ -110,10 +113,6 @@ public class KafkaTopicConsumers {
     }
 
     private void restartConsumersOfType(KafkaTopicListener topic, InfoType type) {
-        this.consumers.forEach((jobId, consumer) -> {
-            if (consumer.getJob().getType().getId().equals(type.getId())) {
-                consumer.start(topic.getOutput());
-            }
-        });
+        this.consumers.get(type.getId()).forEach(consumer -> consumer.start(topic.getOutput().asFlux()));
     }
 }
index d1045ee..f3b44a3 100644 (file)
@@ -53,7 +53,6 @@ public class KafkaTopicListener {
     public KafkaTopicListener(ApplicationConfig applicationConfig, InfoType type) {
         this.applicationConfig = applicationConfig;
         this.type = type;
-        start();
     }
 
     public Many<String> getOutput() {
index 8b5b6cf..c9284b5 100644 (file)
@@ -50,7 +50,8 @@ import reactor.core.publisher.Flux;
 import reactor.core.publisher.Mono;
 
 /**
- * Registers the types and this producer in ECS. This is done when needed.
+ * Registers the types and this producer in Innformation Coordinator Service.
+ * This is done when needed.
  */
 @Component
 @EnableScheduling
@@ -65,7 +66,7 @@ public class ProducerRegstrationTask {
 
     private static final String PRODUCER_ID = "DmaapGenericInfoProducer";
     @Getter
-    private boolean isRegisteredInEcs = false;
+    private boolean isRegisteredInIcs = false;
     private static final int REGISTRATION_SUPERVISION_INTERVAL_MS = 1000 * 5;
 
     public ProducerRegstrationTask(@Autowired ApplicationConfig applicationConfig, @Autowired InfoTypes types) {
@@ -78,7 +79,7 @@ public class ProducerRegstrationTask {
     @Scheduled(fixedRate = REGISTRATION_SUPERVISION_INTERVAL_MS)
     public void supervisionTask() {
         checkRegistration() //
-                .filter(isRegistrationOk -> !isRegistrationOk || !this.isRegisteredInEcs) //
+                .filter(isRegistrationOk -> !isRegistrationOk || !this.isRegisteredInIcs) //
                 .flatMap(isRegisterred -> registerTypesAndProducer()) //
                 .subscribe( //
                         null, //
@@ -87,7 +88,7 @@ public class ProducerRegstrationTask {
     }
 
     private void handleRegistrationCompleted() {
-        isRegisteredInEcs = true;
+        isRegisteredInIcs = true;
     }
 
     private void handleRegistrationFailure(Throwable t) {
@@ -96,7 +97,7 @@ public class ProducerRegstrationTask {
 
     // Returns TRUE if registration is correct
     private Mono<Boolean> checkRegistration() {
-        final String url = applicationConfig.getEcsBaseUrl() + "/data-producer/v1/info-producers/" + PRODUCER_ID;
+        final String url = applicationConfig.getIcsBaseUrl() + "/data-producer/v1/info-producers/" + PRODUCER_ID;
         return restClient.get(url) //
                 .flatMap(this::isRegisterredInfoCorrect) //
                 .onErrorResume(t -> Mono.just(Boolean.FALSE));
@@ -105,7 +106,7 @@ public class ProducerRegstrationTask {
     private Mono<Boolean> isRegisterredInfoCorrect(String registerredInfoStr) {
         ProducerRegistrationInfo registerredInfo = gson.fromJson(registerredInfoStr, ProducerRegistrationInfo.class);
         if (isEqual(producerRegistrationInfo(), registerredInfo)) {
-            logger.trace("Already registered in ECS");
+            logger.trace("Already registered in ICS");
             return Mono.just(Boolean.TRUE);
         } else {
             return Mono.just(Boolean.FALSE);
@@ -113,13 +114,13 @@ public class ProducerRegstrationTask {
     }
 
     private String registerTypeUrl(InfoType type) {
-        return applicationConfig.getEcsBaseUrl() + "/data-producer/v1/info-types/" + type.getId();
+        return applicationConfig.getIcsBaseUrl() + "/data-producer/v1/info-types/" + type.getId();
     }
 
     private Mono<String> registerTypesAndProducer() {
         final int CONCURRENCY = 20;
         final String producerUrl =
-                applicationConfig.getEcsBaseUrl() + "/data-producer/v1/info-producers/" + PRODUCER_ID;
+                applicationConfig.getIcsBaseUrl() + "/data-producer/v1/info-producers/" + PRODUCER_ID;
 
         return Flux.fromIterable(this.types.getAll()) //
                 .doOnNext(type -> logger.info("Registering type {}", type.getId())) //
@@ -144,20 +145,8 @@ public class ProducerRegstrationTask {
     }
 
     private Object jsonSchemaObject(InfoType type) throws IOException, ServiceException {
-
-        if (type.isKafkaTopicDefined()) {
-            String schemaStrKafka = readSchemaFile("/typeSchemaKafka.json");
-            return jsonObject(schemaStrKafka);
-        } else {
-            // An object with no properties
-            String schemaStr = "{" //
-                    + "\"type\": \"object\"," //
-                    + "\"properties\": {}," //
-                    + "\"additionalProperties\": false" //
-                    + "}"; //
-
-            return jsonObject(schemaStr);
-        }
+        String schemaFile = type.isKafkaTopicDefined() ? "/typeSchemaKafka.json" : "/typeSchemaDmaap.json";
+        return jsonObject(readSchemaFile(schemaFile));
     }
 
     private String readSchemaFile(String filePath) throws IOException, ServiceException {
@@ -169,12 +158,13 @@ public class ProducerRegstrationTask {
         return CharStreams.toString(new InputStreamReader(in, StandardCharsets.UTF_8));
     }
 
+    @SuppressWarnings("java:S2139") // Log exception
     private Object jsonObject(String json) {
         try {
             return JsonParser.parseString(json).getAsJsonObject();
         } catch (Exception e) {
-            logger.error("Bug, error in JSON: {}", json);
-            throw new NullPointerException(e.toString());
+            logger.error("Bug, error in JSON: {} {}", json, e.getMessage());
+            throw new NullPointerException(e.getMessage());
         }
     }
 
@@ -185,7 +175,6 @@ public class ProducerRegstrationTask {
     }
 
     private ProducerRegistrationInfo producerRegistrationInfo() {
-
         return ProducerRegistrationInfo.builder() //
                 .jobCallbackUrl(baseUrl() + ProducerCallbacksController.JOB_URL) //
                 .producerSupervisionCallbackUrl(baseUrl() + ProducerCallbacksController.SUPERVISION_URL) //
diff --git a/dmaap-adaptor-java/src/main/resources/typeSchemaDmaap.json b/dmaap-adaptor-java/src/main/resources/typeSchemaDmaap.json
new file mode 100644 (file)
index 0000000..a50b236
--- /dev/null
@@ -0,0 +1,10 @@
+{
+  "$schema": "http://json-schema.org/draft-04/schema#",
+  "type": "object",
+  "properties": {
+    "filter": {
+       "type": "string"
+     }
+  },
+  "additionalProperties": false
+}
index 290b70a..38e7807 100644 (file)
           "type": "integer"
         }
       },
+      "additionalProperties": false,
       "required": [
         "maxSize",
         "maxTimeMiliseconds"
       ]
     }
   },
-  "required": []
+  "additionalProperties": false
 }
\ No newline at end of file
index 287c95e..0ea0056 100644 (file)
@@ -34,6 +34,7 @@ import java.nio.file.Paths;
 
 import org.json.JSONObject;
 import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
 import org.junit.jupiter.api.Test;
 import org.junit.jupiter.api.extension.ExtendWith;
 import org.oran.dmaapadapter.clients.AsyncRestClient;
@@ -46,9 +47,11 @@ import org.oran.dmaapadapter.configuration.WebClientConfig.HttpProxyConfig;
 import org.oran.dmaapadapter.controllers.ProducerCallbacksController;
 import org.oran.dmaapadapter.r1.ConsumerJobInfo;
 import org.oran.dmaapadapter.r1.ProducerJobInfo;
-import org.oran.dmaapadapter.repository.InfoType;
 import org.oran.dmaapadapter.repository.InfoTypes;
+import org.oran.dmaapadapter.repository.Job;
 import org.oran.dmaapadapter.repository.Jobs;
+import org.oran.dmaapadapter.tasks.KafkaJobDataConsumer;
+import org.oran.dmaapadapter.tasks.KafkaTopicConsumers;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.boot.test.context.SpringBootTest;
 import org.springframework.boot.test.context.SpringBootTest.WebEnvironment;
@@ -64,11 +67,12 @@ import org.springframework.test.context.TestPropertySource;
 import org.springframework.test.context.junit.jupiter.SpringExtension;
 import org.springframework.web.reactive.function.client.WebClientResponseException;
 
+import reactor.core.publisher.Flux;
 import reactor.core.publisher.Mono;
 import reactor.test.StepVerifier;
 
 @ExtendWith(SpringExtension.class)
-@SpringBootTest(webEnvironment = WebEnvironment.DEFINED_PORT)
+@SpringBootTest(webEnvironment = WebEnvironment.RANDOM_PORT)
 @TestPropertySource(properties = { //
         "server.ssl.key-store=./config/keystore.jks", //
         "app.webclient.trust-store=./config/truststore.jks", //
@@ -89,7 +93,10 @@ class ApplicationTest {
     private ConsumerController consumerController;
 
     @Autowired
-    private EcsSimulatorController ecsSimulatorController;
+    private IcsSimulatorController icsSimulatorController;
+
+    @Autowired
+    KafkaTopicConsumers kafkaTopicConsumers;
 
     private com.google.gson.Gson gson = new com.google.gson.GsonBuilder().create();
 
@@ -98,7 +105,7 @@ class ApplicationTest {
 
     static class TestApplicationConfig extends ApplicationConfig {
         @Override
-        public String getEcsBaseUrl() {
+        public String getIcsBaseUrl() {
             return thisProcessUrl();
         }
 
@@ -138,10 +145,15 @@ class ApplicationTest {
         }
     }
 
+    @BeforeEach
+    void setPort() {
+        this.applicationConfig.setLocalServerHttpPort(this.localServerHttpPort);
+    }
+
     @AfterEach
     void reset() {
         this.consumerController.testResults.reset();
-        this.ecsSimulatorController.testResults.reset();
+        this.icsSimulatorController.testResults.reset();
         this.jobs.clear();
     }
 
@@ -174,8 +186,7 @@ class ApplicationTest {
     }
 
     private ConsumerJobInfo consumerJobInfo() {
-        InfoType type = this.types.getAll().iterator().next();
-        return consumerJobInfo(type.getId(), "EI_JOB_ID");
+        return consumerJobInfo("DmaapInformationType", "EI_JOB_ID");
     }
 
     private Object jsonObject() {
@@ -232,15 +243,54 @@ class ApplicationTest {
     }
 
     @Test
-    void testWholeChain() throws Exception {
+    void testReceiveAndPostDataFromKafka() {
+        final String JOB_ID = "ID";
+        final String TYPE_ID = "KafkaInformationType";
+        await().untilAsserted(() -> assertThat(icsSimulatorController.testResults.registrationInfo).isNotNull());
+        assertThat(icsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(this.types.size());
+
+        // Create a job
+        Job.Parameters param = new Job.Parameters("", new Job.BufferTimeout(123, 456), 1);
+        String targetUri = baseUrl() + ConsumerController.CONSUMER_TARGET_URL;
+        ConsumerJobInfo kafkaJobInfo =
+                new ConsumerJobInfo(TYPE_ID, jsonObject(gson.toJson(param)), "owner", targetUri, "");
+
+        this.icsSimulatorController.addJob(kafkaJobInfo, JOB_ID, restClient());
+        await().untilAsserted(() -> assertThat(this.jobs.size()).isEqualTo(1));
+
+        KafkaJobDataConsumer kafkaConsumer = this.kafkaTopicConsumers.getConsumers().get(TYPE_ID, JOB_ID);
+
+        // Handle received data from Kafka, check that it has been posted to the
+        // consumer
+        kafkaConsumer.start(Flux.just("data"));
+
+        ConsumerController.TestResults consumer = this.consumerController.testResults;
+        await().untilAsserted(() -> assertThat(consumer.receivedBodies.size()).isEqualTo(1));
+        assertThat(consumer.receivedBodies.get(0)).isEqualTo("[\"data\"]");
+
+        // Test send an exception
+        kafkaConsumer.start(Flux.error(new NullPointerException()));
+
+        // Test regular restart of stopped
+        kafkaConsumer.stop();
+        this.kafkaTopicConsumers.restartNonRunningTopics();
+        await().untilAsserted(() -> assertThat(kafkaConsumer.isRunning()).isTrue());
+
+        // Delete the job
+        this.icsSimulatorController.deleteJob(JOB_ID, restClient());
+        await().untilAsserted(() -> assertThat(this.jobs.size()).isZero());
+    }
+
+    @Test
+    void testReceiveAndPostDataFromDmaap() throws Exception {
         final String JOB_ID = "ID";
 
         // Register producer, Register types
-        await().untilAsserted(() -> assertThat(ecsSimulatorController.testResults.registrationInfo).isNotNull());
-        assertThat(ecsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(1);
+        await().untilAsserted(() -> assertThat(icsSimulatorController.testResults.registrationInfo).isNotNull());
+        assertThat(icsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(this.types.size());
 
         // Create a job
-        this.ecsSimulatorController.addJob(consumerJobInfo(), JOB_ID, restClient());
+        this.icsSimulatorController.addJob(consumerJobInfo(), JOB_ID, restClient());
         await().untilAsserted(() -> assertThat(this.jobs.size()).isEqualTo(1));
 
         // Return two messages from DMAAP and verify that these are sent to the owner of
@@ -253,30 +303,28 @@ class ApplicationTest {
 
         String jobUrl = baseUrl() + ProducerCallbacksController.JOB_URL;
         String jobs = restClient().get(jobUrl).block();
-        assertThat(jobs).contains("ExampleInformationType");
+        assertThat(jobs).contains(JOB_ID);
 
         // Delete the job
-        this.ecsSimulatorController.deleteJob(JOB_ID, restClient());
+        this.icsSimulatorController.deleteJob(JOB_ID, restClient());
         await().untilAsserted(() -> assertThat(this.jobs.size()).isZero());
-
     }
 
     @Test
     void testReRegister() throws Exception {
         // Wait foir register types and producer
-        await().untilAsserted(() -> assertThat(ecsSimulatorController.testResults.registrationInfo).isNotNull());
-        assertThat(ecsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(1);
+        await().untilAsserted(() -> assertThat(icsSimulatorController.testResults.registrationInfo).isNotNull());
+        assertThat(icsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(this.types.size());
 
         // Clear the registration, should trigger a re-register
-        ecsSimulatorController.testResults.reset();
-        await().untilAsserted(() -> assertThat(ecsSimulatorController.testResults.registrationInfo).isNotNull());
-        assertThat(ecsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(1);
+        icsSimulatorController.testResults.reset();
+        await().untilAsserted(() -> assertThat(icsSimulatorController.testResults.registrationInfo).isNotNull());
+        assertThat(icsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(this.types.size());
 
         // Just clear the registerred types, should trigger a re-register
-        ecsSimulatorController.testResults.types.clear();
+        icsSimulatorController.testResults.types.clear();
         await().untilAsserted(
-                () -> assertThat(ecsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(1));
-
+                () -> assertThat(icsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(2));
     }
 
     private void testErrorCode(Mono<?> request, HttpStatus expStatus, String responseContains) {
@@ -303,5 +351,4 @@ class ApplicationTest {
         }
         return true;
     }
-
 }
@@ -47,7 +47,7 @@ import org.springframework.web.bind.annotation.RestController;
 
 @RestController("IcsSimulatorController")
 @Tag(name = "Information Coordinator Service Simulator (exists only in test)")
-public class EcsSimulatorController {
+public class IcsSimulatorController {
 
     private final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
     private final static Gson gson = new GsonBuilder().create();
@@ -38,8 +38,8 @@ import org.oran.dmaapadapter.configuration.ImmutableWebClientConfig;
 import org.oran.dmaapadapter.configuration.WebClientConfig;
 import org.oran.dmaapadapter.configuration.WebClientConfig.HttpProxyConfig;
 import org.oran.dmaapadapter.r1.ConsumerJobInfo;
-import org.oran.dmaapadapter.repository.InfoType;
 import org.oran.dmaapadapter.repository.InfoTypes;
+import org.oran.dmaapadapter.repository.Job;
 import org.oran.dmaapadapter.repository.Jobs;
 import org.oran.dmaapadapter.tasks.ProducerRegstrationTask;
 import org.springframework.beans.factory.annotation.Autowired;
@@ -63,7 +63,8 @@ import org.springframework.test.context.junit.jupiter.SpringExtension;
 })
 class IntegrationWithEcs {
 
-    private static final String EI_JOB_ID = "EI_JOB_ID";
+    private static final String DMAAP_JOB_ID = "DMAAP_JOB_ID";
+    private static final String DMAAP_TYPE_ID = "DmaapInformationType";
 
     @Autowired
     private ApplicationConfig applicationConfig;
@@ -85,7 +86,7 @@ class IntegrationWithEcs {
     static class TestApplicationConfig extends ApplicationConfig {
 
         @Override
-        public String getEcsBaseUrl() {
+        public String getIcsBaseUrl() {
             return "https://localhost:8434";
         }
 
@@ -128,8 +129,7 @@ class IntegrationWithEcs {
     @AfterEach
     void reset() {
         this.consumerController.testResults.reset();
-        this.jobs.clear();
-        this.types.clear();
+        assertThat(this.jobs.size()).isZero();
     }
 
     private AsyncRestClient restClient(boolean useTrustValidation) {
@@ -161,15 +161,15 @@ class IntegrationWithEcs {
     }
 
     private String ecsBaseUrl() {
-        return applicationConfig.getEcsBaseUrl();
+        return applicationConfig.getIcsBaseUrl();
     }
 
     private String jobUrl(String jobId) {
-        return ecsBaseUrl() + "/data-consumer/v1/info-jobs/" + jobId;
+        return ecsBaseUrl() + "/data-consumer/v1/info-jobs/" + jobId + "?typeCheck=true";
     }
 
-    private void createInformationJobInEcs(String jobId) {
-        String body = gson.toJson(consumerJobInfo());
+    private void createInformationJobInEcs(String typeId, String jobId, String filter) {
+        String body = gson.toJson(consumerJobInfo(typeId, filter));
         try {
             // Delete the job if it already exists
             deleteInformationJobInEcs(jobId);
@@ -182,13 +182,8 @@ class IntegrationWithEcs {
         restClient().delete(jobUrl(jobId)).block();
     }
 
-    private ConsumerJobInfo consumerJobInfo() {
-        InfoType type = this.types.getAll().iterator().next();
-        return consumerJobInfo(type.getId(), EI_JOB_ID);
-    }
-
-    private Object jsonObject() {
-        return jsonObject("{}");
+    private ConsumerJobInfo consumerJobInfo(String typeId, String filter) {
+        return consumerJobInfo(typeId, DMAAP_JOB_ID, filter);
     }
 
     private Object jsonObject(String json) {
@@ -199,31 +194,60 @@ class IntegrationWithEcs {
         }
     }
 
-    private ConsumerJobInfo consumerJobInfo(String typeId, String infoJobId) {
+    private String quote(String str) {
+        return "\"" + str + "\"";
+    }
+
+    private String consumerUri() {
+        return selfBaseUrl() + ConsumerController.CONSUMER_TARGET_URL;
+    }
+
+    private ConsumerJobInfo consumerJobInfo(String typeId, String infoJobId, String filter) {
         try {
-            String targetUri = selfBaseUrl() + ConsumerController.CONSUMER_TARGET_URL;
-            return new ConsumerJobInfo(typeId, jsonObject(), "owner", targetUri, "");
+
+            String jsonStr = "{ \"filter\" :" + quote(filter) + "}";
+            return new ConsumerJobInfo(typeId, jsonObject(jsonStr), "owner", consumerUri(), "");
         } catch (Exception e) {
             return null;
         }
     }
 
+    @Test
+    void testCreateKafkaJob() {
+        await().untilAsserted(() -> assertThat(producerRegstrationTask.isRegisteredInIcs()).isTrue());
+        final String TYPE_ID = "KafkaInformationType";
+
+        Job.Parameters param = new Job.Parameters("filter", new Job.BufferTimeout(123, 456), 1);
+
+        ConsumerJobInfo jobInfo =
+                new ConsumerJobInfo(TYPE_ID, jsonObject(gson.toJson(param)), "owner", consumerUri(), "");
+        String body = gson.toJson(jobInfo);
+
+        restClient().putForEntity(jobUrl("KAFKA_JOB_ID"), body).block();
+
+        await().untilAsserted(() -> assertThat(this.jobs.size()).isEqualTo(1));
+
+        deleteInformationJobInEcs("KAFKA_JOB_ID");
+        await().untilAsserted(() -> assertThat(this.jobs.size()).isZero());
+    }
+
     @Test
     void testWholeChain() throws Exception {
-        await().untilAsserted(() -> assertThat(producerRegstrationTask.isRegisteredInEcs()).isTrue());
+        await().untilAsserted(() -> assertThat(producerRegstrationTask.isRegisteredInIcs()).isTrue());
 
-        createInformationJobInEcs(EI_JOB_ID);
+        createInformationJobInEcs(DMAAP_TYPE_ID, DMAAP_JOB_ID, ".*DmaapResponse.*");
 
         await().untilAsserted(() -> assertThat(this.jobs.size()).isEqualTo(1));
 
         DmaapSimulatorController.dmaapResponses.add("DmaapResponse1");
         DmaapSimulatorController.dmaapResponses.add("DmaapResponse2");
+        DmaapSimulatorController.dmaapResponses.add("Junk");
 
         ConsumerController.TestResults results = this.consumerController.testResults;
         await().untilAsserted(() -> assertThat(results.receivedBodies.size()).isEqualTo(2));
         assertThat(results.receivedBodies.get(0)).isEqualTo("DmaapResponse1");
 
-        deleteInformationJobInEcs(EI_JOB_ID);
+        deleteInformationJobInEcs(DMAAP_JOB_ID);
 
         await().untilAsserted(() -> assertThat(this.jobs.size()).isZero());
 
index 470e114..c38af8a 100644 (file)
@@ -75,10 +75,12 @@ import reactor.kafka.sender.SenderRecord;
 @TestPropertySource(properties = { //
         "server.ssl.key-store=./config/keystore.jks", //
         "app.webclient.trust-store=./config/truststore.jks", //
-        "app.configuration-filepath=./src/test/resources/test_application_configuration_kafka.json"//
+        "app.configuration-filepath=./src/test/resources/test_application_configuration.json"//
 })
 class IntegrationWithKafka {
 
+    final String TYPE_ID = "KafkaInformationType";
+
     @Autowired
     private ApplicationConfig applicationConfig;
 
@@ -92,12 +94,12 @@ class IntegrationWithKafka {
     private ConsumerController consumerController;
 
     @Autowired
-    private EcsSimulatorController ecsSimulatorController;
+    private IcsSimulatorController icsSimulatorController;
 
     @Autowired
     private KafkaTopicConsumers kafkaTopicConsumers;
 
-    private com.google.gson.Gson gson = new com.google.gson.GsonBuilder().create();
+    private static com.google.gson.Gson gson = new com.google.gson.GsonBuilder().create();
 
     private static final Logger logger = LoggerFactory.getLogger(IntegrationWithKafka.class);
 
@@ -106,7 +108,7 @@ class IntegrationWithKafka {
 
     static class TestApplicationConfig extends ApplicationConfig {
         @Override
-        public String getEcsBaseUrl() {
+        public String getIcsBaseUrl() {
             return thisProcessUrl();
         }
 
@@ -149,7 +151,7 @@ class IntegrationWithKafka {
     @AfterEach
     void reset() {
         this.consumerController.testResults.reset();
-        this.ecsSimulatorController.testResults.reset();
+        this.icsSimulatorController.testResults.reset();
         this.jobs.clear();
     }
 
@@ -181,14 +183,15 @@ class IntegrationWithKafka {
         return "https://localhost:" + this.applicationConfig.getLocalServerHttpPort();
     }
 
-    private Object jobParametersAsJsonObject(String filter, long maxTimeMiliseconds, int maxSize, int maxConcurrency) {
+    private static Object jobParametersAsJsonObject(String filter, long maxTimeMiliseconds, int maxSize,
+            int maxConcurrency) {
         Job.Parameters param =
                 new Job.Parameters(filter, new Job.BufferTimeout(maxSize, maxTimeMiliseconds), maxConcurrency);
         String str = gson.toJson(param);
         return jsonObject(str);
     }
 
-    private Object jsonObject(String json) {
+    private static Object jsonObject(String json) {
         try {
             return JsonParser.parseString(json).getAsJsonObject();
         } catch (Exception e) {
@@ -196,12 +199,10 @@ class IntegrationWithKafka {
         }
     }
 
-    private ConsumerJobInfo consumerJobInfo(String filter, Duration maxTime, int maxSize, int maxConcurrency) {
+    ConsumerJobInfo consumerJobInfo(String filter, Duration maxTime, int maxSize, int maxConcurrency) {
         try {
-            InfoType type = this.types.getAll().iterator().next();
-            String typeId = type.getId();
             String targetUri = baseUrl() + ConsumerController.CONSUMER_TARGET_URL;
-            return new ConsumerJobInfo(typeId,
+            return new ConsumerJobInfo(TYPE_ID,
                     jobParametersAsJsonObject(filter, maxTime.toMillis(), maxSize, maxConcurrency), "owner", targetUri,
                     "");
         } catch (Exception e) {
@@ -221,9 +222,11 @@ class IntegrationWithKafka {
         return SenderOptions.create(props);
     }
 
-    private SenderRecord<Integer, String, Integer> senderRecord(String data, int i) {
-        final InfoType infoType = this.types.getAll().iterator().next();
-        return SenderRecord.create(new ProducerRecord<>(infoType.getKafkaInputTopic(), i, data + i), i);
+    private SenderRecord<Integer, String, Integer> senderRecord(String data) {
+        final InfoType infoType = this.types.get(TYPE_ID);
+        int key = 1;
+        int correlationMetadata = 2;
+        return SenderRecord.create(new ProducerRecord<>(infoType.getKafkaInputTopic(), key, data), correlationMetadata);
     }
 
     private void sendDataToStream(Flux<SenderRecord<Integer, String, Integer>> dataToSend) {
@@ -244,38 +247,32 @@ class IntegrationWithKafka {
     }
 
     @Test
-    void kafkaIntegrationTest() throws InterruptedException {
+    void kafkaIntegrationTest() throws Exception {
         final String JOB_ID1 = "ID1";
         final String JOB_ID2 = "ID2";
 
         // Register producer, Register types
-        await().untilAsserted(() -> assertThat(ecsSimulatorController.testResults.registrationInfo).isNotNull());
-        assertThat(ecsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(1);
+        await().untilAsserted(() -> assertThat(icsSimulatorController.testResults.registrationInfo).isNotNull());
+        assertThat(icsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(this.types.size());
 
         // Create two jobs. One buffering and one with a filter
-        this.ecsSimulatorController.addJob(consumerJobInfo(null, Duration.ofMillis(400), 1000, 20), JOB_ID1,
+        this.icsSimulatorController.addJob(consumerJobInfo(null, Duration.ofMillis(400), 1000, 20), JOB_ID1,
                 restClient());
-        this.ecsSimulatorController.addJob(consumerJobInfo("^Message_1$", Duration.ZERO, 0, 1), JOB_ID2, restClient());
+        this.icsSimulatorController.addJob(consumerJobInfo("^Message_1$", Duration.ZERO, 0, 1), JOB_ID2, restClient());
 
         await().untilAsserted(() -> assertThat(this.jobs.size()).isEqualTo(2));
 
-        var dataToSend = Flux.range(1, 3).map(i -> senderRecord("Message_", i)); // Message_1, Message_2 etc.
+        var dataToSend = Flux.range(1, 3).map(i -> senderRecord("Message_" + i)); // Message_1, Message_2 etc.
         sendDataToStream(dataToSend);
 
         verifiedReceivedByConsumer("Message_1", "[\"Message_1\", \"Message_2\", \"Message_3\"]");
 
-        // Just for testing quoting
-        this.consumerController.testResults.reset();
-        dataToSend = Flux.just(senderRecord("Message\"_", 1));
-        sendDataToStream(dataToSend);
-        verifiedReceivedByConsumer("[\"Message\\\"_1\"]");
-
         // Delete the jobs
-        this.ecsSimulatorController.deleteJob(JOB_ID1, restClient());
-        this.ecsSimulatorController.deleteJob(JOB_ID2, restClient());
+        this.icsSimulatorController.deleteJob(JOB_ID1, restClient());
+        this.icsSimulatorController.deleteJob(JOB_ID2, restClient());
 
         await().untilAsserted(() -> assertThat(this.jobs.size()).isZero());
-        await().untilAsserted(() -> assertThat(this.kafkaTopicConsumers.getConsumers()).isEmpty());
+        await().untilAsserted(() -> assertThat(this.kafkaTopicConsumers.getConsumers().keySet()).isEmpty());
     }
 
     @Test
@@ -284,30 +281,38 @@ class IntegrationWithKafka {
         final String JOB_ID2 = "ID2";
 
         // Register producer, Register types
-        await().untilAsserted(() -> assertThat(ecsSimulatorController.testResults.registrationInfo).isNotNull());
-        assertThat(ecsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(1);
+        await().untilAsserted(() -> assertThat(icsSimulatorController.testResults.registrationInfo).isNotNull());
+        assertThat(icsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(this.types.size());
 
         // Create two jobs.
-        this.ecsSimulatorController.addJob(consumerJobInfo(null, Duration.ZERO, 0, 1), JOB_ID1, restClient());
-        this.ecsSimulatorController.addJob(consumerJobInfo(null, Duration.ZERO, 0, 1), JOB_ID2, restClient());
+        this.icsSimulatorController.addJob(consumerJobInfo(null, Duration.ofMillis(400), 1000, 1), JOB_ID1,
+                restClient());
+        this.icsSimulatorController.addJob(consumerJobInfo(null, Duration.ZERO, 0, 1), JOB_ID2, restClient());
 
         await().untilAsserted(() -> assertThat(this.jobs.size()).isEqualTo(2));
 
-        var dataToSend = Flux.range(1, 1000000).map(i -> senderRecord("Message_", i)); // Message_1, Message_2 etc.
+        var dataToSend = Flux.range(1, 1000000).map(i -> senderRecord("Message_" + i)); // Message_1, Message_2 etc.
         sendDataToStream(dataToSend); // this should overflow
 
-        KafkaJobDataConsumer consumer = kafkaTopicConsumers.getConsumers().values().iterator().next();
+        KafkaJobDataConsumer consumer = kafkaTopicConsumers.getConsumers().get(TYPE_ID).iterator().next();
         await().untilAsserted(() -> assertThat(consumer.isRunning()).isFalse());
         this.consumerController.testResults.reset();
 
-        kafkaTopicConsumers.restartNonRunningTasks();
-        this.ecsSimulatorController.deleteJob(JOB_ID2, restClient()); // Delete one job
+        this.icsSimulatorController.deleteJob(JOB_ID2, restClient()); // Delete one job
+        kafkaTopicConsumers.restartNonRunningTopics();
         Thread.sleep(1000); // Restarting the input seems to take some asynch time
 
-        dataToSend = Flux.range(1, 1).map(i -> senderRecord("Howdy_", i));
+        dataToSend = Flux.just(senderRecord("Howdy\""));
         sendDataToStream(dataToSend);
 
-        verifiedReceivedByConsumer("Howdy_1");
+        verifiedReceivedByConsumer("[\"Howdy\\\"\"]");
+
+        // Delete the jobs
+        this.icsSimulatorController.deleteJob(JOB_ID1, restClient());
+        this.icsSimulatorController.deleteJob(JOB_ID2, restClient());
+
+        await().untilAsserted(() -> assertThat(this.jobs.size()).isZero());
+        await().untilAsserted(() -> assertThat(this.kafkaTopicConsumers.getConsumers().keySet()).isEmpty());
     }
 
 }
index 794eb8e..32e6c32 100644 (file)
@@ -1,9 +1,14 @@
 {
    "types": [
       {
-         "id": "ExampleInformationType",
+         "id": "DmaapInformationType",
          "dmaapTopicUrl": "/dmaap-topic-1",
          "useHttpProxy": false
+      },
+      {
+         "id": "KafkaInformationType",
+         "kafkaInputTopic": "TutorialTopic",
+         "useHttpProxy": false
       }
    ]
 }
\ No newline at end of file
diff --git a/dmaap-adaptor-java/src/test/resources/test_application_configuration_kafka.json b/dmaap-adaptor-java/src/test/resources/test_application_configuration_kafka.json
deleted file mode 100644 (file)
index e2ea525..0000000
+++ /dev/null
@@ -1,9 +0,0 @@
-{
-   "types": [
-      {
-         "id": "ExampleInformationType",
-         "kafkaInputTopic": "TutorialTopic",
-         "useHttpProxy": false
-      }
-   ]
-}
\ No newline at end of file
diff --git a/docker-compose/.env b/docker-compose/.env
new file mode 100644 (file)
index 0000000..6fc3528
--- /dev/null
@@ -0,0 +1,64 @@
+#  ============LICENSE_START===============================================
+#  Copyright (C) 2021 Nordix Foundation. All rights reserved.
+#  ========================================================================
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#  ============LICENSE_END=================================================
+#
+
+#PMS
+PMS_IMAGE_BASE="nexus3.o-ran-sc.org:10002/o-ran-sc/nonrtric-policy-agent"
+PMS_IMAGE_TAG="2.2.0"
+
+#A1_SIM
+A1_SIM_IMAGE_BASE="nexus3.o-ran-sc.org:10002/o-ran-sc/a1-simulator"
+A1_SIM_IMAGE_TAG="2.1.0"
+
+#RAPP
+RAPP_IMAGE_BASE="nexus3.o-ran-sc.org:10002/o-ran-sc/nonrtric-r-app-catalogue"
+RAPP_IMAGE_TAG="1.0.0"
+
+#CONTROL_PANEL
+CONTROL_PANEL_IMAGE_BASE="nexus3.o-ran-sc.org:10002/o-ran-sc/nonrtric-controlpanel"
+CONTROL_PANEL_IMAGE_TAG="2.2.0"
+
+#GATEWAY
+NONRTRIC_GATEWAY_IMAGE_BASE="nexus3.o-ran-sc.org:10002/o-ran-sc/nonrtric-gateway"
+NONRTRIC_GATEWAY_IMAGE_TAG="1.0.0"
+
+#ECS
+ECS_IMAGE_BASE="nexus3.o-ran-sc.org:10002/o-ran-sc/nonrtric-enrichment-coordinator-service"
+ECS_IMAGE_TAG="1.1.0"
+
+#CONSUMER
+CONSUMER_IMAGE_BASE="eexit/mirror-http-server"
+CONSUMER_IMAGE_TAG="latest"
+
+#ORU
+ORU_APP_IMAGE_BASE="nexus3.o-ran-sc.org:10002/o-ran-sc/nonrtric-o-ru-closed-loop-recovery"
+ORU_APP_IMAGE_TAG="1.0.0"
+
+#DB
+DB_IMAGE_BASE="mysql/mysql-server"
+DB_IMAGE_TAG="5.6"
+
+#A1CONTROLLER
+A1CONTROLLER_IMAGE_BASE="nexus3.onap.org:10002/onap/sdnc-image"
+A1CONTROLLER_IMAGE_TAG="2.1.2"
+
+#DMAAP_MEDIATOR_GO
+DMAAP_MEDIATOR_GO_BASE="nexus3.o-ran-sc.org:10004/o-ran-sc/nonrtric-dmaap-mediator-producer"
+DMAAP_MEDIATOR_GO_TAG="1.0,0"
+
+#DMAAP_MEDIATOR_JAVA
+DMAAP_MEDIATOR_JAVA_BASE="nexus3.o-ran-sc.org:10003/o-ran-sc/nonrtric-dmaap-adaptor"
+DMAAP_MEDIATOR_JAVA_TAG="1.0.0-SNAPSHOT"
\ No newline at end of file
index 9366ff1..8467946 100644 (file)
@@ -22,7 +22,7 @@ networks:
 
 services:
   a1-sim-OSC:
-    image: nexus3.o-ran-sc.org:10002/o-ran-sc/a1-simulator:2.1.0
+    image: "${A1_SIM_IMAGE_BASE}:${A1_SIM_IMAGE_TAG}"
     container_name: a1-sim-OSC
     networks:
       - default
@@ -35,7 +35,7 @@ services:
       - ALLOW_HTTP=true
 
   a1-sim-STD:
-    image: nexus3.o-ran-sc.org:10002/o-ran-sc/a1-simulator:2.1.0
+    image: "${A1_SIM_IMAGE_BASE}:${A1_SIM_IMAGE_TAG}"
     container_name: a1-sim-STD
     networks:
       - default
@@ -48,7 +48,7 @@ services:
       - ALLOW_HTTP=true
 
   a1-sim-STD-v2:
-    image: nexus3.o-ran-sc.org:10002/o-ran-sc/a1-simulator:2.1.0
+    image: "${A1_SIM_IMAGE_BASE}:${A1_SIM_IMAGE_TAG}"
     container_name: a1-sim-STD-v2
     networks:
       - default
index 340d158..4efdf57 100644 (file)
@@ -22,18 +22,15 @@ networks:
 
 services:
   dmaap-mediator-go:
-    image: nexus3.o-ran-sc.org:10004/o-ran-sc/nonrtric-dmaap-mediator-producer:1.0.0
+    image: "${DMAAP_MEDIATOR_GO_BASE}:${DMAAP_MEDIATOR_GO_TAG}"
     container_name: dmaap-mediator-go
     environment:
       - INFO_PRODUCER_HOST=http://consumer
-      - LOG_LEVEL=Debug
       - INFO_PRODUCER_PORT=8088
       - INFO_COORD_ADDR=http://ecs:8083
-      - MR_HOST=http://dmaap-mr
-      - MR_PORT=3904
-      - INFO_PRODUCER_SUPERVISION_CALLBACK_HOST=http://consumer
-      - INFO_PRODUCER_SUPERVISION_CALLBACK_PORT=8088
-      - INFO_JOB_CALLBACK_HOST=http://consumer
-      - INFO_JOB_CALLBACK_PORT=8088
+      - DMAAP_MR_ADDR=http://dmaap-mr:3904
+      - PRODUCER_CERT_PATH=security/producer.crt
+      - PRODUCER_KEY_PATH=security/producer.key
+      - LOG_LEVEL=Debug
     networks:
       - default
\ No newline at end of file
index 1d53de4..5cfe809 100644 (file)
@@ -22,7 +22,7 @@ networks:
 
 services:
   dmaap-mediator-java:
-    image: nexus3.o-ran-sc.org:10003/o-ran-sc/nonrtric-dmaap-adaptor:1.0.0-SNAPSHOT
+    image: "${DMAAP_MEDIATOR_JAVA_BASE}:${DMAAP_MEDIATOR_JAVA_TAG}"
     container_name: dmaap-mediator-java
     networks:
       - default
index 376f734..6de293f 100644 (file)
@@ -22,7 +22,7 @@ networks:
 
 services:
   ecs:
-    image: nexus3.o-ran-sc.org:10003/o-ran-sc/nonrtric-enrichment-coordinator-service:1.2.0-SNAPSHOT
+    image: "${ECS_IMAGE_BASE}:${ECS_IMAGE_TAG}"
     container_name: ecs
     networks:
       default:
@@ -32,7 +32,7 @@ services:
       - 8083:8083
       - 8434:8434
   consumer:
-    image: eexit/mirror-http-server
+    image: "${CONSUMER_IMAGE_BASE}:${CONSUMER_IMAGE_TAG}"
     container_name: consumer
     networks:
       - default
index a593e2e..2dfc38c 100644 (file)
@@ -22,7 +22,7 @@ networks:
 
 services:
   policy-agent:
-    image: nexus3.o-ran-sc.org:10004/o-ran-sc/nonrtric-policy-agent:2.3.0
+    image: "${PMS_IMAGE_BASE}:${PMS_IMAGE_TAG}"
     container_name: policy-agent
     networks:
       default:
index ade37f7..5477588 100644 (file)
@@ -22,7 +22,7 @@ networks:
 
 services:
   r-app:
-    image: nexus3.o-ran-sc.org:10004/o-ran-sc/nonrtric-r-app-catalogue:1.1.0
+    image: "${RAPP_IMAGE_BASE}:${RAPP_IMAGE_TAG}"
     container_name: r-app
     networks:
       default:
index ef234a5..51d18ee 100644 (file)
@@ -20,7 +20,7 @@ Here we describe the APIs to access the Non-RT RIC functions.
 The core Non-RT RIC consists of several parts, with available APIs described in the sections below:
 
 * The A1 Policy Management Service
-* The Enrichment Coordinator Service
+* The Information Coordinator Service
 * The Non-RT-RIC App Catalogue
 * K8S Helm Chart LCM Manager (Initial) **<ToDo>**
 
@@ -29,10 +29,10 @@ A1 Policy Management Service
 
 For information about the A1 Policy Management Service that is implemented in ONAP, see `ONAP docs <https://docs.onap.org/projects/onap-ccsdk-oran/en/latest/index.html>`_ and `wiki <https://wiki.onap.org/pages/viewpage.action?pageId=84672221>`_.
 
-Enrichment Coordinator Service
-==============================
+Information Coordinator Service
+===============================
 
-See `A1 Enrichment Information Coordination Service API <./ecs-api.html>`_ for full details of the API.
+See `A1 Information Information Coordination Service API <./ics-api.html>`_ for full details of the API.
 
 The API is also described in Swagger-JSON and YAML:
 
@@ -40,7 +40,7 @@ The API is also described in Swagger-JSON and YAML:
    :header: "API name", "|swagger-icon|", "|yaml-icon|"
    :widths: 10,5,5
 
-   "A1 Enrichment Information Coordination Service API", ":download:`link <../enrichment-coordinator-service/api/ecs-api.json>`", ":download:`link <../enrichment-coordinator-service/api/ecs-api.yaml>`"
+   "A1 Enrichment Information Coordination Service API", ":download:`link <../information-coordinator-service/api/ics-api.json>`", ":download:`link <../information-coordinator-service/api/ics-api.yaml>`"
 
 Non-RT-RIC App Catalogue (Initial)
 ==================================
index 85721c6..ff5fcc9 100644 (file)
@@ -24,7 +24,7 @@ redoc = [
             {
                 'name': 'ECS API',
                 'page': 'ecs-api',
-                'spec': '../enrichment-coordinator-service/api/ecs-api.json',
+                'spec': '../information-coordinator-service/api/ics-api.json',
                 'embed': True,
             }
         ]
index 43ac2d1..e0cb080 100644 (file)
@@ -15,21 +15,21 @@ A1 Policy Management Service & SDNC/A1 Controller & A1 Adapter
 The A1 Policy Management Service is implemented in ONAP. For documentation see `ONAP CCSDK documentation <https://docs.onap.org/projects/onap-ccsdk-oran/en/latest/index.html>`_
 and `wiki <https://wiki.onap.org/pages/viewpage.action?pageId=84672221>`_.
 
-Enrichment Coordinator Service
-------------------------------
-The Enrichment Coordinator Service is a Java 11 web application built using the Spring Framework. Using Spring Boot
+Information Coordinator Service
+-------------------------------
+The Information Coordinator Service is a Java 11 web application built using the Spring Framework. Using Spring Boot
 dependencies, it runs as a standalone application.
 
 Its main functionality is to act as a data subscription broker and to decouple data producer from data consumers.
 
-See the ./config/README file in the *enrichment-coordinator-service* directory Gerrit repo on how to create and setup
+See the ./config/README file in the *information-coordinator-service* directory Gerrit repo on how to create and setup
 the certificates and private keys needed for HTTPS.
 
 Start standalone
 ++++++++++++++++
 
-The project uses Maven. To start the Enrichment Coordinator Service as a freestanding application, run the following
-command in the *enrichment-coordinator-service* directory:
+The project uses Maven. To start the Information Coordinator Service as a freestanding application, run the following
+command in the *information-coordinator-service* directory:
 
     +-----------------------------+
     | mvn spring-boot:run         |
@@ -38,7 +38,7 @@ command in the *enrichment-coordinator-service* directory:
 Start in Docker
 +++++++++++++++
 
-To build and deploy the Enrichment Coordinator Service, go to the "enrichment-coordinator-service" folder and run the
+To build and deploy the Information Coordinator Service, go to the "information-coordinator-service" folder and run the
 following command:
 
     +-----------------------------+
@@ -48,7 +48,7 @@ following command:
 Then start the container by running the following command:
 
     +--------------------------------------------------------------------+
-    | docker run nonrtric-enrichment-coordinator-service                 |
+    | docker run nonrtric-information-coordinator-service                |
     +--------------------------------------------------------------------+
 
 Initial Non-RT-RIC App Catalogue
index 8645a18..e816975 100644 (file)
@@ -24,7 +24,7 @@ These are the components that make up the Non-RT-RIC:
 * Non-RT-RIC Control Panel / Dashboard
 * A1 Policy Management Service (developed in ONAP)
 * A1/SDNC Controller & A1 Adapter (Controller plugin)
-* Enrichment Information Coordinator
+* Information Coordinator Service
 * Non-RT-RIC (Spring Cloud) Service Gateway
 * Non-RT-RIC (Kong) Service Exposure Prototyping
 * Initial Non-RT-RIC App Catalogue
@@ -40,7 +40,7 @@ Graphical user interface
 * View and Manage A1 policies in the RAN (near-RT-RICs)
 * Interacts with the Policy agent’s NBI (REST API)
 * Graphical A1 policy creation/editing is model-driven, based on policy type’s JSON schema
-* View and manage producers and jobs for the Enrichment coordinator service
+* View and manage producers and jobs for the Information coordinator service
 * Configure A1 Policy Management Service (e.g. add/remove near-rt-rics)
 * Interacts with the A1-PolicyManagementService & A1-EI-Coordinator (REST NBIs) via Service Exposure gateway
      
@@ -90,8 +90,8 @@ Mediation point for A1 interface termination in SMO/NONRTRIC
 
 See also: `A1 Adapter/Controller Functions in ONAP <https://wiki.onap.org/pages/viewpage.action?pageId=84672221>`_  
   
-Enrichment Information Job Coordination Service
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+Information Job Coordination Service
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
 Coordinate/Register A1-EI Types, Producers, Consumers, and Jobs.
 
@@ -106,14 +106,14 @@ Coordinate/Register A1-EI Types, Producers, Consumers, and Jobs.
 * Query status of A1-EI jobs
 * Monitors all near-RT-RICs and recovers from inconsistencies
 * After EI-type/Producer/Consumer/Job is successfully registered delivery/flow can happen directly between A1-EI Producers (in SMO/NONRTRIC domain) and A1-EI consumers (near-RT-RICs in RAN domain)
-* *Being extended to coordinate non-A1 Enrichment Information exchange between NONRTRIC Apps*
+* *Being extended to coordinate non-A1 Information exchange between NONRTRIC Apps*
 
 Non-RT-RIC (Spring Cloud) Service Gateway
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 Support Apps to use A1 Services 
 
 * `Spring Cloud Gateway <https://cloud.spring.io/spring-cloud-gateway>`_ provides the library to build a basic API gateway
-* Exposes A1 Policy Management Service & Enrichment Coordinator Service.  
+* Exposes A1 Policy Management Service & Information Coordinator Service.  
 * Additional predicates can be added in code or preferably in the Gateway yaml configuration.
 
 Implementation:
@@ -162,7 +162,7 @@ A1 Interface / Near-RT-RIC Simulator
 Stateful A1 test stub.
 
 * Used to create multiple stateful A1 providers (simulated near-rt-rics)
-* Supports A1-Policy and A1-EnrichmentInformation
+* Supports A1-Policy and A1-Enrichment Information
 * Swagger-based northbound interface, so easy to change the A1 profile exposed (e.g. A1 version, A1 Policy Types, A1-E1 consumers, etc)
 * All A1-AP versions supported
 
diff --git a/enrichment-coordinator-service b/enrichment-coordinator-service
new file mode 120000 (symlink)
index 0000000..750df39
--- /dev/null
@@ -0,0 +1 @@
+information-coordinator-service
\ No newline at end of file
similarity index 53%
rename from enrichment-coordinator-service/Dockerfile
rename to information-coordinator-service/Dockerfile
index 744a237..e9d179d 100644 (file)
@@ -21,23 +21,23 @@ FROM openjdk:11-jre-slim
 
 ARG JAR
 
-WORKDIR /opt/app/enrichment-coordinator-service
-RUN mkdir -p /var/log/enrichment-coordinator-service
-RUN mkdir -p /opt/app/enrichment-coordinator-service/etc/cert/
-RUN mkdir -p /var/enrichment-coordinator-service
-RUN chmod -R 777 /var/enrichment-coordinator-service
+WORKDIR /opt/app/information-coordinator-service
+RUN mkdir -p /var/log/information-coordinator-service
+RUN mkdir -p /opt/app/information-coordinator-service/etc/cert/
+RUN mkdir -p /var/information-coordinator-service
+RUN chmod -R 777 /var/information-coordinator-service
 
 EXPOSE 8083 8434
 
-ADD /config/application.yaml /opt/app/enrichment-coordinator-service/config/application.yaml
-ADD target/${JAR} /opt/app/enrichment-coordinator-service/enrichment-coordinator-service.jar
-ADD /config/keystore.jks /opt/app/enrichment-coordinator-service/etc/cert/keystore.jks
-ADD /config/truststore.jks /opt/app/enrichment-coordinator-service/etc/cert/truststore.jks
+ADD /config/application.yaml /opt/app/information-coordinator-service/config/application.yaml
+ADD target/${JAR} /opt/app/information-coordinator-service/information-coordinator-service.jar
+ADD /config/keystore.jks /opt/app/information-coordinator-service/etc/cert/keystore.jks
+ADD /config/truststore.jks /opt/app/information-coordinator-service/etc/cert/truststore.jks
 
 
-RUN chmod -R 777 /opt/app/enrichment-coordinator-service/config/
+RUN chmod -R 777 /opt/app/information-coordinator-service/config/
 
-CMD ["java", "-jar", "/opt/app/enrichment-coordinator-service/enrichment-coordinator-service.jar"]
+CMD ["java", "-jar", "/opt/app/information-coordinator-service/information-coordinator-service.jar"]
 
 
 
@@ -19,9 +19,9 @@ logging:
     org.springframework: ERROR
     org.springframework.data: ERROR
     org.springframework.web.reactive.function.client.ExchangeFunctions: ERROR
-    org.oransc.enrichment: INFO
+    org.oransc.ics: INFO
   file:
-    name: /var/log/enrichment-coordinator-service/application.log
+    name: /var/log/information-coordinator-service/application.log
 server:
    # Configuration of the HTTP/REST server. The parameters are defined and handeled by the springboot framework.
    # See springboot documentation.
@@ -30,7 +30,7 @@ server:
    ssl:
       key-store-type: JKS
       key-store-password: policy_agent
-      key-store: /opt/app/enrichment-coordinator-service/etc/cert/keystore.jks
+      key-store: /opt/app/information-coordinator-service/etc/cert/keystore.jks
       key-password: policy_agent
       key-alias: policy_agent
 app:
@@ -40,10 +40,10 @@ app:
     # Note that the same keystore as for the server is used.
     trust-store-used: false
     trust-store-password: policy_agent
-    trust-store: /opt/app/enrichment-coordinator-service/etc/cert/truststore.jks
+    trust-store: /opt/app/information-coordinator-service/etc/cert/truststore.jks
     # Configuration of usage of HTTP Proxy for the southbound accesses.
     # The HTTP proxy (if configured) will only be used for accessing NearRT RIC:s
     http.proxy-host:
     http.proxy-port: 0
-  vardata-directory: /var/enrichment-coordinator-service
+  vardata-directory: /var/information-coordinator-service
 
similarity index 96%
rename from enrichment-coordinator-service/pom.xml
rename to information-coordinator-service/pom.xml
index 4edf4d4..2de2bf2 100644 (file)
@@ -30,7 +30,7 @@
         <relativePath />
     </parent>
     <groupId>org.o-ran-sc.nonrtric</groupId>
-    <artifactId>enrichment-coordinator-service</artifactId>
+    <artifactId>information-coordinator-service</artifactId>
     <version>1.2.0-SNAPSHOT</version>
     <licenses>
         <license>
                             <goal>generate</goal>
                         </goals>
                         <configuration>
-                            <inputSpec>${project.basedir}/api/ecs-api.json</inputSpec>
+                            <inputSpec>${project.basedir}/api/ics-api.json</inputSpec>
                             <language>openapi-yaml</language>
                             <output>${project.basedir}/api</output>
                             <configOptions>
-                                <outputFile>ecs-api.yaml</outputFile>
+                                <outputFile>ics-api.yaml</outputFile>
                             </configOptions>
                         </configuration>
                     </execution>
                 <inherited>false</inherited>
                 <executions>
                     <execution>
-                        <id>generate-enrichment-coordinator-service-image</id>
+                        <id>generate-information-coordinator-service-image</id>
                         <phase>package</phase>
                         <goals>
                             <goal>build</goal>
                             <pullRegistry>${env.CONTAINER_PULL_REGISTRY}</pullRegistry>
                             <images>
                                 <image>
-                                    <name>o-ran-sc/nonrtric-enrichment-coordinator-service:${project.version}</name>
+                                    <name>o-ran-sc/nonrtric-information-coordinator-service:${project.version}</name>
                                     <build>
                                         <cleanup>try</cleanup>
                                         <contextDir>${basedir}</contextDir>
                         </configuration>
                     </execution>
                     <execution>
-                        <id>push-enrichment-coordinator-service-image</id>
+                        <id>push-information-coordinator-service-image</id>
                         <goals>
                             <goal>build</goal>
                             <goal>push</goal>
                             <pushRegistry>${env.CONTAINER_PUSH_REGISTRY}</pushRegistry>
                             <images>
                                 <image>
-                                    <name>o-ran-sc/nonrtric-enrichment-coordinator-service:${project.version}</name>
+                                    <name>o-ran-sc/nonrtric-information-coordinator-service:${project.version}</name>
                                     <build>
                                         <contextDir>${basedir}</contextDir>
                                         <dockerFile>Dockerfile</dockerFile>
         <system>JIRA</system>
         <url>https://jira.o-ran-sc.org/</url>
     </issueManagement>
-</project>
+</project>
\ No newline at end of file
@@ -18,7 +18,7 @@
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment;
+package org.oransc.ics;
 
 import org.springframework.boot.SpringApplication;
 import org.springframework.boot.autoconfigure.SpringBootApplication;
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment;
+package org.oransc.ics;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
 
 import java.lang.invoke.MethodHandles;
 
 import org.apache.catalina.connector.Connector;
-import org.oransc.enrichment.configuration.ApplicationConfig;
-import org.oransc.enrichment.controllers.r1producer.ProducerCallbacks;
-import org.oransc.enrichment.repository.InfoJobs;
-import org.oransc.enrichment.repository.InfoTypes;
+import org.oransc.ics.configuration.ApplicationConfig;
+import org.oransc.ics.controllers.r1producer.ProducerCallbacks;
+import org.oransc.ics.repository.InfoJobs;
+import org.oransc.ics.repository.InfoTypes;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Value;
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment;
+package org.oransc.ics;
 
 import io.swagger.v3.oas.annotations.OpenAPIDefinition;
 import io.swagger.v3.oas.annotations.info.Info;
 import io.swagger.v3.oas.annotations.info.License;
 import io.swagger.v3.oas.annotations.tags.Tag;
 
-import org.oransc.enrichment.controllers.StatusController;
-import org.oransc.enrichment.controllers.a1e.A1eConsts;
-import org.oransc.enrichment.controllers.r1consumer.ConsumerConsts;
-import org.oransc.enrichment.controllers.r1producer.ProducerConsts;
+import org.oransc.ics.controllers.StatusController;
+import org.oransc.ics.controllers.a1e.A1eConsts;
+import org.oransc.ics.controllers.r1consumer.ConsumerConsts;
+import org.oransc.ics.controllers.r1producer.ProducerConsts;
 
 /**
  * Swagger configuration class that uses swagger documentation type and scans
@@ -18,7 +18,7 @@
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment.clients;
+package org.oransc.ics.clients;
 
 import io.netty.channel.ChannelOption;
 import io.netty.handler.ssl.SslContext;
@@ -28,7 +28,7 @@ import io.netty.handler.timeout.WriteTimeoutHandler;
 import java.lang.invoke.MethodHandles;
 import java.util.concurrent.atomic.AtomicInteger;
 
-import org.oransc.enrichment.configuration.WebClientConfig.HttpProxyConfig;
+import org.oransc.ics.configuration.WebClientConfig.HttpProxyConfig;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.http.MediaType;
@@ -18,7 +18,7 @@
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment.clients;
+package org.oransc.ics.clients;
 
 import io.netty.handler.ssl.SslContext;
 import io.netty.handler.ssl.SslContextBuilder;
@@ -41,8 +41,8 @@ import java.util.stream.Collectors;
 
 import javax.net.ssl.KeyManagerFactory;
 
-import org.oransc.enrichment.configuration.WebClientConfig;
-import org.oransc.enrichment.configuration.WebClientConfig.HttpProxyConfig;
+import org.oransc.ics.configuration.WebClientConfig;
+import org.oransc.ics.configuration.WebClientConfig.HttpProxyConfig;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.util.ResourceUtils;
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment.configuration;
+package org.oransc.ics.configuration;
 
 import lombok.Getter;
 
-import org.oransc.enrichment.configuration.WebClientConfig.HttpProxyConfig;
+import org.oransc.ics.configuration.WebClientConfig.HttpProxyConfig;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Value;
@@ -18,7 +18,7 @@
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment.controllers;
+package org.oransc.ics.controllers;
 
 import com.google.gson.Gson;
 import com.google.gson.GsonBuilder;
@@ -26,7 +26,7 @@ import com.google.gson.annotations.SerializedName;
 
 import io.swagger.v3.oas.annotations.media.Schema;
 
-import org.oransc.enrichment.exceptions.ServiceException;
+import org.oransc.ics.exceptions.ServiceException;
 import org.springframework.http.HttpHeaders;
 import org.springframework.http.HttpStatus;
 import org.springframework.http.MediaType;
@@ -18,7 +18,7 @@
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment.controllers;
+package org.oransc.ics.controllers;
 
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.gson.annotations.SerializedName;
@@ -31,9 +31,9 @@ import io.swagger.v3.oas.annotations.responses.ApiResponses;
 import io.swagger.v3.oas.annotations.tags.Tag;
 
 import org.immutables.gson.Gson;
-import org.oransc.enrichment.repository.InfoJobs;
-import org.oransc.enrichment.repository.InfoProducers;
-import org.oransc.enrichment.repository.InfoTypes;
+import org.oransc.ics.repository.InfoJobs;
+import org.oransc.ics.repository.InfoProducers;
+import org.oransc.ics.repository.InfoTypes;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.http.HttpStatus;
 import org.springframework.http.MediaType;
@@ -18,7 +18,7 @@
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment.controllers.a1e;
+package org.oransc.ics.controllers.a1e;
 
 import com.google.gson.Gson;
 import com.google.gson.GsonBuilder;
@@ -26,13 +26,13 @@ import com.google.gson.GsonBuilder;
 import java.lang.invoke.MethodHandles;
 import java.util.Collection;
 
-import org.oransc.enrichment.clients.AsyncRestClient;
-import org.oransc.enrichment.clients.AsyncRestClientFactory;
-import org.oransc.enrichment.configuration.ApplicationConfig;
-import org.oransc.enrichment.repository.InfoJob;
-import org.oransc.enrichment.repository.InfoJobs;
-import org.oransc.enrichment.repository.InfoProducers;
-import org.oransc.enrichment.repository.InfoType;
+import org.oransc.ics.clients.AsyncRestClient;
+import org.oransc.ics.clients.AsyncRestClientFactory;
+import org.oransc.ics.configuration.ApplicationConfig;
+import org.oransc.ics.repository.InfoJob;
+import org.oransc.ics.repository.InfoJobs;
+import org.oransc.ics.repository.InfoProducers;
+import org.oransc.ics.repository.InfoType;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
@@ -18,7 +18,7 @@
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment.controllers.a1e;
+package org.oransc.ics.controllers.a1e;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.gson.Gson;
@@ -40,16 +40,16 @@ import java.util.ArrayList;
 import java.util.List;
 
 import org.json.JSONObject;
-import org.oransc.enrichment.configuration.ApplicationConfig;
-import org.oransc.enrichment.controllers.ErrorResponse;
-import org.oransc.enrichment.controllers.VoidResponse;
-import org.oransc.enrichment.controllers.r1producer.ProducerCallbacks;
-import org.oransc.enrichment.exceptions.ServiceException;
-import org.oransc.enrichment.repository.InfoJob;
-import org.oransc.enrichment.repository.InfoJobs;
-import org.oransc.enrichment.repository.InfoProducers;
-import org.oransc.enrichment.repository.InfoType;
-import org.oransc.enrichment.repository.InfoTypes;
+import org.oransc.ics.configuration.ApplicationConfig;
+import org.oransc.ics.controllers.ErrorResponse;
+import org.oransc.ics.controllers.VoidResponse;
+import org.oransc.ics.controllers.r1producer.ProducerCallbacks;
+import org.oransc.ics.exceptions.ServiceException;
+import org.oransc.ics.repository.InfoJob;
+import org.oransc.ics.repository.InfoJobs;
+import org.oransc.ics.repository.InfoProducers;
+import org.oransc.ics.repository.InfoType;
+import org.oransc.ics.repository.InfoTypes;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment.controllers.r1consumer;
+package org.oransc.ics.controllers.r1consumer;
 
 import com.google.gson.Gson;
 import com.google.gson.GsonBuilder;
 
-import org.oransc.enrichment.clients.AsyncRestClient;
-import org.oransc.enrichment.clients.AsyncRestClientFactory;
-import org.oransc.enrichment.configuration.ApplicationConfig;
-import org.oransc.enrichment.repository.InfoType;
-import org.oransc.enrichment.repository.InfoTypeSubscriptions;
+import org.oransc.ics.clients.AsyncRestClient;
+import org.oransc.ics.clients.AsyncRestClientFactory;
+import org.oransc.ics.configuration.ApplicationConfig;
+import org.oransc.ics.repository.InfoType;
+import org.oransc.ics.repository.InfoTypeSubscriptions;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.stereotype.Component;
 import reactor.core.publisher.Mono;
@@ -18,7 +18,7 @@
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment.controllers.r1consumer;
+package org.oransc.ics.controllers.r1consumer;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.gson.Gson;
@@ -41,17 +41,17 @@ import java.util.Collection;
 import java.util.List;
 
 import org.json.JSONObject;
-import org.oransc.enrichment.controllers.ErrorResponse;
-import org.oransc.enrichment.controllers.VoidResponse;
-import org.oransc.enrichment.controllers.r1producer.ProducerCallbacks;
-import org.oransc.enrichment.exceptions.ServiceException;
-import org.oransc.enrichment.repository.InfoJob;
-import org.oransc.enrichment.repository.InfoJobs;
-import org.oransc.enrichment.repository.InfoProducer;
-import org.oransc.enrichment.repository.InfoProducers;
-import org.oransc.enrichment.repository.InfoType;
-import org.oransc.enrichment.repository.InfoTypeSubscriptions;
-import org.oransc.enrichment.repository.InfoTypes;
+import org.oransc.ics.controllers.ErrorResponse;
+import org.oransc.ics.controllers.VoidResponse;
+import org.oransc.ics.controllers.r1producer.ProducerCallbacks;
+import org.oransc.ics.exceptions.ServiceException;
+import org.oransc.ics.repository.InfoJob;
+import org.oransc.ics.repository.InfoJobs;
+import org.oransc.ics.repository.InfoProducer;
+import org.oransc.ics.repository.InfoProducers;
+import org.oransc.ics.repository.InfoType;
+import org.oransc.ics.repository.InfoTypeSubscriptions;
+import org.oransc.ics.repository.InfoTypes;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
@@ -18,7 +18,7 @@
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment.controllers.r1producer;
+package org.oransc.ics.controllers.r1producer;
 
 import com.google.gson.Gson;
 import com.google.gson.GsonBuilder;
@@ -27,13 +27,13 @@ import java.lang.invoke.MethodHandles;
 import java.time.Duration;
 import java.util.Collection;
 
-import org.oransc.enrichment.clients.AsyncRestClient;
-import org.oransc.enrichment.clients.AsyncRestClientFactory;
-import org.oransc.enrichment.configuration.ApplicationConfig;
-import org.oransc.enrichment.repository.InfoJob;
-import org.oransc.enrichment.repository.InfoJobs;
-import org.oransc.enrichment.repository.InfoProducer;
-import org.oransc.enrichment.repository.InfoProducers;
+import org.oransc.ics.clients.AsyncRestClient;
+import org.oransc.ics.clients.AsyncRestClientFactory;
+import org.oransc.ics.configuration.ApplicationConfig;
+import org.oransc.ics.repository.InfoJob;
+import org.oransc.ics.repository.InfoJobs;
+import org.oransc.ics.repository.InfoProducer;
+import org.oransc.ics.repository.InfoProducers;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -18,7 +18,7 @@
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment.controllers.r1producer;
+package org.oransc.ics.controllers.r1producer;
 
 import com.google.gson.Gson;
 import com.google.gson.GsonBuilder;
@@ -38,16 +38,16 @@ import java.util.ArrayList;
 import java.util.Collection;
 import java.util.List;
 
-import org.oransc.enrichment.controllers.ErrorResponse;
-import org.oransc.enrichment.controllers.VoidResponse;
-import org.oransc.enrichment.exceptions.ServiceException;
-import org.oransc.enrichment.repository.InfoJob;
-import org.oransc.enrichment.repository.InfoJobs;
-import org.oransc.enrichment.repository.InfoProducer;
-import org.oransc.enrichment.repository.InfoProducers;
-import org.oransc.enrichment.repository.InfoType;
-import org.oransc.enrichment.repository.InfoTypeSubscriptions;
-import org.oransc.enrichment.repository.InfoTypes;
+import org.oransc.ics.controllers.ErrorResponse;
+import org.oransc.ics.controllers.VoidResponse;
+import org.oransc.ics.exceptions.ServiceException;
+import org.oransc.ics.repository.InfoJob;
+import org.oransc.ics.repository.InfoJobs;
+import org.oransc.ics.repository.InfoProducer;
+import org.oransc.ics.repository.InfoProducers;
+import org.oransc.ics.repository.InfoType;
+import org.oransc.ics.repository.InfoTypeSubscriptions;
+import org.oransc.ics.repository.InfoTypes;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.http.HttpStatus;
 import org.springframework.http.MediaType;
@@ -18,7 +18,7 @@
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment.controllers.r1producer;
+package org.oransc.ics.controllers.r1producer;
 
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.gson.annotations.SerializedName;
@@ -26,7 +26,7 @@ import com.google.gson.annotations.SerializedName;
 import io.swagger.v3.oas.annotations.media.Schema;
 
 import org.immutables.gson.Gson;
-import org.oransc.enrichment.repository.InfoJob;
+import org.oransc.ics.repository.InfoJob;
 
 @Gson.TypeAdapters
 @Schema(
@@ -18,7 +18,7 @@
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment.repository;
+package org.oransc.ics.repository;
 
 import com.google.gson.Gson;
 import com.google.gson.GsonBuilder;
@@ -38,9 +38,9 @@ import java.util.Map;
 import java.util.ServiceLoader;
 import java.util.Vector;
 
-import org.oransc.enrichment.configuration.ApplicationConfig;
-import org.oransc.enrichment.controllers.r1producer.ProducerCallbacks;
-import org.oransc.enrichment.exceptions.ServiceException;
+import org.oransc.ics.configuration.ApplicationConfig;
+import org.oransc.ics.controllers.r1producer.ProducerCallbacks;
+import org.oransc.ics.exceptions.ServiceException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.http.HttpStatus;
@@ -18,7 +18,7 @@
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment.repository;
+package org.oransc.ics.repository;
 
 import java.lang.invoke.MethodHandles;
 import java.util.ArrayList;
@@ -30,9 +30,9 @@ import java.util.Vector;
 import lombok.Builder;
 import lombok.Getter;
 
-import org.oransc.enrichment.controllers.a1e.A1eCallbacks;
-import org.oransc.enrichment.controllers.r1producer.ProducerCallbacks;
-import org.oransc.enrichment.exceptions.ServiceException;
+import org.oransc.ics.controllers.a1e.A1eCallbacks;
+import org.oransc.ics.controllers.r1producer.ProducerCallbacks;
+import org.oransc.ics.exceptions.ServiceException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
@@ -18,7 +18,7 @@
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment.repository;
+package org.oransc.ics.repository;
 
 import com.google.gson.Gson;
 import com.google.gson.GsonBuilder;
@@ -41,8 +41,8 @@ import java.util.function.Function;
 import lombok.Builder;
 import lombok.Getter;
 
-import org.oransc.enrichment.configuration.ApplicationConfig;
-import org.oransc.enrichment.exceptions.ServiceException;
+import org.oransc.ics.configuration.ApplicationConfig;
+import org.oransc.ics.exceptions.ServiceException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
@@ -18,7 +18,7 @@
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment.repository;
+package org.oransc.ics.repository;
 
 import com.google.gson.Gson;
 import com.google.gson.GsonBuilder;
@@ -38,8 +38,8 @@ import java.util.Map;
 import java.util.ServiceLoader;
 import java.util.Vector;
 
-import org.oransc.enrichment.configuration.ApplicationConfig;
-import org.oransc.enrichment.exceptions.ServiceException;
+import org.oransc.ics.configuration.ApplicationConfig;
+import org.oransc.ics.exceptions.ServiceException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.http.HttpStatus;
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment.tasks;
+package org.oransc.ics.tasks;
 
-import org.oransc.enrichment.configuration.ApplicationConfig;
-import org.oransc.enrichment.controllers.a1e.A1eCallbacks;
-import org.oransc.enrichment.controllers.r1producer.ProducerCallbacks;
-import org.oransc.enrichment.repository.InfoJob;
-import org.oransc.enrichment.repository.InfoJobs;
-import org.oransc.enrichment.repository.InfoProducer;
-import org.oransc.enrichment.repository.InfoProducers;
+import org.oransc.ics.configuration.ApplicationConfig;
+import org.oransc.ics.controllers.a1e.A1eCallbacks;
+import org.oransc.ics.controllers.r1producer.ProducerCallbacks;
+import org.oransc.ics.repository.InfoJob;
+import org.oransc.ics.repository.InfoJobs;
+import org.oransc.ics.repository.InfoProducer;
+import org.oransc.ics.repository.InfoProducers;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
@@ -18,7 +18,7 @@
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment;
+package org.oransc.ics;
 
 import static org.assertj.core.api.Assertions.assertThat;
 import static org.awaitility.Awaitility.await;
@@ -41,40 +41,40 @@ import org.junit.jupiter.api.AfterEach;
 import org.junit.jupiter.api.BeforeEach;
 import org.junit.jupiter.api.Test;
 import org.junit.jupiter.api.extension.ExtendWith;
-import org.oransc.enrichment.clients.AsyncRestClient;
-import org.oransc.enrichment.clients.AsyncRestClientFactory;
-import org.oransc.enrichment.configuration.ApplicationConfig;
-import org.oransc.enrichment.configuration.ImmutableHttpProxyConfig;
-import org.oransc.enrichment.configuration.ImmutableWebClientConfig;
-import org.oransc.enrichment.configuration.WebClientConfig;
-import org.oransc.enrichment.configuration.WebClientConfig.HttpProxyConfig;
-import org.oransc.enrichment.controller.ConsumerSimulatorController;
-import org.oransc.enrichment.controller.ProducerSimulatorController;
-import org.oransc.enrichment.controllers.a1e.A1eConsts;
-import org.oransc.enrichment.controllers.a1e.A1eEiJobInfo;
-import org.oransc.enrichment.controllers.a1e.A1eEiJobStatus;
-import org.oransc.enrichment.controllers.a1e.A1eEiTypeInfo;
-import org.oransc.enrichment.controllers.r1consumer.ConsumerConsts;
-import org.oransc.enrichment.controllers.r1consumer.ConsumerInfoTypeInfo;
-import org.oransc.enrichment.controllers.r1consumer.ConsumerJobInfo;
-import org.oransc.enrichment.controllers.r1consumer.ConsumerJobStatus;
-import org.oransc.enrichment.controllers.r1consumer.ConsumerTypeRegistrationInfo;
-import org.oransc.enrichment.controllers.r1consumer.ConsumerTypeSubscriptionInfo;
-import org.oransc.enrichment.controllers.r1producer.ProducerCallbacks;
-import org.oransc.enrichment.controllers.r1producer.ProducerConsts;
-import org.oransc.enrichment.controllers.r1producer.ProducerInfoTypeInfo;
-import org.oransc.enrichment.controllers.r1producer.ProducerJobInfo;
-import org.oransc.enrichment.controllers.r1producer.ProducerRegistrationInfo;
-import org.oransc.enrichment.controllers.r1producer.ProducerStatusInfo;
-import org.oransc.enrichment.exceptions.ServiceException;
-import org.oransc.enrichment.repository.InfoJob;
-import org.oransc.enrichment.repository.InfoJobs;
-import org.oransc.enrichment.repository.InfoProducer;
-import org.oransc.enrichment.repository.InfoProducers;
-import org.oransc.enrichment.repository.InfoType;
-import org.oransc.enrichment.repository.InfoTypeSubscriptions;
-import org.oransc.enrichment.repository.InfoTypes;
-import org.oransc.enrichment.tasks.ProducerSupervision;
+import org.oransc.ics.clients.AsyncRestClient;
+import org.oransc.ics.clients.AsyncRestClientFactory;
+import org.oransc.ics.configuration.ApplicationConfig;
+import org.oransc.ics.configuration.ImmutableHttpProxyConfig;
+import org.oransc.ics.configuration.ImmutableWebClientConfig;
+import org.oransc.ics.configuration.WebClientConfig;
+import org.oransc.ics.configuration.WebClientConfig.HttpProxyConfig;
+import org.oransc.ics.controller.ConsumerSimulatorController;
+import org.oransc.ics.controller.ProducerSimulatorController;
+import org.oransc.ics.controllers.a1e.A1eConsts;
+import org.oransc.ics.controllers.a1e.A1eEiJobInfo;
+import org.oransc.ics.controllers.a1e.A1eEiJobStatus;
+import org.oransc.ics.controllers.a1e.A1eEiTypeInfo;
+import org.oransc.ics.controllers.r1consumer.ConsumerConsts;
+import org.oransc.ics.controllers.r1consumer.ConsumerInfoTypeInfo;
+import org.oransc.ics.controllers.r1consumer.ConsumerJobInfo;
+import org.oransc.ics.controllers.r1consumer.ConsumerJobStatus;
+import org.oransc.ics.controllers.r1consumer.ConsumerTypeRegistrationInfo;
+import org.oransc.ics.controllers.r1consumer.ConsumerTypeSubscriptionInfo;
+import org.oransc.ics.controllers.r1producer.ProducerCallbacks;
+import org.oransc.ics.controllers.r1producer.ProducerConsts;
+import org.oransc.ics.controllers.r1producer.ProducerInfoTypeInfo;
+import org.oransc.ics.controllers.r1producer.ProducerJobInfo;
+import org.oransc.ics.controllers.r1producer.ProducerRegistrationInfo;
+import org.oransc.ics.controllers.r1producer.ProducerStatusInfo;
+import org.oransc.ics.exceptions.ServiceException;
+import org.oransc.ics.repository.InfoJob;
+import org.oransc.ics.repository.InfoJobs;
+import org.oransc.ics.repository.InfoProducer;
+import org.oransc.ics.repository.InfoProducers;
+import org.oransc.ics.repository.InfoType;
+import org.oransc.ics.repository.InfoTypeSubscriptions;
+import org.oransc.ics.repository.InfoTypes;
+import org.oransc.ics.tasks.ProducerSupervision;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
@@ -182,7 +182,7 @@ class ApplicationTest {
         assertThat(jsonObj.remove("servers")).isNotNull();
 
         String indented = jsonObj.toString(4);
-        try (PrintStream out = new PrintStream(new FileOutputStream("api/ecs-api.json"))) {
+        try (PrintStream out = new PrintStream(new FileOutputStream("api/ics-api.json"))) {
             out.print(indented);
         }
     }
@@ -18,7 +18,7 @@
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment.controller;
+package org.oransc.ics.controller;
 
 import io.swagger.v3.oas.annotations.Operation;
 import io.swagger.v3.oas.annotations.media.Content;
@@ -34,11 +34,11 @@ import java.util.List;
 
 import lombok.Getter;
 
-import org.oransc.enrichment.controllers.VoidResponse;
-import org.oransc.enrichment.controllers.a1e.A1eConsts;
-import org.oransc.enrichment.controllers.a1e.A1eEiJobStatus;
-import org.oransc.enrichment.controllers.r1consumer.ConsumerConsts;
-import org.oransc.enrichment.controllers.r1consumer.ConsumerTypeRegistrationInfo;
+import org.oransc.ics.controllers.VoidResponse;
+import org.oransc.ics.controllers.a1e.A1eConsts;
+import org.oransc.ics.controllers.a1e.A1eEiJobStatus;
+import org.oransc.ics.controllers.r1consumer.ConsumerConsts;
+import org.oransc.ics.controllers.r1consumer.ConsumerTypeRegistrationInfo;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.http.HttpStatus;
@@ -18,7 +18,7 @@
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment.controller;
+package org.oransc.ics.controller;
 
 import io.swagger.v3.oas.annotations.Operation;
 import io.swagger.v3.oas.annotations.media.Content;
@@ -34,10 +34,10 @@ import java.util.List;
 
 import lombok.Getter;
 
-import org.oransc.enrichment.controllers.ErrorResponse;
-import org.oransc.enrichment.controllers.VoidResponse;
-import org.oransc.enrichment.controllers.r1producer.ProducerConsts;
-import org.oransc.enrichment.controllers.r1producer.ProducerJobInfo;
+import org.oransc.ics.controllers.ErrorResponse;
+import org.oransc.ics.controllers.VoidResponse;
+import org.oransc.ics.controllers.r1producer.ProducerConsts;
+import org.oransc.ics.controllers.r1producer.ProducerJobInfo;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.http.HttpStatus;
diff --git a/pom.xml b/pom.xml
index e0665a1..d4f5500 100644 (file)
--- a/pom.xml
+++ b/pom.xml
@@ -34,7 +34,7 @@
     </properties>
     <modules>
         <module>policy-agent</module>
-        <module>enrichment-coordinator-service</module>
+        <module>information-coordinator-service</module>
         <module>r-app-catalogue</module>
         <module>helm-manager</module>
         <module>dmaap-adaptor-java</module>
index 5d718b0..1e34405 100755 (executable)
@@ -24,7 +24,7 @@ TC_ONELINE_DESCR="Sanity test, create service and then create,update and delete
 DOCKER_INCLUDED_IMAGES="CBS CONSUL CP CR MR DMAAPMR PA RICSIM SDNC NGW KUBEPROXY"
 
 #App names to include in the test when running kubernetes, space separated list
-KUBE_INCLUDED_IMAGES="CP CR MR PA RICSIM SDNC KUBEPROXY NGW"
+KUBE_INCLUDED_IMAGES="CP CR MR DMAAPMR PA RICSIM SDNC NGW KUBEPROXY "
 #Prestarted app (not started by script) to include in the test when running kubernetes, space separated list
 KUBE_PRESTARTED_IMAGES=""
 
@@ -38,17 +38,7 @@ SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ORAN-CHERRY ORAN-D-R
 #Supported run modes
 SUPPORTED_RUNMODES="DOCKER KUBE"
 
-. ../common/testcase_common.sh  $@
-. ../common/agent_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/http_proxy_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
+. ../common/testcase_common.sh $@
 
 setup_testenvironment
 
@@ -85,7 +75,7 @@ for __httpx in $TESTED_PROTOCOLS ; do
 
         # Create service to be able to receive events when rics becomes available
         # Must use rest towards the agent since dmaap is not configured yet
-        api_put_service 201 "ric-registration" 0 "$CR_SERVICE_APP_PATH/ric-registration"
+        api_put_service 201 "ric-registration" 0 "$CR_SERVICE_APP_PATH_0/ric-registration"
 
         if [ $__httpx == "HTTPS" ]; then
             use_cr_https
@@ -119,9 +109,10 @@ for __httpx in $TESTED_PROTOCOLS ; do
             start_ric_simulators ricsim_g3 1  STD_2.0.0
         fi
 
-        start_mr
+        start_mr    "$MR_READ_TOPIC"  "/events" "users/policy-agent" \
+                    "$MR_WRITE_TOPIC" "/events" "users/mr-stub"
 
-        start_cr
+        start_cr 1
 
         start_control_panel $SIM_GROUP/$CONTROL_PANEL_COMPOSE_DIR/$CONTROL_PANEL_CONFIG_FILE
 
@@ -162,9 +153,9 @@ for __httpx in $TESTED_PROTOCOLS ; do
 
             api_equal json:policy-instances 0
 
-            cr_equal received_callbacks 3 120
+            cr_equal received_callbacks 3 120
 
-            cr_api_check_all_sync_events 200 ric-registration ricsim_g1_1 ricsim_g2_1 ricsim_g3_1
+            cr_api_check_all_sync_events 200 ric-registration ricsim_g1_1 ricsim_g2_1 ricsim_g3_1
 
         else
             api_equal json:rics 2 300
@@ -188,7 +179,7 @@ for __httpx in $TESTED_PROTOCOLS ; do
         echo "##### Service registry and supervision #####"
         echo "############################################"
 
-        api_put_service 201 "serv1" 1000 "$CR_SERVICE_APP_PATH/1"
+        api_put_service 201 "serv1" 1000 "$CR_SERVICE_APP_PATH_0/1"
 
         api_get_service_ids 200 "serv1" "ric-registration"
 
@@ -209,7 +200,7 @@ for __httpx in $TESTED_PROTOCOLS ; do
         echo "############################################"
 
         if [ "$PMS_VERSION" == "V2" ]; then
-            notificationurl=$CR_SERVICE_APP_PATH"/test"
+            notificationurl=$CR_SERVICE_APP_PATH_0"/test"
         else
             notificationurl=""
         fi
@@ -248,7 +239,7 @@ for __httpx in $TESTED_PROTOCOLS ; do
         fi
 
         if [ "$PMS_VERSION" == "V2" ]; then
-            cr_equal received_callbacks 3
+            cr_equal received_callbacks 3
         fi
 
         if [[ $interface = *"DMAAP"* ]]; then
index 625346b..a561cc6 100755 (executable)
@@ -38,15 +38,6 @@ SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ORAN-CHERRY ORAN-D-R
 SUPPORTED_RUNMODES="DOCKER KUBE"
 
 . ../common/testcase_common.sh $@
-. ../common/agent_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
 
 setup_testenvironment
 
@@ -73,7 +64,7 @@ fi
 
 start_mr
 
-start_cr
+start_cr 1
 
 if [ $RUNMODE == "DOCKER" ]; then
     start_consul_cbs
@@ -121,14 +112,14 @@ fi
 # Create policies
 
 if [ "$PMS_VERSION" == "V2" ]; then
-    notificationurl=$CR_SERVICE_APP_PATH"/test"
+    notificationurl=$CR_SERVICE_APP_PATH_0"/test"
 else
     notificationurl=""
 fi
 
 use_agent_rest_http
 
-api_put_service 201 "service1" 3600 "$CR_SERVICE_APP_PATH/1"
+api_put_service 201 "service1" 3600 "$CR_SERVICE_APP_PATH_0/1"
 
 api_put_policy 201 "service1" ricsim_g1_1 1 2000 NOTRANSIENT $notificationurl testdata/OSC/pi1_template.json 1
 
@@ -173,7 +164,7 @@ fi
 #Update policies
 use_agent_rest_http
 
-api_put_service 200 "service1" 3600 "$CR_SERVICE_APP_PATH/1"
+api_put_service 200 "service1" 3600 "$CR_SERVICE_APP_PATH_0/1"
 
 api_put_policy 200 "service1" ricsim_g1_1 1 2000 NOTRANSIENT $notificationurl testdata/OSC/pi1_template.json 1
 
index ac6f8d5..da623ce 100755 (executable)
@@ -38,16 +38,7 @@ SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ORAN-CHERRY ORAN-D-R
 #Supported run modes
 SUPPORTED_RUNMODES="DOCKER KUBE"
 
-. ../common/testcase_common.sh  $@
-. ../common/agent_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
+. ../common/testcase_common.sh $@
 
 setup_testenvironment
 
@@ -89,7 +80,7 @@ for __httpx in $TESTED_PROTOCOLS ; do
 
         # Create service to be able to receive events when rics becomes available
         # Must use rest towards the agent since dmaap is not configured yet
-        api_put_service 201 "ric-registration" 0 "$CR_SERVICE_APP_PATH/ric-registration"
+        api_put_service 201 "ric-registration" 0 "$CR_SERVICE_APP_PATH_0/ric-registration"
 
 
         if [ $__httpx == "HTTPS" ]; then
@@ -124,7 +115,7 @@ for __httpx in $TESTED_PROTOCOLS ; do
 
         start_mr
 
-        start_cr
+        start_cr 1
 
         start_control_panel $SIM_GROUP/$CONTROL_PANEL_COMPOSE_DIR/$CONTROL_PANEL_CONFIG_FILE
 
@@ -176,8 +167,8 @@ for __httpx in $TESTED_PROTOCOLS ; do
         fi
 
         if [ "$PMS_VERSION" == "V2" ]; then
-            cr_equal received_callbacks 3 120
-            cr_api_check_all_sync_events 200 ric-registration ricsim_g1_1 ricsim_g2_1 ricsim_g3_1
+            cr_equal received_callbacks 3 120
+            cr_api_check_all_sync_events 200 ric-registration ricsim_g1_1 ricsim_g2_1 ricsim_g3_1
         fi
         mr_equal requests_submitted 0
 
@@ -194,14 +185,14 @@ for __httpx in $TESTED_PROTOCOLS ; do
 
         api_get_services 404 "service1"
 
-        api_put_service 201 "service1" 1000 "$CR_SERVICE_APP_PATH/1"
+        api_put_service 201 "service1" 1000 "$CR_SERVICE_APP_PATH_0/1"
 
-        api_put_service 200 "service1" 2000 "$CR_SERVICE_APP_PATH/1"
+        api_put_service 200 "service1" 2000 "$CR_SERVICE_APP_PATH_0/1"
 
 
-        api_put_service 400 "service2" -1 "$CR_SERVICE_APP_PATH/2"
+        api_put_service 400 "service2" -1 "$CR_SERVICE_APP_PATH_0/2"
 
-        api_put_service 400 "service2" "wrong" "$CR_SERVICE_APP_PATH/2"
+        api_put_service 400 "service2" "wrong" "$CR_SERVICE_APP_PATH_0/2"
 
         api_put_service 400 "service2" 100 "/test"
 
@@ -209,20 +200,20 @@ for __httpx in $TESTED_PROTOCOLS ; do
 
         api_put_service 201 "service2" 300 "ftp://localhost:80/test"
 
-        api_get_services 200 "service1" "service1" 2000 "$CR_SERVICE_APP_PATH/1"
+        api_get_services 200 "service1" "service1" 2000 "$CR_SERVICE_APP_PATH_0/1"
 
         api_get_service_ids 200 "service1" "service2" "ric-registration"
 
 
-        api_put_service 201 "service3" 5000 "$CR_SERVICE_APP_PATH/3"
+        api_put_service 201 "service3" 5000 "$CR_SERVICE_APP_PATH_0/3"
 
 
         api_get_service_ids 200 "service1" "service2" "service3" "ric-registration"
 
 
-        api_get_services 200 "service1" "service1" 2000 "$CR_SERVICE_APP_PATH/1"
+        api_get_services 200 "service1" "service1" 2000 "$CR_SERVICE_APP_PATH_0/1"
 
-        api_get_services 200 NOSERVICE "service1" 2000 "$CR_SERVICE_APP_PATH/1" "service2" 300 "ftp://localhost:80/test" "service3" 5000 "$CR_SERVICE_APP_PATH/3"  "ric-registration" 0 "$CR_SERVICE_APP_PATH/ric-registration"
+        api_get_services 200 NOSERVICE "service1" 2000 "$CR_SERVICE_APP_PATH_0/1" "service2" 300 "ftp://localhost:80/test" "service3" 5000 "$CR_SERVICE_APP_PATH_0/3"  "ric-registration" 0 "$CR_SERVICE_APP_PATH_0/ric-registration"
 
         api_get_services 200
 
@@ -251,7 +242,7 @@ for __httpx in $TESTED_PROTOCOLS ; do
         api_get_service_ids 200 "service2" "service3" "ric-registration"
 
 
-        api_put_service 201 "service1" 50 "$CR_SERVICE_APP_PATH/1"
+        api_put_service 201 "service1" 50 "$CR_SERVICE_APP_PATH_0/1"
 
         api_get_service_ids 200 "service1" "service2" "service3"  "ric-registration"
 
@@ -386,10 +377,10 @@ for __httpx in $TESTED_PROTOCOLS ; do
 
 
 
-        api_put_service 201 "service10" 3600 "$CR_SERVICE_APP_PATH/1"
+        api_put_service 201 "service10" 3600 "$CR_SERVICE_APP_PATH_0/1"
 
         if [ "$PMS_VERSION" == "V2" ]; then
-            notificationurl=$CR_SERVICE_APP_PATH"/test"
+            notificationurl=$CR_SERVICE_APP_PATH_0"/test"
         else
             notificationurl=""
         fi
@@ -544,7 +535,7 @@ for __httpx in $TESTED_PROTOCOLS ; do
         fi
 
         if [ "$PMS_VERSION" == "V2" ]; then
-            cr_equal received_callbacks 3
+            cr_equal received_callbacks 3
         fi
 
         if [[ $interface = *"DMAAP"* ]]; then
index e3b96a5..f855f6f 100755 (executable)
@@ -38,15 +38,7 @@ SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ORAN-CHERRY ORAN-D-R
 #Supported run modes
 SUPPORTED_RUNMODES="DOCKER KUBE"
 
-. ../common/testcase_common.sh  $@
-. ../common/agent_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
+. ../common/testcase_common.sh $@
 
 setup_testenvironment
 
@@ -71,7 +63,7 @@ fi
 
 start_mr
 
-start_cr
+start_cr 1
 
 if [ $RUNMODE == "DOCKER" ]; then
     start_consul_cbs
@@ -107,60 +99,60 @@ if [ "$PMS_VERSION" == "V2" ]; then
     sim_print ricsim_g3_1 interface
 fi
 
-api_put_service 201 "service1" 15 "$CR_SERVICE_APP_PATH/service1"
+api_put_service 201 "service1" 15 "$CR_SERVICE_APP_PATH_0/service1"
 
-api_get_services 200 "service1" "service1" 15 "$CR_SERVICE_APP_PATH/service1"
+api_get_services 200 "service1" "service1" 15 "$CR_SERVICE_APP_PATH_0/service1"
 
-api_put_service 201 "service2" 120 "$CR_SERVICE_APP_PATH/service2"
+api_put_service 201 "service2" 120 "$CR_SERVICE_APP_PATH_0/service2"
 
-api_get_services 200 "service2" "service2" 120 "$CR_SERVICE_APP_PATH/service2"
+api_get_services 200 "service2" "service2" 120 "$CR_SERVICE_APP_PATH_0/service2"
 
-api_put_service 200 "service1" 50 "$CR_SERVICE_APP_PATH/service1"
-api_put_service 200 "service2" 180 "$CR_SERVICE_APP_PATH/service2"
+api_put_service 200 "service1" 50 "$CR_SERVICE_APP_PATH_0/service1"
+api_put_service 200 "service2" 180 "$CR_SERVICE_APP_PATH_0/service2"
 
-api_get_services 200 "service1" "service1" 50 "$CR_SERVICE_APP_PATH/service1"
-api_get_services 200 "service2" "service2" 180 "$CR_SERVICE_APP_PATH/service2"
+api_get_services 200 "service1" "service1" 50 "$CR_SERVICE_APP_PATH_0/service1"
+api_get_services 200 "service2" "service2" 180 "$CR_SERVICE_APP_PATH_0/service2"
 
 api_get_service_ids 200 "service1" "service2"
 
 sleep_wait 30 "Waiting for keep alive timeout"
 
-api_get_services 200 "service1" "service1" 50 "$CR_SERVICE_APP_PATH/service1"
-api_get_services 200 "service2" "service2" 180 "$CR_SERVICE_APP_PATH/service2"
+api_get_services 200 "service1" "service1" 50 "$CR_SERVICE_APP_PATH_0/service1"
+api_get_services 200 "service2" "service2" 180 "$CR_SERVICE_APP_PATH_0/service2"
 
 sleep_wait 100 "Waiting for keep alive timeout"
 
 api_get_services 404 "service1"
-api_get_services 200 "service2" "service2" 180 "$CR_SERVICE_APP_PATH/service2"
+api_get_services 200 "service2" "service2" 180 "$CR_SERVICE_APP_PATH_0/service2"
 
 api_delete_services 204 "service2"
 
 api_get_services 404 "service1"
 api_get_services 404 "service2"
 
-api_put_service 201 "service3" 60 "$CR_SERVICE_APP_PATH/service3"
+api_put_service 201 "service3" 60 "$CR_SERVICE_APP_PATH_0/service3"
 
-api_get_services 200 "service3" "service3" 60 "$CR_SERVICE_APP_PATH/service3"
+api_get_services 200 "service3" "service3" 60 "$CR_SERVICE_APP_PATH_0/service3"
 
 sleep_wait 30 "Waiting for keep alive timeout"
 
-api_put_service 200 "service3" 60 "$CR_SERVICE_APP_PATH/service3"
+api_put_service 200 "service3" 60 "$CR_SERVICE_APP_PATH_0/service3"
 
 sleep_wait 100 "Waiting for keep alive timeout"
 
 api_get_services 404 "service3"
 
-api_put_service 201 "service4" 120 "$CR_SERVICE_APP_PATH/service4"
+api_put_service 201 "service4" 120 "$CR_SERVICE_APP_PATH_0/service4"
 
 sleep_wait 60 "Waiting for keep alive timeout"
 
-api_get_services 200 "service4" "service4" 120 "$CR_SERVICE_APP_PATH/service4"
+api_get_services 200 "service4" "service4" 120 "$CR_SERVICE_APP_PATH_0/service4"
 
 api_put_services_keepalive 200 "service4"
 
 sleep_wait 90 "Waiting for keep alive timeout"
 
-api_get_services 200 "service4" "service4" 120 "$CR_SERVICE_APP_PATH/service4"
+api_get_services 200 "service4" "service4" 120 "$CR_SERVICE_APP_PATH_0/service4"
 
 api_delete_services 204 "service4"
 
@@ -183,7 +175,7 @@ api_put_services_keepalive 404 "service3"
 api_put_services_keepalive 404 "service4"
 
 # Policy delete after timeout
-api_put_service 201 "service10" 600 "$CR_SERVICE_APP_PATH/service10"
+api_put_service 201 "service10" 600 "$CR_SERVICE_APP_PATH_0/service10"
 
 sim_put_policy_type 201 ricsim_g1_1 1 testdata/OSC/sim_1.json
 
@@ -209,7 +201,7 @@ else
 fi
 
 if [ "$PMS_VERSION" == "V2" ]; then
-    notificationurl=$CR_SERVICE_APP_PATH"/test"
+    notificationurl=$CR_SERVICE_APP_PATH_0"/test"
 else
     notificationurl=""
 fi
@@ -263,7 +255,7 @@ if [ "$PMS_VERSION" == "V2" ]; then
     sim_equal ricsim_g3_1 num_instances 1
 fi
 
-api_put_service 200 "service10" 10 "$CR_SERVICE_APP_PATH/service10"
+api_put_service 200 "service10" 10 "$CR_SERVICE_APP_PATH_0/service10"
 
 #Wait for service expiry
 api_equal json:policies 0 120
index 0e4f4a7..cc23abb 100755 (executable)
@@ -38,16 +38,7 @@ SUPPORTED_PROFILES="ONAP-HONOLULU ONAP-ISTANBUL ORAN-CHERRY ORAN-D-RELEASE ORAN-
 #Supported run modes
 SUPPORTED_RUNMODES="DOCKER KUBE"
 
-. ../common/testcase_common.sh  $@
-. ../common/ecs_api_functions.sh
-. ../common/prodstub_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/http_proxy_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
+. ../common/testcase_common.sh $@
 
 setup_testenvironment
 
@@ -89,7 +80,7 @@ if [ "$PMS_VERSION" == "V2" ]; then
     start_ric_simulators ricsim_g3 4  STD_2.0.0
 fi
 
-start_cr
+start_cr 1
 
 CB_JOB="$PROD_STUB_SERVICE_PATH$PROD_STUB_JOB_CALLBACK"
 CB_SV="$PROD_STUB_SERVICE_PATH$PROD_STUB_SUPERVISION_CALLBACK"
@@ -110,25 +101,25 @@ TARGET150="http://localhost:80/target"  # Dummy target, no target for info data
 TARGET160="http://localhost:80/target"  # Dummy target, no target for info data in this env...
 
 #Status callbacks for eijobs
-STATUS1="$CR_SERVICE_APP_PATH/job1-status"
-STATUS2="$CR_SERVICE_APP_PATH/job2-status"
-STATUS3="$CR_SERVICE_APP_PATH/job3-status"
-STATUS8="$CR_SERVICE_APP_PATH/job8-status"
-STATUS10="$CR_SERVICE_APP_PATH/job10-status"
+STATUS1="$CR_SERVICE_APP_PATH_0/job1-status"
+STATUS2="$CR_SERVICE_APP_PATH_0/job2-status"
+STATUS3="$CR_SERVICE_APP_PATH_0/job3-status"
+STATUS8="$CR_SERVICE_APP_PATH_0/job8-status"
+STATUS10="$CR_SERVICE_APP_PATH_0/job10-status"
 
 #Status callbacks for infojobs
-INFOSTATUS101="$CR_SERVICE_APP_PATH/info-job101-status"
-INFOSTATUS102="$CR_SERVICE_APP_PATH/info-job102-status"
-INFOSTATUS103="$CR_SERVICE_APP_PATH/info-job103-status"
-INFOSTATUS108="$CR_SERVICE_APP_PATH/info-job108-status"
-INFOSTATUS110="$CR_SERVICE_APP_PATH/info-job110-status"
-INFOSTATUS150="$CR_SERVICE_APP_PATH/info-job150-status"
-INFOSTATUS160="$CR_SERVICE_APP_PATH/info-job160-status"
+INFOSTATUS101="$CR_SERVICE_APP_PATH_0/info-job101-status"
+INFOSTATUS102="$CR_SERVICE_APP_PATH_0/info-job102-status"
+INFOSTATUS103="$CR_SERVICE_APP_PATH_0/info-job103-status"
+INFOSTATUS108="$CR_SERVICE_APP_PATH_0/info-job108-status"
+INFOSTATUS110="$CR_SERVICE_APP_PATH_0/info-job110-status"
+INFOSTATUS150="$CR_SERVICE_APP_PATH_0/info-job150-status"
+INFOSTATUS160="$CR_SERVICE_APP_PATH_0/info-job160-status"
 
 if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
     #Type registration status callbacks
-    TYPESTATUS1="$CR_SERVICE_APP_PATH/type-status1"
-    TYPESTATUS2="$CR_SERVICE_APP_PATH/type-status2"
+    TYPESTATUS1="$CR_SERVICE_APP_PATH_0/type-status1"
+    TYPESTATUS2="$CR_SERVICE_APP_PATH_0/type-status2"
 
     ecs_api_idc_put_subscription 201 subscription-id-1 owner1 $TYPESTATUS1
 
@@ -163,36 +154,36 @@ if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
 
     ecs_api_edp_put_type_2 201 type1 testdata/ecs/ei-type-1.json
 
-    cr_equal received_callbacks 1 30
-    cr_equal received_callbacks?id=type-status1 1
-    cr_api_check_all_ecs_subscription_events 200 type-status1 type1 testdata/ecs/ei-type-1.json REGISTERED
+    cr_equal received_callbacks 1 30
+    cr_equal received_callbacks?id=type-status1 1
+    cr_api_check_all_ecs_subscription_events 200 type-status1 type1 testdata/ecs/ei-type-1.json REGISTERED
 
     ecs_api_edp_delete_type_2 204 type1
 
-    cr_equal received_callbacks 2 30
-    cr_equal received_callbacks?id=type-status1 2
-    cr_api_check_all_ecs_subscription_events 200 type-status1 type1 testdata/ecs/ei-type-1.json DEREGISTERED
+    cr_equal received_callbacks 2 30
+    cr_equal received_callbacks?id=type-status1 2
+    cr_api_check_all_ecs_subscription_events 200 type-status1 type1 testdata/ecs/ei-type-1.json DEREGISTERED
 
     ecs_api_idc_put_subscription 201 subscription-id-2 owner2 $TYPESTATUS2
     ecs_api_idc_get_subscription_ids 200 NOOWNER subscription-id-1 subscription-id-2
 
     ecs_api_edp_put_type_2 201 type1 testdata/ecs/ei-type-1.json
 
-    cr_equal received_callbacks 4 30
-    cr_equal received_callbacks?id=type-status1 3
-    cr_equal received_callbacks?id=type-status2 1
-    cr_api_check_all_ecs_subscription_events 200 type-status1 type1 testdata/ecs/ei-type-1.json REGISTERED
+    cr_equal received_callbacks 4 30
+    cr_equal received_callbacks?id=type-status1 3
+    cr_equal received_callbacks?id=type-status2 1
+    cr_api_check_all_ecs_subscription_events 200 type-status1 type1 testdata/ecs/ei-type-1.json REGISTERED
 
     ecs_api_idc_delete_subscription 204 subscription-id-2
 
     ecs_api_edp_delete_type_2 204 type1
 
-    cr_equal received_callbacks 5 30
-    cr_equal received_callbacks?id=type-status1 4
-    cr_equal received_callbacks?id=type-status2 1
-    cr_api_check_all_ecs_subscription_events 200 type-status1 type1 testdata/ecs/ei-type-1.json DEREGISTERED
+    cr_equal received_callbacks 5 30
+    cr_equal received_callbacks?id=type-status1 4
+    cr_equal received_callbacks?id=type-status2 1
+    cr_api_check_all_ecs_subscription_events 200 type-status1 type1 testdata/ecs/ei-type-1.json DEREGISTERED
 
-    cr_api_reset
+    cr_api_reset 0
 fi
 
 ### Setup prodstub sim to accept calls for producers, types and jobs
@@ -251,7 +242,7 @@ prodstub_arm_job_create 200 prod-f job10
 ### ecs status
 ecs_api_service_status 200
 
-cr_equal received_callbacks 0
+cr_equal received_callbacks 0
 
 ### Initial tests - no config made
 ### GET: type ids, types, producer ids, producers, job ids, jobs
@@ -345,11 +336,11 @@ else
     ecs_api_edp_put_producer_2 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
 
     if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-        cr_equal received_callbacks 3 30
-        cr_equal received_callbacks?id=type-status1 3
-        cr_api_check_all_ecs_subscription_events 200 type-status1 type1 testdata/ecs/ei-type-1.json REGISTERED type1 testdata/ecs/ei-type-1.json DEREGISTERED type1 testdata/ecs/ei-type-1.json REGISTERED
+        cr_equal received_callbacks 3 30
+        cr_equal received_callbacks?id=type-status1 3
+        cr_api_check_all_ecs_subscription_events 200 type-status1 type1 testdata/ecs/ei-type-1.json REGISTERED type1 testdata/ecs/ei-type-1.json DEREGISTERED type1 testdata/ecs/ei-type-1.json REGISTERED
     else
-        cr_equal received_callbacks 0
+        cr_equal received_callbacks 0
     fi
 fi
 
@@ -500,11 +491,11 @@ else
     ecs_api_edp_put_type_2 201 type2 testdata/ecs/ei-type-2.json
     ecs_api_edp_put_producer_2 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type2
     if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-        cr_equal received_callbacks 4 30
-        cr_equal received_callbacks?id=type-status1 4
-        cr_api_check_all_ecs_subscription_events 200 type-status1 type2 testdata/ecs/ei-type-2.json REGISTERED
+        cr_equal received_callbacks 4 30
+        cr_equal received_callbacks?id=type-status1 4
+        cr_api_check_all_ecs_subscription_events 200 type-status1 type2 testdata/ecs/ei-type-2.json REGISTERED
     else
-        cr_equal received_callbacks 0
+        cr_equal received_callbacks 0
     fi
 fi
 
@@ -729,14 +720,14 @@ else
 fi
 
 if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 5 30
-    cr_equal received_callbacks?id=type-status1 4
-    cr_equal received_callbacks?id=job3-status 1
-    cr_api_check_all_ecs_events 200 job3-status DISABLED
+    cr_equal received_callbacks 5 30
+    cr_equal received_callbacks?id=type-status1 4
+    cr_equal received_callbacks?id=job3-status 1
+    cr_api_check_all_ecs_events 200 job3-status DISABLED
 else
-    cr_equal received_callbacks 1 30
-    cr_equal received_callbacks?id=job3-status 1
-    cr_api_check_all_ecs_events 200 job3-status DISABLED
+    cr_equal received_callbacks 1 30
+    cr_equal received_callbacks?id=job3-status 1
+    cr_api_check_all_ecs_events 200 job3-status DISABLED
 fi
 
 # Re-create the producer
@@ -755,14 +746,14 @@ else
 fi
 
 if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 6 30
-    cr_equal received_callbacks?id=type-status1 4
-    cr_equal received_callbacks?id=job3-status 2
-    cr_api_check_all_ecs_events 200 job3-status ENABLED
+    cr_equal received_callbacks 6 30
+    cr_equal received_callbacks?id=type-status1 4
+    cr_equal received_callbacks?id=job3-status 2
+    cr_api_check_all_ecs_events 200 job3-status ENABLED
 else
-    cr_equal received_callbacks 2 30
-    cr_equal received_callbacks?id=job3-status 2
-    cr_api_check_all_ecs_events 200 job3-status ENABLED
+    cr_equal received_callbacks 2 30
+    cr_equal received_callbacks?id=job3-status 2
+    cr_api_check_all_ecs_events 200 job3-status ENABLED
 fi
 
 if [ $ECS_VERSION == "V1-1" ]; then
@@ -784,9 +775,9 @@ else
 fi
 
 if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 7 30
-    cr_equal received_callbacks?id=type-status1 5
-    cr_api_check_all_ecs_subscription_events 200 type-status1 type4 testdata/ecs/ei-type-4.json REGISTERED
+    cr_equal received_callbacks 7 30
+    cr_equal received_callbacks?id=type-status1 5
+    cr_api_check_all_ecs_subscription_events 200 type-status1 type4 testdata/ecs/ei-type-4.json REGISTERED
 fi
 
 ecs_api_a1_get_job_ids 200 type4 NOWNER EMPTY
@@ -839,14 +830,14 @@ else
 fi
 
 if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 8 30
-    cr_equal received_callbacks?id=type-status1 5
-    cr_equal received_callbacks?id=job8-status 1
-    cr_api_check_all_ecs_events 200 job8-status DISABLED
+    cr_equal received_callbacks 8 30
+    cr_equal received_callbacks?id=type-status1 5
+    cr_equal received_callbacks?id=job8-status 1
+    cr_api_check_all_ecs_events 200 job8-status DISABLED
 else
-    cr_equal received_callbacks 3 30
-    cr_equal received_callbacks?id=job8-status 1
-    cr_api_check_all_ecs_events 200 job8-status DISABLED
+    cr_equal received_callbacks 3 30
+    cr_equal received_callbacks?id=job8-status 1
+    cr_api_check_all_ecs_events 200 job8-status DISABLED
 fi
 
 prodstub_equal create/prod-d/job8 1
@@ -879,16 +870,16 @@ ecs_api_edp_get_producer_status 200 prod-c ENABLED
 ecs_api_edp_get_producer_status 200 prod-d ENABLED
 
 if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 10 30
-    cr_equal received_callbacks?id=type-status1 6
-    cr_api_check_all_ecs_subscription_events 200 type-status1 type4 testdata/ecs/ei-type-4.json REGISTERED
+    cr_equal received_callbacks 10 30
+    cr_equal received_callbacks?id=type-status1 6
+    cr_api_check_all_ecs_subscription_events 200 type-status1 type4 testdata/ecs/ei-type-4.json REGISTERED
 
-    cr_equal received_callbacks?id=job8-status 2
-    cr_api_check_all_ecs_events 200 job8-status ENABLED
+    cr_equal received_callbacks?id=job8-status 2
+    cr_api_check_all_ecs_events 200 job8-status ENABLED
 else
-    cr_equal received_callbacks 4 30
-    cr_equal received_callbacks?id=job8-status 2
-    cr_api_check_all_ecs_events 200 job8-status ENABLED
+    cr_equal received_callbacks 4 30
+    cr_equal received_callbacks?id=job8-status 2
+    cr_api_check_all_ecs_events 200 job8-status ENABLED
 fi
 
 prodstub_equal create/prod-d/job8 2
@@ -903,9 +894,9 @@ else
 fi
 
 if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 11 30
-    cr_equal received_callbacks?id=type-status1 7
-    cr_api_check_all_ecs_subscription_events 200 type-status1 type6 testdata/ecs/ei-type-6.json REGISTERED
+    cr_equal received_callbacks 11 30
+    cr_equal received_callbacks?id=type-status1 7
+    cr_api_check_all_ecs_subscription_events 200 type-status1 type6 testdata/ecs/ei-type-6.json REGISTERED
 fi
 
 ecs_api_a1_get_job_ids 200 type6 NOWNER EMPTY
@@ -946,9 +937,9 @@ else
 fi
 
 if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 12 30
-    cr_equal received_callbacks?id=type-status1 8
-    cr_api_check_all_ecs_subscription_events 200 type-status1 type6 testdata/ecs/ei-type-6.json REGISTERED
+    cr_equal received_callbacks 12 30
+    cr_equal received_callbacks?id=type-status1 8
+    cr_api_check_all_ecs_subscription_events 200 type-status1 type6 testdata/ecs/ei-type-6.json REGISTERED
 fi
 
 ecs_api_a1_get_job_ids 200 type6 NOWNER job10
@@ -1125,16 +1116,16 @@ else
 fi
 
 if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 14 30
+    cr_equal received_callbacks 14 30
 else
-    cr_equal received_callbacks 6 30
+    cr_equal received_callbacks 6 30
 fi
 
-cr_equal received_callbacks?id=job1-status 1
-cr_equal received_callbacks?id=job2-status 1
+cr_equal received_callbacks?id=job1-status 1
+cr_equal received_callbacks?id=job2-status 1
 
-cr_api_check_all_ecs_events 200 job1-status DISABLED
-cr_api_check_all_ecs_events 200 job2-status DISABLED
+cr_api_check_all_ecs_events 200 job1-status DISABLED
+cr_api_check_all_ecs_events 200 job2-status DISABLED
 
 
 # Arm producer prod-e for supervision failure
@@ -1241,9 +1232,9 @@ else
 fi
 
 if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 14 30
+    cr_equal received_callbacks 14 30
 else
-    cr_equal received_callbacks 6 30
+    cr_equal received_callbacks 6 30
 fi
 
 
@@ -1379,11 +1370,11 @@ ecs_api_edp_put_producer_2 200 prod-ia $CB_JOB/prod-ia $CB_SV/prod-ia type101
 ecs_api_edp_delete_type_2 406 type101
 
 if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 17 30
-    cr_equal received_callbacks?id=type-status1 11
-    cr_api_check_all_ecs_subscription_events 200 type-status1 type101 testdata/ecs/info-type-1.json REGISTERED type101 testdata/ecs/info-type-1.json DEREGISTERED type101 testdata/ecs/info-type-1.json REGISTERED
+    cr_equal received_callbacks 17 30
+    cr_equal received_callbacks?id=type-status1 11
+    cr_api_check_all_ecs_subscription_events 200 type-status1 type101 testdata/ecs/info-type-1.json REGISTERED type101 testdata/ecs/info-type-1.json DEREGISTERED type101 testdata/ecs/info-type-1.json REGISTERED
 else
-    cr_equal received_callbacks 6
+    cr_equal received_callbacks 6
 fi
 
 ecs_api_edp_get_type_ids 200 type101 type1 type2 type4 type6
@@ -1450,11 +1441,11 @@ ecs_api_edp_put_type_2 201 type102 testdata/ecs/info-type-2.json
 ecs_api_edp_put_producer_2 201 prod-ib $CB_JOB/prod-ib $CB_SV/prod-ib type102
 
 if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 18 30
-    cr_equal received_callbacks?id=type-status1 12
-    cr_api_check_all_ecs_subscription_events 200 type-status1 type102 testdata/ecs/info-type-2.json REGISTERED
+    cr_equal received_callbacks 18 30
+    cr_equal received_callbacks?id=type-status1 12
+    cr_api_check_all_ecs_subscription_events 200 type-status1 type102 testdata/ecs/info-type-2.json REGISTERED
 else
-    cr_equal received_callbacks 6
+    cr_equal received_callbacks 6
 fi
 
 ecs_api_idc_get_type_ids 200 type101 type102 type1 type2 type4 type6
@@ -1564,14 +1555,14 @@ ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ic prod-b prod-c prod-d p
 ecs_api_idc_get_job_status2 200 job103 DISABLED EMPTYPROD
 
 if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 19 30
+    cr_equal received_callbacks 19 30
 
-    cr_equal received_callbacks?id=info-job103-status 1
-    cr_api_check_all_ecs_events 200 info-job103-status DISABLED
+    cr_equal received_callbacks?id=info-job103-status 1
+    cr_api_check_all_ecs_events 200 info-job103-status DISABLED
 else
-    cr_equal received_callbacks 7 30
-    cr_equal received_callbacks?id=info-job103-status 1
-    cr_api_check_all_ecs_events 200 info-job103-status DISABLED
+    cr_equal received_callbacks 7 30
+    cr_equal received_callbacks?id=info-job103-status 1
+    cr_api_check_all_ecs_events 200 info-job103-status DISABLED
 fi
 
 # Re-create the producer
@@ -1582,13 +1573,13 @@ ecs_api_edp_get_producer_status 200 prod-ib ENABLED
 ecs_api_idc_get_job_status2 200 job103 ENABLED 1 prod-ib
 
 if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 20 30
-    cr_equal received_callbacks?id=info-job103-status 2
-    cr_api_check_all_ecs_events 200 info-job103-status ENABLED
+    cr_equal received_callbacks 20 30
+    cr_equal received_callbacks?id=info-job103-status 2
+    cr_api_check_all_ecs_events 200 info-job103-status ENABLED
 else
-    cr_equal received_callbacks 8 30
-    cr_equal received_callbacks?id=info-job103-status 2
-    cr_api_check_all_ecs_events 200 info-job103-status ENABLED
+    cr_equal received_callbacks 8 30
+    cr_equal received_callbacks?id=info-job103-status 2
+    cr_api_check_all_ecs_events 200 info-job103-status ENABLED
 fi
 
 prodstub_check_jobdata_3 200 prod-ib job103 type102 $TARGET103 info-owner-3 testdata/ecs/job-template2.json
@@ -1619,16 +1610,16 @@ ecs_api_idc_get_job_ids 200 NOTYPE NOWNER job101 job102 job103 job108  job1 job2
 ecs_api_idc_get_job_status2 200 job108 DISABLED EMPTYPROD
 
 if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 22 30
-    cr_equal received_callbacks?id=type-status1 13
-    cr_api_check_all_ecs_subscription_events 200 type-status1 type104 testdata/ecs/info-type-4.json REGISTERED
+    cr_equal received_callbacks 22 30
+    cr_equal received_callbacks?id=type-status1 13
+    cr_api_check_all_ecs_subscription_events 200 type-status1 type104 testdata/ecs/info-type-4.json REGISTERED
 
-    cr_equal received_callbacks?id=info-job108-status 1
-    cr_api_check_all_ecs_events 200 info-job108-status DISABLED
+    cr_equal received_callbacks?id=info-job108-status 1
+    cr_api_check_all_ecs_events 200 info-job108-status DISABLED
 else
-    cr_equal received_callbacks 9 30
-    cr_equal received_callbacks?id=info-job108-status 1
-    cr_api_check_all_ecs_events 200 info-job108-status DISABLED
+    cr_equal received_callbacks 9 30
+    cr_equal received_callbacks?id=info-job108-status 1
+    cr_api_check_all_ecs_events 200 info-job108-status DISABLED
 fi
 
 prodstub_equal create/prod-id/job108 1
@@ -1650,17 +1641,17 @@ ecs_api_edp_get_producer_status 200 prod-ic ENABLED
 ecs_api_edp_get_producer_status 200 prod-id ENABLED
 
 if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 24 30
+    cr_equal received_callbacks 24 30
 
-    cr_equal received_callbacks?id=type-status1 14
-    cr_api_check_all_ecs_subscription_events 200 type-status1 type104 testdata/ecs/info-type-4.json REGISTERED
+    cr_equal received_callbacks?id=type-status1 14
+    cr_api_check_all_ecs_subscription_events 200 type-status1 type104 testdata/ecs/info-type-4.json REGISTERED
 
-    cr_equal received_callbacks?id=info-job108-status 2
-    cr_api_check_all_ecs_events 200 info-job108-status ENABLED
+    cr_equal received_callbacks?id=info-job108-status 2
+    cr_api_check_all_ecs_events 200 info-job108-status ENABLED
 else
-    cr_equal received_callbacks 10 30
-    cr_equal received_callbacks?id=info-job108-status 2
-    cr_api_check_all_ecs_events 200 info-job108-status ENABLED
+    cr_equal received_callbacks 10 30
+    cr_equal received_callbacks?id=info-job108-status 2
+    cr_api_check_all_ecs_events 200 info-job108-status ENABLED
 fi
 
 prodstub_equal create/prod-id/job108 2
@@ -1689,10 +1680,10 @@ ecs_api_edp_put_type_2 200 type106 testdata/ecs/info-type-6.json
 ecs_api_edp_put_producer_2 201 prod-if $CB_JOB/prod-if $CB_SV/prod-if type106
 
 if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 26 30
+    cr_equal received_callbacks 26 30
 
-    cr_equal received_callbacks?id=type-status1 16
-    cr_api_check_all_ecs_subscription_events 200 type-status1 type106 testdata/ecs/info-type-6.json REGISTERED type106 testdata/ecs/info-type-6.json REGISTERED
+    cr_equal received_callbacks?id=type-status1 16
+    cr_api_check_all_ecs_subscription_events 200 type-status1 type106 testdata/ecs/info-type-6.json REGISTERED type106 testdata/ecs/info-type-6.json REGISTERED
 fi
 
 
@@ -1807,19 +1798,19 @@ ecs_api_idc_get_job_status2 200 job110 ENABLED 2 prod-ie prod-if
 
 
 if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 28 30
+    cr_equal received_callbacks 28 30
 
-    cr_equal received_callbacks?id=info-job101-status 1
-    cr_equal received_callbacks?id=info-job102-status 1
-    cr_api_check_all_ecs_events 200 info-job101-status DISABLED
-    cr_api_check_all_ecs_events 200 info-job102-status DISABLED
+    cr_equal received_callbacks?id=info-job101-status 1
+    cr_equal received_callbacks?id=info-job102-status 1
+    cr_api_check_all_ecs_events 200 info-job101-status DISABLED
+    cr_api_check_all_ecs_events 200 info-job102-status DISABLED
 else
-    cr_equal received_callbacks 12 30
+    cr_equal received_callbacks 12 30
 
-    cr_equal received_callbacks?id=info-job101-status 1
-    cr_equal received_callbacks?id=info-job102-status 1
-    cr_api_check_all_ecs_events 200 info-job101-status DISABLED
-    cr_api_check_all_ecs_events 200 info-job102-status DISABLED
+    cr_equal received_callbacks?id=info-job101-status 1
+    cr_equal received_callbacks?id=info-job102-status 1
+    cr_api_check_all_ecs_events 200 info-job101-status DISABLED
+    cr_api_check_all_ecs_events 200 info-job102-status DISABLED
 fi
 
 
@@ -1887,9 +1878,9 @@ ecs_api_idc_get_job_status2 200 job108 ENABLED 1 prod-id
 ecs_api_idc_get_job_status2 200 job110 ENABLED 1 prod-ie
 
 if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 28
+    cr_equal received_callbacks 28
 else
-    cr_equal received_callbacks 12
+    cr_equal received_callbacks 12
 fi
 ### Test of pre and post validation
 
@@ -1937,11 +1928,11 @@ ecs_api_edp_put_type_2 201 type150 testdata/ecs/info-type-50.json
 ecs_api_idc_get_type_ids 200 type1 type2 type4 type6 type101 type102 type104 type106 type160 type150
 
 if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 30 30
-    cr_equal received_callbacks?id=type-status1 18
-    cr_api_check_all_ecs_subscription_events 200 type-status1 type160 testdata/ecs/info-type-60.json REGISTERED type150 testdata/ecs/info-type-50.json REGISTERED
+    cr_equal received_callbacks 30 30
+    cr_equal received_callbacks?id=type-status1 18
+    cr_api_check_all_ecs_subscription_events 200 type-status1 type160 testdata/ecs/info-type-60.json REGISTERED type150 testdata/ecs/info-type-50.json REGISTERED
 else
-    cr_equal received_callbacks 12
+    cr_equal received_callbacks 12
 fi
 
 ecs_api_edp_put_producer_2 200 prod-ig $CB_JOB/prod-ig $CB_SV/prod-ig type160 type150
@@ -1958,10 +1949,10 @@ ecs_api_idc_get_job_status2 200 job150 ENABLED  1 prod-ig 60
 ecs_api_idc_get_job_status2 200 job160 ENABLED  1 prod-ig
 
 if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 30 30
-    cr_equal received_callbacks?id=type-status1 18
+    cr_equal received_callbacks 30 30
+    cr_equal received_callbacks?id=type-status1 18
 else
-    cr_equal received_callbacks 12
+    cr_equal received_callbacks 12
 fi
 
 # Test job deletion at type delete
@@ -1974,18 +1965,18 @@ if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
 
     ecs_api_edp_delete_type_2 204 type104
 
-    cr_equal received_callbacks 32 30
-    cr_equal received_callbacks?id=info-job108-status 3
-    cr_equal received_callbacks?id=type-status1 19
-    cr_api_check_all_ecs_subscription_events 200 type-status1 type104 testdata/ecs/info-type-4.json DEREGISTERED
-    cr_api_check_all_ecs_events 200 info-job108-status DISABLED
+    cr_equal received_callbacks 32 30
+    cr_equal received_callbacks?id=info-job108-status 3
+    cr_equal received_callbacks?id=type-status1 19
+    cr_api_check_all_ecs_subscription_events 200 type-status1 type104 testdata/ecs/info-type-4.json DEREGISTERED
+    cr_api_check_all_ecs_events 200 info-job108-status DISABLED
 
     ecs_api_edp_get_producer 404 prod-id
 
     ecs_api_idc_get_job 404 job-108
 
 else
-    cr_equal received_callbacks 12
+    cr_equal received_callbacks 12
 fi
 
 check_ecs_logs
index f011a21..822f835 100755 (executable)
@@ -32,10 +32,7 @@ SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ORAN-CHERRY ORAN-D-R
 #Supported run modes
 SUPPORTED_RUNMODES="DOCKER KUBE"
 
-. ../common/testcase_common.sh  $@
-. ../common/controller_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
+. ../common/testcase_common.sh $@
 
 setup_testenvironment
 
index 6241f3c..232f901 100755 (executable)
@@ -38,14 +38,7 @@ SUPPORTED_PROFILES="ONAP-HONOLULU ONAP-ISTANBUL ORAN-CHERRY ORAN-D-RELEASE ORAN-
 #Supported run modes
 SUPPORTED_RUNMODES="DOCKER KUBE"
 
-. ../common/testcase_common.sh  $@
-. ../common/ecs_api_functions.sh
-. ../common/prodstub_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
+. ../common/testcase_common.sh $@
 
 setup_testenvironment
 
@@ -73,7 +66,7 @@ if [ ! -z "$NRT_GATEWAY_APP_NAME" ]; then
     start_gateway $SIM_GROUP/$NRT_GATEWAY_COMPOSE_DIR/$NRT_GATEWAY_CONFIG_FILE
 fi
 
-start_cr
+start_cr 1
 
 CB_JOB="$PROD_STUB_SERVICE_PATH$PROD_STUB_JOB_CALLBACK"
 CB_SV="$PROD_STUB_SERVICE_PATH$PROD_STUB_SUPERVISION_CALLBACK"
@@ -88,8 +81,8 @@ fi
 
 if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
     #Type registration status callbacks
-    TYPESTATUS1="$CR_SERVICE_APP_PATH/type-status1"
-    TYPESTATUS2="$CR_SERVICE_APP_PATH/type-status2"
+    TYPESTATUS1="$CR_SERVICE_APP_PATH_0/type-status1"
+    TYPESTATUS2="$CR_SERVICE_APP_PATH_0/type-status2"
 
     ecs_api_idc_put_subscription 201 subscription-id-1 owner1 $TYPESTATUS1
 
@@ -219,11 +212,11 @@ else
 
 
         if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-            cr_equal received_callbacks 20 30
-            cr_equal received_callbacks?id=type-status1 10
-            cr_equal received_callbacks?id=type-status2 10
+            cr_equal received_callbacks 20 30
+            cr_equal received_callbacks?id=type-status1 10
+            cr_equal received_callbacks?id=type-status2 10
 
-            cr_api_check_all_ecs_subscription_events 200 type-status1 \
+            cr_api_check_all_ecs_subscription_events 200 type-status1 \
                 type1 testdata/ecs/ei-type-1.json REGISTERED \
                 type2 testdata/ecs/ei-type-2.json REGISTERED \
                 type3 testdata/ecs/ei-type-3.json REGISTERED \
@@ -235,7 +228,7 @@ else
                 type104 testdata/ecs/info-type-4.json REGISTERED \
                 type105 testdata/ecs/info-type-5.json REGISTERED
 
-            cr_api_check_all_ecs_subscription_events 200 type-status2 \
+            cr_api_check_all_ecs_subscription_events 200 type-status2 \
                 type1 testdata/ecs/ei-type-1.json REGISTERED \
                 type2 testdata/ecs/ei-type-2.json REGISTERED \
                 type3 testdata/ecs/ei-type-3.json REGISTERED \
@@ -273,62 +266,62 @@ ecs_api_edp_get_producer_status 200 prod-d ENABLED
 for ((i=1; i<=$NUM_JOBS; i++))
 do
     if [ $(($i%5)) -eq 0 ]; then
-        ecs_api_a1_put_job 201 job$i type1 $TARGET ric1 $CR_SERVICE_APP_PATH/job_status_ric1 testdata/ecs/job-template.json
+        ecs_api_a1_put_job 201 job$i type1 $TARGET ric1 $CR_SERVICE_APP_PATH_0/job_status_ric1 testdata/ecs/job-template.json
         if [  -z "$FLAT_A1_EI" ]; then
             ecs_api_a1_get_job_status 200 type1 job$i ENABLED
         else
             ecs_api_a1_get_job_status 200 job$i ENABLED 120
         fi
         if [ $use_info_jobs ]; then
-            ecs_api_idc_put_job 201 job$(($i+$NUM_JOBS)) type101 $TARGET info-owner $CR_SERVICE_APP_PATH/job_status_info-owner testdata/ecs/job-template.json VALIDATE
+            ecs_api_idc_put_job 201 job$(($i+$NUM_JOBS)) type101 $TARGET info-owner $CR_SERVICE_APP_PATH_0/job_status_info-owner testdata/ecs/job-template.json VALIDATE
             ecs_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 3 prod-a prod-b prod-c 120
         fi
     fi
     if [ $(($i%5)) -eq 1 ]; then
-        ecs_api_a1_put_job 201 job$i type2 $TARGET ric1 $CR_SERVICE_APP_PATH/job_status_ric1 testdata/ecs/job-template.json
+        ecs_api_a1_put_job 201 job$i type2 $TARGET ric1 $CR_SERVICE_APP_PATH_0/job_status_ric1 testdata/ecs/job-template.json
         if [  -z "$FLAT_A1_EI" ]; then
             ecs_api_a1_get_job_status 200 type2 job$i ENABLED
         else
             ecs_api_a1_get_job_status 200 job$i ENABLED 120
         fi
         if [ $use_info_jobs ]; then
-            ecs_api_idc_put_job 201 job$(($i+$NUM_JOBS)) type102 $TARGET info-owner $CR_SERVICE_APP_PATH/job_status_info-owner testdata/ecs/job-template.json VALIDATE
+            ecs_api_idc_put_job 201 job$(($i+$NUM_JOBS)) type102 $TARGET info-owner $CR_SERVICE_APP_PATH_0/job_status_info-owner testdata/ecs/job-template.json VALIDATE
             ecs_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 2 prod-b prod-c 120
         fi
     fi
     if [ $(($i%5)) -eq 2 ]; then
-        ecs_api_a1_put_job 201 job$i type3 $TARGET ric1 $CR_SERVICE_APP_PATH/job_status_ric1 testdata/ecs/job-template.json
+        ecs_api_a1_put_job 201 job$i type3 $TARGET ric1 $CR_SERVICE_APP_PATH_0/job_status_ric1 testdata/ecs/job-template.json
         if [  -z "$FLAT_A1_EI" ]; then
             ecs_api_a1_get_job_status 200 type3 job$i ENABLED
         else
             ecs_api_a1_get_job_status 200 job$i ENABLED 120
         fi
         if [ $use_info_jobs ]; then
-            ecs_api_idc_put_job 201 job$(($i+$NUM_JOBS)) type103 $TARGET info-owner $CR_SERVICE_APP_PATH/job_status_info-owner testdata/ecs/job-template.json VALIDATE
+            ecs_api_idc_put_job 201 job$(($i+$NUM_JOBS)) type103 $TARGET info-owner $CR_SERVICE_APP_PATH_0/job_status_info-owner testdata/ecs/job-template.json VALIDATE
             ecs_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 1 prod-c 120
         fi
     fi
     if [ $(($i%5)) -eq 3 ]; then
-        ecs_api_a1_put_job 201 job$i type4 $TARGET ric1 $CR_SERVICE_APP_PATH/job_status_ric1 testdata/ecs/job-template.json
+        ecs_api_a1_put_job 201 job$i type4 $TARGET ric1 $CR_SERVICE_APP_PATH_0/job_status_ric1 testdata/ecs/job-template.json
         if [  -z "$FLAT_A1_EI" ]; then
             ecs_api_a1_get_job_status 200 type4 job$i ENABLED
         else
             ecs_api_a1_get_job_status 200 job$i ENABLED 120
         fi
         if [ $use_info_jobs ]; then
-            ecs_api_idc_put_job 201 job$(($i+$NUM_JOBS)) type104 $TARGET info-owner $CR_SERVICE_APP_PATH/job_status_info-owner testdata/ecs/job-template.json VALIDATE
+            ecs_api_idc_put_job 201 job$(($i+$NUM_JOBS)) type104 $TARGET info-owner $CR_SERVICE_APP_PATH_0/job_status_info-owner testdata/ecs/job-template.json VALIDATE
             ecs_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 1 prod-d 120
         fi
     fi
     if [ $(($i%5)) -eq 4 ]; then
-        ecs_api_a1_put_job 201 job$i type5 $TARGET ric1 $CR_SERVICE_APP_PATH/job_status_ric1 testdata/ecs/job-template.json
+        ecs_api_a1_put_job 201 job$i type5 $TARGET ric1 $CR_SERVICE_APP_PATH_0/job_status_ric1 testdata/ecs/job-template.json
         if [  -z "$FLAT_A1_EI" ]; then
             ecs_api_a1_get_job_status 200 type5 job$i ENABLED
         else
             ecs_api_a1_get_job_status 200 job$i ENABLED 120
         fi
         if [ $use_info_jobs ]; then
-            ecs_api_idc_put_job 201 job$(($i+$NUM_JOBS)) type105 $TARGET info-owner $CR_SERVICE_APP_PATH/job_status_info-owner testdata/ecs/job-template.json VALIDATE
+            ecs_api_idc_put_job 201 job$(($i+$NUM_JOBS)) type105 $TARGET info-owner $CR_SERVICE_APP_PATH_0/job_status_info-owner testdata/ecs/job-template.json VALIDATE
             ecs_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 1 prod-d 120
         fi
     fi
@@ -356,10 +349,10 @@ if [ $use_info_jobs ]; then
 fi
 
 if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 20 30
+    cr_equal received_callbacks 20 30
 
 else
-    cr_equal received_callbacks 0 30
+    cr_equal received_callbacks 0 30
 
 fi
 
@@ -381,7 +374,7 @@ fi
 
 stop_ecs
 
-cr_api_reset
+cr_api_reset 0
 
 start_stopped_ecs
 
@@ -401,7 +394,7 @@ if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
     fi
 fi
 
-cr_equal received_callbacks 0
+cr_equal received_callbacks 0
 
 for ((i=1; i<=$NUM_JOBS; i++))
 do
@@ -787,18 +780,18 @@ if [ $use_info_jobs ]; then
 fi
 
 if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 10 30
-    cr_equal received_callbacks?id=type-status1 5
-    cr_equal received_callbacks?id=type-status2 5
+    cr_equal received_callbacks 10 30
+    cr_equal received_callbacks?id=type-status1 5
+    cr_equal received_callbacks?id=type-status2 5
 
-    cr_api_check_all_ecs_subscription_events 200 type-status1 \
+    cr_api_check_all_ecs_subscription_events 200 type-status1 \
         type101 testdata/ecs/info-type-1.json REGISTERED \
         type102 testdata/ecs/info-type-2.json REGISTERED \
         type103 testdata/ecs/info-type-3.json REGISTERED \
         type104 testdata/ecs/info-type-4.json REGISTERED \
         type105 testdata/ecs/info-type-5.json REGISTERED
 
-    cr_api_check_all_ecs_subscription_events 200 type-status2 \
+    cr_api_check_all_ecs_subscription_events 200 type-status2 \
         type101 testdata/ecs/info-type-1.json REGISTERED \
         type102 testdata/ecs/info-type-2.json REGISTERED \
         type103 testdata/ecs/info-type-3.json REGISTERED \
@@ -806,7 +799,7 @@ if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
         type105 testdata/ecs/info-type-5.json REGISTERED
 
 else
-    cr_equal received_callbacks 0 30
+    cr_equal received_callbacks 0 30
 fi
 
 check_ecs_logs
index f194817..e7d511f 100755 (executable)
@@ -38,17 +38,6 @@ SUPPORTED_PROFILES="ONAP-HONOLULU ONAP-ISTANBUL ORAN-CHERRY ORAN-D-RELEASE ORAN-
 SUPPORTED_RUNMODES="DOCKER KUBE"
 
 . ../common/testcase_common.sh $@
-. ../common/agent_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/ecs_api_functions.sh
-. ../common/prodstub_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/http_proxy_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
 
 setup_testenvironment
 
@@ -64,7 +53,7 @@ use_ecs_rest_https
 use_prod_stub_https
 
 if [ "$PMS_VERSION" == "V2" ]; then
-    notificationurl=$CR_SERVICE_APP_PATH"/test"
+    notificationurl=$CR_SERVICE_APP_PATH_0"/test"
 else
    echo "PMS VERSION 2 (V2) is required"
    exit 1
@@ -102,7 +91,7 @@ else
     consul_config_app                      ".consul_config.json"
 fi
 
-start_cr
+start_cr 1
 
 start_prod_stub
 
@@ -144,7 +133,7 @@ done
 #Check the number of types
 api_equal json:policy-types 2 300
 
-api_put_service 201 "Emergency-response-app" 0 "$CR_SERVICE_APP_PATH/1"
+api_put_service 201 "Emergency-response-app" 0 "$CR_SERVICE_APP_PATH_0/1"
 
 # Create policies in STD
 for ((i=1; i<=$STD_NUM_RICS; i++))
@@ -182,8 +171,8 @@ fi
 TARGET1="$RIC_SIM_HTTPX://$RIC_G1_1:$RIC_SIM_PORT/datadelivery"
 TARGET2="$RIC_SIM_HTTPX://$RIC_G1_1:$RIC_SIM_PORT/datadelivery"
 
-STATUS1="$CR_SERVICE_APP_PATH/job1-status"
-STATUS2="$CR_SERVICE_APP_PATH/job2-status"
+STATUS1="$CR_SERVICE_APP_PATH_0/job1-status"
+STATUS2="$CR_SERVICE_APP_PATH_0/job2-status"
 
 prodstub_arm_producer 200 prod-a
 prodstub_arm_type 200 prod-a type1
@@ -195,7 +184,7 @@ ecs_api_service_status 200
 
 if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
     #Type registration status callbacks
-    TYPESTATUS1="$CR_SERVICE_APP_PATH/type-status1"
+    TYPESTATUS1="$CR_SERVICE_APP_PATH_0/type-status1"
 
     ecs_api_idc_put_subscription 201 subscription-id-1 owner1 $TYPESTATUS1
 
@@ -270,17 +259,17 @@ else
 fi
 
 if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 3 30
-    cr_api_check_all_ecs_subscription_events 200 type-status1 type1 testdata/ecs/ei-type-1.json REGISTERED
-    cr_api_check_all_ecs_events 200 job1-status DISABLED
-    cr_api_check_all_ecs_events 200 job2-status DISABLED
+    cr_equal received_callbacks 3 30
+    cr_api_check_all_ecs_subscription_events 200 type-status1 type1 testdata/ecs/ei-type-1.json REGISTERED
+    cr_api_check_all_ecs_events 200 job1-status DISABLED
+    cr_api_check_all_ecs_events 200 job2-status DISABLED
 else
-    cr_equal received_callbacks 2 30
-    cr_api_check_all_ecs_events 200 job1-status DISABLED
-    cr_api_check_all_ecs_events 200 job2-status DISABLED
+    cr_equal received_callbacks 2 30
+    cr_api_check_all_ecs_events 200 job1-status DISABLED
+    cr_api_check_all_ecs_events 200 job2-status DISABLED
 fi
 
-cr_contains_str remote_hosts $HTTP_PROXY_APP_NAME
+cr_contains_str remote_hosts $HTTP_PROXY_APP_NAME
 
 check_policy_agent_logs
 check_ecs_logs
index 321dd24..1b05763 100755 (executable)
@@ -31,11 +31,7 @@ SUPPORTED_PROFILES="ONAP-ISTANBUL"
 #Supported run modes
 SUPPORTED_RUNMODES="DOCKER"
 
-. ../common/testcase_common.sh  $@
-. ../common/controller_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/http_proxy_api_functions.sh
+. ../common/testcase_common.sh $@
 
 setup_testenvironment
 
index 6e22ced..ae0fbb6 100755 (executable)
@@ -38,12 +38,6 @@ SUPPORTED_PROFILES="ORAN-E-RELEASE"
 SUPPORTED_RUNMODES="DOCKER KUBE"
 
 . ../common/testcase_common.sh $@
-. ../common/ecs_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/http_proxy_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/dmaapadp_api_functions.sh
 
 setup_testenvironment
 
@@ -66,7 +60,7 @@ start_kube_proxy
 
 start_http_proxy
 
-start_cr
+start_cr 1
 
 start_ecs NOPROXY $SIM_GROUP/$ECS_COMPOSE_DIR/$ECS_CONFIG_FILE
 
@@ -92,7 +86,7 @@ ecs_api_edp_get_producer_ids_2 200 NOTYPE DmaapGenericInfoProducer
 
 for ((i=1; i<=$NUM_JOBS; i++))
 do
-    ecs_api_idc_put_job 201 joby$i ExampleInformationType $CR_SERVICE_MR_PATH/joby-data$i info-ownery$i $CR_SERVICE_MR_PATH/job_status_info-ownery$i testdata/dmaap-adapter/job-template.json
+    ecs_api_idc_put_job 201 joby$i ExampleInformationType $CR_SERVICE_MR_PATH_0/joby-data$i info-ownery$i $CR_SERVICE_MR_PATH_0/job_status_info-ownery$i testdata/dmaap-adapter/job-template.json
 done
 
 for ((i=1; i<=$NUM_JOBS; i++))
@@ -105,20 +99,20 @@ done
 mr_api_send_json "/events/unauthenticated.dmaapadp.json" '{"msg":"msg-1"}'
 mr_api_send_json "/events/unauthenticated.dmaapadp.json" '{"msg":"msg-3"}'
 
-cr_equal received_callbacks $(($NUM_JOBS*2)) 60
+cr_equal received_callbacks $(($NUM_JOBS*2)) 60
 
 for ((i=1; i<=$NUM_JOBS; i++))
 do
-    cr_equal received_callbacks?id=joby-data$i 2
+    cr_equal received_callbacks?id=joby-data$i 2
 done
 
 for ((i=1; i<=$NUM_JOBS; i++))
 do
-    cr_api_check_single_genric_json_event 200 joby-data$i '{"msg":"msg-1"}'
-    cr_api_check_single_genric_json_event 200 joby-data$i '{"msg":"msg-3"}'
+    cr_api_check_single_genric_json_event 200 joby-data$i '{"msg":"msg-1"}'
+    cr_api_check_single_genric_json_event 200 joby-data$i '{"msg":"msg-3"}'
 done
 
-cr_contains_str remote_hosts $HTTP_PROXY_APP_NAME
+cr_contains_str remote_hosts $HTTP_PROXY_APP_NAME
 
 #### TEST COMPLETE ####
 
index 4503c88..232e5a8 100755 (executable)
@@ -37,16 +37,7 @@ SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ORAN-CHERRY ORAN-D-R
 #Supported run modes
 SUPPORTED_RUNMODES="DOCKER KUBE"
 
-. ../common/testcase_common.sh  $@
-. ../common/agent_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
+. ../common/testcase_common.sh $@
 
 setup_testenvironment
 
@@ -109,7 +100,7 @@ for __httpx in $TESTED_PROTOCOLS ; do
 
         start_mr
 
-        start_cr
+        start_cr 1
 
         if [ $RUNMODE == "DOCKER" ]; then
             start_consul_cbs
@@ -156,13 +147,13 @@ for __httpx in $TESTED_PROTOCOLS ; do
             api_equal json:policy_types 2 120  #Wait for the agent to refresh types from the simulator
         fi
 
-        api_put_service 201 "serv1" 3600 "$CR_SERVICE_APP_PATH/1"
+        api_put_service 201 "serv1" 3600 "$CR_SERVICE_APP_PATH_0/1"
 
         START_ID=2000
         NUM_POLICIES=10000  # Must be at least 100
 
         if [ "$PMS_VERSION" == "V2" ]; then
-            notificationurl=$CR_SERVICE_APP_PATH"/test"
+            notificationurl=$CR_SERVICE_APP_PATH_0"/test"
         else
             notificationurl=""
         fi
index da4bf1e..a1c4fca 100755 (executable)
 TC_ONELINE_DESCR="App test DMAAP Meditor and DMAAP Adapter"
 
 #App names to include in the test when running docker, space separated list
-DOCKER_INCLUDED_IMAGES="ECS DMAAPMED DMAAPADP KUBEPROXY MR CR"
+DOCKER_INCLUDED_IMAGES="ECS DMAAPMED DMAAPADP KUBEPROXY MR DMAAPMR CR"
 
 #App names to include in the test when running kubernetes, space separated list
-KUBE_INCLUDED_IMAGES=" ECS DMAAPMED DMAAPADP KUBEPROXY MR CR"
+KUBE_INCLUDED_IMAGES=" ECS DMAAPMED DMAAPADP KUBEPROXY MR DMAAPMR CR"
 
 #Prestarted app (not started by script) to include in the test when running kubernetes, space separated list
 KUBE_PRESTARTED_IMAGES=""
@@ -39,21 +39,6 @@ SUPPORTED_PROFILES="ORAN-E-RELEASE"
 SUPPORTED_RUNMODES="DOCKER KUBE"
 
 . ../common/testcase_common.sh $@
-. ../common/agent_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/ecs_api_functions.sh
-. ../common/prodstub_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/rapp_catalogue_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/http_proxy_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
-. ../common/dmaapmed_api_functions.sh
-. ../common/dmaapadp_api_functions.sh
 
 setup_testenvironment
 
@@ -62,7 +47,13 @@ setup_testenvironment
 #Local vars in test script
 ##########################
 FLAT_A1_EI="1"
-NUM_JOBS=100  # Mediator and adapter gets same number of jobs
+NUM_CR=10 # Number of callback receivers, divide all callbacks to this number of servers - for load sharing
+## Note: The number jobs must be a multiple of the number of CRs in order to calculate the number of expected event in each CR
+NUM_JOBS=200  # Mediator and adapter gets same number of jobs for every type
+
+if [ $NUM_JOBS -lt $NUM_CR ]; then
+    __log_conf_fail_general "Number of jobs: $NUM_JOBS must be greater then the number of CRs: $NUM_CR"
+fi
 
 clean_environment
 
@@ -75,13 +66,15 @@ use_dmaapmed_https
 
 start_kube_proxy
 
-start_cr
+start_cr $NUM_CR
 
 start_ecs NOPROXY $SIM_GROUP/$ECS_COMPOSE_DIR/$ECS_CONFIG_FILE
 
 set_ecs_trace
 
-start_mr
+start_mr    "unauthenticated.dmaapmed.json" "/events" "dmaapmediatorproducer/STD_Fault_Messages" \
+            "unauthenticated.dmaapadp.json" "/events" "dmaapadapterproducer/msgs" \
+            "unauthenticated.dmaapadp_kafka.text" "/events" "dmaapadapterproducer/msgs"
 
 start_dmaapadp NOPROXY $SIM_GROUP/$DMAAP_ADP_COMPOSE_DIR/$DMAAP_ADP_CONFIG_FILE $SIM_GROUP/$DMAAP_ADP_COMPOSE_DIR/$DMAAP_ADP_DATA_FILE
 
@@ -93,122 +86,341 @@ ecs_equal json:data-producer/v1/info-producers 2 60
 
 # Check producers
 ecs_api_idc_get_job_ids 200 NOTYPE NOWNER EMPTY
-ecs_api_idc_get_type_ids 200 ExampleInformationType STD_Fault_Messages
+ecs_api_idc_get_type_ids 200 ExampleInformationType STD_Fault_Messages ExampleInformationTypeKafka
 ecs_api_edp_get_producer_ids_2 200 NOTYPE DmaapGenericInfoProducer DMaaP_Mediator_Producer
 
 
-# Create jobs for adapter
+# Create jobs for adapter - CR stores data as MD5 hash
 start_timer "Create adapter jobs: $NUM_JOBS"
 for ((i=1; i<=$NUM_JOBS; i++))
 do
-    ecs_api_idc_put_job 201 job-adp-$i ExampleInformationType $CR_SERVICE_MR_PATH/job-adp-data$i info-owner-adp-$i $CR_SERVICE_MR_PATH/job_status_info-owner-adp-$i testdata/dmaap-adapter/job-template.json
+    cr_index=$(($i%$NUM_CR))
+    service_mr="CR_SERVICE_MR_PATH_"$cr_index
+    service_app="CR_SERVICE_APP_PATH_"$cr_index
+    ecs_api_idc_put_job 201 job-adp-$i ExampleInformationType ${!service_mr}/job-adp-data$i"?storeas=md5" info-owner-adp-$i ${!service_app}/job_status_info-owner-adp-$i testdata/dmaap-adapter/job-template.json
+
+done
+print_timer
+
+# Create jobs for adapter kafka - CR stores data as MD5 hash
+start_timer "Create adapter (kafka) jobs: $NUM_JOBS"
+for ((i=1; i<=$NUM_JOBS; i++))
+do
+    cr_index=$(($i%$NUM_CR))
+    service_text="CR_SERVICE_TEXT_PATH_"$cr_index
+    service_app="CR_SERVICE_APP_PATH_"$cr_index
+    ecs_api_idc_put_job 201 job-adp-kafka-$i ExampleInformationTypeKafka ${!service_text}/job-adp-kafka-data$i"?storeas=md5" info-owner-adp-kafka-$i ${!service_app}/job_status_info-owner-adp-kafka-$i testdata/dmaap-adapter/job-template-1-kafka.json
+
 done
-print_timer "Create adapter jobs: $NUM_JOBS"
+print_timer
 
-# Create jobs for mediator
+# Create jobs for mediator - CR stores data as MD5 hash
 start_timer "Create mediator jobs: $NUM_JOBS"
 for ((i=1; i<=$NUM_JOBS; i++))
 do
-    ecs_api_idc_put_job 201 job-med-$i STD_Fault_Messages $CR_SERVICE_MR_PATH/job-med-data$i info-owner-med-$i $CR_SERVICE_MR_PATH/job_status_info-owner-med-$i testdata/dmaap-adapter/job-template.json
+    cr_index=$(($i%$NUM_CR))
+    service_mr="CR_SERVICE_MR_PATH_"$cr_index
+    service_app="CR_SERVICE_APP_PATH_"$cr_index
+    ecs_api_idc_put_job 201 job-med-$i STD_Fault_Messages ${!service_mr}/job-med-data$i"?storeas=md5" info-owner-med-$i ${!service_app}/job_status_info-owner-med-$i testdata/dmaap-adapter/job-template.json
 done
-print_timer "Create mediator jobs: $NUM_JOBS"
+print_timer
 
 # Check job status
 for ((i=1; i<=$NUM_JOBS; i++))
 do
     ecs_api_a1_get_job_status 200 job-med-$i ENABLED 30
     ecs_api_a1_get_job_status 200 job-adp-$i ENABLED 30
+    ecs_api_a1_get_job_status 200 job-adp-kafka-$i ENABLED 30
+done
+
+
+EXPECTED_DATA_DELIV=0 #Total delivered msg per CR
+DATA_DELIV_JOBS=0 #Total delivered msg per job per CR
+
+mr_api_generate_json_payload_file 1 ./tmp/data_for_dmaap_test.json
+mr_api_generate_text_payload_file 1 ./tmp/data_for_dmaap_test.txt
+
+## Send json file via message-router to adapter
+DATA_DELIV_JOBS=5 #Each job will eventuall get 2 msgs
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
+mr_api_send_json_file "/events/unauthenticated.dmaapadp.json" ./tmp/data_for_dmaap_test.json
+for ((i=0; i<$NUM_CR; i++))
+do
+    cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
+
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
+mr_api_send_json_file "/events/unauthenticated.dmaapadp.json" ./tmp/data_for_dmaap_test.json
+for ((i=0; i<$NUM_CR; i++))
+do
+    cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
+
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
+mr_api_send_json_file "/events/unauthenticated.dmaapadp.json" ./tmp/data_for_dmaap_test.json
+for ((i=0; i<$NUM_CR; i++))
+do
+    cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
+
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
+mr_api_send_json_file "/events/unauthenticated.dmaapadp.json" ./tmp/data_for_dmaap_test.json
+for ((i=0; i<$NUM_CR; i++))
+do
+    cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
+
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
+mr_api_send_json_file "/events/unauthenticated.dmaapadp.json" ./tmp/data_for_dmaap_test.json
+for ((i=0; i<$NUM_CR; i++))
+do
+    cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
+
+# Check received data callbacks from adapter
+for ((i=1; i<=$NUM_JOBS; i++))
+do
+    cr_index=$(($i%$NUM_CR))
+    cr_api_check_single_genric_event_md5_file 200 $cr_index job-adp-data$i ./tmp/data_for_dmaap_test.json
+    cr_api_check_single_genric_event_md5_file 200 $cr_index job-adp-data$i ./tmp/data_for_dmaap_test.json
+    cr_api_check_single_genric_event_md5_file 200 $cr_index job-adp-data$i ./tmp/data_for_dmaap_test.json
+    cr_api_check_single_genric_event_md5_file 200 $cr_index job-adp-data$i ./tmp/data_for_dmaap_test.json
+    cr_api_check_single_genric_event_md5_file 200 $cr_index job-adp-data$i ./tmp/data_for_dmaap_test.json
+done
+
+
+## Send text file via message-router to adapter kafka
+
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
+mr_api_send_text_file "/events/unauthenticated.dmaapadp_kafka.text" ./tmp/data_for_dmaap_test.txt
+for ((i=0; i<$NUM_CR; i++))
+do
+    cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
+
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
+mr_api_send_text_file "/events/unauthenticated.dmaapadp_kafka.text" ./tmp/data_for_dmaap_test.txt
+for ((i=0; i<$NUM_CR; i++))
+do
+    cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
+
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
+mr_api_send_text_file "/events/unauthenticated.dmaapadp_kafka.text" ./tmp/data_for_dmaap_test.txt
+for ((i=0; i<$NUM_CR; i++))
+do
+    cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
+
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
+mr_api_send_text_file "/events/unauthenticated.dmaapadp_kafka.text" ./tmp/data_for_dmaap_test.txt
+for ((i=0; i<$NUM_CR; i++))
+do
+    cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
+
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
+mr_api_send_text_file "/events/unauthenticated.dmaapadp_kafka.text" ./tmp/data_for_dmaap_test.txt
+for ((i=0; i<$NUM_CR; i++))
+do
+    cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
 done
 
-EXPECTED_DATA_DELIV=0
+# Check received data callbacks from adapter kafka
+for ((i=1; i<=$NUM_JOBS; i++))
+do
+    cr_index=$(($i%$NUM_CR))
+    cr_api_check_single_genric_event_md5_file 200 $cr_index job-adp-kafka-data$i ./tmp/data_for_dmaap_test.txt
+    cr_api_check_single_genric_event_md5_file 200 $cr_index job-adp-kafka-data$i ./tmp/data_for_dmaap_test.txt
+    cr_api_check_single_genric_event_md5_file 200 $cr_index job-adp-kafka-data$i ./tmp/data_for_dmaap_test.txt
+    cr_api_check_single_genric_event_md5_file 200 $cr_index job-adp-kafka-data$i ./tmp/data_for_dmaap_test.txt
+    cr_api_check_single_genric_event_md5_file 200 $cr_index job-adp-kafka-data$i ./tmp/data_for_dmaap_test.txt
+done
+
+## Send json file via message-router to mediator
+
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
+mr_api_send_json_file "/events/unauthenticated.dmaapmed.json" ./tmp/data_for_dmaap_test.json
+for ((i=0; i<$NUM_CR; i++))
+do
+    cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
+
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
+mr_api_send_json_file "/events/unauthenticated.dmaapmed.json" ./tmp/data_for_dmaap_test.json
+for ((i=0; i<$NUM_CR; i++))
+do
+    cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
+
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
+mr_api_send_json_file "/events/unauthenticated.dmaapmed.json" ./tmp/data_for_dmaap_test.json
+for ((i=0; i<$NUM_CR; i++))
+do
+    cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
+
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
+mr_api_send_json_file "/events/unauthenticated.dmaapmed.json" ./tmp/data_for_dmaap_test.json
+for ((i=0; i<$NUM_CR; i++))
+do
+    cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
+
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
+mr_api_send_json_file "/events/unauthenticated.dmaapmed.json" ./tmp/data_for_dmaap_test.json
+for ((i=0; i<$NUM_CR; i++))
+do
+    cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
+
+# Check received data callbacks from mediator
+for ((i=1; i<=$NUM_JOBS; i++))
+do
+    cr_index=$(($i%$NUM_CR))
+    cr_api_check_single_genric_event_md5_file 200 $cr_index job-med-data$i ./tmp/data_for_dmaap_test.json
+    cr_api_check_single_genric_event_md5_file 200 $cr_index job-med-data$i ./tmp/data_for_dmaap_test.json
+    cr_api_check_single_genric_event_md5_file 200 $cr_index job-med-data$i ./tmp/data_for_dmaap_test.json
+    cr_api_check_single_genric_event_md5_file 200 $cr_index job-med-data$i ./tmp/data_for_dmaap_test.json
+    cr_api_check_single_genric_event_md5_file 200 $cr_index job-med-data$i ./tmp/data_for_dmaap_test.json
+done
 
-# Send data to adapter via mr
+
+# Send small json via message-router to adapter
 mr_api_send_json "/events/unauthenticated.dmaapadp.json" '{"msg":"msg-1"}'
 mr_api_send_json "/events/unauthenticated.dmaapadp.json" '{"msg":"msg-3"}'
 
+DATA_DELIV_JOBS=7 #Each job will eventuall get 5+2 msgs
+
 # Wait for data recetption, adapter
-EXPECTED_DATA_DELIV=$(($NUM_JOBS*2+$EXPECTED_DATA_DELIV))
+EXPECTED_DATA_DELIV=$(($NUM_JOBS*2/$NUM_CR+$EXPECTED_DATA_DELIV))
 start_timer "Data delivery adapter, 2 json per job"
-cr_equal received_callbacks $EXPECTED_DATA_DELIV 100
-print_timer "Data delivery adapter, 2 json per job"
-EXPECTED_DATA_DELIV=$(cr_read received_callbacks)
+for ((i=0; i<$NUM_CR; i++))
+do
+    cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
+print_timer
+
+# Send small text via message-routere to adapter
+mr_api_send_text "/events/unauthenticated.dmaapadp_kafka.text" 'Message-------1'
+mr_api_send_text "/events/unauthenticated.dmaapadp_kafka.text" 'Message-------3'
+
+# Wait for data recetption, adapter kafka
+EXPECTED_DATA_DELIV=$(($NUM_JOBS*2/$NUM_CR+$EXPECTED_DATA_DELIV))
+start_timer "Data delivery adapter kafka, 2 strings per job"
+for ((i=0; i<$NUM_CR; i++))
+do
+    cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
+print_timer
 
-# Send data to mediator
+# Send small json via message-router to mediator
 mr_api_send_json "/events/unauthenticated.dmaapmed.json" '{"msg":"msg-0"}'
 mr_api_send_json "/events/unauthenticated.dmaapmed.json" '{"msg":"msg-2"}'
 
 # Wait for data reception, mediator
-EXPECTED_DATA_DELIV=$(($NUM_JOBS*2+$EXPECTED_DATA_DELIV))
+EXPECTED_DATA_DELIV=$(($NUM_JOBS*2/$NUM_CR+$EXPECTED_DATA_DELIV))
 start_timer "Data delivery mediator, 2 json per job"
-cr_equal received_callbacks $EXPECTED_DATA_DELIV 100
-print_timer "Data delivery mediator, 2 json per job"
-EXPECTED_DATA_DELIV=$(cr_read received_callbacks)
+for ((i=0; i<$NUM_CR; i++))
+do
+    cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 100
+done
+print_timer
 
 # Check received number of messages for mediator and adapter callbacks
 for ((i=1; i<=$NUM_JOBS; i++))
 do
-    cr_equal received_callbacks?id=job-med-data$i 2
-    cr_equal received_callbacks?id=job-adp-data$i 2
+    cr_index=$(($i%$NUM_CR))
+    cr_equal $cr_index received_callbacks?id=job-med-data$i $DATA_DELIV_JOBS
+    cr_equal $cr_index received_callbacks?id=job-adp-data$i $DATA_DELIV_JOBS
+    cr_equal $cr_index received_callbacks?id=job-adp-kafka-data$i $DATA_DELIV_JOBS
 done
 
 # Check received data and order for mediator and adapter callbacks
 for ((i=1; i<=$NUM_JOBS; i++))
 do
-    cr_api_check_single_genric_json_event 200 job-med-data$i '{"msg":"msg-0"}'
-    cr_api_check_single_genric_json_event 200 job-med-data$i '{"msg":"msg-2"}'
-    cr_api_check_single_genric_json_event 200 job-adp-data$i '{"msg":"msg-1"}'
-    cr_api_check_single_genric_json_event 200 job-adp-data$i '{"msg":"msg-3"}'
+    cr_index=$(($i%$NUM_CR))
+    cr_api_check_single_genric_event_md5 200 $cr_index job-med-data$i '{"msg":"msg-0"}'
+    cr_api_check_single_genric_event_md5 200 $cr_index job-med-data$i '{"msg":"msg-2"}'
+    cr_api_check_single_genric_event_md5 200 $cr_index job-adp-data$i '{"msg":"msg-1"}'
+    cr_api_check_single_genric_event_md5 200 $cr_index job-adp-data$i '{"msg":"msg-3"}'
+    cr_api_check_single_genric_event_md5 200 $cr_index job-adp-kafka-data$i 'Message-------1'
+    cr_api_check_single_genric_event_md5 200 $cr_index job-adp-kafka-data$i 'Message-------3'
 done
 
 # Set delay in the callback receiver to slow down callbacks
-SEC_DELAY=5
-cr_delay_callback 200 $SEC_DELAY
+SEC_DELAY=2
+for ((i=0; i<$NUM_CR; i++))
+do
+    cr_delay_callback 200 $i $SEC_DELAY
+done
 
-# Send data to adapter via mr
+# Send small json via message-router to adapter
 mr_api_send_json "/events/unauthenticated.dmaapadp.json" '{"msg":"msg-5"}'
 mr_api_send_json "/events/unauthenticated.dmaapadp.json" '{"msg":"msg-7"}'
 
 # Wait for data recetption, adapter
-EXPECTED_DATA_DELIV=$(($NUM_JOBS*2+$EXPECTED_DATA_DELIV))
-start_timer "Data delivery adapter with $SEC_DELAY seconds delay, 2 json per job"
-cr_equal received_callbacks $EXPECTED_DATA_DELIV $(($NUM_JOBS+300))
-print_timer "Data delivery adapter with $SEC_DELAY seconds delay, 2 json per job"
-EXPECTED_DATA_DELIV=$(cr_read received_callbacks)
+EXPECTED_DATA_DELIV=$(($NUM_JOBS*2/$NUM_CR+$EXPECTED_DATA_DELIV))
+start_timer "Data delivery adapter with $SEC_DELAY seconds delay in consumer, 2 json per job"
+for ((i=0; i<$NUM_CR; i++))
+do
+    cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 100
+done
+print_timer
+
 
+# Send small text via message-router to adapter kafka
+mr_api_send_text "/events/unauthenticated.dmaapadp_kafka.text" 'Message-------5'
+mr_api_send_text "/events/unauthenticated.dmaapadp_kafka.text" 'Message-------7'
+
+# Wait for data recetption, adapter kafka
+EXPECTED_DATA_DELIV=$(($NUM_JOBS*2/$NUM_CR+$EXPECTED_DATA_DELIV))
+start_timer "Data delivery adapter kafka with $SEC_DELAY seconds delay in consumer, 2 strings per job"
+for ((i=0; i<$NUM_CR; i++))
+do
+    cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 100
+done
+print_timer
 
-# Send data to mediator
+
+# Send small json via message-router to mediator
 mr_api_send_json "/events/unauthenticated.dmaapmed.json" '{"msg":"msg-4"}'
 mr_api_send_json "/events/unauthenticated.dmaapmed.json" '{"msg":"msg-6"}'
 
 # Wait for data reception, mediator
-EXPECTED_DATA_DELIV=$(($NUM_JOBS*2+$EXPECTED_DATA_DELIV))
-start_timer "Data delivery mediator with $SEC_DELAY seconds delay, 2 json per job"
-cr_equal received_callbacks $EXPECTED_DATA_DELIV 1000
-print_timer "Data delivery mediator with $SEC_DELAY seconds delay, 2 json per job"
-EXPECTED_DATA_DELIV=$(cr_read received_callbacks)
+EXPECTED_DATA_DELIV=$(($NUM_JOBS*2/$NUM_CR+$EXPECTED_DATA_DELIV))
+start_timer "Data delivery mediator with $SEC_DELAY seconds delay in consumer, 2 json per job"
+for ((i=0; i<$NUM_CR; i++))
+do
+    cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 100
+done
+print_timer
 
 # Check received number of messages for mediator and adapter callbacks
 for ((i=1; i<=$NUM_JOBS; i++))
 do
-    cr_equal received_callbacks?id=job-med-data$i 4
-    cr_equal received_callbacks?id=job-adp-data$i 4
+    cr_index=$(($i%$NUM_CR))
+    cr_equal $cr_index received_callbacks?id=job-med-data$i 9
+    cr_equal $cr_index received_callbacks?id=job-adp-data$i 9
+    cr_equal $cr_index received_callbacks?id=job-adp-kafka-data$i 9
 done
 
 # Check received data and order for mediator and adapter callbacks
 for ((i=1; i<=$NUM_JOBS; i++))
 do
-    cr_api_check_single_genric_json_event 200 job-med-data$i '{"msg":"msg-4"}'
-    cr_api_check_single_genric_json_event 200 job-med-data$i '{"msg":"msg-6"}'
-    cr_api_check_single_genric_json_event 200 job-adp-data$i '{"msg":"msg-5"}'
-    cr_api_check_single_genric_json_event 200 job-adp-data$i '{"msg":"msg-7"}'
+    cr_index=$(($i%$NUM_CR))
+    cr_api_check_single_genric_event_md5 200 $cr_index job-med-data$i '{"msg":"msg-4"}'
+    cr_api_check_single_genric_event_md5 200 $cr_index job-med-data$i '{"msg":"msg-6"}'
+    cr_api_check_single_genric_event_md5 200 $cr_index job-adp-data$i '{"msg":"msg-5"}'
+    cr_api_check_single_genric_event_md5 200 $cr_index job-adp-data$i '{"msg":"msg-7"}'
+    cr_api_check_single_genric_event_md5 200 $cr_index job-adp-kafka-data$i 'Message-------5'
+    cr_api_check_single_genric_event_md5 200 $cr_index job-adp-kafka-data$i 'Message-------7'
 done
 
-
-
 #### TEST COMPLETE ####
 
 store_logs          END
 
 print_result
 
-auto_clean_environment
\ No newline at end of file
+auto_clean_environment
index e017643..53437e8 100755 (executable)
@@ -28,15 +28,7 @@ SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ORAN-CHERRY ORAN-D-R
 #Supported run modes
 SUPPORTED_RUNMODES="DOCKER"
 
-. ../common/testcase_common.sh  $@
-. ../common/agent_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/gateway_api_functions.sh
+. ../common/testcase_common.sh $@
 
 setup_testenvironment
 
@@ -62,7 +54,7 @@ for consul_conf in $TESTED_VARIANTS ; do
 
     # Create service to be able to receive events when rics becomes available
     # Must use rest towards the agent since dmaap is not configured yet
-    api_put_service 201 "ric-registration" 0 "$CR_SERVICE_APP_PATH/ric-registration"
+    api_put_service 201 "ric-registration" 0 "$CR_SERVICE_APP_PATH_0/ric-registration"
 
     # Start one RIC of each type
     start_ric_simulators ricsim_g1 1  OSC_2.1.0
@@ -73,7 +65,7 @@ for consul_conf in $TESTED_VARIANTS ; do
 
     start_mr
 
-    start_cr
+    start_cr 1
 
     start_control_panel $SIM_GROUP/$CONTROL_PANEL_COMPOSE_DIR/$CONTROL_PANEL_CONFIG_FILE
 
@@ -93,9 +85,9 @@ for consul_conf in $TESTED_VARIANTS ; do
     if [ "$PMS_VERSION" == "V2" ]; then
         api_equal json:rics 3 300
 
-        cr_equal received_callbacks 3 120
+        cr_equal received_callbacks 3 120
 
-        cr_api_check_all_sync_events 200 ric-registration ricsim_g1_1 ricsim_g2_1 ricsim_g3_1
+        cr_api_check_all_sync_events 200 ric-registration ricsim_g1_1 ricsim_g2_1 ricsim_g3_1
     else
         api_equal json:rics 2 300
     fi
@@ -114,9 +106,9 @@ for consul_conf in $TESTED_VARIANTS ; do
     if [ "$PMS_VERSION" == "V2" ]; then
         api_equal json:rics 4 120
 
-        cr_equal received_callbacks 4 120
+        cr_equal received_callbacks 4 120
 
-        cr_api_check_all_sync_events 200 ric-registration ricsim_g2_2
+        cr_api_check_all_sync_events 200 ric-registration ricsim_g2_2
     else
         api_equal json:rics 3 120
     fi
@@ -138,7 +130,7 @@ for consul_conf in $TESTED_VARIANTS ; do
     if [ "$PMS_VERSION" == "V2" ]; then
         api_equal json:rics 3 120
 
-        cr_equal received_callbacks 4 120
+        cr_equal received_callbacks 4 120
     else
         api_equal json:rics 2 120
     fi
index 25bdc4c..31e40ab 100755 (executable)
@@ -32,16 +32,7 @@ SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ORAN-CHERRY ORAN-D-R
 #Supported run modes
 SUPPORTED_RUNMODES="DOCKER KUBE"
 
-. ../common/testcase_common.sh  $@
-. ../common/agent_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
+. ../common/testcase_common.sh $@
 
 setup_testenvironment
 
@@ -75,7 +66,7 @@ for interface in $TESTED_VARIANTS ; do
 
     start_ric_simulators ricsim_g1 $NUM_RICS_2 OSC_2.1.0
 
-    start_cr
+    start_cr 1
 
     start_mr
 
@@ -108,7 +99,7 @@ for interface in $TESTED_VARIANTS ; do
 
     # Create service to be able to receive events when rics becomes available
     # Must use rest towards the agent since dmaap is not configured yet
-    api_put_service 201 "ric-registration" 0 "$CR_SERVICE_APP_PATH/ric-registration"
+    api_put_service 201 "ric-registration" 0 "$CR_SERVICE_APP_PATH_0/ric-registration"
 
     #Load first config
     if [ $RUNMODE == "KUBE" ]; then
@@ -126,8 +117,8 @@ for interface in $TESTED_VARIANTS ; do
     api_equal json:rics 8 300
 
     if [ "$PMS_VERSION" == "V2" ]; then
-        cr_equal received_callbacks?id=ric-registration 8 120
-        cr_api_check_all_sync_events 200 ric-registration ricsim_g1_1 ricsim_g1_2  ricsim_g1_3 ricsim_g1_4 ricsim_g1_5 ricsim_g1_6  ricsim_g1_7  ricsim_g1_8
+        cr_equal received_callbacks?id=ric-registration 8 120
+        cr_api_check_all_sync_events 200 ric-registration ricsim_g1_1 ricsim_g1_2  ricsim_g1_3 ricsim_g1_4 ricsim_g1_5 ricsim_g1_6  ricsim_g1_7  ricsim_g1_8
     fi
 
     api_get_rics 200 NOTYPE "ricsim_g1_1:me1_ricsim_g1_1,me2_ricsim_g1_1:NOTYPE:???? \
@@ -205,8 +196,8 @@ for interface in $TESTED_VARIANTS ; do
                              ricsim_g1_8:me1_ricsim_g1_8,me2_ricsim_g1_8:4,5:???? "
 
     if [ "$PMS_VERSION" == "V2" ]; then
-        cr_equal received_callbacks?id=ric-registration 16 120
-        cr_api_check_all_sync_events 200 ric-registration ricsim_g1_1 ricsim_g1_2  ricsim_g1_3 ricsim_g1_4 ricsim_g1_5 ricsim_g1_6  ricsim_g1_7  ricsim_g1_8
+        cr_equal received_callbacks?id=ric-registration 16 120
+        cr_api_check_all_sync_events 200 ric-registration ricsim_g1_1 ricsim_g1_2  ricsim_g1_3 ricsim_g1_4 ricsim_g1_5 ricsim_g1_6  ricsim_g1_7  ricsim_g1_8
     fi
 
     #Load config with all rics
@@ -219,8 +210,8 @@ for interface in $TESTED_VARIANTS ; do
     api_equal json:rics 10 120
 
     if [ "$PMS_VERSION" == "V2" ]; then
-        cr_equal received_callbacks?id=ric-registration 18 120
-        cr_api_check_all_sync_events 200 ric-registration ricsim_g1_9  ricsim_g1_10
+        cr_equal received_callbacks?id=ric-registration 18 120
+        cr_api_check_all_sync_events 200 ric-registration ricsim_g1_9  ricsim_g1_10
     fi
 
     sim_put_policy_type 201 ricsim_g1_9 5 testdata/OSC/sim_5.json
@@ -269,8 +260,8 @@ for interface in $TESTED_VARIANTS ; do
                              ricsim_g1_10:me1_ricsim_g1_10,me2_ricsim_g1_10:NOTYPE:???? "
 
     if [ "$PMS_VERSION" == "V2" ]; then
-        cr_equal received_callbacks?id=ric-registration 19 120
-        cr_api_check_all_sync_events 200 ric-registration ricsim_g1_9
+        cr_equal received_callbacks?id=ric-registration 19 120
+        cr_api_check_all_sync_events 200 ric-registration ricsim_g1_9
     fi
 
     #No policy type in sim #10
@@ -281,10 +272,10 @@ for interface in $TESTED_VARIANTS ; do
         api_equal json:policy_types 5
     fi
 
-    api_put_service 201 "serv1" 3600 "$CR_SERVICE_APP_PATH/serv1"
+    api_put_service 201 "serv1" 3600 "$CR_SERVICE_APP_PATH_0/serv1"
 
     if [ "$PMS_VERSION" == "V2" ]; then
-        notificationurl=$CR_SERVICE_APP_PATH"/test"
+        notificationurl=$CR_SERVICE_APP_PATH_0"/test"
     else
         notificationurl=""
     fi
@@ -301,8 +292,8 @@ for interface in $TESTED_VARIANTS ; do
     api_equal json:rics 8 120
 
     if [ "$PMS_VERSION" == "V2" ]; then
-        cr_equal received_callbacks?id=ric-registration 19 120
-        cr_api_check_all_sync_events 200 ric-registration EMPTY
+        cr_equal received_callbacks?id=ric-registration 19 120
+        cr_api_check_all_sync_events 200 ric-registration EMPTY
     fi
 
     if [ "$PMS_VERSION" == "V2" ]; then
index 27675be..e509f6c 100755 (executable)
@@ -37,16 +37,7 @@ SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ORAN-CHERRY ORAN-D-R
 #Supported run modes
 SUPPORTED_RUNMODES="DOCKER KUBE"
 
-. ../common/testcase_common.sh  $@
-. ../common/agent_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
+. ../common/testcase_common.sh $@
 
 setup_testenvironment
 
@@ -101,7 +92,7 @@ for __httpx in $TESTED_PROTOCOLS ; do
 
         start_mr
 
-        start_cr
+        start_cr 1
 
         start_control_panel $SIM_GROUP/$CONTROL_PANEL_COMPOSE_DIR/$CONTROL_PANEL_CONFIG_FILE
 
@@ -151,10 +142,10 @@ for __httpx in $TESTED_PROTOCOLS ; do
             api_equal json:policy_types 2 300  #Wait for the agent to refresh types from the simulators
         fi
 
-        api_put_service 201 "serv1" 3600 "$CR_SERVICE_APP_PATH/1"
+        api_put_service 201 "serv1" 3600 "$CR_SERVICE_APP_PATH_0/1"
 
         if [ "$PMS_VERSION" == "V2" ]; then
-            notificationurl=$CR_SERVICE_APP_PATH"/test"
+            notificationurl=$CR_SERVICE_APP_PATH_0"/test"
         else
             notificationurl=""
         fi
index 02f6758..af46814 100755 (executable)
@@ -37,15 +37,7 @@ SUPPORTED_PROFILES="ONAP-ISTANBUL ORAN-D-RELEASE ORAN-E-RELEASE"
 #Supported run modes
 SUPPORTED_RUNMODES="DOCKER KUBE"
 
-. ../common/testcase_common.sh  $@
-. ../common/agent_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
+. ../common/testcase_common.sh $@
 
 setup_testenvironment
 
@@ -67,7 +59,7 @@ NUM_POLICIES_PER_RIC=2000
 generate_policy_uuid
 
 if [ "$PMS_VERSION" == "V2" ]; then
-    notificationurl=$CR_SERVICE_APP_PATH"/test"
+    notificationurl=$CR_SERVICE_APP_PATH_0"/test"
 else
     notificationurl=""
 fi
@@ -131,7 +123,7 @@ for __httpx in $TESTED_PROTOCOLS ; do
             consul_config_app                      ".consul_config.json"
         fi
 
-        start_cr
+        start_cr 1
 
         api_get_status 200
 
@@ -152,7 +144,7 @@ for __httpx in $TESTED_PROTOCOLS ; do
             api_equal json:policy_types 1 300  #Wait for the agent to refresh types from the simulator
         fi
 
-        api_put_service 201 "serv1" 0 "$CR_SERVICE_APP_PATH/1"
+        api_put_service 201 "serv1" 0 "$CR_SERVICE_APP_PATH_0/1"
 
         echo "Check the number of types in the agent for each ric is 1"
         for ((i=1; i<=$NUM_RICS; i++))
@@ -180,7 +172,7 @@ for __httpx in $TESTED_PROTOCOLS ; do
             sim_equal ricsim_g1_$i num_instances $NUM_POLICIES_PER_RIC
         done
 
-        api_get_services 200 "serv1" "serv1" 0 "$CR_SERVICE_APP_PATH/1"
+        api_get_services 200 "serv1" "serv1" 0 "$CR_SERVICE_APP_PATH_0/1"
 
         stop_policy_agent
 
@@ -217,7 +209,7 @@ for __httpx in $TESTED_PROTOCOLS ; do
 
         print_timer "Restore $((NUM_POLICIES_PER_RIC*$NUM_RICS)) polices after restart over $interface using "$__httpx
 
-        api_get_services 200 "serv1" "serv1" 0 "$CR_SERVICE_APP_PATH/1"
+        api_get_services 200 "serv1" "serv1" 0 "$CR_SERVICE_APP_PATH_0/1"
 
         start_timer "Delete $((NUM_POLICIES_PER_RIC*$NUM_RICS)) polices over $interface using "$__httpx
 
@@ -247,7 +239,7 @@ for __httpx in $TESTED_PROTOCOLS ; do
 
         sleep_wait 200
 
-        api_get_services 200 "serv1" "serv1" 0 "$CR_SERVICE_APP_PATH/1"
+        api_get_services 200 "serv1" "serv1" 0 "$CR_SERVICE_APP_PATH_0/1"
 
         api_equal json:policies 0
 
index 02b8db9..ad71f46 100755 (executable)
@@ -37,16 +37,7 @@ SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ORAN-CHERRY ORAN-D-R
 #Supported run modes
 SUPPORTED_RUNMODES="DOCKER KUBE"
 
-. ../common/testcase_common.sh  $@
-. ../common/agent_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
+. ../common/testcase_common.sh $@
 
 setup_testenvironment
 
@@ -97,7 +88,7 @@ fi
 
 start_mr
 
-start_cr
+start_cr 1
 
 start_control_panel $SIM_GROUP/$CONTROL_PANEL_COMPOSE_DIR/$CONTROL_PANEL_CONFIG_FILE
 
@@ -197,7 +188,7 @@ do
 done
 
 echo "Register a service"
-api_put_service 201 "serv1" 0 "$CR_SERVICE_APP_PATH/1"
+api_put_service 201 "serv1" 0 "$CR_SERVICE_APP_PATH_0/1"
 
 TEST_DURATION=$((24*3600*$DAYS))
 TEST_START=$SECONDS
@@ -207,7 +198,7 @@ AGENT_INTERFACES="REST REST_PARALLEL DMAAP DMAAP-BATCH"
 MR_MESSAGES=0
 
 if [ "$PMS_VERSION" == "V2" ]; then
-      notificationurl=$CR_SERVICE_APP_PATH"/test"
+      notificationurl=$CR_SERVICE_APP_PATH_0"/test"
 else
       notificationurl=""
 fi
index a5f1978..bd61b3a 100755 (executable)
@@ -37,16 +37,7 @@ SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ORAN-CHERRY ORAN-D-R
 #Supported run modes
 SUPPORTED_RUNMODES="DOCKER KUBE"
 
-. ../common/testcase_common.sh  $@
-. ../common/agent_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
+. ../common/testcase_common.sh $@
 
 setup_testenvironment
 
@@ -67,7 +58,7 @@ NUM_POLICIES_PER_RIC=500
 generate_policy_uuid
 
 if [ "$PMS_VERSION" == "V2" ]; then
-    notificationurl=$CR_SERVICE_APP_PATH"/test"
+    notificationurl=$CR_SERVICE_APP_PATH_0"/test"
 else
     notificationurl=""
 fi
@@ -135,7 +126,7 @@ for __httpx in $TESTED_PROTOCOLS ; do
 
         start_mr # Not used, but removes error messages from the agent log
 
-        start_cr
+        start_cr 1
 
         api_get_status 200
 
@@ -156,7 +147,7 @@ for __httpx in $TESTED_PROTOCOLS ; do
             api_equal json:policy_types 1 300  #Wait for the agent to refresh types from the simulator
         fi
 
-        api_put_service 201 "serv1" 600 "$CR_SERVICE_APP_PATH/1"
+        api_put_service 201 "serv1" 600 "$CR_SERVICE_APP_PATH_0/1"
 
         echo "Check the number of types in the agent for each ric is 1"
         for ((i=1; i<=$NUM_RICS; i++))
index e698f62..886b664 100755 (executable)
@@ -38,15 +38,6 @@ SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ORAN-CHERRY ORAN-D-R
 SUPPORTED_RUNMODES="DOCKER KUBE"
 
 . ../common/testcase_common.sh $@
-. ../common/agent_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
-. ../common/cr_api_functions.sh
 
 setup_testenvironment
 
@@ -184,10 +175,10 @@ fi
 # Create policies
 use_agent_rest_http
 
-api_put_service 201 "Emergency-response-app" 0 "$CR_SERVICE_APP_PATH/1"
+api_put_service 201 "Emergency-response-app" 0 "$CR_SERVICE_APP_PATH_0/1"
 
 if [ "$PMS_VERSION" == "V2" ]; then
-    notificationurl=$CR_SERVICE_APP_PATH"/test"
+    notificationurl=$CR_SERVICE_APP_PATH_0"/test"
 else
     notificationurl=""
 fi
index 71a5d50..54f3e4d 100755 (executable)
@@ -35,18 +35,6 @@ SUPPORTED_PROFILES="ORAN-E-RELEASE"
 SUPPORTED_RUNMODES="KUBE"
 
 . ../common/testcase_common.sh $@
-. ../common/agent_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/ecs_api_functions.sh
-. ../common/prodstub_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/rapp_catalogue_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/dmaapmed_api_functions.sh
-. ../common/dmaapadp_api_functions.sh
 
 setup_testenvironment
 
@@ -108,7 +96,7 @@ start_sdnc
 
 start_policy_agent
 
-start_cr
+start_cr 1
 
 start_prod_stub
 
@@ -240,16 +228,16 @@ else
     api_equal json:policy_ids 0
 fi
 
-api_put_service 201 "Emergency-response-app" 0 "$CR_SERVICE_APP_PATH/ER-app"
+api_put_service 201 "Emergency-response-app" 0 "$CR_SERVICE_APP_PATH_0/ER-app"
 
 # Create policies in STD
 for ((i=0; i<$STD_NUM_RICS; i++))
 do
     ricid=$((3+$i))
     generate_policy_uuid
-    api_put_policy 201 "Emergency-response-app" ric$ricid NOTYPE $((1100+$i)) NOTRANSIENT $CR_SERVICE_APP_PATH/"std2" testdata/STD/pi1_template.json 1
+    api_put_policy 201 "Emergency-response-app" ric$ricid NOTYPE $((1100+$i)) NOTRANSIENT $CR_SERVICE_APP_PATH_0/"std2" testdata/STD/pi1_template.json 1
     generate_policy_uuid
-    api_put_policy 201 "Emergency-response-app" ric$ricid NOTYPE $((1200+$i)) NOTRANSIENT $CR_SERVICE_APP_PATH/"std2" testdata/STD/pi1_template.json 1
+    api_put_policy 201 "Emergency-response-app" ric$ricid NOTYPE $((1200+$i)) NOTRANSIENT $CR_SERVICE_APP_PATH_0/"std2" testdata/STD/pi1_template.json 1
 done
 
 #Create policies in STD 2
@@ -257,9 +245,9 @@ for ((i=0; i<$STD_NUM_RICS; i++))
 do
    ricid=$((5+$i))
    generate_policy_uuid
-   api_put_policy 201 "Emergency-response-app" ric$ricid STD_QOS_0_2_0 $((2100+$i)) NOTRANSIENT $CR_SERVICE_APP_PATH/"std2" testdata/STD2/pi_qos_template.json 1
+   api_put_policy 201 "Emergency-response-app" ric$ricid STD_QOS_0_2_0 $((2100+$i)) NOTRANSIENT $CR_SERVICE_APP_PATH_0/"std2" testdata/STD2/pi_qos_template.json 1
    generate_policy_uuid
-   api_put_policy 201 "Emergency-response-app" ric$ricid STD_QOS2_0.1.0 $((2200+$i)) NOTRANSIENT $CR_SERVICE_APP_PATH/"std2" testdata/STD2/pi_qos2_template.json 1
+   api_put_policy 201 "Emergency-response-app" ric$ricid STD_QOS2_0.1.0 $((2200+$i)) NOTRANSIENT $CR_SERVICE_APP_PATH_0/"std2" testdata/STD2/pi_qos2_template.json 1
 done
 
 # Create policies in OSC
@@ -267,9 +255,9 @@ for ((i=0; i<$OSC_NUM_RICS; i++))
 do
     ricid=$((1+$i))
     generate_policy_uuid
-    api_put_policy 201 "Emergency-response-app" ric$ricid 1 $((3100+$i)) NOTRANSIENT $CR_SERVICE_APP_PATH/"osc" testdata/OSC/pi1_template.json 1
+    api_put_policy 201 "Emergency-response-app" ric$ricid 1 $((3100+$i)) NOTRANSIENT $CR_SERVICE_APP_PATH_0/"osc" testdata/OSC/pi1_template.json 1
     generate_policy_uuid
-    api_put_policy 201 "Emergency-response-app" ric$ricid 2 $((3200+$i)) NOTRANSIENT $CR_SERVICE_APP_PATH/"osc" testdata/OSC/pi2_template.json 1
+    api_put_policy 201 "Emergency-response-app" ric$ricid 2 $((3200+$i)) NOTRANSIENT $CR_SERVICE_APP_PATH_0/"osc" testdata/OSC/pi2_template.json 1
 done
 
 
@@ -338,8 +326,8 @@ CB_SV="$PROD_STUB_SERVICE_PATH$PROD_STUB_SUPERVISION_CALLBACK"
 TARGET1="$RIC_SIM_HTTPX://a1-sim-std2-0.a1-sim:$RIC_SIM_PORT/datadelivery"
 TARGET2="$RIC_SIM_HTTPX://a1-sim-std2-1.a1-sim:$RIC_SIM_PORT/datadelivery"
 
-STATUS1="$CR_SERVICE_APP_PATH/job1-status"
-STATUS2="$CR_SERVICE_APP_PATH/job2-status"
+STATUS1="$CR_SERVICE_APP_PATH_0/job1-status"
+STATUS2="$CR_SERVICE_APP_PATH_0/job2-status"
 
 prodstub_arm_producer 200 prod-a
 prodstub_arm_type 200 prod-a type1
@@ -420,12 +408,12 @@ NUM_JOBS=5
 
 for ((i=1; i<=$NUM_JOBS; i++))
 do
-    ecs_api_idc_put_job 201 jobx$i STD_Fault_Messages $CR_SERVICE_MR_PATH/jobx-data$i info-ownerx$i $CR_SERVICE_MR_PATH/job_status_info-ownerx$i testdata/dmaap-adapter/job-template.json
+    ecs_api_idc_put_job 201 jobx$i STD_Fault_Messages $CR_SERVICE_MR_PATH_0/jobx-data$i info-ownerx$i $CR_SERVICE_MR_PATH_0/job_status_info-ownerx$i testdata/dmaap-adapter/job-template.json
 done
 
 for ((i=1; i<=$NUM_JOBS; i++))
 do
-    ecs_api_idc_put_job 201 joby$i ExampleInformationType $CR_SERVICE_MR_PATH/joby-data$i info-ownery$i $CR_SERVICE_MR_PATH/job_status_info-ownery$i testdata/dmaap-adapter/job-template.json
+    ecs_api_idc_put_job 201 joby$i ExampleInformationType $CR_SERVICE_MR_PATH_0/joby-data$i info-ownery$i $CR_SERVICE_MR_PATH_0/job_status_info-ownery$i testdata/dmaap-adapter/job-template.json
 done
 
 for ((i=1; i<=$NUM_JOBS; i++))
@@ -438,19 +426,19 @@ mr_api_send_json "/events/unauthenticated.dmaapadp.json" '{"msg":"msg-1"}'
 mr_api_send_json "/events/unauthenticated.dmaapmed.json" '{"msg":"msg-2"}'
 mr_api_send_json "/events/unauthenticated.dmaapadp.json" '{"msg":"msg-3"}'
 
-cr_equal received_callbacks $(($NUM_JOBS*2*2)) 60
+cr_equal received_callbacks $(($NUM_JOBS*2*2)) 60
 for ((i=1; i<=$NUM_JOBS; i++))
 do
-    cr_equal received_callbacks?id=jobx-data$i 2
-    cr_equal received_callbacks?id=joby-data$i 2
+    cr_equal received_callbacks?id=jobx-data$i 2
+    cr_equal received_callbacks?id=joby-data$i 2
 done
 
 for ((i=1; i<=$NUM_JOBS; i++))
 do
-    cr_api_check_single_genric_json_event 200 jobx-data$i '{"msg":"msg-0"}'
-    cr_api_check_single_genric_json_event 200 jobx-data$i '{"msg":"msg-2"}'
-    cr_api_check_single_genric_json_event 200 joby-data$i '{"msg":"msg-1"}'
-    cr_api_check_single_genric_json_event 200 joby-data$i '{"msg":"msg-3"}'
+    cr_api_check_single_genric_json_event 200 jobx-data$i '{"msg":"msg-0"}'
+    cr_api_check_single_genric_json_event 200 jobx-data$i '{"msg":"msg-2"}'
+    cr_api_check_single_genric_json_event 200 joby-data$i '{"msg":"msg-1"}'
+    cr_api_check_single_genric_json_event 200 joby-data$i '{"msg":"msg-3"}'
 done
 
 
index 15b5c5b..5d23034 100755 (executable)
@@ -23,7 +23,7 @@ TC_ONELINE_DESCR="ONAP Use case REQ-626"
 DOCKER_INCLUDED_IMAGES="CBS CONSUL CP CR MR DMAAPMR PA RICSIM SDNC NGW KUBEPROXY"
 
 #App names to include in the test when running kubernetes, space separated list
-KUBE_INCLUDED_IMAGES="CP CR MR PA RICSIM SDNC KUBEPROXY NGW"
+KUBE_INCLUDED_IMAGES="CP CR MR DMAAPMR PA RICSIM SDNC KUBEPROXY NGW"
 #Prestarted app (not started by script) to include in the test when running kubernetes, space separated list
 KUBE_PRESTARTED_IMAGES=""
 
@@ -38,15 +38,6 @@ SUPPORTED_PROFILES="ONAP-HONOLULU ONAP-ISTANBUL"
 SUPPORTED_RUNMODES="DOCKER KUBE"
 
 . ../common/testcase_common.sh $@
-. ../common/agent_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
 
 setup_testenvironment
 
@@ -62,7 +53,7 @@ use_simulator_https
 use_mr_https
 
 if [ "$PMS_VERSION" == "V2" ]; then
-    notificationurl=$CR_SERVICE_APP_PATH"/test"
+    notificationurl=$CR_SERVICE_APP_PATH_0"/test"
 else
     echo "Version V2 of PMS is needed, exiting..."
     exit 1
@@ -99,7 +90,8 @@ for interface in $TESTED_VARIANTS ; do
 
     start_ric_simulators $RIC_SIM_PREFIX"_g3" $STD_NUM_RICS STD_2.0.0
 
-    start_mr
+    start_mr    "$MR_READ_TOPIC"  "/events" "users/policy-agent" \
+                "$MR_WRITE_TOPIC" "/events" "users/mr-stub"
 
     start_control_panel $SIM_GROUP/$CONTROL_PANEL_COMPOSE_DIR/$CONTROL_PANEL_CONFIG_FILE
 
@@ -211,7 +203,7 @@ for interface in $TESTED_VARIANTS ; do
     # Create policies
     use_agent_rest_http
 
-    api_put_service 201 "Emergency-response-app" 0 "$CR_SERVICE_APP_PATH/1"
+    api_put_service 201 "Emergency-response-app" 0 "$CR_SERVICE_APP_PATH_0/1"
 
     # Create policies in OSC
     for ((i=1; i<=$OSC_NUM_RICS; i++))
index 20a02cb..f3d5dd4 100755 (executable)
@@ -38,15 +38,6 @@ SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ORAN-CHERRY ORAN-D-R
 SUPPORTED_RUNMODES="DOCKER KUBE"
 
 . ../common/testcase_common.sh $@
-. ../common/agent_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
 
 setup_testenvironment
 
@@ -61,7 +52,7 @@ use_sdnc_https
 use_simulator_https
 
 if [ "$PMS_VERSION" == "V2" ]; then
-    notificationurl=$CR_SERVICE_APP_PATH"/test"
+    notificationurl=$CR_SERVICE_APP_PATH_0"/test"
 else
     notificationurl=""
 fi
@@ -210,7 +201,7 @@ fi
 # Create policies
 use_agent_rest_http
 
-api_put_service 201 "Emergency-response-app" 0 "$CR_SERVICE_APP_PATH/1"
+api_put_service 201 "Emergency-response-app" 0 "$CR_SERVICE_APP_PATH_0/1"
 
 # Create policies in OSC
 for ((i=1; i<=$OSC_NUM_RICS; i++))
index c93a6d7..deff460 100755 (executable)
@@ -38,19 +38,6 @@ SUPPORTED_PROFILES="ONAP-HONOLULU ONAP-ISTANBUL ORAN-CHERRY ORAN-D-RELEASE ORAN-
 SUPPORTED_RUNMODES="DOCKER KUBE"
 
 . ../common/testcase_common.sh $@
-. ../common/agent_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/ecs_api_functions.sh
-. ../common/prodstub_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/rapp_catalogue_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/http_proxy_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
 
 setup_testenvironment
 
@@ -73,7 +60,7 @@ fi
 
 
 if [ "$PMS_VERSION" == "V2" ]; then
-    notificationurl=$CR_SERVICE_APP_PATH"/test"
+    notificationurl=$CR_SERVICE_APP_PATH_0"/test"
 else
    echo "PMS VERSION 2 (V2) is required"
    exit 1
@@ -113,7 +100,7 @@ else
     consul_config_app                      ".consul_config.json"
 fi
 
-start_cr
+start_cr 1
 
 start_prod_stub
 
@@ -168,7 +155,7 @@ done
 #Check the number of types
 api_equal json:policy-types 2 300
 
-api_put_service 201 "Emergency-response-app" 0 "$CR_SERVICE_APP_PATH/1"
+api_put_service 201 "Emergency-response-app" 0 "$CR_SERVICE_APP_PATH_0/1"
 
 # Create policies in STD
 for ((i=1; i<=$STD_NUM_RICS; i++))
@@ -205,8 +192,8 @@ fi
 TARGET1="$RIC_SIM_HTTPX://$RIC_G1_1:$RIC_SIM_PORT/datadelivery"
 TARGET2="$RIC_SIM_HTTPX://$RIC_G1_1:$RIC_SIM_PORT/datadelivery"
 
-STATUS1="$CR_SERVICE_APP_PATH/callbacks/job1-status"
-STATUS2="$CR_SERVICE_APP_PATH/callbacks/job2-status"
+STATUS1="$CR_SERVICE_APP_PATH_0/callbacks/job1-status"
+STATUS2="$CR_SERVICE_APP_PATH_0/callbacks/job2-status"
 
 prodstub_arm_producer 200 prod-a
 prodstub_arm_type 200 prod-a type1
index 1f8ef5d..c914c67 100644 (file)
@@ -127,8 +127,7 @@ SUPPORTED_RUNMODES=<List of runmodes, DOCKER and/or KUBE>
 
 CONDITIONALLY_IGNORED_IMAGES=<list of images to exclude if it does not exist in the profile file>
 
-. ../common/testcase_common.sh  $@
-< other scripts need to be sourced for specific interfaces>
+. ../common/testcase_common.sh $@
 
 setup_testenvironment
 
index 47b4514..d2d0e9a 100755 (executable)
@@ -38,17 +38,7 @@ SUPPORTED_PROFILES="ONAP-HONOLULU ONAP-ISTANBUL ORAN-D-RELEASE ORAN-E-RELEASE"
 #Supported run modes
 SUPPORTED_RUNMODES="DOCKER KUBE"
 
-. ../common/testcase_common.sh  $@
-. ../common/agent_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/http_proxy_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
+. ../common/testcase_common.sh $@
 
 setup_testenvironment
 
@@ -56,7 +46,11 @@ setup_testenvironment
 
 clean_environment
 start_kube_proxy
-start_mr
+start_mr    "$MR_READ_TOPIC"  "/events" "users/policy-agent" \
+            "$MR_WRITE_TOPIC" "/events" "users/mr-stub" \
+            "unauthenticated.dmaapadp.json" "/events" "dmaapadapterproducer/msgs" \
+            "unauthenticated.dmaapmed.json" "/events" "maapmediatorproducer/STD_Fault_Messages"
+
 if [ $RUNMODE == "KUBE" ]; then
     :
 else
diff --git a/test/auto-test/testdata/dmaap-adapter/job-schema-1-kafka b/test/auto-test/testdata/dmaap-adapter/job-schema-1-kafka
new file mode 100644 (file)
index 0000000..290b70a
--- /dev/null
@@ -0,0 +1,28 @@
+{
+  "$schema": "http://json-schema.org/draft-04/schema#",
+  "type": "object",
+  "properties": {
+    "filter": {
+      "type": "string"
+    },
+    "maxConcurrency": {
+      "type": "integer"
+    },
+    "bufferTimeout": {
+      "type": "object",
+      "properties": {
+        "maxSize": {
+          "type": "integer"
+        },
+        "maxTimeMiliseconds": {
+          "type": "integer"
+        }
+      },
+      "required": [
+        "maxSize",
+        "maxTimeMiliseconds"
+      ]
+    }
+  },
+  "required": []
+}
\ No newline at end of file
diff --git a/test/auto-test/testdata/dmaap-adapter/job-template-1-kafka.json b/test/auto-test/testdata/dmaap-adapter/job-template-1-kafka.json
new file mode 100644 (file)
index 0000000..d549397
--- /dev/null
@@ -0,0 +1,7 @@
+{
+  "maxConcurrency": 1,
+  "bufferTimeout": {
+      "maxSize": 1,
+      "maxTimeMiliseconds": 0
+  }
+}
\ No newline at end of file
index 18b9656..11e2a97 100644 (file)
@@ -153,6 +153,7 @@ The script can be started with these arguments
 | `--print-stats` |  Prints the number of tests, failed tests, failed configuration and deviations after each individual test or config |
 | `--override <file>` |  Override setting from the file supplied by --env-file |
 | `--pre-clean` |  Clean kube resouces when running docker and vice versa |
+| `--gen-stats`  | Collect container/pod runtime statistics |
 | `help` | Print this info along with the test script description and the list of app short names supported |
 
 ## Function: setup_testenvironment ##
@@ -202,17 +203,6 @@ Print the value of the timer (in seconds) previously started by 'start_timer'. (
 | --------- | ----------- |
 | `<timer-message-to-print>` | Any text message to be printed along with the timer result.(It is good practice to use same args for this function as for the `start_timer`) |
 
-## Function: print_and_reset_timer ##
-
-Print the value of the timer (in seconds) previously started by 'start_timer'. Also reset the timer to 0. The result of the timer as well as the args to the function will also be printed in the test report.
-| arg list |
-|--|
-| `<timer-message-to-print>` |
-
-| parameter | description |
-| --------- | ----------- |
-| `<timer-message-to-print>` | Any text message to be printed along with the timer result.(It is good practice to use same args for this function as for the `start_timer`) |
-
 ## Function: deviation ##
 
 Mark a test as a deviation from the requirements. The list of deviations will be printed in the test report.
index 17f80a5..f2777eb 100644 (file)
@@ -23,7 +23,8 @@
 # one for sending the requests and one for receiving the response
 # but only when using the DMAAP interface
 # REST or DMAAP is controlled of the base url of $XX_ADAPTER
-# arg: (PA|ECS|CR|RC GET|PUT|POST|DELETE|GET_BATCH|PUT_BATCH|POST_BATCH|DELETE_BATCH <url>|<correlation-id> [<file>]) | (PA|ECS RESPONSE <correlation-id>)
+# arg: (PA|ECS|CR|RC GET|PUT|POST|DELETE|GET_BATCH|PUT_BATCH|POST_BATCH|DELETE_BATCH <url>|<correlation-id> [<file> [mime-type]]) | (PA|ECS RESPONSE <correlation-id>)
+# Default mime type for file is application/json unless specified in parameter mime-type
 # (Not for test scripts)
 __do_curl_to_api() {
        TIMESTAMP=$(date "+%Y-%m-%d %H:%M:%S")
@@ -39,6 +40,7 @@ __do_curl_to_api() {
 
        paramError=0
        input_url=$3
+       fname=$4
     if [ $# -gt 0 ]; then
         if [ $1 == "PA" ]; then
                        __ADAPTER=$PA_ADAPTER
@@ -75,17 +77,21 @@ __do_curl_to_api() {
                        __ADAPTER=$MR_STUB_ADAPTER
                        __ADAPTER_TYPE=$MR_STUB_ADAPTER_TYPE
             __RETRY_CODES=""
-        else
+        elif [ $1 == "DMAAPMR" ]; then
+                       __ADAPTER=$MR_DMAAP_ADAPTER_HTTP
+                       __ADAPTER_TYPE=$MR_DMAAP_ADAPTER_TYPE
+            __RETRY_CODES=""
+               else
             paramError=1
         fi
-               if [ $__ADAPTER_TYPE == "MR-HTTP" ]; then
+               if [ "$__ADAPTER_TYPE" == "MR-HTTP" ]; then
                        __ADAPTER=$MR_ADAPTER_HTTP
                fi
-               if [ $__ADAPTER_TYPE == "MR-HTTPS" ]; then
+               if [ "$__ADAPTER_TYPE" == "MR-HTTPS" ]; then
                        __ADAPTER=$MR_ADAPTER_HTTPS
                fi
     fi
-    if [ $# -lt 3 ] || [ $# -gt 4 ]; then
+    if [ $# -lt 3 ] || [ $# -gt 5 ]; then
                paramError=1
     else
                timeout=""
@@ -100,6 +106,10 @@ __do_curl_to_api() {
                fi
                if [ $# -gt 3 ]; then
                        content=" -H Content-Type:application/json"
+                       fname=$4
+                       if [ $# -gt 4 ]; then
+                               content=" -H Content-Type:"$5
+                       fi
                fi
                if [ $2 == "GET" ] || [ $2 == "GET_BATCH" ]; then
                        oper="GET"
@@ -108,15 +118,15 @@ __do_curl_to_api() {
                        fi
                elif [ $2 == "PUT" ] || [ $2 == "PUT_BATCH" ]; then
                        oper="PUT"
-                       if [ $# -eq 4 ]; then
-                               file=" --data-binary @$4"
+                       if [ $# -gt 3 ]; then
+                               file=" --data-binary @$fname"
                        fi
                        accept=" -H accept:application/json"
                elif [ $2 == "POST" ] || [ $2 == "POST_BATCH" ]; then
                        oper="POST"
                        accept=" -H accept:*/*"
-                       if [ $# -eq 4 ]; then
-                               file=" --data-binary @$4"
+                       if [ $# -gt 3 ]; then
+                               file=" --data-binary @$fname"
                                accept=" -H accept:application/json"
                        fi
                elif [ $2 == "DELETE" ] || [ $2 == "DELETE_BATCH" ]; then
@@ -153,8 +163,8 @@ __do_curl_to_api() {
         oper=" -X "$oper
         curlString="curl -k $proxyflag "${oper}${timeout}${httpcode}${accept}${content}${url}${file}
         echo " CMD: "$curlString >> $HTTPLOG
-               if [ $# -eq 4 ]; then
-                       echo " FILE: $(<$4)" >> $HTTPLOG
+               if [ $# -gt 3 ]; then
+                       echo " FILE: $(<$fname)" >> $HTTPLOG
                fi
 
                # Do retry for configured response codes, otherwise only one attempt
@@ -190,12 +200,12 @@ __do_curl_to_api() {
     else
                if [ $oper != "RESPONSE" ]; then
                        requestUrl=$input_url
-                       if [ $2 == "PUT" ] && [ $# -eq 4 ]; then
-                               payload="$(cat $4 | tr -d '\n' | tr -d ' ' )"
+                       if [ $2 == "PUT" ] && [ $# -gt 3 ]; then
+                               payload="$(cat $fname | tr -d '\n' | tr -d ' ' )"
                                echo "payload: "$payload >> $HTTPLOG
                                file=" --data-binary "$payload
-                       elif [ $# -eq 4 ]; then
-                               echo " FILE: $(cat $4)" >> $HTTPLOG
+                       elif [ $# -gt 3 ]; then
+                               echo " FILE: $(cat $fname)" >> $HTTPLOG
                        fi
                        #urlencode the request url since it will be carried by send-request url
                        requestUrl=$(python3 -c "from __future__ import print_function; import urllib.parse, sys; print(urllib.parse.quote(sys.argv[1]))"  "$input_url")
diff --git a/test/common/cbs_api_functions.sh b/test/common/cbs_api_functions.sh
new file mode 100644 (file)
index 0000000..f08b250
--- /dev/null
@@ -0,0 +1,23 @@
+#!/bin/bash
+
+#  ============LICENSE_START===============================================
+#  Copyright (C) 2021 Nordix Foundation. All rights reserved.
+#  ========================================================================
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#  ============LICENSE_END=================================================
+#
+
+# Functions for CBS is included in consul_api_functions.sh
+
+
+
index 405c194..a3a3e5b 100755 (executable)
@@ -90,7 +90,7 @@ __kube_delete_all_resources() {
                if [ $? -eq 0 ] && [ ! -z "$result" ]; then
                        for resid in $result; do
                                echo  "  Deleting $restype $resid in namespace $namespace with label autotest "
-                               kubectl delete $restype $resid -n $namespace 1> /dev/null 2> /dev/null
+                               kubectl delete --grace-period=1 $restype $resid -n $namespace 1> /dev/null 2> /dev/null
                        done
                fi
        done
@@ -104,7 +104,7 @@ __kube_delete_all_pv() {
                if [ $? -eq 0 ] && [ ! -z "$result" ]; then
                        for resid in $result; do
                                echo  "  Deleting $restype $resid with label autotest "
-                               kubectl delete $restype $resid 1> /dev/null 2> /dev/null
+                               kubectl delete --grace-period=1 $restype $resid 1> /dev/null 2> /dev/null
                        done
                fi
        done
@@ -119,7 +119,7 @@ __kube_wait_for_delete() {
                if [ $? -eq 0 ] && [ ! -z "$result" ]; then
                        for resid in $result; do
                                echo  "  Deleting $restype $resid in namespace $namespace with label autotest "
-                               kubectl delete $restype $resid -n $namespace #1> /dev/null 2> /dev/null
+                               kubectl delete --grace-period=1 $restype $resid -n $namespace #1> /dev/null 2> /dev/null
                                echo -ne "  Waiting for $restype $resid in namespace $namespace with label autotest to be deleted..."$SAMELINE
                                T_START=$SECONDS
                                result="dummy"
@@ -147,7 +147,7 @@ __kube_wait_for_delete_pv() {
                if [ $? -eq 0 ] && [ ! -z "$result" ]; then
                        for resid in $result; do
                                echo  "  Deleting $restype $resid with label autotest "
-                               kubectl delete $restype $resid -n $namespace #1> /dev/null 2> /dev/null
+                               kubectl delete --grace-period=1 $restype $resid -n $namespace #1> /dev/null 2> /dev/null
                                echo -ne "  Waiting for $restype $resid with label autotest to be deleted..."$SAMELINE
                                T_START=$SECONDS
                                result="dummy"
similarity index 67%
rename from test/common/consul_cbs_functions.sh
rename to test/common/consul_api_functions.sh
index 747eaab..af85ff3 100644 (file)
@@ -165,6 +165,21 @@ __CBS_initial_setup() {
        CBS_SERVICE_PATH="http://"$CBS_APP_NAME":"$CBS_INTERNAL_PORT
 }
 
+# Set app short-name, app name and namespace for logging runtime statistics of kubernets pods or docker containers
+# For docker, the namespace shall be excluded
+# This function is called for apps managed by the test script as well as for prestarted apps.
+# args: -
+__CONSUL_statisics_setup() {
+       echo ""
+}
+
+# Set app short-name, app name and namespace for logging runtime statistics of kubernets pods or docker containers
+# For docker, the namespace shall be excluded
+# This function is called for apps managed by the test script as well as for prestarted apps.
+# args: -
+__CBS_statisics_setup() {
+       echo ""
+}
 #######################################################
 
 
@@ -220,123 +235,6 @@ consul_config_app() {
 
 }
 
-# Function to perpare the consul configuration according to the current simulator configuration
-# args: SDNC|NOSDNC <output-file>
-# (Function for test scripts)
-prepare_consul_config() {
-       echo -e $BOLD"Prepare Consul config"$EBOLD
-
-       echo " Writing consul config for "$POLICY_AGENT_APP_NAME" to file: "$2
-
-       if [ $# != 2 ];  then
-               ((RES_CONF_FAIL++))
-       __print_err "need two args,  SDNC|NOSDNC <output-file>" $@
-               exit 1
-       fi
-
-       if [ $1 == "SDNC" ]; then
-               echo -e " Config$BOLD including SDNC$EBOLD configuration"
-       elif [ $1 == "NOSDNC" ];  then
-               echo -e " Config$BOLD excluding SDNC$EBOLD configuration"
-       else
-               ((RES_CONF_FAIL++))
-       __print_err "need two args,  SDNC|NOSDNC <output-file>" $@
-               exit 1
-       fi
-
-       config_json="\n            {"
-       if [ $1 == "SDNC" ]; then
-               config_json=$config_json"\n   \"controller\": ["
-               config_json=$config_json"\n                     {"
-               config_json=$config_json"\n                       \"name\": \"$SDNC_APP_NAME\","
-               config_json=$config_json"\n                       \"baseUrl\": \"$SDNC_SERVICE_PATH\","
-               config_json=$config_json"\n                       \"userName\": \"$SDNC_USER\","
-               config_json=$config_json"\n                       \"password\": \"$SDNC_PWD\""
-               config_json=$config_json"\n                     }"
-               config_json=$config_json"\n   ],"
-       fi
-
-       config_json=$config_json"\n   \"streams_publishes\": {"
-       config_json=$config_json"\n                            \"dmaap_publisher\": {"
-       config_json=$config_json"\n                              \"type\": \"message-router\","
-       config_json=$config_json"\n                              \"dmaap_info\": {"
-       config_json=$config_json"\n                                \"topic_url\": \"$MR_SERVICE_PATH$MR_WRITE_URL\""
-       config_json=$config_json"\n                              }"
-       config_json=$config_json"\n                            }"
-       config_json=$config_json"\n   },"
-       config_json=$config_json"\n   \"streams_subscribes\": {"
-       config_json=$config_json"\n                             \"dmaap_subscriber\": {"
-       config_json=$config_json"\n                               \"type\": \"message-router\","
-       config_json=$config_json"\n                               \"dmaap_info\": {"
-       config_json=$config_json"\n                                   \"topic_url\": \"$MR_SERVICE_PATH$MR_READ_URL\""
-       config_json=$config_json"\n                                 }"
-       config_json=$config_json"\n                               }"
-       config_json=$config_json"\n   },"
-
-       config_json=$config_json"\n   \"ric\": ["
-
-       if [ $RUNMODE == "KUBE" ]; then
-               result=$(kubectl get pods -n $KUBE_A1SIM_NAMESPACE -o jsonpath='{.items[?(@.metadata.labels.autotest=="RICSIM")].metadata.name}')
-               rics=""
-               ric_cntr=0
-               if [ $? -eq 0 ] && [ ! -z "$result" ]; then
-                       for im in $result; do
-                               if [[ $im != *"-0" ]]; then
-                                       ric_subdomain=$(kubectl get pod $im -n $KUBE_A1SIM_NAMESPACE -o jsonpath='{.spec.subdomain}')
-                                       rics=$rics" "$im"."$ric_subdomain"."$KUBE_A1SIM_NAMESPACE
-                                       let ric_cntr=ric_cntr+1
-                               fi
-                       done
-               fi
-               if [ $ric_cntr -eq 0 ]; then
-                       echo $YELLOW"Warning: No rics found for the configuration"$EYELLOW
-               fi
-       else
-               rics=$(docker ps --filter "name=$RIC_SIM_PREFIX" --filter "network=$DOCKER_SIM_NWNAME" --filter "status=running" --format {{.Names}})
-               if [ $? -ne 0 ] || [ -z "$rics" ]; then
-                       echo -e $RED" FAIL - the names of the running RIC Simulator cannot be retrieved." $ERED
-                       ((RES_CONF_FAIL++))
-                       return 1
-               fi
-       fi
-       cntr=0
-       for ric in $rics; do
-               if [ $cntr -gt 0 ]; then
-                       config_json=$config_json"\n          ,"
-               fi
-               config_json=$config_json"\n          {"
-               if [ $RUNMODE == "KUBE" ]; then
-                       ric_id=${ric%.*.*} #extract pod id from full hosthame
-                       ric_id=$(echo "$ric_id" | tr '-' '_')
-               else
-                       ric_id=$ric
-               fi
-               echo " Found a1 sim: "$ric_id
-               config_json=$config_json"\n            \"name\": \"$ric_id\","
-               config_json=$config_json"\n            \"baseUrl\": \"$RIC_SIM_HTTPX://$ric:$RIC_SIM_PORT\","
-               if [ $1 == "SDNC" ]; then
-                       config_json=$config_json"\n            \"controller\": \"$SDNC_APP_NAME\","
-               fi
-               config_json=$config_json"\n            \"managedElementIds\": ["
-               config_json=$config_json"\n              \"me1_$ric_id\","
-               config_json=$config_json"\n              \"me2_$ric_id\""
-               config_json=$config_json"\n            ]"
-               config_json=$config_json"\n          }"
-               let cntr=cntr+1
-       done
-
-       config_json=$config_json"\n           ]"
-       config_json=$config_json"\n}"
-
-       if [ $RUNMODE == "KUBE" ]; then
-               config_json="{\"config\":"$config_json"}"
-       fi
-
-       printf "$config_json">$2
-
-       echo ""
-}
-
 # Start Consul and CBS
 # args: -
 # (Function for test scripts)
similarity index 95%
rename from test/common/control_panel_api_functions.sh
rename to test/common/cp_api_functions.sh
index eda6fe3..295e16a 100644 (file)
@@ -91,6 +91,19 @@ __CP_store_docker_logs() {
 __CP_initial_setup() {
        use_control_panel_http
 }
+
+# Set app short-name, app name and namespace for logging runtime statistics of kubernets pods or docker containers
+# For docker, the namespace shall be excluded
+# This function is called for apps managed by the test script as well as for prestarted apps.
+# args: -
+__CP_statisics_setup() {
+       if [ $RUNMODE == "KUBE" ]; then
+               echo "CP $CONTROL_PANEL_APP_NAME $KUBE_NONRTRIC_NAMESPACE"
+       else
+               echo "CP $CONTROL_PANEL_APP_NAME"
+       fi
+}
+
 #######################################################
 
 
index ba46510..1b82ea0 100644 (file)
@@ -94,9 +94,14 @@ __CR_kube_delete_all() {
 # args: <log-dir> <file-prexix>
 __CR_store_docker_logs() {
        if [ $RUNMODE == "KUBE" ]; then
-               kubectl  logs -l "autotest=CR" -n $KUBE_SIM_NAMESPACE --tail=-1 > $1$2_cr.log 2>&1
+               for podname in $(kubectl get pods -n $KUBE_SIM_NAMESPACE -l "autotest=CR" -o custom-columns=":metadata.name"); do
+                       kubectl logs -n $KUBE_SIM_NAMESPACE $podname --tail=-1 > $1$2_$podname.log 2>&1
+               done
        else
-               docker logs $CR_APP_NAME > $1$2_cr.log 2>&1
+               crs=$(docker ps --filter "name=$CR_APP_NAME" --filter "network=$DOCKER_SIM_NWNAME" --filter "status=running" --format {{.Names}})
+               for crid in $crs; do
+                       docker logs $crid > $1$2_$crid.log 2>&1
+               done
        fi
 }
 
@@ -107,12 +112,35 @@ __CR_initial_setup() {
        use_cr_http
 }
 
+# Set app short-name, app name and namespace for logging runtime statistics of kubernets pods or docker containers
+# For docker, the namespace shall be excluded
+# This function is called for apps managed by the test script as well as for prestarted apps.
+# args: -
+__CR_statisics_setup() {
+       for ((CR_INSTANCE=MAX_CR_APP_COUNT; CR_INSTANCE>0; CR_INSTANCE-- )); do
+               if [ $RUNMODE == "KUBE" ]; then
+                       CR_INSTANCE_KUBE=$(($CR_INSTANCE-1))
+                       echo -n " CR-$CR_INSTANCE_KUBE $CR_APP_NAME-$CR_INSTANCE_KUBE $KUBE_SIM_NAMESPACE "
+               else
+                       if [ $DOCKER_COMPOSE_VERION == "V1" ]; then
+                               echo -n " CR_$CR_INSTANCE ${CR_APP_NAME}_cr_$CR_INSTANCE "
+                       else
+                               echo -n " CR_$CR_INSTANCE ${CR_APP_NAME}-cr-$CR_INSTANCE "
+                       fi
+               fi
+       done
+}
+
 #######################################################
 
 ################
 ### CR functions
 ################
 
+#Var to hold the current number of CR instances
+CR_APP_COUNT=1
+MAX_CR_APP_COUNT=10
+
 # Set http as the protocol to use for all communication to the Dmaap adapter
 # args: -
 # (Function for test scripts)
@@ -130,24 +158,34 @@ use_cr_https() {
 # Setup paths to svc/container for internal and external access
 # args: <protocol> <internal-port> <external-port>
 __cr_set_protocoll() {
+
        echo -e $BOLD"$CR_DISPLAY_NAME protocol setting"$EBOLD
        echo -e " Using $BOLD http $EBOLD towards $CR_DISPLAY_NAME"
-
        ## Access to Dmaap adapter
-
-       # CR_SERVICE_PATH is the base path to cr
-       CR_SERVICE_PATH=$1"://"$CR_APP_NAME":"$2  # docker access, container->container and script->container via proxy
-       if [ $RUNMODE == "KUBE" ]; then
-               CR_SERVICE_PATH=$1"://"$CR_APP_NAME.$KUBE_SIM_NAMESPACE":"$3 # kube access, pod->svc and script->svc via proxy
-       fi
-       # Service paths are used in test script to provide callbacck urls to app
-       CR_SERVICE_MR_PATH=$CR_SERVICE_PATH$CR_APP_CALLBACK_MR  #Only for messages from dmaap adapter/mediator
-       CR_SERVICE_APP_PATH=$CR_SERVICE_PATH$CR_APP_CALLBACK    #For general callbacks from apps
-
-       # CR_ADAPTER used for switching between REST and DMAAP (only REST supported currently)
-       CR_ADAPTER_TYPE="REST"
-       CR_ADAPTER=$CR_SERVICE_PATH
-
+       for ((CR_INSTANCE=0; CR_INSTANCE<$MAX_CR_APP_COUNT; CR_INSTANCE++ )); do
+               CR_DOCKER_INSTANCE=$(($CR_INSTANCE+1))
+               # CR_SERVICE_PATH is the base path to cr
+               if [ $DOCKER_COMPOSE_VERION == "V1" ]; then
+                       __CR_SERVICE_PATH=$1"://"$CR_APP_NAME"_cr_"${CR_DOCKER_INSTANCE}":"$2  # docker access, container->container and script->container via proxy
+               else
+                       __CR_SERVICE_PATH=$1"://"$CR_APP_NAME"-cr-"${CR_DOCKER_INSTANCE}":"$2  # docker access, container->container and script->container via proxy
+               fi
+               if [ $RUNMODE == "KUBE" ]; then
+                       __CR_SERVICE_PATH=$1"://"$CR_APP_NAME"-"$CR_INSTANCE.$CR_APP_NAME"."$KUBE_SIM_NAMESPACE":"$3 # kube access, pod->svc and script->svc via proxy
+               fi
+               export CR_SERVICE_PATH"_"${CR_INSTANCE}=$__CR_SERVICE_PATH
+               # Service paths are used in test script to provide callbacck urls to app
+               export CR_SERVICE_MR_PATH"_"${CR_INSTANCE}=$__CR_SERVICE_PATH$CR_APP_CALLBACK_MR  #Only for messages from dmaap adapter/mediator
+               export CR_SERVICE_TEXT_PATH"_"${CR_INSTANCE}=$__CR_SERVICE_PATH$CR_APP_CALLBACK_TEXT  #Callbacks for text payload
+               export CR_SERVICE_APP_PATH"_"${CR_INSTANCE}=$__CR_SERVICE_PATH$CR_APP_CALLBACK    #For general callbacks from apps
+
+               if [ $CR_INSTANCE -eq 0 ]; then
+                       # CR_ADAPTER used for switching between REST and DMAAP (only REST supported currently)
+                       # CR_ADDAPTER need to be set before each call to CR....only set for instance 0 here
+                       CR_ADAPTER_TYPE="REST"
+                       CR_ADAPTER=$__CR_SERVICE_PATH
+               fi
+       done
        echo ""
 }
 
@@ -166,15 +204,27 @@ __cr_export_vars() {
        export CR_INTERNAL_SECURE_PORT
        export CR_EXTERNAL_PORT
        export CR_EXTERNAL_SECURE_PORT
+
+       export CR_APP_COUNT
 }
 
 # Start the Callback reciver in the simulator group
-# args: -
+# args: <app-count>
 # (Function for test scripts)
 start_cr() {
 
        echo -e $BOLD"Starting $CR_DISPLAY_NAME"$EBOLD
 
+       if [ $# -ne 1 ]; then
+               echo -e $RED" Number of CR instances missing, usage: start_cr <app-count>"$ERED
+               exit 1
+       fi
+       if [ $1 -lt 1 ] || [ $1 -gt 10 ]; then
+               echo -e $RED" Number of CR shall be 1...10, usage: start_cr <app-count>"$ERED
+               exit 1
+       fi
+       export CR_APP_COUNT=$1
+
        if [ $RUNMODE == "KUBE" ]; then
 
                # Check if app shall be fully managed by the test script
@@ -222,15 +272,13 @@ start_cr() {
 
                fi
 
-               __check_service_start $CR_APP_NAME $CR_SERVICE_PATH$CR_ALIVE_URL
+               for ((CR_INSTANCE=0; CR_INSTANCE<$CR_APP_COUNT; CR_INSTANCE++ )); do
+                       __dynvar="CR_SERVICE_PATH_"$CR_INSTANCE
+                       __cr_app_name=$CR_APP_NAME"-"$CR_INSTANCE
+                       __check_service_start $__cr_app_name ${!__dynvar}$CR_ALIVE_URL
+                       result=$(__do_curl ${!__dynvar}/reset)
+               done
 
-               echo -ne " Service $CR_APP_NAME - reset  "$SAMELINE
-               result=$(__do_curl CR $CR_SERVICE_PATH/reset)
-               if [ $? -ne 0 ]; then
-                       echo -e " Service $CR_APP_NAME - reset  $RED Failed $ERED - will continue"
-               else
-                       echo -e " Service $CR_APP_NAME - reset  $GREEN OK $EGREEN"
-               fi
        else
                # Check if docker app shall be fully managed by the test script
                __check_included_image 'CR'
@@ -242,65 +290,132 @@ start_cr() {
 
                __cr_export_vars
 
-               __start_container $CR_COMPOSE_DIR "" NODOCKERARGS 1 $CR_APP_NAME
+               app_data=""
+               cntr=1
+               while [ $cntr -le $CR_APP_COUNT ]; do
+                       if [ $DOCKER_COMPOSE_VERION == "V1" ]; then
+                               app=$CR_APP_NAME"_cr_"$cntr
+                       else
+                               app=$CR_APP_NAME"-cr-"$cntr
+                       fi
+                       app_data="$app_data $app"
+                       let cntr=cntr+1
+               done
+
+               echo "COMPOSE_PROJECT_NAME="$CR_APP_NAME > $SIM_GROUP/$CR_COMPOSE_DIR/.env
+
+               __start_container $CR_COMPOSE_DIR "" NODOCKERARGS $CR_APP_COUNT $app_data
 
-        __check_service_start $CR_APP_NAME $CR_SERVICE_PATH$CR_ALIVE_URL
+               cntr=1   #Counter for docker instance, starts on 1
+               cntr2=0  #Couter for env var name, starts with 0 to be compablible with kube
+               while [ $cntr -le $CR_APP_COUNT ]; do
+                       if [ $DOCKER_COMPOSE_VERION == "V1" ]; then
+                               app=$CR_APP_NAME"_cr_"$cntr
+                       else
+                               app=$CR_APP_NAME"-cr-"$cntr
+                       fi
+                       __dynvar="CR_SERVICE_PATH_"$cntr2
+                       __check_service_start $app ${!__dynvar}$CR_ALIVE_URL
+                       let cntr=cntr+1
+                       let cntr2=cntr2+1
+               done
        fi
        echo ""
 }
 
+#Convert a cr path id to the value of the environment var holding the url
+# arg: <cr-path-id>
+# returns: <base-url-to-the-app>
+__cr_get_service_path(){
+       if [ $# -ne 1 ]; then
+               echo "DUMMY"
+               return 1
+       fi
+       if [ $1 -lt 0 ] || [ $1 -ge $MAX_CR_APP_COUNT ]; then
+               echo "DUMMY"
+               return 1
+       fi
+       __dynvar="CR_SERVICE_PATH_"$1
+       echo ${!__dynvar}
+       return 0
+}
 
 # Tests if a variable value in the CR is equal to a target value and and optional timeout.
 # Arg: <variable-name> <target-value> - This test set pass or fail depending on if the variable is
 # equal to the target or not.
-# Arg: <variable-name> <target-value> <timeout-in-sec>  - This test waits up to the timeout seconds
+# Arg: <cr-path-id> <variable-name> <target-value> <timeout-in-sec>  - This test waits up to the timeout seconds
 # before setting pass or fail depending on if the variable value becomes equal to the target
 # value or not.
 # (Function for test scripts)
 cr_equal() {
-       if [ $# -eq 2 ] || [ $# -eq 3 ]; then
-               __var_test "CR" "$CR_SERVICE_PATH/counter/" $1 "=" $2 $3
+       if [ $# -eq 3 ] || [ $# -eq 4 ]; then
+               CR_SERVICE_PATH=$(__cr_get_service_path $1)
+               CR_ADAPTER=$CR_SERVICE_PATH
+               if [ $? -ne 0 ]; then
+                       __print_err "<cr-path-id> missing or incorrect" $@
+                       return 1
+               fi
+               __var_test "CR" "$CR_SERVICE_PATH/counter/" $2 "=" $3 $4
        else
-               __print_err "Wrong args to cr_equal, needs two or three args: <sim-param> <target-value> [ timeout ]" $@
+               __print_err "Wrong args to cr_equal, needs three or four args: <cr-path-id>  <variable-name> <target-value> [ timeout ]" $@
        fi
 }
 
 # Tests if a variable value in the CR contains the target string and and optional timeout
 # Arg: <variable-name> <target-value> - This test set pass or fail depending on if the variable contains
 # the target or not.
-# Arg: <variable-name> <target-value> <timeout-in-sec>  - This test waits up to the timeout seconds
+# Arg: <cr-path-id> <variable-name> <target-value> <timeout-in-sec>  - This test waits up to the timeout seconds
 # before setting pass or fail depending on if the variable value contains the target
 # value or not.
 # (Function for test scripts)
 cr_contains_str() {
 
-       if [ $# -eq 2 ] || [ $# -eq 3 ]; then
-               __var_test "CR" "$CR_SERVICE_PATH/counter/" $1 "contain_str" $2 $3
+       if [ $# -eq 3 ] || [ $# -eq 4 ]; then
+               CR_SERVICE_PATH=$(__cr_get_service_path $1)
+               CR_ADAPTER=$CR_SERVICE_PATH
+               if [ $? -ne 0 ]; then
+                       __print_err "<cr-path-id> missing or incorrect" $@
+                       return 1
+               fi
+               __var_test "CR" "$CR_SERVICE_PATH/counter/" $2 "contain_str" $3 $4
                return 0
        else
-               __print_err "needs two or three args: <sim-param> <target-value> [ timeout ]"
+               __print_err "needs two or three args: <cr-path-id> <variable-name> <target-value> [ timeout ]"
                return 1
        fi
 }
 
 # Read a variable value from CR sim and send to stdout. Arg: <variable-name>
 cr_read() {
+       CR_SERVICE_PATH=$(__cr_get_service_path $1)
+       CR_ADAPTER=$CR_SERVICE_PATH
+       if [ $? -ne 0 ]; then
+               __print_err "<cr-path-id> missing or incorrect" $@
+               return  1
+       fi
        echo "$(__do_curl $CR_SERVICE_PATH/counter/$1)"
 }
 
 # Function to configure write delay on callbacks
 # Delay given in seconds.
-# arg <response-code> <delay-in-sec>
+# arg <response-code> <cr-path-id>  <delay-in-sec>
 # (Function for test scripts)
 cr_delay_callback() {
        __log_conf_start $@
 
-       if [ $# -ne 2 ]; then
-        __print_err "<response-code> <delay-in-sec>]" $@
+       if [ $# -ne 3 ]; then
+        __print_err "<response-code> <cr-path-id> <delay-in-sec>]" $@
         return 1
        fi
 
-       res="$(__do_curl_to_api CR POST /forcedelay?delay=$2)"
+       CR_SERVICE_PATH=$(__cr_get_service_path $2)
+       CR_ADAPTER=$CR_SERVICE_PATH
+       if [ $? -ne 0 ]; then
+               __print_err "<cr-path-id> missing or incorrect" $@
+               return 1
+       fi
+
+       res="$(__do_curl_to_api CR POST /forcedelay?delay=$3)"
        status=${res:${#res}-3}
 
        if [ $status -ne 200 ]; then
@@ -313,7 +428,7 @@ cr_delay_callback() {
 }
 
 # CR API: Check the contents of all current ric sync events for one id from PMS
-# <response-code> <id> [ EMPTY | ( <ric-id> )+ ]
+# <response-code> <cr-path-id> <id> [ EMPTY | ( <ric-id> )+ ]
 # (Function for test scripts)
 cr_api_check_all_sync_events() {
        __log_test_start $@
@@ -323,12 +438,19 @@ cr_api_check_all_sync_events() {
                return 1
        fi
 
-    if [ $# -lt 2 ]; then
-        __print_err "<response-code> <id> [ EMPTY | ( <ric-id> )+ ]" $@
+    if [ $# -lt 3 ]; then
+        __print_err "<response-code> <cr-path-id> <id> [ EMPTY | ( <ric-id> )+ ]" $@
         return 1
     fi
 
-       query="/get-all-events/"$2
+       CR_SERVICE_PATH=$(__cr_get_service_path $2)
+       CR_ADAPTER=$CR_SERVICE_PATH
+       if [ $? -ne 0 ]; then
+               __print_err "<cr-path-id> missing or incorrect" $@
+               return 1
+       fi
+
+       query="/get-all-events/"$3
        res="$(__do_curl_to_api CR GET $query)"
        status=${res:${#res}-3}
 
@@ -337,15 +459,15 @@ cr_api_check_all_sync_events() {
                return 1
        fi
 
-       if [ $# -gt 2 ]; then
+       if [ $# -gt 3 ]; then
                body=${res:0:${#res}-3}
-               if [ $# -eq 3 ] && [ $3 == "EMPTY" ]; then
+               if [ $# -eq 4 ] && [ $4 == "EMPTY" ]; then
                        targetJson="["
                else
                        targetJson="["
-                       arr=(${@:3})
+                       arr=(${@:4})
 
-                       for ((i=0; i<$(($#-2)); i=i+1)); do
+                       for ((i=0; i<$(($#-3)); i=i+1)); do
 
                                if [ "$targetJson" != "[" ]; then
                                        targetJson=$targetJson","
@@ -368,17 +490,24 @@ cr_api_check_all_sync_events() {
 }
 
 # CR API: Check the contents of all current status events for one id from ECS
-# <response-code> <id> [ EMPTY | ( <status> )+ ]
+# <response-code> <cr-path-id> <id> [ EMPTY | ( <status> )+ ]
 # (Function for test scripts)
 cr_api_check_all_ecs_events() {
        __log_test_start $@
 
-    if [ $# -lt 2 ]; then
-        __print_err "<response-code> <id> [ EMPTY | ( <status> )+ ]" $@
+    if [ $# -lt 3 ]; then
+        __print_err "<response-code> <cr-path-id> <id> [ EMPTY | ( <status> )+ ]" $@
         return 1
     fi
 
-       query="/get-all-events/"$2
+       CR_SERVICE_PATH=$(__cr_get_service_path $2)
+       CR_ADAPTER=$CR_SERVICE_PATH
+       if [ $? -ne 0 ]; then
+               __print_err "<cr-path-id> missing or incorrect" $@
+               return 1
+       fi
+
+       query="/get-all-events/"$3
        res="$(__do_curl_to_api CR GET $query)"
        status=${res:${#res}-3}
 
@@ -387,15 +516,15 @@ cr_api_check_all_ecs_events() {
                return 1
        fi
 
-       if [ $# -gt 2 ]; then
+       if [ $# -gt 3 ]; then
                body=${res:0:${#res}-3}
-               if [ $# -eq 3 ] && [ $3 == "EMPTY" ]; then
+               if [ $# -eq 4 ] && [ $4 == "EMPTY" ]; then
                        targetJson="["
                else
                        targetJson="["
-                       arr=(${@:3})
+                       arr=(${@:4})
 
-                       for ((i=0; i<$(($#-2)); i=i+1)); do
+                       for ((i=0; i<$(($#-3)); i=i+1)); do
 
                                if [ "$targetJson" != "[" ]; then
                                        targetJson=$targetJson","
@@ -418,29 +547,36 @@ cr_api_check_all_ecs_events() {
 }
 
 # CR API: Check the contents of all current type subscription events for one id from ECS
-# <response-code> <id> [ EMPTY | ( <type-id> <schema> <registration-status> )+ ]
+# <response-code> <cr-path-id> <id> [ EMPTY | ( <type-id> <schema> <registration-status> )+ ]
 # (Function for test scripts)
 cr_api_check_all_ecs_subscription_events() {
        __log_test_start $@
 
-       #Valid number of parameter 2,3,7,11
+       #Valid number of parameter 3,4,8,12
        paramError=1
-       if [ $# -eq 2 ]; then
+       if [ $# -eq 3 ]; then
                paramError=0
        fi
-       if [ $# -eq 3 ] && [ "$3" == "EMPTY" ]; then
+       if [ $# -eq 4 ] && [ "$4" == "EMPTY" ]; then
                paramError=0
        fi
-       variablecount=$(($#-2))
-       if [ $# -gt 3 ] && [ $(($variablecount%3)) -eq 0 ]; then
+       variablecount=$(($#-3))
+       if [ $# -gt 4 ] && [ $(($variablecount%3)) -eq 0 ]; then
                paramError=0
        fi
        if [ $paramError -eq 1 ]; then
-               __print_err "<response-code> <id> [ EMPTY | ( <type-id> <schema> <registration-status> )+ ]" $@
+               __print_err "<response-code> <cr-path-id> <id> [ EMPTY | ( <type-id> <schema> <registration-status> )+ ]" $@
                return 1
        fi
 
-       query="/get-all-events/"$2
+       CR_SERVICE_PATH=$(__cr_get_service_path $2)
+       CR_ADAPTER=$CR_SERVICE_PATH
+       if [ $? -ne 0 ]; then
+               __print_err "<cr-path-id> missing or incorrect" $@
+               return 1
+       fi
+
+       query="/get-all-events/"$3
        res="$(__do_curl_to_api CR GET $query)"
        status=${res:${#res}-3}
 
@@ -449,12 +585,12 @@ cr_api_check_all_ecs_subscription_events() {
                return 1
        fi
 
-       if [ $# -gt 2 ]; then
+       if [ $# -gt 3 ]; then
                body=${res:0:${#res}-3}
                targetJson="["
-               if [ $# -gt 3 ]; then
-                       arr=(${@:3})
-                       for ((i=0; i<$(($#-3)); i=i+3)); do
+               if [ $# -gt 4 ]; then
+                       arr=(${@:4})
+                       for ((i=0; i<$(($#-4)); i=i+3)); do
                                if [ "$targetJson" != "[" ]; then
                                        targetJson=$targetJson","
                                fi
@@ -484,11 +620,23 @@ cr_api_check_all_ecs_subscription_events() {
 
 
 # CR API: Reset all events and counters
-# Arg: -
+# Arg: <cr-path-id>
 # (Function for test scripts)
 cr_api_reset() {
        __log_conf_start $@
 
+       if [ $# -ne 0 ]; then
+               __print_err "<cr-path-id>" $@
+               return 1
+       fi
+
+       CR_SERVICE_PATH=$(__cr_get_service_path $1)
+       CR_ADAPTER=$CR_SERVICE_PATH
+       if [ $? -ne 0 ]; then
+               __print_err "<cr-path-id> missing or incorrect" $@
+               return 1
+       fi
+
        res="$(__do_curl_to_api CR GET /reset)"
        status=${res:${#res}-3}
 
@@ -503,17 +651,24 @@ cr_api_reset() {
 
 
 # CR API: Check the contents of all json events for path
-# <response-code> <topic-url> (EMPTY | <json-msg>+ )
+# <response-code> <cr-path-id> <topic-url> (EMPTY | <json-msg>+ )
 # (Function for test scripts)
 cr_api_check_all_genric_json_events() {
        __log_test_start $@
 
-       if [ $# -lt 3 ]; then
-               __print_err "<response-code> <topic-url> (EMPTY | <json-msg>+ )" $@
+       if [ $# -lt 4 ]; then
+               __print_err "<response-code> <cr-path-id>  <topic-url> (EMPTY | <json-msg>+ )" $@
                return 1
        fi
 
-       query="/get-all-events/"$2
+       CR_SERVICE_PATH=$(__cr_get_service_path $2)
+       CR_ADAPTER=$CR_SERVICE_PATH
+       if [ $? -ne 0 ]; then
+               __print_err "<cr-path-id> missing or incorrect" $@
+               return 1
+       fi
+
+       query="/get-all-events/"$3
        res="$(__do_curl_to_api CR GET $query)"
        status=${res:${#res}-3}
 
@@ -524,7 +679,8 @@ cr_api_check_all_genric_json_events() {
        body=${res:0:${#res}-3}
        targetJson="["
 
-       if [ $3 != "EMPTY" ]; then
+       if [ $4 != "EMPTY" ]; then
+               shift
                shift
                shift
                while [ $# -gt 0 ]; do
@@ -550,19 +706,25 @@ cr_api_check_all_genric_json_events() {
 }
 
 
-
 # CR API: Check a single (oldest) json event (or none if empty) for path
-# <response-code> <topic-url> (EMPTY | <json-msg> )
+# <response-code> <cr-path-id> <topic-url> (EMPTY | <json-msg> )
 # (Function for test scripts)
 cr_api_check_single_genric_json_event() {
        __log_test_start $@
 
-       if [ $# -ne 3 ]; then
-               __print_err "<response-code> <topic-url> (EMPTY | <json-msg> )" $@
+       if [ $# -ne 4 ]; then
+               __print_err "<response-code> <cr-path-id>  <topic-url> (EMPTY | <json-msg> )" $@
+               return 1
+       fi
+
+       CR_SERVICE_PATH=$(__cr_get_service_path $2)
+       CR_ADAPTER=$CR_SERVICE_PATH
+       if [ $? -ne 0 ]; then
+               __print_err "<cr-path-id> missing or incorrect" $@
                return 1
        fi
 
-       query="/get-event/"$2
+       query="/get-event/"$3
        res="$(__do_curl_to_api CR GET $query)"
        status=${res:${#res}-3}
 
@@ -571,8 +733,12 @@ cr_api_check_single_genric_json_event() {
                return 1
        fi
        body=${res:0:${#res}-3}
-       targetJson=$3
+       targetJson=$4
 
+       if [ $targetJson == "EMPTY" ] && [ ${#body} -ne 0 ]; then
+               __log_test_fail_body
+               return 1
+       fi
        echo " TARGET JSON: $targetJson" >> $HTTPLOG
        res=$(python3 ../common/compare_json.py "$targetJson" "$body")
 
@@ -581,6 +747,140 @@ cr_api_check_single_genric_json_event() {
                return 1
        fi
 
+       __log_test_pass
+       return 0
+}
+
+# CR API: Check a single (oldest) json in md5 format (or none if empty) for path.
+# Note that if a json message is given, it shall be compact, no ws except inside string.
+# The MD5 will generate different hash if ws is present or not in otherwise equivalent json
+# arg: <response-code> <cr-path-id> <topic-url> (EMPTY | <data-msg> )
+# (Function for test scripts)
+cr_api_check_single_genric_event_md5() {
+       __log_test_start $@
+
+       if [ $# -ne 4 ]; then
+               __print_err "<response-code> <cr-path-id> <topic-url> (EMPTY | <data-msg> )" $@
+               return 1
+       fi
+
+       CR_SERVICE_PATH=$(__cr_get_service_path $2)
+       CR_ADAPTER=$CR_SERVICE_PATH
+       if [ $? -ne 0 ]; then
+               __print_err "<cr-path-id> missing or incorrect" $@
+               return 1
+       fi
+
+       query="/get-event/"$3
+       res="$(__do_curl_to_api CR GET $query)"
+       status=${res:${#res}-3}
+
+       if [ $status -ne $1 ]; then
+               __log_test_fail_status_code $1 $status
+               return 1
+       fi
+       body=${res:0:${#res}-3}
+       if [ $4 == "EMPTY" ]; then
+               if [ ${#body} -ne 0 ]; then
+                       __log_test_fail_body
+                       return 1
+               else
+                       __log_test_pass
+                       return 0
+               fi
+       fi
+       command -v md5 > /dev/null # Mac
+       if [ $? -eq 0 ]; then
+               targetMd5=$(echo -n "$4" | md5)
+       else
+               command -v md5sum > /dev/null # Linux
+               if [ $? -eq 0 ]; then
+                       targetMd5=$(echo -n "$4" | md5sum | cut -d' ' -f 1)  # Need to cut additional info printed by cmd
+               else
+                       __log_test_fail_general "Command md5 nor md5sum is available"
+                       return 1
+               fi
+       fi
+       targetMd5="\""$targetMd5"\"" #Quotes needed
+
+       echo " TARGET MD5 hash: $targetMd5" >> $HTTPLOG
+
+       if [ "$body" != "$targetMd5" ]; then
+               __log_test_fail_body
+               return 1
+       fi
+
+       __log_test_pass
+       return 0
+}
+
+# CR API: Check a single (oldest) event in md5 format (or none if empty) for path.
+# Note that if a file with json message is given, the json shall be compact, no ws except inside string and not newlines.
+# The MD5 will generate different hash if ws/newlines is present or not in otherwise equivalent json
+# arg: <response-code> <cr-path-id> <topic-url> (EMPTY | <data-file> )
+# (Function for test scripts)
+cr_api_check_single_genric_event_md5_file() {
+       __log_test_start $@
+
+       if [ $# -ne 4 ]; then
+               __print_err "<response-code> <cr-path-id> <topic-url> (EMPTY | <data-file> )" $@
+               return 1
+       fi
+
+       CR_SERVICE_PATH=$(__cr_get_service_path $2)
+       CR_ADAPTER=$CR_SERVICE_PATH
+       if [ $? -ne 0 ]; then
+               __print_err "<cr-path-id> missing or incorrect" $@
+               return 1
+       fi
+
+       query="/get-event/"$3
+       res="$(__do_curl_to_api CR GET $query)"
+       status=${res:${#res}-3}
+
+       if [ $status -ne $1 ]; then
+               __log_test_fail_status_code $1 $status
+               return 1
+       fi
+       body=${res:0:${#res}-3}
+       if [ $4 == "EMPTY" ]; then
+               if [ ${#body} -ne 0 ]; then
+                       __log_test_fail_body
+                       return 1
+               else
+                       __log_test_pass
+                       return 0
+               fi
+       fi
+
+       if [ ! -f $4 ]; then
+               __log_test_fail_general "File $3 does not exist"
+               return 1
+       fi
+
+       filedata=$(cat $4)
+
+       command -v md5 > /dev/null # Mac
+       if [ $? -eq 0 ]; then
+               targetMd5=$(echo -n "$filedata" | md5)
+       else
+               command -v md5sum > /dev/null # Linux
+               if [ $? -eq 0 ]; then
+                       targetMd5=$(echo -n "$filedata" | md5sum | cut -d' ' -f 1)  # Need to cut additional info printed by cmd
+               else
+                       __log_test_fail_general "Command md5 nor md5sum is available"
+                       return 1
+               fi
+       fi
+       targetMd5="\""$targetMd5"\""   #Quotes needed
+
+       echo " TARGET MD5 hash: $targetMd5" >> $HTTPLOG
+
+       if [ "$body" != "$targetMd5" ]; then
+               __log_test_fail_body
+               return 1
+       fi
+
        __log_test_pass
        return 0
 }
\ No newline at end of file
index 26da2d0..9b7571f 100644 (file)
@@ -92,6 +92,18 @@ __DMAAPADP_initial_setup() {
        use_dmaapadp_http
 }
 
+# Set app short-name, app name and namespace for logging runtime statistics of kubernets pods or docker containers
+# For docker, the namespace shall be excluded
+# This function is called for apps managed by the test script as well as for prestarted apps.
+# args: -
+__DMAAPADP_statisics_setup() {
+       if [ $RUNMODE == "KUBE" ]; then
+               echo "DMAAPADP $DMAAP_ADP_APP_NAME $KUBE_NONRTRIC_NAMESPACE"
+       else
+               echo "DMAAPADP $DMAAP_ADP_APP_NAME"
+       fi
+}
+
 #######################################################
 
 # Set http as the protocol to use for all communication to the Dmaap adapter
index 16e1ad7..5188a45 100644 (file)
@@ -92,6 +92,18 @@ __DMAAPMED_initial_setup() {
        use_dmaapmed_http
 }
 
+# Set app short-name, app name and namespace for logging runtime statistics of kubernets pods or docker containers
+# For docker, the namespace shall be excluded
+# This function is called for apps managed by the test script as well as for prestarted apps.
+# args: -
+__DMAAPMED_statisics_setup() {
+       if [ $RUNMODE == "KUBE" ]; then
+               echo "DMAAPMED $DMAAP_MED_APP_NAME $KUBE_NONRTRIC_NAMESPACE"
+       else
+               echo "DMAAPMED $DMAAP_MED_APP_NAME"
+       fi
+}
+
 #######################################################
 
 # Set http as the protocol to use for all communication to the Dmaap mediator
diff --git a/test/common/dmaapmr_api_functions.sh b/test/common/dmaapmr_api_functions.sh
new file mode 100644 (file)
index 0000000..d0f3f0c
--- /dev/null
@@ -0,0 +1,23 @@
+#!/bin/bash
+
+#  ============LICENSE_START===============================================
+#  Copyright (C) 2021 Nordix Foundation. All rights reserved.
+#  ========================================================================
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#  ============LICENSE_END=================================================
+#
+
+# Functions for DMAAPMR is included in mr_api_functions.sh
+
+
+
index 2b434f1..b28c061 100644 (file)
@@ -91,6 +91,18 @@ __ECS_initial_setup() {
        use_ecs_rest_http
 }
 
+# Set app short-name, app name and namespace for logging runtime statistics of kubernets pods or docker containers
+# For docker, the namespace shall be excluded
+# This function is called for apps managed by the test script as well as for prestarted apps.
+# args: -
+__ECS_statisics_setup() {
+       if [ $RUNMODE == "KUBE" ]; then
+               echo "ECS $ECS_APP_NAME $KUBE_NONRTRIC_NAMESPACE"
+       else
+               echo "ECS $ECS_APP_NAME"
+       fi
+}
+
 #######################################################
 
 
diff --git a/test/common/genstat.sh b/test/common/genstat.sh
new file mode 100755 (executable)
index 0000000..e186f09
--- /dev/null
@@ -0,0 +1,134 @@
+#!/bin/bash
+
+#  ============LICENSE_START===============================================
+#  Copyright (C) 2020 Nordix Foundation. All rights reserved.
+#  ========================================================================
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#  ============LICENSE_END=================================================
+#
+
+# This script collects container statistics to a file. Data is separated with semicolon.
+# Works for both docker container and kubernetes pods.
+# Relies on 'docker stats' so will not work for other container runtimes.
+# Used by the test env.
+
+# args: docker <start-time-seconds> <log-file> <app-short-name> <app-name> [ <app-short-name> <app-name> ]*
+# or
+# args: kube <start-time-seconds> <log-file> <app-short-name> <app-name> <namespace> [ <app-short-name> <app-name> <namespace> ]*
+
+print_usage() {
+  echo "Usage: genstat.sh DOCKER <start-time-seconds> <log-file> <app-short-name> <app-name> [ <app-short-name> <app-name> ]*"
+  echo "or"
+  echo "Usage: genstat.sh KUBE <start-time-seconds> <log-file> <app-short-name> <app-name> <namespace> [ <app-short-name> <app-name> <namespace> ]*"
+}
+STARTTIME=-1
+
+if [ $# -lt 4 ]; then
+  print_usage
+  exit 1
+fi
+if [ $1 == "DOCKER" ]; then
+  STAT_TYPE=$1
+  shift
+  STARTTIME=$1
+  shift
+  LOGFILE=$1
+  shift
+  if [ $(($#%2)) -ne 0 ]; then
+    print_usage
+    exit 1
+  fi
+elif [ $1 == "KUBE" ]; then
+  STAT_TYPE=$1
+  shift
+  STARTTIME=$1
+  shift
+  LOGFILE=$1
+  shift
+  if [ $(($#%3)) -ne 0 ]; then
+    print_usage
+    exit 1
+  fi
+else
+  print_usage
+  exit 1
+fi
+
+
+echo "Name;Time;PIDS;CPU perc;Mem perc" > $LOGFILE
+
+if [ "$STARTTIME" -ne -1 ]; then
+    STARTTIME=$(($SECONDS-$STARTTIME))
+fi
+
+while [ true ]; do
+  docker stats --no-stream --format "table {{.Name}};{{.PIDs}};{{.CPUPerc}};{{.MemPerc}}" > tmp/.tmp_stat_out.txt
+  if [ "$STARTTIME" -eq -1 ]; then
+    STARTTIME=$SECONDS
+  fi
+  CTIME=$(($SECONDS-$STARTTIME))
+
+  TMP_APPS=""
+
+  while read -r line; do
+    APP_LIST=(${@})
+    if [ $STAT_TYPE == "DOCKER" ]; then
+      for ((i=0; i<$#; i=i+2)); do
+        SAPP=${APP_LIST[$i]}
+        APP=${APP_LIST[$i+1]}
+        d=$(echo $line | grep -v "k8s" | grep $APP)
+        if [ ! -z $d ]; then
+          d=$(echo $d | cut -d';' -f 2- | sed -e 's/%//g' | sed 's/\./,/g')
+          echo "$SAPP;$CTIME;$d" >> $LOGFILE
+          TMP_APPS=$TMP_APPS" $SAPP "
+        fi
+      done
+    else
+      for ((i=0; i<$#; i=i+3)); do
+        SAPP=${APP_LIST[$i]}
+        APP=${APP_LIST[$i+1]}
+        NS=${APP_LIST[$i+2]}
+        d=$(echo "$line" | grep -v "k8s_POD" | grep "k8s" | grep $APP | grep $NS)
+        if [ ! -z "$d" ]; then
+          d=$(echo "$d" | cut -d';' -f 2- | sed -e 's/%//g' | sed 's/\./,/g')
+          data="$SAPP-$NS;$CTIME;$d"
+          echo $data >> $LOGFILE
+          TMP_APPS=$TMP_APPS" $SAPP-$NS "
+        fi
+      done
+    fi
+  done < tmp/.tmp_stat_out.txt
+
+  APP_LIST=(${@})
+  if [ $STAT_TYPE == "DOCKER" ]; then
+    for ((i=0; i<$#; i=i+2)); do
+      SAPP=${APP_LIST[$i]}
+      APP=${APP_LIST[$i+1]}
+      if [[ $TMP_APPS != *" $SAPP "* ]]; then
+        data="$SAPP;$CTIME;0;0,00;0,00"
+        echo $data >> $LOGFILE
+      fi
+    done
+  else
+    for ((i=0; i<$#; i=i+3)); do
+      SAPP=${APP_LIST[$i]}
+      APP=${APP_LIST[$i+1]}
+      NS=${APP_LIST[$i+2]}
+      if [[ $TMP_APPS != *" $SAPP-$NS "* ]]; then
+        data="$SAPP-$NS;$CTIME;0;0,00;0,00"
+        echo $data >> $LOGFILE
+      fi
+    done
+  fi
+  sleep 1
+done
similarity index 95%
rename from test/common/http_proxy_api_functions.sh
rename to test/common/httpproxy_api_functions.sh
index 56ce6d4..3378a1d 100644 (file)
@@ -106,6 +106,18 @@ __HTTPPROXY_initial_setup() {
        :
 }
 
+# Set app short-name, app name and namespace for logging runtime statistics of kubernets pods or docker containers
+# For docker, the namespace shall be excluded
+# This function is called for apps managed by the test script as well as for prestarted apps.
+# args: -
+__HTTPPROXY_statisics_setup() {
+       if [ $RUNMODE == "KUBE" ]; then
+               echo "HTTPPROXY $HTTP_PROXY_APP_NAME $KUBE_SIM_NAMESPACE"
+       else
+               echo "HTTPPROXY $HTTP_PROXY_APP_NAME"
+       fi
+}
+
 #######################################################
 
 
similarity index 96%
rename from test/common/kube_proxy_api_functions.sh
rename to test/common/kubeproxy_api_functions.sh
index dcaaf80..eb4600c 100644 (file)
@@ -107,6 +107,18 @@ __KUBEPROXY_initial_setup() {
        use_kube_proxy_http
 }
 
+# Set app short-name, app name and namespace for logging runtime statistics of kubernets pods or docker containers
+# For docker, the namespace shall be excluded
+# This function is called for apps managed by the test script as well as for prestarted apps.
+# args: -
+__KUBEPROXY_statisics_setup() {
+       if [ $RUNMODE == "KUBE" ]; then
+               echo "KUBEPROXXY $KUBE_PROXY_APP_NAME $KUBE_SIM_NAMESPACE"
+       else
+               echo "KUBEPROXXY $KUBE_PROXY_APP_NAME"
+       fi
+}
+
 #######################################################
 
 ## Access to Kube http proxy
index c6a5a2c..c6feb45 100755 (executable)
@@ -193,19 +193,84 @@ __DMAAPMR_initial_setup() {
        :  # handle by __MR_initial_setup
 }
 
+# Set app short-name, app name and namespace for logging runtime statistics of kubernets pods or docker containers
+# For docker, the namespace shall be excluded
+# This function is called for apps managed by the test script as well as for prestarted apps.
+# args: -
+__MR_statisics_setup() {
+       if [ $RUNMODE == "KUBE" ]; then
+               echo "MR-STUB $MR_STUB_APP_NAME $KUBE_ONAP_NAMESPACE"
+       else
+               echo "MR-STUB $MR_STUB_APP_NAME"
+       fi
+}
+
+# Set app short-name, app name and namespace for logging runtime statistics of kubernets pods or docker containers
+# For docker, the namespace shall be excluded
+# This function is called for apps managed by the test script as well as for prestarted apps.
+# args: -
+__DMAAPMR_statisics_setup() {
+       if [ $RUNMODE == "KUBE" ]; then
+               echo "KAFKA $MR_KAFKA_APP_NAME $KUBE_ONAP_NAMESPACE MESSAGE-ROUTER $MR_DMAAP_APP_NAME $KUBE_ONAP_NAMESPACE ZOOKEEPER $MR_ZOOKEEPER_APP_NAME $KUBE_ONAP_NAMESPACE"
+       else
+               echo "KAFKA $MR_KAFKA_APP_NAME MESSAGE-ROUTER $MR_DMAAP_APP_NAME ZOOKEEPER $MR_ZOOKEEPER_APP_NAME"
+       fi
+}
 
 #######################################################
 
+# Description of port mappings when running MR-STUB only or MR-STUB + MESSAGE-ROUTER
+#
+# 'MR-STUB only' is started when only 'MR' is included in the test script. Both the test scripts and app will then use MR-STUB as a message-router simulator.
+#
+# 'MR-STUB + MESSAGE-ROUTER' is started when 'MR' and 'DMAAPMR' is included in the testscripts. DMAAPMR is the real message router including kafka and zookeeper.
+# In this configuration, MR-STUB is used by the test-script as frontend to the message-router while app are using the real message-router.
+#
+# DOCKER                                                                      KUBE
+# ---------------------------------------------------------------------------------------------------------------------------------------------------
+
+#                             MR-STUB                                                             MR-STUB
+#                             +++++++                                                             +++++++
+# localhost                               container                           service                                 pod
+# ==============================================================================================================================================
+# 10 MR_STUB_LOCALHOST_PORT          ->   13 MR_INTERNAL_PORT                 15 MR_EXTERNAL_PORT                ->   17 MR_INTERNAL_PORT
+# 12 MR_STUB_LOCALHOST_SECURE_PORT   ->   14 MR_INTERNAL_SECURE_PORT          16 MR_EXTERNAL_SECURE_PORT                ->   18 MR_INTERNAL_SECURE_PORT
+
+
+
+#                             MESSAGE-ROUTER                                                      MESSAGE-ROUTER
+#                             ++++++++++++++                                                      ++++++++++++++
+# localhost                               container                           service                                 pod
+# ===================================================================================================================================================
+# 20 MR_DMAAP_LOCALHOST_PORT         ->   23 MR_INTERNAL_PORT                 25 MR_EXTERNAL_PORT                ->   27 MR_INTERNAL_PORT
+# 22 MR_DMAAP_LOCALHOST_SECURE_PORT  ->   24 MR_INTERNAL_SECURE_PORT          26 MR_EXTERNAL_SECURE_PORT                ->   28 MR_INTERNAL_SECURE_PORT
+
+
+# Running only the MR-STUB - apps using MR-STUB
+# DOCKER                                                                      KUBE
+# localhost:          10 and 12                                                -
+# via proxy (script): 13 and 14                                               via proxy (script): 15 and 16
+# apps:               13 and 14                                               apps:               15 and 16
+
+# Running MR-STUB (as frontend for test script) and MESSAGE-ROUTER - apps using MESSAGE-ROUTER
+# DOCKER                                                                      KUBE
+# localhost:          10 and 12                                                -
+# via proxy (script): 13 and 14                                               via proxy (script): 15 and 16
+# apps:               23 and 24                                               apps:               25 and 26
+#
+
+
+
 use_mr_http() {
-       __mr_set_protocoll "http" $MR_INTERNAL_PORT $MR_EXTERNAL_PORT $MR_INTERNAL_PORT $MR_EXTERNAL_PORT $MR_INTERNAL_SECURE_PORT $MR_EXT_SECURE_PORT
+       __mr_set_protocoll "http" $MR_INTERNAL_PORT $MR_EXTERNAL_PORT $MR_INTERNAL_SECURE_PORT $MR_EXTERNAL_SECURE_PORT
 }
 
 use_mr_https() {
-       __mr_set_protocoll "https" $MR_INTERNAL_SECURE_PORT $MR_EXTERNAL_SECURE_PORT
+       __mr_set_protocoll "https" $MR_INTERNAL_PORT $MR_EXTERNAL_PORT $MR_INTERNAL_SECURE_PORT $MR_EXTERNAL_SECURE_PORT
 }
 
 # Setup paths to svc/container for internal and external access
-# args: <protocol> <internal-port> <external-port> <mr-stub-internal-port> <mr-stub-external-port> <mr-stub-internal-secure-port> <mr-stub-external-secure-port>
+# args: <protocol> <internal-port> <external-port> <internal-secure-port> <external-secure-port>
 __mr_set_protocoll() {
        echo -e $BOLD"$MR_STUB_DISPLAY_NAME and $MR_DMAAP_DISPLAY_NAME protocol setting"$EBOLD
        echo -e " Using $BOLD http $EBOLD towards $MR_STUB_DISPLAY_NAME and $MR_DMAAP_DISPLAY_NAME"
@@ -214,39 +279,64 @@ __mr_set_protocoll() {
 
        MR_HTTPX=$1
 
+       if [ $MR_HTTPX == "http" ]; then
+               INT_PORT=$2
+               EXT_PORT=$3
+       else
+               INT_PORT=$4
+               EXT_PORT=$5
+       fi
+
        # Access via test script
-       MR_STUB_PATH=$MR_HTTPX"://"$MR_STUB_APP_NAME":"$2  # access from script via proxy, docker
-       MR_DMAAP_PATH=$MR_HTTPX"://"$MR_DMAAP_APP_NAME":"$2 # access from script via proxy, docker
+       MR_STUB_PATH=$MR_HTTPX"://"$MR_STUB_APP_NAME":"$INT_PORT  # access from script via proxy, docker
+       MR_DMAAP_PATH=$MR_HTTPX"://"$MR_DMAAP_APP_NAME":"$INT_PORT # access from script via proxy, docker
+       MR_DMAAP_ADAPTER_HTTP="" # Access to dmaap mr via proyx - set only if app is included
 
        MR_SERVICE_PATH=$MR_STUB_PATH # access container->container, docker -  access pod->svc, kube
+       MR_KAFKA_SERVICE_PATH=""
+       MR_ZOOKEEPER_SERVICE_PATH=""
        __check_included_image "DMAAPMR"
        if [ $? -eq 0 ]; then
                MR_SERVICE_PATH=$MR_DMAAP_PATH # access container->container, docker -  access pod->svc, kube
+               MR_DMAAP_ADAPTER_HTTP=$MR_DMAAP_PATH
+
+               MR_KAFKA_SERVICE_PATH=$MR_KAFKA_APP_NAME":"$MR_KAFKA_PORT
+               MR_ZOOKEEPER_SERVICE_PATH=$MR_ZOOKEEPER_APP_NAME":"$MR_ZOOKEEPER_PORT
        fi
 
        # For directing calls from script to e.g.PMS via message rounter
-       # Theses case shall always go though the  mr-stub
-       MR_ADAPTER_HTTP="http://"$MR_STUB_APP_NAME":"$4
-       MR_ADAPTER_HTTPS="https://"$MR_STUB_APP_NAME":"$6
+       # These cases shall always go though the  mr-stub
+       MR_ADAPTER_HTTP="http://"$MR_STUB_APP_NAME":"$2
+       MR_ADAPTER_HTTPS="https://"$MR_STUB_APP_NAME":"$4
+
+       MR_DMAAP_ADAPTER_TYPE="REST"
+
+
 
        if [ $RUNMODE == "KUBE" ]; then
-               MR_STUB_PATH=$MR_HTTPX"://"$MR_STUB_APP_NAME.$KUBE_ONAP_NAMESPACE":"$3 # access from script via proxy, kube
-               MR_DMAAP_PATH=$MR_HTTPX"://"$MR_DMAAP_APP_NAME.$KUBE_ONAP_NAMESPACE":"$3 # access from script via proxy, kube
+               MR_STUB_PATH=$MR_HTTPX"://"$MR_STUB_APP_NAME.$KUBE_ONAP_NAMESPACE":"$EXT_PORT # access from script via proxy, kube
+               MR_DMAAP_PATH=$MR_HTTPX"://"$MR_DMAAP_APP_NAME.$KUBE_ONAP_NAMESPACE":"$EXT_PORT # access from script via proxy, kube
 
                MR_SERVICE_PATH=$MR_STUB_PATH
                __check_included_image "DMAAPMR"
                if [ $? -eq 0 ]; then
                        MR_SERVICE_PATH=$MR_DMAAP_PATH
+                       MR_DMAAP_ADAPTER_HTTP=$MR_DMAAP_PATH
+                       MR_KAFKA_SERVICE_PATH=$MR_KAFKA_APP_NAME"."$KUBE_ONAP_NAMESPACE":"$MR_KAFKA_PORT
+                       MR_ZOOKEEPER_SERVICE_PATH=$MR_ZOOKEEPER_APP_NAME"."$KUBE_ONAP_NAMESPACE":"$MR_ZOOKEEPER_PORT
                fi
                __check_prestarted_image "DMAAPMR"
                if [ $? -eq 0 ]; then
                        MR_SERVICE_PATH=$MR_DMAAP_PATH
+                       MR_DMAAP_ADAPTER_HTTP=$MR_DMAAP_PATH
+                       MR_KAFKA_SERVICE_PATH=$MR_KAFKA_APP_NAME"."$KUBE_ONAP_NAMESPACE":"$MR_KAFKA_PORT
+                       MR_ZOOKEEPER_SERVICE_PATH=$MR_ZOOKEEPER_APP_NAME"."$KUBE_ONAP_NAMESPACE":"$MR_ZOOKEEPER_PORT
                fi
 
                # For directing calls from script to e.g.PMS, via message rounter
                # These calls shall always go though the  mr-stub
-               MR_ADAPTER_HTTP="http://"$MR_STUB_APP_NAME":"$5
-               MR_ADAPTER_HTTPS="https://"$MR_STUB_APP_NAME":"$7
+               MR_ADAPTER_HTTP="http://"$MR_STUB_APP_NAME.$KUBE_ONAP_NAMESPACE":"$3
+               MR_ADAPTER_HTTPS="https://"$MR_STUB_APP_NAME.$KUBE_ONAP_NAMESPACE":"$5
        fi
 
        # For calls from script to the mr-stub
@@ -254,6 +344,7 @@ __mr_set_protocoll() {
        MR_STUB_ADAPTER_TYPE="REST"
 
        echo ""
+
 }
 
 # Export env vars for config files, docker compose and kube resources
@@ -272,6 +363,18 @@ __dmaapmr_export_vars() {
        export MR_DMAAP_LOCALHOST_SECURE_PORT
        export MR_INTERNAL_SECURE_PORT
        export MR_DMAAP_HOST_MNT_DIR
+
+       export KUBE_ONAP_NAMESPACE
+       export MR_EXTERNAL_PORT
+       export MR_EXTERNAL_SECURE_PORT
+       export MR_KAFKA_PORT
+       export MR_ZOOKEEPER_PORT
+
+       export MR_KAFKA_SERVICE_PATH
+       export MR_ZOOKEEPER_SERVICE_PATH
+
+       export MR_KAFKA_KUBE_NODE_PORT
+       export MR_KAFKA_DOCKER_LOCALHOST_PORT
 }
 
 # Export env vars for config files, docker compose and kube resources
@@ -283,10 +386,18 @@ __mr_export_vars() {
        export MRSTUB_IMAGE
        export MR_INTERNAL_PORT
        export MR_INTERNAL_SECURE_PORT
+       export MR_EXTERNAL_PORT
+       export MR_EXTERNAL_SECURE_PORT
        export MR_STUB_LOCALHOST_PORT
        export MR_STUB_LOCALHOST_SECURE_PORT
        export MR_STUB_CERT_MOUNT_DIR
        export MR_STUB_DISPLAY_NAME
+
+       export KUBE_ONAP_NAMESPACE
+       export MR_EXTERNAL_PORT
+
+       export MR_KAFKA_SERVICE_PATH
+       export MR_ZOOKEEPER_SERVICE_PATH
 }
 
 
@@ -358,53 +469,33 @@ start_mr() {
 
                        __dmaapmr_export_vars
 
-                       #export MR_DMAAP_APP_NAME
-                       export MR_DMAAP_KUBE_APP_NAME=message-router
-                       MR_DMAAP_APP_NAME=$MR_DMAAP_KUBE_APP_NAME
-                       export KUBE_ONAP_NAMESPACE
-                       export MR_EXTERNAL_PORT
-                       export MR_INTERNAL_PORT
-                       export MR_EXTERNAL_SECURE_PORT
-                       export MR_INTERNAL_SECURE_PORT
-                       export ONAP_DMAAPMR_IMAGE
-
-                       export MR_KAFKA_BWDS_NAME=akfak-bwds
-                       export MR_KAFKA_BWDS_NAME=kaka
-                       export KUBE_ONAP_NAMESPACE
-
-                       export MR_ZOOKEEPER_APP_NAME
-                       export ONAP_ZOOKEEPER_IMAGE
-
                        #Check if onap namespace exists, if not create it
                        __kube_create_namespace $KUBE_ONAP_NAMESPACE
 
-                       # TODO - Fix domain name substitution in the prop file
-                       # Create config maps - dmaapmr app
-                       configfile=$PWD/tmp/MsgRtrApi.properties
-                       cp $SIM_GROUP"/"$MR_DMAAP_COMPOSE_DIR"$MR_DMAAP_HOST_MNT_DIR"/mr/KUBE-MsgRtrApi.properties $configfile
+                       # copy config files
+                       MR_MNT_CONFIG_BASEPATH=$SIM_GROUP"/"$MR_DMAAP_COMPOSE_DIR$MR_DMAAP_HOST_MNT_DIR
+                       cp -r $SIM_GROUP"/"$MR_DMAAP_COMPOSE_DIR$MR_DMAAP_HOST_CONFIG_DIR/*  $MR_MNT_CONFIG_BASEPATH
 
+                       # Create config maps - dmaapmr app
+                       configfile=$MR_MNT_CONFIG_BASEPATH/mr/MsgRtrApi.properties
                        output_yaml=$PWD/tmp/dmaapmr_msgrtrapi_cfc.yaml
                        __kube_create_configmap dmaapmr-msgrtrapi.properties $KUBE_ONAP_NAMESPACE autotest DMAAPMR $configfile $output_yaml
 
-                       configfile=$PWD/tmp/logback.xml
-                       cp $SIM_GROUP"/"$MR_DMAAP_COMPOSE_DIR"$MR_DMAAP_HOST_MNT_DIR"/mr/logback.xml $configfile
+                       configfile=$MR_MNT_CONFIG_BASEPATH/mr/logback.xml
                        output_yaml=$PWD/tmp/dmaapmr_logback_cfc.yaml
                        __kube_create_configmap dmaapmr-logback.xml $KUBE_ONAP_NAMESPACE autotest DMAAPMR $configfile $output_yaml
 
-                       configfile=$PWD/tmp/cadi.properties
-                       cp $SIM_GROUP"/"$MR_DMAAP_COMPOSE_DIR"$MR_DMAAP_HOST_MNT_DIR"/mr/cadi.properties $configfile
+                       configfile=$MR_MNT_CONFIG_BASEPATH/mr/cadi.properties
                        output_yaml=$PWD/tmp/dmaapmr_cadi_cfc.yaml
                        __kube_create_configmap dmaapmr-cadi.properties $KUBE_ONAP_NAMESPACE autotest DMAAPMR $configfile $output_yaml
 
                        # Create config maps - kafka app
-                       configfile=$PWD/tmp/zk_client_jaas.conf
-                       cp $SIM_GROUP"/"$MR_DMAAP_COMPOSE_DIR"$MR_DMAAP_HOST_MNT_DIR"/kafka/zk_client_jaas.conf $configfile
+                       configfile=$MR_MNT_CONFIG_BASEPATH/kafka/zk_client_jaas.conf
                        output_yaml=$PWD/tmp/dmaapmr_zk_client_cfc.yaml
                        __kube_create_configmap dmaapmr-zk-client-jaas.conf $KUBE_ONAP_NAMESPACE autotest DMAAPMR $configfile $output_yaml
 
                        # Create config maps - zookeeper app
-                       configfile=$PWD/tmp/zk_server_jaas.conf
-                       cp $SIM_GROUP"/"$MR_DMAAP_COMPOSE_DIR"$MR_DMAAP_HOST_MNT_DIR"/zk/zk_server_jaas.conf $configfile
+                       configfile=$MR_MNT_CONFIG_BASEPATH/zk/zk_server_jaas.conf
                        output_yaml=$PWD/tmp/dmaapmr_zk_server_cfc.yaml
                        __kube_create_configmap dmaapmr-zk-server-jaas.conf $KUBE_ONAP_NAMESPACE autotest DMAAPMR $configfile $output_yaml
 
@@ -419,42 +510,43 @@ start_mr() {
                        __kube_create_instance app $MR_DMAAP_APP_NAME $input_yaml $output_yaml
 
 
-                       echo " Retrieving host and ports for service..."
-                       MR_DMAAP_HOST_NAME=$(__kube_get_service_host $MR_DMAAP_APP_NAME $KUBE_ONAP_NAMESPACE)
+                       __check_service_start $MR_DMAAP_APP_NAME $MR_DMAAP_PATH$MR_DMAAP_ALIVE_URL
 
-                       MR_EXT_PORT=$(__kube_get_service_port $MR_DMAAP_APP_NAME $KUBE_ONAP_NAMESPACE "http")
-                       MR_EXT_SECURE_PORT=$(__kube_get_service_port $MR_DMAAP_APP_NAME $KUBE_ONAP_NAMESPACE "https")
+                       echo " Kafka TCP node port $MR_KAFKA_KUBE_NODE_PORT"
 
-                       echo " Host IP, http port, https port: $MR_DMAAP_APP_NAME $MR_EXT_PORT $MR_EXT_SECURE_PORT"
-                       MR_SERVICE_PATH=""
-                       if [ $MR_HTTPX == "http" ]; then
-                               MR_DMAAP_PATH=$MR_HTTPX"://"$MR_DMAAP_HOST_NAME":"$MR_EXT_PORT
-                               MR_SERVICE_PATH=$MR_HTTPX"://"$MR_DMAAP_APP_NAME"."$KUBE_ONAP_NAMESPACE":"$MR_EXT_PORT
-                       else
-                               MR_DMAAP_PATH=$MR_HTTPX"://"$MR_DMAAP_HOST_NAME":"$MR_EXT_SECURE_PORT
-                               MR_SERVICE_PATH=$MR_HTTPX"://"$MR_DMAAP_APP_NAME"."$KUBE_ONAP_NAMESPACE":"$MR_EXT_SECURE_PORT
+
+                       if [ $# -gt 0 ]; then
+                               if [ $(($#%3)) -eq 0 ]; then
+                                       while [ $# -gt 0 ]; do
+                                               __dmaap_pipeclean "$1" "$2/$1" "$2/$1/$3?timeout=1000&limit=100"
+                                               shift; shift; shift;
+                                       done
+                               else
+                                       echo -e $RED" args: start_mr [<topic-name> <base-url> <group-and-user-url>]*"$ERED
+                                       echo -e $RED" Got: $@"$ERED
+                                       exit 1
+                               fi
                        fi
 
-                               __check_service_start $MR_DMAAP_APP_NAME $MR_DMAAP_PATH$MR_DMAAP_ALIVE_URL
+                       echo " Current topics:"
+                       curlString="$MR_DMAAP_PATH/topics"
+                       result=$(__do_curl "$curlString")
+                       echo $result | indent2
 
                fi
 
                if [ $retcode_included_mr -eq 0 ]; then
-                       #exporting needed var for deployment
-                       export MR_STUB_APP_NAME
-                       export KUBE_ONAP_NAMESPACE
-                       export MRSTUB_IMAGE
-                       export MR_INTERNAL_PORT
-                       export MR_INTERNAL_SECURE_PORT
-                       export MR_EXTERNAL_PORT
-                       export MR_EXTERNAL_SECURE_PORT
+
+                       __mr_export_vars
 
                        if [ $retcode_prestarted_dmaapmr -eq 0 ] || [ $retcode_included_dmaapmr -eq 0 ]; then  # Set topics for dmaap
                                export TOPIC_READ="http://$MR_DMAAP_APP_NAME.$KUBE_ONAP_NAMESPACE:$MR_INTERNAL_PORT/events/$MR_READ_TOPIC"
                                export TOPIC_WRITE="http://$MR_DMAAP_APP_NAME.$KUBE_ONAP_NAMESPACE:$MR_INTERNAL_PORT/events/$MR_WRITE_TOPIC/users/mr-stub?timeout=15000&limit=100"
+                               export GENERIC_TOPICS_UPLOAD_BASEURL="http://$MR_DMAAP_APP_NAME.$KUBE_ONAP_NAMESPACE:$MR_INTERNAL_PORT"
                        else
                                export TOPIC_READ=""
                                export TOPIC_WRITE=""
+                               export GENERIC_TOPICS_UPLOAD_BASEURL=""
                        fi
 
                        #Check if onap namespace exists, if not create it
@@ -473,42 +565,8 @@ start_mr() {
 
                fi
 
-
-               echo " Retrieving host and ports for service..."
-               MR_STUB_HOST_NAME=$(__kube_get_service_host $MR_STUB_APP_NAME $KUBE_ONAP_NAMESPACE)
-
-               MR_EXT_PORT=$(__kube_get_service_port $MR_STUB_APP_NAME $KUBE_ONAP_NAMESPACE "http")
-               MR_EXT_SECURE_PORT=$(__kube_get_service_port $MR_STUB_APP_NAME $KUBE_ONAP_NAMESPACE "https")
-
-               echo " Host IP, http port, https port: $MR_STUB_APP_NAME $MR_EXT_PORT $MR_EXT_SECURE_PORT"
-               if [ $MR_HTTPX == "http" ]; then
-                       MR_STUB_PATH=$MR_HTTPX"://"$MR_STUB_HOST_NAME":"$MR_EXT_PORT
-                       if [ -z "$MR_SERVICE_PATH" ]; then
-                               MR_SERVICE_PATH=$MR_HTTPX"://"$MR_STUB_APP_NAME"."$KUBE_ONAP_NAMESPACE":"$MR_EXT_PORT
-                       fi
-               else
-                       MR_STUB_PATH=$MR_HTTPX"://"$MR_STUB_HOST_NAME":"$MR_EXT_SECURE_PORT
-                       if [ -z "$MR_SERVICE_PATH" ]; then
-                               MR_SERVICE_PATH=$MR_HTTPX"://"$MR_STUB_APP_NAME"."$KUBE_ONAP_NAMESPACE":"$MR_EXT_SECURE_PORT
-                       fi
-               fi
-               MR_ADAPTER_HTTP="http://"$MR_STUB_HOST_NAME":"$MR_EXT_PORT
-               MR_ADAPTER_HTTPS="https://"$MR_STUB_HOST_NAME":"$MR_EXT_SECURE_PORT
-
-               MR_STUB_ADAPTER=$MR_STUB_PATH
-               MR_STUB_ADAPTER_TYPE="REST"
-
                __check_service_start $MR_STUB_APP_NAME $MR_STUB_PATH$MR_STUB_ALIVE_URL
 
-               echo -ne " Service $MR_STUB_APP_NAME - reset  "$SAMELINE
-               result=$(__do_curl $MR_STUB_PATH/reset)
-               if [ $? -ne 0 ]; then
-                       echo -e " Service $MR_STUB_APP_NAME - reset  $RED Failed $ERED - will continue"
-               else
-                       echo -e " Service $MR_STUB_APP_NAME - reset  $GREEN OK $EGREEN"
-               fi
-
-
        else
 
                __check_included_image 'DMAAPMR'
@@ -532,26 +590,44 @@ start_mr() {
 
                export TOPIC_READ=""
         export TOPIC_WRITE=""
+               export GENERIC_TOPICS_UPLOAD_BASEURL=""
                if [ $retcode_dmaapmr -eq 0 ]; then  # Set topics for dmaap
                        export TOPIC_READ="http://$MR_DMAAP_APP_NAME:$MR_INTERNAL_PORT/events/$MR_READ_TOPIC"
                        export TOPIC_WRITE="http://$MR_DMAAP_APP_NAME:$MR_INTERNAL_PORT/events/$MR_WRITE_TOPIC/users/mr-stub?timeout=15000&limit=100"
+                       export GENERIC_TOPICS_UPLOAD_BASEURL="http://$MR_DMAAP_APP_NAME:$MR_INTERNAL_PORT"
                fi
 
                __dmaapmr_export_vars
 
                if [ $retcode_dmaapmr -eq 0 ]; then
-                       __start_container $MR_DMAAP_COMPOSE_DIR "" NODOCKERARGS 1 $MR_DMAAP_APP_NAME
 
-                       __check_service_start $MR_DMAAP_APP_NAME $MR_DMAAP_PATH$MR_DMAAP_ALIVE_URL
+                       # copy config files
+                       MR_MNT_CONFIG_BASEPATH=$SIM_GROUP"/"$MR_DMAAP_COMPOSE_DIR$MR_DMAAP_HOST_MNT_DIR
+                       cp -r $SIM_GROUP"/"$MR_DMAAP_COMPOSE_DIR$MR_DMAAP_HOST_CONFIG_DIR/*  $MR_MNT_CONFIG_BASEPATH
 
+                       # substitute vars
+                       configfile=$MR_MNT_CONFIG_BASEPATH/mr/MsgRtrApi.properties
+                       cp $configfile $configfile"_tmp"
+                       envsubst < $configfile"_tmp" > $configfile
 
-                       __create_topic $MR_READ_TOPIC "Topic for reading policy messages"
+                       __start_container $MR_DMAAP_COMPOSE_DIR "" NODOCKERARGS 1 $MR_DMAAP_APP_NAME
 
-                       __create_topic $MR_WRITE_TOPIC "Topic for writing policy messages"
+                       __check_service_start $MR_DMAAP_APP_NAME $MR_DMAAP_PATH$MR_DMAAP_ALIVE_URL
 
-                       __dmaap_pipeclean $MR_READ_TOPIC "/events/$MR_READ_TOPIC" "/events/$MR_READ_TOPIC/users/policy-agent?timeout=1000&limit=100"
+                       echo " Kafka TCP node port $MR_KAFKA_DOCKER_LOCALHOST_PORT"
 
-                       __dmaap_pipeclean $MR_WRITE_TOPIC "/events/$MR_WRITE_TOPIC" "/events/$MR_WRITE_TOPIC/users/mr-stub?timeout=1000&limit=100"
+                       if [ $# -gt 0 ]; then
+                               if [ $(($#%3)) -eq 0 ]; then
+                                       while [ $# -gt 0 ]; do
+                                               __dmaap_pipeclean "$1" "$2/$1" "$2/$1/$3?timeout=1000&limit=100"
+                                               shift; shift; shift;
+                                       done
+                               else
+                                       echo -e $RED" args: start_mr [<topic-name> <base-url> <group-and-user-url>]*"$ERED
+                                       echo -e $RED" Got: $@"$ERED
+                                       exit 1
+                               fi
+                       fi
 
                        echo " Current topics:"
                        curlString="$MR_DMAAP_PATH/topics"
@@ -575,23 +651,25 @@ start_mr() {
 # Create a dmaap mr topic
 # args: <topic name> <topic-description>
 __create_topic() {
-       echo -ne " Creating read topic: $1"$SAMELINE
+       echo -ne " Creating topic: $1"$SAMELINE
 
        json_topic="{\"topicName\":\"$1\",\"partitionCount\":\"2\", \"replicationCount\":\"3\", \"transactionEnabled\":\"false\",\"topicDescription\":\"$2\"}"
-       echo $json_topic > ./tmp/$1.json
+       fname="./tmp/$1.json"
+       echo $json_topic > $fname
 
-       curlString="$MR_DMAAP_PATH/topics/create -X POST  -H Content-Type:application/json -d@./tmp/$1.json"
-       topic_retries=5
+       query="/topics/create"
+       topic_retries=10
        while [ $topic_retries -gt 0 ]; do
                let topic_retries=topic_retries-1
-               result=$(__do_curl "$curlString")
-               if [ $? -eq 0 ]; then
+               res="$(__do_curl_to_api DMAAPMR POST $query $fname)"
+               status=${res:${#res}-3}
+
+               if [[ $status == "2"* ]]; then
                        topic_retries=0
-                       echo -e " Creating read topic: $1 $GREEN OK $EGREEN"
-               fi
-               if [ $? -ne 0 ]; then
+                       echo -e " Creating topic: $1 $GREEN OK $EGREEN"
+               else
                        if [ $topic_retries -eq 0 ]; then
-                               echo -e " Creating read topic: $1 $RED Failed $ERED"
+                               echo -e " Creating topic: $1 $RED Failed $ERED"
                                ((RES_CONF_FAIL++))
                                return 1
                        else
@@ -599,18 +677,27 @@ __create_topic() {
                        fi
                fi
        done
+       echo
        return 0
 }
 
 # Do a pipeclean of a topic - to overcome dmaap mr bug...
-# args: <topic> <post-url> <read-url>
+# args: <topic> <post-url> <read-url> [<num-retries>]
 __dmaap_pipeclean() {
        pipeclean_retries=50
+       if [ $# -eq 4 ]; then
+               pipeclean_retries=$4
+       fi
        echo -ne " Doing dmaap-mr pipe cleaning on topic: $1"$SAMELINE
        while [ $pipeclean_retries -gt 0 ]; do
-               echo "{\"pipeclean-$1\":$pipeclean_retries}" > ./tmp/pipeclean.json
+               if [[ $1 == *".text" ]]; then
+                       echo "pipeclean-$1:$pipeclean_retries" > ./tmp/__dmaap_pipeclean.txt
+                       curlString="$MR_DMAAP_PATH$2 -X POST  -H Content-Type:text/plain -d@./tmp/__dmaap_pipeclean.txt"
+               else
+                       echo "{\"pipeclean-$1\":$pipeclean_retries}" > ./tmp/__dmaap_pipeclean.json
+                       curlString="$MR_DMAAP_PATH$2 -X POST  -H Content-Type:application/json -d@./tmp/__dmaap_pipeclean.json"
+               fi
                let pipeclean_retries=pipeclean_retries-1
-               curlString="$MR_DMAAP_PATH$2 -X POST  -H Content-Type:application/json -d@./tmp/pipeclean.json"
                result=$(__do_curl "$curlString")
                if [ $? -ne 0 ]; then
                        sleep 1
@@ -688,7 +775,7 @@ mr_print() {
 # arg: <topic-url> <json-msg>
 # (Function for test scripts)
 mr_api_send_json() {
-       __log_test_start $@
+       __log_conf_start $@
     if [ $# -ne 2 ]; then
         __print_err "<topic-url> <json-msg>" $@
         return 1
@@ -700,10 +787,139 @@ mr_api_send_json() {
 
        status=${res:${#res}-3}
        if [ $status -ne 200 ]; then
-               __log_test_fail_status_code 200 $status
+               __log_conf_fail_status_code 200 $status
+               return 1
+       fi
+
+       __log_conf_ok
+       return 0
+}
+
+# Send text to topic in mr-stub.
+# arg: <topic-url> <text-msg>
+# (Function for test scripts)
+mr_api_send_text() {
+       __log_conf_start $@
+    if [ $# -ne 2 ]; then
+        __print_err "<topic-url> <text-msg>" $@
+        return 1
+    fi
+       query=$1
+       fname=$PWD/tmp/text_payload_to_mr.txt
+       echo $2 > $fname
+       res="$(__do_curl_to_api MRSTUB POST $query $fname text/plain)"
+
+       status=${res:${#res}-3}
+       if [ $status -ne 200 ]; then
+               __log_conf_fail_status_code 200 $status
+               return 1
+       fi
+
+       __log_conf_ok
+       return 0
+}
+
+# Send json file to topic in mr-stub.
+# arg: <topic-url> <json-file>
+# (Function for test scripts)
+mr_api_send_json_file() {
+       __log_conf_start $@
+    if [ $# -ne 2 ]; then
+        __print_err "<topic-url> <json-file>" $@
+        return 1
+    fi
+       query=$1
+       if [ ! -f $2 ]; then
+               __log_test_fail_general "File $2 does not exist"
+               return 1
+       fi
+       #Create json array for mr
+       datafile="tmp/mr_api_send_json_file.json"
+       { echo -n "[" ; cat $2 ; echo -n "]" ;} > $datafile
+
+       res="$(__do_curl_to_api MRSTUB POST $query $datafile)"
+
+       status=${res:${#res}-3}
+       if [ $status -ne 200 ]; then
+               __log_conf_fail_status_code 200 $status
+               return 1
+       fi
+
+       __log_conf_ok
+       return 0
+}
+
+# Send text file to topic in mr-stub.
+# arg: <topic-url> <text-file>
+# (Function for test scripts)
+mr_api_send_text_file() {
+       __log_conf_start $@
+    if [ $# -ne 2 ]; then
+        __print_err "<topic-url> <text-file>" $@
+        return 1
+    fi
+       query=$1
+       if [ ! -f $2 ]; then
+               __log_test_fail_general "File $2 does not exist"
+               return 1
+       fi
+
+       res="$(__do_curl_to_api MRSTUB POST $query $2 text/plain)"
+
+       status=${res:${#res}-3}
+       if [ $status -ne 200 ]; then
+               __log_conf_fail_status_code 200 $status
+               return 1
+       fi
+
+       __log_conf_ok
+       return 0
+}
+
+# Create json file for payload
+# arg: <size-in-kb> <filename>
+mr_api_generate_json_payload_file() {
+       __log_conf_start $@
+    if [ $# -ne 2 ]; then
+        __print_err "<topic-url> <json-file>" $@
+        return 1
+    fi
+       if [ $1 -lt 1 ] || [ $1 -gt 10000 ]; then
+               __log_conf_fail_general "Only size between 1k and 10000k supported"
                return 1
        fi
+       echo -n "{\"abcdefghijklmno\":[" > $2
+       LEN=$(($1*100-2))
+       echo -n "\""ABCDEFG"\"" >> $2
+       for ((idx=1; idx<$LEN; idx++))
+       do
+               echo -n ",\"ABCDEFG\"" >> $2
+       done
+       echo -n "]}" >> $2
+
+       __log_conf_ok
+       return 0
+}
+
+# Create tet file for payload
+# arg: <size-in-kb> <filename>
+mr_api_generate_text_payload_file() {
+       __log_conf_start $@
+    if [ $# -ne 2 ]; then
+        __print_err "<topic-url> <text-file>" $@
+        return 1
+    fi
+       if [ $1 -lt 1 ] || [ $1 -gt 10000 ]; then
+               __log_conf_fail_general "Only size between 1k and 10000k supported"
+               return 1
+       fi
+       echo -n "" > $2
+       LEN=$(($1*100))
+       for ((idx=0; idx<$LEN; idx++))
+       do
+               echo -n "ABCDEFGHIJ" >> $2
+       done
 
-       __log_test_pass
+       __log_conf_ok
        return 0
 }
similarity index 96%
rename from test/common/gateway_api_functions.sh
rename to test/common/ngw_api_functions.sh
index ee617ef..d8f1707 100644 (file)
@@ -92,6 +92,18 @@ __NGW_initial_setup() {
        use_gateway_http
 }
 
+# Set app short-name, app name and namespace for logging runtime statistics of kubernets pods or docker containers
+# For docker, the namespace shall be excluded
+# This function is called for apps managed by the test script as well as for prestarted apps.
+# args: -
+__NGW_statisics_setup() {
+       if [ $RUNMODE == "KUBE" ]; then
+               echo "NGW $NRT_GATEWAY_APP_NAME $KUBE_NONRTRIC_NAMESPACE"
+       else
+               echo "NGW $NRT_GATEWAY_APP_NAME"
+       fi
+}
+
 #######################################################
 
 
similarity index 92%
rename from test/common/agent_api_functions.sh
rename to test/common/pa_api_functions.sh
index a1fd657..219aedb 100644 (file)
@@ -91,6 +91,19 @@ __PA_initial_setup() {
        use_agent_rest_http
 }
 
+# Set app short-name, app name and namespace for logging runtime statistics of kubernets pods or docker containers
+# For docker, the namespace shall be excluded
+# This function is called for apps managed by the test script as well as for prestarted apps.
+# args: -
+__PA_statisics_setup() {
+       if [ $RUNMODE == "KUBE" ]; then
+               echo "PA $POLICY_AGENT_APP_NAME $KUBE_NONRTRIC_NAMESPACE"
+       else
+               echo "PA $POLICY_AGENT_APP_NAME"
+       fi
+}
+
+
 #######################################################
 
 ###########################
@@ -431,6 +444,126 @@ start_stopped_policy_agent() {
 }
 
 
+# Function to perpare the consul configuration according to the current simulator configuration
+# args: SDNC|NOSDNC <output-file>
+# (Function for test scripts)
+prepare_consul_config() {
+       echo -e $BOLD"Prepare Consul config"$EBOLD
+
+       echo " Writing consul config for "$POLICY_AGENT_APP_NAME" to file: "$2
+
+       if [ $# != 2 ];  then
+               ((RES_CONF_FAIL++))
+       __print_err "need two args,  SDNC|NOSDNC <output-file>" $@
+               exit 1
+       fi
+
+       if [ $1 == "SDNC" ]; then
+               echo -e " Config$BOLD including SDNC$EBOLD configuration"
+       elif [ $1 == "NOSDNC" ];  then
+               echo -e " Config$BOLD excluding SDNC$EBOLD configuration"
+       else
+               ((RES_CONF_FAIL++))
+       __print_err "need two args,  SDNC|NOSDNC <output-file>" $@
+               exit 1
+       fi
+
+       config_json="\n            {"
+       if [ $1 == "SDNC" ]; then
+               config_json=$config_json"\n   \"controller\": ["
+               config_json=$config_json"\n                     {"
+               config_json=$config_json"\n                       \"name\": \"$SDNC_APP_NAME\","
+               config_json=$config_json"\n                       \"baseUrl\": \"$SDNC_SERVICE_PATH\","
+               config_json=$config_json"\n                       \"userName\": \"$SDNC_USER\","
+               config_json=$config_json"\n                       \"password\": \"$SDNC_PWD\""
+               config_json=$config_json"\n                     }"
+               config_json=$config_json"\n   ],"
+       fi
+
+       config_json=$config_json"\n   \"streams_publishes\": {"
+       config_json=$config_json"\n                            \"dmaap_publisher\": {"
+       config_json=$config_json"\n                              \"type\": \"message-router\","
+       config_json=$config_json"\n                              \"dmaap_info\": {"
+       config_json=$config_json"\n                                \"topic_url\": \"$MR_SERVICE_PATH$MR_WRITE_URL\""
+       config_json=$config_json"\n                              }"
+       config_json=$config_json"\n                            }"
+       config_json=$config_json"\n   },"
+       config_json=$config_json"\n   \"streams_subscribes\": {"
+       config_json=$config_json"\n                             \"dmaap_subscriber\": {"
+       config_json=$config_json"\n                               \"type\": \"message-router\","
+       config_json=$config_json"\n                               \"dmaap_info\": {"
+       config_json=$config_json"\n                                   \"topic_url\": \"$MR_SERVICE_PATH$MR_READ_URL\""
+       config_json=$config_json"\n                                 }"
+       config_json=$config_json"\n                               }"
+       config_json=$config_json"\n   },"
+
+       config_json=$config_json"\n   \"ric\": ["
+
+       if [ $RUNMODE == "KUBE" ]; then
+               result=$(kubectl get pods -n $KUBE_A1SIM_NAMESPACE -o jsonpath='{.items[?(@.metadata.labels.autotest=="RICSIM")].metadata.name}')
+               rics=""
+               ric_cntr=0
+               if [ $? -eq 0 ] && [ ! -z "$result" ]; then
+                       for im in $result; do
+                               if [[ $im != *"-0" ]]; then
+                                       ric_subdomain=$(kubectl get pod $im -n $KUBE_A1SIM_NAMESPACE -o jsonpath='{.spec.subdomain}')
+                                       rics=$rics" "$im"."$ric_subdomain"."$KUBE_A1SIM_NAMESPACE
+                                       let ric_cntr=ric_cntr+1
+                               fi
+                       done
+               fi
+               if [ $ric_cntr -eq 0 ]; then
+                       echo $YELLOW"Warning: No rics found for the configuration"$EYELLOW
+               fi
+       else
+               rics=$(docker ps --filter "name=$RIC_SIM_PREFIX" --filter "network=$DOCKER_SIM_NWNAME" --filter "status=running" --format {{.Names}})
+               if [ $? -ne 0 ] || [ -z "$rics" ]; then
+                       echo -e $RED" FAIL - the names of the running RIC Simulator cannot be retrieved." $ERED
+                       ((RES_CONF_FAIL++))
+                       return 1
+               fi
+       fi
+       cntr=0
+       for ric in $rics; do
+               if [ $cntr -gt 0 ]; then
+                       config_json=$config_json"\n          ,"
+               fi
+               config_json=$config_json"\n          {"
+               if [ $RUNMODE == "KUBE" ]; then
+                       ric_id=${ric%.*.*} #extract pod id from full hosthame
+                       ric_id=$(echo "$ric_id" | tr '-' '_')
+               else
+                       if [ $DOCKER_COMPOSE_VERION == "V1" ]; then
+                               ric_id=$ric
+                       else
+                               ric_id=$(echo "$ric" | tr '-' '_')  #ric id still needs underscore as it is different from the container name
+                       fi
+               fi
+               echo " Found a1 sim: "$ric_id
+               config_json=$config_json"\n            \"name\": \"$ric_id\","
+               config_json=$config_json"\n            \"baseUrl\": \"$RIC_SIM_HTTPX://$ric:$RIC_SIM_PORT\","
+               if [ $1 == "SDNC" ]; then
+                       config_json=$config_json"\n            \"controller\": \"$SDNC_APP_NAME\","
+               fi
+               config_json=$config_json"\n            \"managedElementIds\": ["
+               config_json=$config_json"\n              \"me1_$ric_id\","
+               config_json=$config_json"\n              \"me2_$ric_id\""
+               config_json=$config_json"\n            ]"
+               config_json=$config_json"\n          }"
+               let cntr=cntr+1
+       done
+
+       config_json=$config_json"\n           ]"
+       config_json=$config_json"\n}"
+
+       if [ $RUNMODE == "KUBE" ]; then
+               config_json="{\"config\":"$config_json"}"
+       fi
+
+       printf "$config_json">$2
+
+       echo ""
+}
 
 # Load the the appl config for the agent into a config map
 agent_load_config() {
index bb4ccf5..17f987a 100644 (file)
@@ -107,6 +107,18 @@ __PRODSTUB_initial_setup() {
        use_prod_stub_http
 }
 
+# Set app short-name, app name and namespace for logging runtime statistics of kubernets pods or docker containers
+# For docker, the namespace shall be excluded
+# This function is called for apps managed by the test script as well as for prestarted apps.
+# args: -
+__PRODSTUB_statisics_setup() {
+       if [ $RUNMODE == "KUBE" ]; then
+               echo "PRODSTUB $PROD_STUB_APP_NAME $KUBE_SIM_NAMESPACE"
+       else
+               echo "PRODSTUB $PROD_STUB_APP_NAME"
+       fi
+}
+
 #######################################################
 
 # Set http as the protocol to use for all communication to the Prod stub sim
@@ -149,7 +161,6 @@ __prod_stub_set_protocoll() {
 # args:
 __prodstub_export_vars() {
        export PROD_STUB_APP_NAME
-       export PROD_STUB_APP_NAME_ALIAS
        export PROD_STUB_DISPLAY_NAME
 
        export DOCKER_SIM_NWNAME
index 62c2d43..5d37bd0 100644 (file)
@@ -90,6 +90,14 @@ __PVCCLEANER_initial_setup() {
        :
 }
 
+# Set app short-name, app name and namespace for logging runtime statistics of kubernets pods or docker containers
+# For docker, the namespace shall be excluded
+# This function is called for apps managed by the test script as well as for prestarted apps.
+# args: -
+__PVCCLEANER_statisics_setup() {
+       echo ""
+}
+
 #######################################################
 
 # This is a system app, all usage in testcase_common.sh
\ No newline at end of file
similarity index 96%
rename from test/common/rapp_catalogue_api_functions.sh
rename to test/common/rc_api_functions.sh
index 52416d3..537bc0c 100644 (file)
@@ -84,6 +84,18 @@ __RC_initial_setup() {
        use_rapp_catalogue_http
 }
 
+# Set app short-name, app name and namespace for logging runtime statistics of kubernets pods or docker containers
+# For docker, the namespace shall be excluded
+# This function is called for apps managed by the test script as well as for prestarted apps.
+# args: -
+__RC_statisics_setup() {
+       if [ $RUNMODE == "KUBE" ]; then
+               echo "RC $RAPP_CAT_APP_NAME $KUBE_NONRTRIC_NAMESPACE"
+       else
+               echo "RC $RAPP_CAT_APP_NAME"
+       fi
+}
+
 #######################################################
 
 # Set http as the protocol to use for all communication to the Rapp catalogue
similarity index 86%
rename from test/common/ricsimulator_api_functions.sh
rename to test/common/ricsim_api_functions.sh
index f760313..435c208 100644 (file)
@@ -91,20 +91,39 @@ __RICSIM_initial_setup() {
        use_simulator_http
 }
 
+# Set app short-name, app name and namespace for logging runtime statistics of kubernets pods or docker containers
+# For docker, the namespace shall be excluded
+# This function is called for apps managed by the test script as well as for prestarted apps.
+# args: -
+__RICSIM_statisics_setup() {
+       for ((RICSIM_INSTANCE=10; RICSIM_INSTANCE>0; RICSIM_INSTANCE-- )); do
+               if [ $RUNMODE == "KUBE" ]; then
+                       RICSIM_INSTANCE_KUBE=$(($RICSIM_INSTANCE-1))
+                       echo -n " RICSIM_G1_$RICSIM_INSTANCE_KUBE ${RIC_SIM_PREFIX}-g1-$RICSIM_INSTANCE_KUBE $KUBE_A1SIM_NAMESPACE "
+                       echo -n " RICSIM_G2_$RICSIM_INSTANCE_KUBE ${RIC_SIM_PREFIX}-g2-$RICSIM_INSTANCE_KUBE $KUBE_A1SIM_NAMESPACE "
+                       echo -n " RICSIM_G3_$RICSIM_INSTANCE_KUBE ${RIC_SIM_PREFIX}-g3-$RICSIM_INSTANCE_KUBE $KUBE_A1SIM_NAMESPACE "
+               else
+                       if [ $DOCKER_COMPOSE_VERION == "V1" ]; then
+                               echo -n " RICSIM_G1_$RICSIM_INSTANCE ${RIC_SIM_PREFIX}_g1_$RICSIM_INSTANCE "
+                               echo -n " RICSIM_G2_$RICSIM_INSTANCE ${RIC_SIM_PREFIX}_g2_$RICSIM_INSTANCE "
+                               echo -n " RICSIM_G3_$RICSIM_INSTANCE ${RIC_SIM_PREFIX}_g3_$RICSIM_INSTANCE "
+                       else
+                               echo -n " RICSIM_G1_$RICSIM_INSTANCE ${RIC_SIM_PREFIX}-g1-$RICSIM_INSTANCE "
+                               echo -n " RICSIM_G2_$RICSIM_INSTANCE ${RIC_SIM_PREFIX}-g2-$RICSIM_INSTANCE "
+                               echo -n " RICSIM_G3_$RICSIM_INSTANCE ${RIC_SIM_PREFIX}-g3-$RICSIM_INSTANCE "
+                       fi
+               fi
+       done
+}
+
 #######################################################
 
 
 RIC_SIM_HTTPX="http"
-RIC_SIM_HOST=$RIC_SIM_HTTPX"://"$LOCALHOST_NAME
 RIC_SIM_PORT=$RIC_SIM_INTERNAL_PORT
 
 
-#Vars for A1 interface version and container count
-G1_A1_VERSION=""
-G2_A1_VERSION=""
-G3_A1_VERSION=""
-G4_A1_VERSION=""
-G5_A1_VERSION=""
+#Vars for container count
 G1_COUNT=0
 G2_COUNT=0
 G3_COUNT=0
@@ -120,7 +139,6 @@ use_simulator_http() {
        echo -e $BOLD"RICSIM protocol setting"$EBOLD
        echo -e " Using $BOLD http $EBOLD towards the simulators"
        RIC_SIM_HTTPX="http"
-       RIC_SIM_HOST=$RIC_SIM_HTTPX"://"$LOCALHOST_NAME
        RIC_SIM_PORT=$RIC_SIM_INTERNAL_PORT
        echo ""
 }
@@ -129,7 +147,6 @@ use_simulator_https() {
        echo -e $BOLD"RICSIM protocol setting"$EBOLD
        echo -e " Using $BOLD https $EBOLD towards the simulators"
        RIC_SIM_HTTPX="https"
-       RIC_SIM_HOST=$RIC_SIM_HTTPX"://"$LOCALHOST_NAME
        RIC_SIM_PORT=$RIC_SIM_INTERNAL_SECURE_PORT
        echo ""
 }
@@ -189,19 +206,14 @@ start_ric_simulators() {
        #Set env var for simulator count and A1 interface vesion for the given group
        if [ $1 == "$RIC1" ]; then
                G1_COUNT=$2
-               G1_A1_VERSION=$3
        elif [ $1 == "$RIC2" ]; then
                G2_COUNT=$2
-               G2_A1_VERSION=$3
        elif [ $1 == "$RIC3" ]; then
                G3_COUNT=$2
-               G3_A1_VERSION=$3
        elif [ $1 == "$RIC4" ]; then
                G4_COUNT=$2
-               G4_A1_VERSION=$3
        elif [ $1 == "$RIC5" ]; then
                G5_COUNT=$2
-               G5_A1_VERSION=$3
        else
                ((RES_CONF_FAIL++))
                __print_err "need three args, $RIC1|$RIC2|$RIC3|$RIC4|$RIC5 <count> <interface-id>" $@
@@ -256,22 +268,34 @@ start_ric_simulators() {
                # Create .env file to compose project, all ric container will get this prefix
                echo "COMPOSE_PROJECT_NAME="$RIC_SIM_PREFIX > $SIM_GROUP/$RIC_SIM_COMPOSE_DIR/.env
 
-               export G1_A1_VERSION
-               export G2_A1_VERSION
-               export G3_A1_VERSION
-               export G4_A1_VERSION
-               export G5_A1_VERSION
+               #extract service name (group), g1, g2, g3, g4 or g5 from var $1
+               #E.g. ricsim_g1 -> g1 is the service name
+               TMP_GRP=$1
+               RICSIM_COMPOSE_SERVICE_NAME=$(echo "${TMP_GRP##*_}")
+
+               export RICSIM_COMPOSE_A1_VERSION=$3
+               export RICSIM_COMPOSE_SERVICE_NAME
                export RIC_SIM_INTERNAL_PORT
                export RIC_SIM_INTERNAL_SECURE_PORT
                export RIC_SIM_CERT_MOUNT_DIR
                export DOCKER_SIM_NWNAME
                export RIC_SIM_DISPLAY_NAME
 
-               docker_args="--scale g1=$G1_COUNT --scale g2=$G2_COUNT --scale g3=$G3_COUNT --scale g4=$G4_COUNT --scale g5=$G5_COUNT"
+               docker_args="--no-recreate --scale $RICSIM_COMPOSE_SERVICE_NAME=$2"
+
+               #Create a list of contsiner names
+               #Will be <ricsim-prefix>_<service-name>_<index>
+               # or
+               # <ricsim-prefix>-<service-name>-<index>
                app_data=""
                cntr=1
+               if [ $DOCKER_COMPOSE_VERION == "V1" ]; then
+                       app_name_prefix=$RIC_SIM_PREFIX"_"$RICSIM_COMPOSE_SERVICE_NAME"_"
+               else
+                       app_name_prefix=$RIC_SIM_PREFIX"-"$RICSIM_COMPOSE_SERVICE_NAME"-"
+               fi
                while [ $cntr -le $2 ]; do
-                       app=$1"_"$cntr
+                       app=$app_name_prefix$cntr
                        app_data="$app_data $app"
                        let cntr=cntr+1
                done
@@ -280,7 +304,11 @@ start_ric_simulators() {
 
                cntr=1
                while [ $cntr -le $2 ]; do
-                       app=$1"_"$cntr
+                       if [ $DOCKER_COMPOSE_VERION == "V1" ]; then
+                               app=$RIC_SIM_PREFIX"_"$RICSIM_COMPOSE_SERVICE_NAME"_"$cntr
+                       else
+                               app=$RIC_SIM_PREFIX"-"$RICSIM_COMPOSE_SERVICE_NAME"-"$cntr
+                       fi
                        __check_service_start $app $RIC_SIM_HTTPX"://"$app:$RIC_SIM_PORT$RIC_SIM_ALIVE_URL
                        let cntr=cntr+1
                done
@@ -310,7 +338,12 @@ __find_sim_host() {
                ric_setname="${ricname%-*}"  #Extract the stateful set name
                echo $RIC_SIM_HTTPX"://"$ricname.$ric_setname.$KUBE_A1SIM_NAMESPACE":"$RIC_SIM_PORT
        else
-               echo $RIC_SIM_HTTPX"://"$1":"$RIC_SIM_PORT
+               if [ $DOCKER_COMPOSE_VERION == "V1" ]; then
+                       echo $RIC_SIM_HTTPX"://"$1":"$RIC_SIM_PORT
+               else
+                       ricname=$(echo "$1" | tr '_' '-')
+                       echo $RIC_SIM_HTTPX"://"$ricname":"$RIC_SIM_PORT
+               fi
 
        fi
 }
similarity index 95%
rename from test/common/controller_api_functions.sh
rename to test/common/sdnc_api_functions.sh
index 4027f30..b3ef07b 100644 (file)
@@ -73,7 +73,7 @@ __SDNC_image_data() {
 # All resources shall be ordered to be scaled to 0, if relevant. If not relevant to scale, then do no action.
 # This function is called for apps fully managed by the test script
 __SDNC_kube_scale_zero() {
-       __kube_scale_all_resources $KUBE_SNDC_NAMESPACE autotest SDNC
+       __kube_scale_all_resources $KUBE_SDNC_NAMESPACE autotest SDNC
 }
 
 # Scale kubernetes resources to zero and wait until this has been accomplished, if relevant. If not relevant to scale, then do no action.
@@ -85,7 +85,7 @@ __SDNC_kube_scale_zero_and_wait() {
 # Delete all kube resouces for the app
 # This function is called for apps managed by the test script.
 __SDNC_kube_delete_all() {
-       __kube_delete_all_resources $KUBE_SNDC_NAMESPACE autotest SDNC
+       __kube_delete_all_resources $KUBE_SDNC_NAMESPACE autotest SDNC
 }
 
 # Store docker logs
@@ -93,9 +93,9 @@ __SDNC_kube_delete_all() {
 # args: <log-dir> <file-prexix>
 __SDNC_store_docker_logs() {
        if [ $RUNMODE == "KUBE" ]; then
-               kubectl  logs -l "autotest=SDNC" -n $KUBE_SNDC_NAMESPACE --tail=-1 > $1$2_SDNC.log 2>&1
-               podname=$(kubectl get pods -n $KUBE_SNDC_NAMESPACE -l "autotest=SDNC" -o custom-columns=":metadata.name")
-               kubectl exec -t -n $KUBE_SNDC_NAMESPACE $podname -- cat $SDNC_KARAF_LOG> $1$2_SDNC_karaf.log 2>&1
+               kubectl  logs -l "autotest=SDNC" -n $KUBE_SDNC_NAMESPACE --tail=-1 > $1$2_SDNC.log 2>&1
+               podname=$(kubectl get pods -n $KUBE_SDNC_NAMESPACE -l "autotest=SDNC" -o custom-columns=":metadata.name")
+               kubectl exec -t -n $KUBE_SDNC_NAMESPACE $podname -- cat $SDNC_KARAF_LOG> $1$2_SDNC_karaf.log 2>&1
        else
                docker exec -t $SDNC_APP_NAME cat $SDNC_KARAF_LOG> $1$2_SDNC_karaf.log 2>&1
        fi
@@ -108,6 +108,18 @@ __SDNC_initial_setup() {
        use_sdnc_http
 }
 
+# Set app short-name, app name and namespace for logging runtime statistics of kubernets pods or docker containers
+# For docker, the namespace shall be excluded
+# This function is called for apps managed by the test script as well as for prestarted apps.
+# args: -
+__SDNC_statisics_setup() {
+       if [ $RUNMODE == "KUBE" ]; then
+               echo "SDNC $SDNC_APP_NAME $KUBE_SDNC_NAMESPACE"
+       else
+               echo "SDNC $SDNC_APP_NAME"
+       fi
+}
+
 #######################################################
 
 # Set http as the protocol to use for all communication to SDNC
@@ -135,8 +147,8 @@ __sdnc_set_protocoll() {
        SDNC_SERVICE_PATH=$1"://"$SDNC_APP_NAME":"$2  # docker access, container->container and script->container via proxy
        SDNC_SERVICE_API_PATH=$1"://"$SDNC_USER":"$SDNC_PWD"@"$SDNC_APP_NAME":"$1$SDNC_API_URL
        if [ $RUNMODE == "KUBE" ]; then
-               SDNC_SERVICE_PATH=$1"://"$SDNC_APP_NAME.$KUBE_SNDC_NAMESPACE":"$3 # kube access, pod->svc and script->svc via proxy
-               SDNC_SERVICE_API_PATH=$1"://"$SDNC_USER":"$SDNC_PWD"@"$SDNC_APP_NAME.KUBE_SNDC_NAMESPACE":"$1$SDNC_API_URL
+               SDNC_SERVICE_PATH=$1"://"$SDNC_APP_NAME.$KUBE_SDNC_NAMESPACE":"$3 # kube access, pod->svc and script->svc via proxy
+               SDNC_SERVICE_API_PATH=$1"://"$SDNC_USER":"$SDNC_PWD"@"$SDNC_APP_NAME.KUBE_SDNC_NAMESPACE":"$1$SDNC_API_URL
        fi
        echo ""
 
@@ -145,7 +157,7 @@ __sdnc_set_protocoll() {
 # Export env vars for config files, docker compose and kube resources
 # args:
 __sdnc_export_vars() {
-       export KUBE_SNDC_NAMESPACE
+       export KUBE_SDNC_NAMESPACE
        export DOCKER_SIM_NWNAME
 
        export SDNC_APP_NAME
@@ -199,7 +211,7 @@ start_sdnc() {
                if [ $retcode_p -eq 0 ]; then
                        echo -e " Using existing $SDNC_APP_NAME deployment and service"
                        echo " Setting SDNC replicas=1"
-                       __kube_scale deployment $SDNC_APP_NAME $KUBE_SNDC_NAMESPACE 1
+                       __kube_scale deployment $SDNC_APP_NAME $KUBE_SDNC_NAMESPACE 1
                fi
 
                                # Check if app shall be fully managed by the test script
@@ -208,7 +220,7 @@ start_sdnc() {
                        echo -e " Creating $SDNC_APP_NAME app and expose service"
 
                        #Check if namespace exists, if not create it
-                       __kube_create_namespace $KUBE_SNDC_NAMESPACE
+                       __kube_create_namespace $KUBE_SDNC_NAMESPACE
 
                        __sdnc_export_vars
 
index 8344f38..3254e06 100755 (executable)
@@ -161,9 +161,9 @@ DOCKER_SIM_NWNAME="nonrtric-docker-net"                  # Name of docker privat
 
 KUBE_NONRTRIC_NAMESPACE="nonrtric"                       # Namespace for all nonrtric components
 KUBE_SIM_NAMESPACE="nonrtric-ft"                         # Namespace for simulators (except MR and RICSIM)
-KUBE_A1SIM_NAMESPACE="a1-sim"                          # Namespace for a1-p simulators (RICSIM)
+KUBE_A1SIM_NAMESPACE="a1-sim"                            # Namespace for a1-p simulators (RICSIM)
 KUBE_ONAP_NAMESPACE="onap"                               # Namespace for onap (only message router)
-KUBE_SNDC_NAMESPACE="onap"                               # Namespace for sdnc
+KUBE_SDNC_NAMESPACE="onap"                               # Namespace for sdnc
 
 POLICY_AGENT_EXTERNAL_PORT=8081                          # Policy Agent container external port (host -> container)
 POLICY_AGENT_INTERNAL_PORT=8081                          # Policy Agent container internal port (container -> container)
@@ -189,7 +189,7 @@ POLICY_AGENT_CONFIG_FILE="application.yaml"              # Container config file
 POLICY_AGENT_DATA_FILE="application_configuration.json"  # Container data file name
 POLICY_AGENT_CONTAINER_MNT_DIR="/var/policy-management-service" # Mounted dir in the container
 
-MR_DMAAP_APP_NAME="dmaap-mr"                             # Name for the Dmaap MR
+MR_DMAAP_APP_NAME="message-router"                       # Name for the Dmaap MR
 MR_STUB_APP_NAME="mr-stub"                               # Name of the MR stub
 MR_DMAAP_DISPLAY_NAME="DMAAP Message Router"
 MR_STUB_DISPLAY_NAME="Message Router stub"
@@ -210,10 +210,14 @@ MR_STUB_ALIVE_URL="/"                                    # Base path for mr stub
 MR_DMAAP_ALIVE_URL="/topics"                             # Base path for dmaap-mr alive check
 MR_DMAAP_COMPOSE_DIR="dmaapmr"                           # Dir in simulator_group for dmaap mr for - docker-compose
 MR_STUB_COMPOSE_DIR="mrstub"                             # Dir in simulator_group for mr stub for - docker-compose
-MR_KAFKA_APP_NAME="kafka"                                # Kafka app name
+MR_KAFKA_APP_NAME="message-router-kafka"                 # Kafka app name, if just named "kafka" the image will not start...
+MR_KAFKA_PORT=9092                                       # Kafka port number
+MR_KAFKA_DOCKER_LOCALHOST_PORT=30098                     # Kafka port number for docker localhost
+MR_KAFKA_KUBE_NODE_PORT=30099                            # Kafka node port number for kube
 MR_ZOOKEEPER_APP_NAME="zookeeper"                        # Zookeeper app name
-MR_DMAAP_HOST_MNT_DIR="/mnt"                              # Config files dir on localhost
-
+MR_ZOOKEEPER_PORT="2181"                                 # Zookeeper port number
+MR_DMAAP_HOST_MNT_DIR="/mnt"                             # Basedir localhost for mounted files
+MR_DMAAP_HOST_CONFIG_DIR="/configs0"                      # Config files dir on localhost
 
 CR_APP_NAME="callback-receiver"                          # Name for the Callback receiver
 CR_DISPLAY_NAME="Callback Reciever"
@@ -222,7 +226,9 @@ CR_INTERNAL_PORT=8090                                    # Callback receiver con
 CR_EXTERNAL_SECURE_PORT=8091                             # Callback receiver container external secure port (host -> container)
 CR_INTERNAL_SECURE_PORT=8091                             # Callback receiver container internal secure port (container -> container)
 CR_APP_CALLBACK="/callbacks"                             # Url for callbacks
-CR_ALIVE_URL="/"                                         # Base path for alive check
+CR_APP_CALLBACK_MR="/callbacks-mr"                       # Url for callbacks (data from mr which contains string encoded jsons in a json arr)
+CR_APP_CALLBACK_TEXT="/callbacks-text"                   # Url for callbacks (data containing text data)
+CR_ALIVE_URL="/reset"                                    # Base path for alive check
 CR_COMPOSE_DIR="cr"                                      # Dir in simulator_group for docker-compose
 
 CONSUL_HOST="consul-server"                              # Host name of consul
@@ -310,6 +316,12 @@ KUBE_PROXY_WEB_EXTERNAL_PORT=8731                        # Kube Http Proxy conta
 KUBE_PROXY_WEB_INTERNAL_PORT=8081                        # Kube Http Proxy container internal port (container -> container)
 KUBE_PROXY_WEB_EXTERNAL_SECURE_PORT=8783                 # Kube Proxy container external secure port (host -> container)
 KUBE_PROXY_WEB_INTERNAL_SECURE_PORT=8434                 # Kube Proxy container internal secure port (container -> container
+
+KUBE_PROXY_DOCKER_EXTERNAL_PORT=8732                     # Kube Http Proxy container external port, doocker (host -> container)
+KUBE_PROXY_DOCKER_EXTERNAL_SECURE_PORT=8784              # Kube Proxy container external secure port, doocker (host -> container)
+KUBE_PROXY_WEB_DOCKER_EXTERNAL_PORT=8733                 # Kube Http Proxy container external port, doocker (host -> container)
+KUBE_PROXY_WEB_DOCKER_EXTERNAL_SECURE_PORT=8785          # Kube Proxy container external secure port, doocker (host -> container)
+
 KUBE_PROXY_PATH=""                                       # Proxy url path, will be set if proxy is started
 KUBE_PROXY_ALIVE_URL="/"                                 # Base path for alive check
 KUBE_PROXY_COMPOSE_DIR="kubeproxy"                       # Dir in simulator_group for docker-compose
index 00e5d4b..f82d54f 100755 (executable)
@@ -185,9 +185,9 @@ DOCKER_SIM_NWNAME="nonrtric-docker-net"                  # Name of docker privat
 
 KUBE_NONRTRIC_NAMESPACE="nonrtric"                       # Namespace for all nonrtric components
 KUBE_SIM_NAMESPACE="nonrtric-ft"                         # Namespace for simulators (except MR and RICSIM)
-KUBE_A1SIM_NAMESPACE="a1-sim"                          # Namespace for a1-p simulators (RICSIM)
+KUBE_A1SIM_NAMESPACE="a1-sim"                            # Namespace for a1-p simulators (RICSIM)
 KUBE_ONAP_NAMESPACE="onap"                               # Namespace for onap (only message router)
-KUBE_SNDC_NAMESPACE="onap"                               # Namespace for sdnc
+KUBE_SDNC_NAMESPACE="onap"                               # Namespace for sdnc
 
 POLICY_AGENT_EXTERNAL_PORT=8081                          # Policy Agent container external port (host -> container)
 POLICY_AGENT_INTERNAL_PORT=8081                          # Policy Agent container internal port (container -> container)
@@ -233,7 +233,7 @@ ECS_CONFIG_FILE=application.yaml                         # Config file name
 ECS_VERSION="V1-2"                                       # Version where the types are added in the producer registration
 ECS_FEATURE_LEVEL=""                                     # Space separated list of features
 
-MR_DMAAP_APP_NAME="dmaap-mr"                             # Name for the Dmaap MR
+MR_DMAAP_APP_NAME="message-router"                       # Name for the Dmaap MR
 MR_STUB_APP_NAME="mr-stub"                               # Name of the MR stub
 MR_DMAAP_DISPLAY_NAME="DMAAP Message Router"
 MR_STUB_DISPLAY_NAME="Message Router stub"
@@ -254,9 +254,14 @@ MR_STUB_ALIVE_URL="/"                                    # Base path for mr stub
 MR_DMAAP_ALIVE_URL="/topics"                             # Base path for dmaap-mr alive check
 MR_DMAAP_COMPOSE_DIR="dmaapmr"                           # Dir in simulator_group for dmaap mr for - docker-compose
 MR_STUB_COMPOSE_DIR="mrstub"                             # Dir in simulator_group for mr stub for - docker-compose
-MR_KAFKA_APP_NAME="kafka"                                # Kafka app name
+MR_KAFKA_APP_NAME="message-router-kafka"                 # Kafka app name, if just named "kafka" the image will not start...
+MR_KAFKA_PORT=9092                                       # Kafka port number
+MR_KAFKA_DOCKER_LOCALHOST_PORT=30098                     # Kafka port number for docker localhost
+MR_KAFKA_KUBE_NODE_PORT=30099                            # Kafka node port number for kube
 MR_ZOOKEEPER_APP_NAME="zookeeper"                        # Zookeeper app name
-MR_DMAAP_HOST_MNT_DIR="/mnt"                              # Config files dir on localhost
+MR_ZOOKEEPER_PORT="2181"                                 # Zookeeper port number
+MR_DMAAP_HOST_MNT_DIR="/mnt"                             # Basedir localhost for mounted files
+MR_DMAAP_HOST_CONFIG_DIR="/configs0"                      # Config files dir on localhost
 
 CR_APP_NAME="callback-receiver"                          # Name for the Callback receiver
 CR_DISPLAY_NAME="Callback Reciever"
@@ -266,7 +271,9 @@ CR_EXTERNAL_SECURE_PORT=8091                             # Callback receiver con
 CR_INTERNAL_SECURE_PORT=8091                             # Callback receiver container internal secure port (container -> container)
 CR_APP_NAME="callback-receiver"                          # Name for the Callback receiver
 CR_APP_CALLBACK="/callbacks"                             # Url for callbacks
-CR_ALIVE_URL="/"                                         # Base path for alive check
+CR_APP_CALLBACK_MR="/callbacks-mr"                       # Url for callbacks (data from mr which contains string encoded jsons in a json arr)
+CR_APP_CALLBACK_TEXT="/callbacks-text"                   # Url for callbacks (data containing text data)
+CR_ALIVE_URL="/reset"                                    # Base path for alive check
 CR_COMPOSE_DIR="cr"                                      # Dir in simulator_group for docker-compose
 
 PROD_STUB_APP_NAME="producer-stub"                       # Name for the Producer stub
@@ -378,6 +385,12 @@ KUBE_PROXY_WEB_EXTERNAL_PORT=8731                        # Kube Http Proxy conta
 KUBE_PROXY_WEB_INTERNAL_PORT=8081                        # Kube Http Proxy container internal port (container -> container)
 KUBE_PROXY_WEB_EXTERNAL_SECURE_PORT=8783                 # Kube Proxy container external secure port (host -> container)
 KUBE_PROXY_WEB_INTERNAL_SECURE_PORT=8434                 # Kube Proxy container internal secure port (container -> container
+
+KUBE_PROXY_DOCKER_EXTERNAL_PORT=8732                     # Kube Http Proxy container external port, doocker (host -> container)
+KUBE_PROXY_DOCKER_EXTERNAL_SECURE_PORT=8784              # Kube Proxy container external secure port, doocker (host -> container)
+KUBE_PROXY_WEB_DOCKER_EXTERNAL_PORT=8733                 # Kube Http Proxy container external port, doocker (host -> container)
+KUBE_PROXY_WEB_DOCKER_EXTERNAL_SECURE_PORT=8785          # Kube Proxy container external secure port, doocker (host -> container)
+
 KUBE_PROXY_PATH=""                                       # Proxy url path, will be set if proxy is started
 KUBE_PROXY_ALIVE_URL="/"                                 # Base path for alive check
 KUBE_PROXY_COMPOSE_DIR="kubeproxy"                       # Dir in simulator_group for docker-compose
index f8c411f..b2b0d2e 100644 (file)
@@ -69,10 +69,10 @@ NEXUS_RELEASE_REPO_ONAP=$NEXUS_RELEASE_REPO
 
 # Policy Agent image and tags
 POLICY_AGENT_IMAGE_BASE="onap/ccsdk-oran-a1policymanagementservice"
-POLICY_AGENT_IMAGE_TAG_LOCAL="1.3.0-SNAPSHOT"
-POLICY_AGENT_IMAGE_TAG_REMOTE_SNAPSHOT="1.3.0-SNAPSHOT"
-POLICY_AGENT_IMAGE_TAG_REMOTE="1.3.0-STAGING-latest" #Will use snapshot repo
-POLICY_AGENT_IMAGE_TAG_REMOTE_RELEASE="1.3.0"
+POLICY_AGENT_IMAGE_TAG_LOCAL="1.2.4-SNAPSHOT"
+POLICY_AGENT_IMAGE_TAG_REMOTE_SNAPSHOT="1.2.4-SNAPSHOT"
+POLICY_AGENT_IMAGE_TAG_REMOTE="1.2.4-STAGING-latest" #Will use snapshot repo
+POLICY_AGENT_IMAGE_TAG_REMOTE_RELEASE="1.2.3"
 
 # SDNC A1 Controller remote image and tag
 SDNC_A1_CONTROLLER_IMAGE_BASE="onap/sdnc-image"
@@ -146,17 +146,17 @@ HTTP_PROXY_IMAGE_TAG_LOCAL="latest"
 
 #ONAP Zookeeper remote image and tag
 ONAP_ZOOKEEPER_IMAGE_BASE="onap/dmaap/zookeeper"
-ONAP_ZOOKEEPER_IMAGE_TAG_REMOTE_RELEASE_ONAP="6.0.3"
+ONAP_ZOOKEEPER_IMAGE_TAG_REMOTE_RELEASE_ONAP="6.1.0"
 #No local image for ONAP Zookeeper, remote image always used
 
 #ONAP Kafka remote image and tag
 ONAP_KAFKA_IMAGE_BASE="onap/dmaap/kafka111"
-ONAP_KAFKA_IMAGE_TAG_REMOTE_RELEASE_ONAP="1.0.4"
+ONAP_KAFKA_IMAGE_TAG_REMOTE_RELEASE_ONAP="1.1.1"
 #No local image for ONAP Kafka, remote image always used
 
 #ONAP DMAAP-MR remote image and tag
 ONAP_DMAAPMR_IMAGE_BASE="onap/dmaap/dmaap-mr"
-ONAP_DMAAPMR_IMAGE_TAG_REMOTE_RELEASE_ONAP="1.1.18"
+ONAP_DMAAPMR_IMAGE_TAG_REMOTE_RELEASE_ONAP="1.3.0"
 #No local image for ONAP DMAAP-MR, remote image always used
 
 #Kube proxy remote image and tag
@@ -188,9 +188,9 @@ DOCKER_SIM_NWNAME="nonrtric-docker-net"                  # Name of docker privat
 
 KUBE_NONRTRIC_NAMESPACE="nonrtric"                       # Namespace for all nonrtric components
 KUBE_SIM_NAMESPACE="nonrtric-ft"                         # Namespace for simulators (except MR and RICSIM)
-KUBE_A1SIM_NAMESPACE="a1-sim"                          # Namespace for a1-p simulators (RICSIM)
+KUBE_A1SIM_NAMESPACE="a1-sim"                            # Namespace for a1-p simulators (RICSIM)
 KUBE_ONAP_NAMESPACE="onap"                               # Namespace for onap (only message router)
-KUBE_SNDC_NAMESPACE="onap"                               # Namespace for sdnc
+KUBE_SDNC_NAMESPACE="onap"                               # Namespace for sdnc
 
 POLICY_AGENT_EXTERNAL_PORT=8081                          # Policy Agent container external port (host -> container)
 POLICY_AGENT_INTERNAL_PORT=8081                          # Policy Agent container internal port (container -> container)
@@ -236,7 +236,7 @@ ECS_CONFIG_FILE=application.yaml                         # Config file name
 ECS_VERSION="V1-2"                                       # Version where the types are added in the producer registration
 ECS_FEATURE_LEVEL="INFO-TYPES"                           # Space separated list of features
 
-MR_DMAAP_APP_NAME="dmaap-mr"                             # Name for the Dmaap MR
+MR_DMAAP_APP_NAME="message-router"                       # Name for the Dmaap MR
 MR_STUB_APP_NAME="mr-stub"                               # Name of the MR stub
 MR_DMAAP_DISPLAY_NAME="DMAAP Message Router"
 MR_STUB_DISPLAY_NAME="Message Router stub"
@@ -257,9 +257,14 @@ MR_STUB_ALIVE_URL="/"                                    # Base path for mr stub
 MR_DMAAP_ALIVE_URL="/topics"                             # Base path for dmaap-mr alive check
 MR_DMAAP_COMPOSE_DIR="dmaapmr"                           # Dir in simulator_group for dmaap mr for - docker-compose
 MR_STUB_COMPOSE_DIR="mrstub"                             # Dir in simulator_group for mr stub for - docker-compose
-MR_KAFKA_APP_NAME="kafka"                                # Kafka app name
+MR_KAFKA_APP_NAME="message-router-kafka"                 # Kafka app name, if just named "kafka" the image will not start...
+MR_KAFKA_PORT=9092                                       # Kafka port number
+MR_KAFKA_DOCKER_LOCALHOST_PORT=30098                     # Kafka port number for docker localhost
+MR_KAFKA_KUBE_NODE_PORT=30099                            # Kafka node port number for kube
 MR_ZOOKEEPER_APP_NAME="zookeeper"                        # Zookeeper app name
-MR_DMAAP_HOST_MNT_DIR="/mnt2"                             # Config files dir on localhost
+MR_ZOOKEEPER_PORT="2181"                                 # Zookeeper port number
+MR_DMAAP_HOST_MNT_DIR="/mnt"                             # Basedir localhost for mounted files
+MR_DMAAP_HOST_CONFIG_DIR="/configs1"                      # Config files dir on localhost
 
 CR_APP_NAME="callback-receiver"                          # Name for the Callback receiver
 CR_DISPLAY_NAME="Callback Reciever"
@@ -269,7 +274,9 @@ CR_EXTERNAL_SECURE_PORT=8091                             # Callback receiver con
 CR_INTERNAL_SECURE_PORT=8091                             # Callback receiver container internal secure port (container -> container)
 CR_APP_NAME="callback-receiver"                          # Name for the Callback receiver
 CR_APP_CALLBACK="/callbacks"                             # Url for callbacks
-CR_ALIVE_URL="/"                                         # Base path for alive check
+CR_APP_CALLBACK_MR="/callbacks-mr"                       # Url for callbacks (data from mr which contains string encoded jsons in a json arr)
+CR_APP_CALLBACK_TEXT="/callbacks-text"                   # Url for callbacks (data containing text data)
+CR_ALIVE_URL="/reset"                                    # Base path for alive check
 CR_COMPOSE_DIR="cr"                                      # Dir in simulator_group for docker-compose
 
 PROD_STUB_APP_NAME="producer-stub"                       # Name for the Producer stub
@@ -397,6 +404,12 @@ KUBE_PROXY_WEB_EXTERNAL_PORT=8731                        # Kube Http Proxy conta
 KUBE_PROXY_WEB_INTERNAL_PORT=8081                        # Kube Http Proxy container internal port (container -> container)
 KUBE_PROXY_WEB_EXTERNAL_SECURE_PORT=8783                 # Kube Proxy container external secure port (host -> container)
 KUBE_PROXY_WEB_INTERNAL_SECURE_PORT=8434                 # Kube Proxy container internal secure port (container -> container
+
+KUBE_PROXY_DOCKER_EXTERNAL_PORT=8732                     # Kube Http Proxy container external port, doocker (host -> container)
+KUBE_PROXY_DOCKER_EXTERNAL_SECURE_PORT=8784              # Kube Proxy container external secure port, doocker (host -> container)
+KUBE_PROXY_WEB_DOCKER_EXTERNAL_PORT=8733                 # Kube Http Proxy container external port, doocker (host -> container)
+KUBE_PROXY_WEB_DOCKER_EXTERNAL_SECURE_PORT=8785          # Kube Proxy container external secure port, doocker (host -> container)
+
 KUBE_PROXY_PATH=""                                       # Proxy url path, will be set if proxy is started
 KUBE_PROXY_ALIVE_URL="/"                                 # Base path for alive check
 KUBE_PROXY_COMPOSE_DIR="kubeproxy"                       # Dir in simulator_group for docker-compose
index 43077ea..b1204cc 100755 (executable)
@@ -188,9 +188,9 @@ DOCKER_SIM_NWNAME="nonrtric-docker-net"                  # Name of docker privat
 
 KUBE_NONRTRIC_NAMESPACE="nonrtric"                       # Namespace for all nonrtric components
 KUBE_SIM_NAMESPACE="nonrtric-ft"                         # Namespace for simulators (except MR and RICSIM)
-KUBE_A1SIM_NAMESPACE="a1-sim"                          # Namespace for a1-p simulators (RICSIM)
+KUBE_A1SIM_NAMESPACE="a1-sim"                            # Namespace for a1-p simulators (RICSIM)
 KUBE_ONAP_NAMESPACE="onap"                               # Namespace for onap (only message router)
-KUBE_SNDC_NAMESPACE="onap"                               # Namespace for sdnc
+KUBE_SDNC_NAMESPACE="onap"                               # Namespace for sdnc
 
 POLICY_AGENT_EXTERNAL_PORT=8081                          # Policy Agent container external port (host -> container)
 POLICY_AGENT_INTERNAL_PORT=8081                          # Policy Agent container internal port (container -> container)
@@ -236,7 +236,7 @@ ECS_CONFIG_FILE=application.yaml                         # Config file name
 ECS_VERSION="V1-2"                                       # Version where the types are added in the producer registration
 ECS_FEATURE_LEVEL=""                                     # Space separated list of features
 
-MR_DMAAP_APP_NAME="dmaap-mr"                             # Name for the Dmaap MR
+MR_DMAAP_APP_NAME="message-router"                       # Name for the Dmaap MR
 MR_STUB_APP_NAME="mr-stub"                               # Name of the MR stub
 MR_DMAAP_DISPLAY_NAME="DMAAP Message Router"
 MR_STUB_DISPLAY_NAME="Message Router stub"
@@ -257,10 +257,14 @@ MR_STUB_ALIVE_URL="/"                                    # Base path for mr stub
 MR_DMAAP_ALIVE_URL="/topics"                             # Base path for dmaap-mr alive check
 MR_DMAAP_COMPOSE_DIR="dmaapmr"                           # Dir in simulator_group for dmaap mr for - docker-compose
 MR_STUB_COMPOSE_DIR="mrstub"                             # Dir in simulator_group for mr stub for - docker-compose
-MR_KAFKA_APP_NAME="kafka"                                # Kafka app name
+MR_KAFKA_APP_NAME="message-router-kafka"                 # Kafka app name, if just named "kafka" the image will not start...
+MR_KAFKA_PORT=9092                                       # Kafka port number
+MR_KAFKA_DOCKER_LOCALHOST_PORT=30098                     # Kafka port number for docker localhost
+MR_KAFKA_KUBE_NODE_PORT=30099                            # Kafka node port number for kube
 MR_ZOOKEEPER_APP_NAME="zookeeper"                        # Zookeeper app name
-MR_DMAAP_HOST_MNT_DIR="/mnt"                              # Config files dir on localhost
-
+MR_ZOOKEEPER_PORT="2181"                                 # Zookeeper port number
+MR_DMAAP_HOST_MNT_DIR="/mnt"                             # Basedir localhost for mounted files
+MR_DMAAP_HOST_CONFIG_DIR="/configs0"                      # Config files dir on localhost
 
 CR_APP_NAME="callback-receiver"                          # Name for the Callback receiver
 CR_DISPLAY_NAME="Callback Reciever"
@@ -269,7 +273,9 @@ CR_INTERNAL_PORT=8090                                    # Callback receiver con
 CR_EXTERNAL_SECURE_PORT=8091                             # Callback receiver container external secure port (host -> container)
 CR_INTERNAL_SECURE_PORT=8091                             # Callback receiver container internal secure port (container -> container)
 CR_APP_CALLBACK="/callbacks"                             # Url for callbacks
-CR_ALIVE_URL="/"                                         # Base path for alive check
+CR_APP_CALLBACK_MR="/callbacks-mr"                       # Url for callbacks (data from mr which contains string encoded jsons in a json arr)
+CR_APP_CALLBACK_TEXT="/callbacks-text"                   # Url for callbacks (data containing text data)
+CR_ALIVE_URL="/reset"                                    # Base path for alive check
 CR_COMPOSE_DIR="cr"                                      # Dir in simulator_group for docker-compose
 
 PROD_STUB_APP_NAME="producer-stub"                       # Name for the Producer stub
@@ -378,6 +384,12 @@ KUBE_PROXY_WEB_EXTERNAL_PORT=8731                        # Kube Http Proxy conta
 KUBE_PROXY_WEB_INTERNAL_PORT=8081                        # Kube Http Proxy container internal port (container -> container)
 KUBE_PROXY_WEB_EXTERNAL_SECURE_PORT=8783                 # Kube Proxy container external secure port (host -> container)
 KUBE_PROXY_WEB_INTERNAL_SECURE_PORT=8434                 # Kube Proxy container internal secure port (container -> container
+
+KUBE_PROXY_DOCKER_EXTERNAL_PORT=8732                     # Kube Http Proxy container external port, doocker (host -> container)
+KUBE_PROXY_DOCKER_EXTERNAL_SECURE_PORT=8784              # Kube Proxy container external secure port, doocker (host -> container)
+KUBE_PROXY_WEB_DOCKER_EXTERNAL_PORT=8733                 # Kube Http Proxy container external port, doocker (host -> container)
+KUBE_PROXY_WEB_DOCKER_EXTERNAL_SECURE_PORT=8785          # Kube Proxy container external secure port, doocker (host -> container)
+
 KUBE_PROXY_PATH=""                                       # Proxy url path, will be set if proxy is started
 KUBE_PROXY_ALIVE_URL="/"                                 # Base path for alive check
 KUBE_PROXY_COMPOSE_DIR="kubeproxy"                       # Dir in simulator_group for docker-compose
index cc510d5..cde9849 100755 (executable)
@@ -207,9 +207,9 @@ DOCKER_SIM_NWNAME="nonrtric-docker-net"                  # Name of docker privat
 
 KUBE_NONRTRIC_NAMESPACE="nonrtric"                       # Namespace for all nonrtric components
 KUBE_SIM_NAMESPACE="nonrtric-ft"                         # Namespace for simulators (except MR and RICSIM)
-KUBE_A1SIM_NAMESPACE="a1-sim"                          # Namespace for a1-p simulators (RICSIM)
+KUBE_A1SIM_NAMESPACE="a1-sim"                            # Namespace for a1-p simulators (RICSIM)
 KUBE_ONAP_NAMESPACE="onap"                               # Namespace for onap (only message router)
-KUBE_SNDC_NAMESPACE="onap"                               # Namespace for sdnc
+KUBE_SDNC_NAMESPACE="onap"                               # Namespace for sdnc
 
 POLICY_AGENT_EXTERNAL_PORT=8081                          # Policy Agent container external port (host -> container)
 POLICY_AGENT_INTERNAL_PORT=8081                          # Policy Agent container internal port (container -> container)
@@ -255,7 +255,7 @@ ECS_CONFIG_FILE=application.yaml                         # Config file name
 ECS_VERSION="V1-2"                                       # Version where the types are decoupled from the producer registration
 ECS_FEATURE_LEVEL="INFO-TYPES"                           # Space separated list of features
 
-MR_DMAAP_APP_NAME="dmaap-mr"                             # Name for the Dmaap MR
+MR_DMAAP_APP_NAME="message-router"                       # Name for the Dmaap MR
 MR_STUB_APP_NAME="mr-stub"                               # Name of the MR stub
 MR_DMAAP_DISPLAY_NAME="DMAAP Message Router"
 MR_STUB_DISPLAY_NAME="Message Router stub"
@@ -276,10 +276,14 @@ MR_STUB_ALIVE_URL="/"                                    # Base path for mr stub
 MR_DMAAP_ALIVE_URL="/topics"                             # Base path for dmaap-mr alive check
 MR_DMAAP_COMPOSE_DIR="dmaapmr"                           # Dir in simulator_group for dmaap mr for - docker-compose
 MR_STUB_COMPOSE_DIR="mrstub"                             # Dir in simulator_group for mr stub for - docker-compose
-MR_KAFKA_APP_NAME="kafka"                                # Kafka app name
+MR_KAFKA_APP_NAME="message-router-kafka"                 # Kafka app name, if just named "kafka" the image will not start...
+MR_KAFKA_PORT=9092                                       # Kafka port number
+MR_KAFKA_DOCKER_LOCALHOST_PORT=30098                     # Kafka port number for docker localhost
+MR_KAFKA_KUBE_NODE_PORT=30099                            # Kafka node port number for kube
 MR_ZOOKEEPER_APP_NAME="zookeeper"                        # Zookeeper app name
-MR_DMAAP_HOST_MNT_DIR="/mnt"                              # Config files dir on localhost
-
+MR_ZOOKEEPER_PORT="2181"                                 # Zookeeper port number
+MR_DMAAP_HOST_MNT_DIR="/mnt"                             # Basedir localhost for mounted files
+MR_DMAAP_HOST_CONFIG_DIR="/configs0"                      # Config files dir on localhost
 
 CR_APP_NAME="callback-receiver"                          # Name for the Callback receiver
 CR_DISPLAY_NAME="Callback receiver"
@@ -288,7 +292,9 @@ CR_INTERNAL_PORT=8090                                    # Callback receiver con
 CR_EXTERNAL_SECURE_PORT=8091                             # Callback receiver container external secure port (host -> container)
 CR_INTERNAL_SECURE_PORT=8091                             # Callback receiver container internal secure port (container -> container)
 CR_APP_CALLBACK="/callbacks"                             # Url for callbacks
-CR_ALIVE_URL="/"                                         # Base path for alive check
+CR_APP_CALLBACK_MR="/callbacks-mr"                       # Url for callbacks (data from mr which contains string encoded jsons in a json arr)
+CR_APP_CALLBACK_TEXT="/callbacks-text"                   # Url for callbacks (data containing text data)
+CR_ALIVE_URL="/reset"                                    # Base path for alive check
 CR_COMPOSE_DIR="cr"                                      # Dir in simulator_group for docker-compose
 
 PROD_STUB_APP_NAME="producer-stub"                       # Name for the Producer stub
@@ -441,6 +447,12 @@ KUBE_PROXY_WEB_EXTERNAL_PORT=8731                        # Kube Http Proxy conta
 KUBE_PROXY_WEB_INTERNAL_PORT=8081                        # Kube Http Proxy container internal port (container -> container)
 KUBE_PROXY_WEB_EXTERNAL_SECURE_PORT=8783                 # Kube Proxy container external secure port (host -> container)
 KUBE_PROXY_WEB_INTERNAL_SECURE_PORT=8434                 # Kube Proxy container internal secure port (container -> container
+
+KUBE_PROXY_DOCKER_EXTERNAL_PORT=8732                     # Kube Http Proxy container external port, doocker (host -> container)
+KUBE_PROXY_DOCKER_EXTERNAL_SECURE_PORT=8784              # Kube Proxy container external secure port, doocker (host -> container)
+KUBE_PROXY_WEB_DOCKER_EXTERNAL_PORT=8733                 # Kube Http Proxy container external port, doocker (host -> container)
+KUBE_PROXY_WEB_DOCKER_EXTERNAL_SECURE_PORT=8785          # Kube Proxy container external secure port, doocker (host -> container)
+
 KUBE_PROXY_PATH=""                                       # Proxy url path, will be set if proxy is started
 KUBE_PROXY_ALIVE_URL="/"                                 # Base path for alive check
 KUBE_PROXY_COMPOSE_DIR="kubeproxy"                       # Dir in simulator_group for docker-compose
index e2b53da..cd0d040 100755 (executable)
@@ -235,7 +235,7 @@ KUBE_NONRTRIC_NAMESPACE="nonrtric"                       # Namespace for all non
 KUBE_SIM_NAMESPACE="nonrtric-ft"                         # Namespace for simulators (except MR and RICSIM)
 KUBE_A1SIM_NAMESPACE="a1-sim"                            # Namespace for a1-p simulators (RICSIM)
 KUBE_ONAP_NAMESPACE="onap"                               # Namespace for onap (only message router)
-KUBE_SNDC_NAMESPACE="onap"                               # Namespace for sdnc
+KUBE_SDNC_NAMESPACE="onap"                               # Namespace for sdnc
 
 POLICY_AGENT_EXTERNAL_PORT=8081                          # Policy Agent container external port (host -> container)
 POLICY_AGENT_INTERNAL_PORT=8081                          # Policy Agent container internal port (container -> container)
@@ -281,7 +281,7 @@ ECS_CONFIG_FILE=application.yaml                         # Config file name
 ECS_VERSION="V1-2"                                       # Version where the types are decoupled from the producer registration
 ECS_FEATURE_LEVEL="INFO-TYPES TYPE-SUBSCRIPTIONS INFO-TYPE-INFO"  # Space separated list of features
 
-MR_DMAAP_APP_NAME="dmaap-mr"                             # Name for the Dmaap MR
+MR_DMAAP_APP_NAME="message-router"                       # Name for the Dmaap MR
 MR_STUB_APP_NAME="mr-stub"                               # Name of the MR stub
 MR_DMAAP_DISPLAY_NAME="DMAAP Message Router"
 MR_STUB_DISPLAY_NAME="Message Router stub"
@@ -302,10 +302,14 @@ MR_STUB_ALIVE_URL="/"                                    # Base path for mr stub
 MR_DMAAP_ALIVE_URL="/topics"                             # Base path for dmaap-mr alive check
 MR_DMAAP_COMPOSE_DIR="dmaapmr"                           # Dir in simulator_group for dmaap mr for - docker-compose
 MR_STUB_COMPOSE_DIR="mrstub"                             # Dir in simulator_group for mr stub for - docker-compose
-MR_KAFKA_APP_NAME="kafka"                                # Kafka app name
+MR_KAFKA_APP_NAME="message-router-kafka"                 # Kafka app name, if just named "kafka" the image will not start...
+MR_KAFKA_PORT=9092                                       # Kafka port number
+MR_KAFKA_DOCKER_LOCALHOST_PORT=30098                     # Kafka port number for docker localhost
+MR_KAFKA_KUBE_NODE_PORT=30099                            # Kafka node port number for kube
 MR_ZOOKEEPER_APP_NAME="zookeeper"                        # Zookeeper app name
-MR_DMAAP_HOST_MNT_DIR="/mnt2"                            # Config files dir on localhost
-
+MR_ZOOKEEPER_PORT="2181"                                 # Zookeeper port number
+MR_DMAAP_HOST_MNT_DIR="/mnt"                             # Basedir localhost for mounted files
+MR_DMAAP_HOST_CONFIG_DIR="/configs1"                      # Config files dir on localhost
 
 CR_APP_NAME="callback-receiver"                          # Name for the Callback receiver
 CR_DISPLAY_NAME="Callback receiver"
@@ -315,7 +319,8 @@ CR_EXTERNAL_SECURE_PORT=8091                             # Callback receiver con
 CR_INTERNAL_SECURE_PORT=8091                             # Callback receiver container internal secure port (container -> container)
 CR_APP_CALLBACK="/callbacks"                             # Url for callbacks
 CR_APP_CALLBACK_MR="/callbacks-mr"                       # Url for callbacks (data from mr which contains string encoded jsons in a json arr)
-CR_ALIVE_URL="/"                                         # Base path for alive check
+CR_APP_CALLBACK_TEXT="/callbacks-text"                   # Url for callbacks (data containing text data)
+CR_ALIVE_URL="/reset"                                    # Base path for alive check
 CR_COMPOSE_DIR="cr"                                      # Dir in simulator_group for docker-compose
 
 PROD_STUB_APP_NAME="producer-stub"                       # Name for the Producer stub
@@ -478,6 +483,10 @@ KUBE_PROXY_PATH=""                                       # Proxy url path, will
 KUBE_PROXY_ALIVE_URL="/"                                 # Base path for alive check
 KUBE_PROXY_COMPOSE_DIR="kubeproxy"                       # Dir in simulator_group for docker-compose
 
+PVC_CLEANER_APP_NAME="pvc-cleaner"                      # Name for Persistent Volume Cleaner container
+PVC_CLEANER_DISPLAY_NAME="Persistent Volume Cleaner"    # Display name for Persistent Volume Cleaner
+PVC_CLEANER_COMPOSE_DIR="pvc-cleaner"                   # Dir in simulator_group for yamls
+
 DMAAP_ADP_APP_NAME="dmaapadapterservice"                 # Name for Dmaap Adapter container
 DMAAP_ADP_DISPLAY_NAME="Dmaap Adapter Service"           # Display name for Dmaap Adapter container
 DMAAP_ADP_EXTERNAL_PORT=9087                             # Dmaap Adapter container external port (host -> container)
@@ -511,18 +520,13 @@ DMAAP_MED_HOST_MNT_DIR="./mnt"                          # Mounted db dir, relati
 #DMAAP_MED_CERT_MOUNT_DIR="./cert"
 DMAAP_MED_ALIVE_URL="/status"                            # Base path for alive check
 DMAAP_MED_COMPOSE_DIR="dmaapmed"                         # Dir in simulator_group for docker-compose
-#MAAP_MED_CONFIG_MOUNT_PATH="/app" # Internal container path for configuration
-DMAAP_MED_DATA_MOUNT_PATH="/configs" # Path in container for data file
-DMAAP_MED_DATA_FILE="type_config.json"  # Container data file name
-#DMAAP_MED_CONFIG_FILE=application.yaml                   # Config file name
-
-PVC_CLEANER_APP_NAME="pvc-cleaner"                      # Name for Persistent Volume Cleaner container
-PVC_CLEANER_DISPLAY_NAME="Persistent Volume Cleaner"    # Display name for Persistent Volume Cleaner
-PVC_CLEANER_COMPOSE_DIR="pvc-cleaner"                   # Dir in simulator_group for yamls
+#MAAP_MED_CONFIG_MOUNT_PATH="/app"                       # Internal container path for configuration
+DMAAP_MED_DATA_MOUNT_PATH="/configs"                     # Path in container for data file
+DMAAP_MED_DATA_FILE="type_config.json"                   # Container data file name
 
 ########################################
 # Setting for common curl-base function
 ########################################
 
-UUID=""                                                   # UUID used as prefix to the policy id to simulate a real UUID
-                                                          # Testscript need to set the UUID otherwise this empty prefix is used
+UUID=""                                                  # UUID used as prefix to the policy id to simulate a real UUID
+                                                         # Testscript need to set the UUID otherwise this empty prefix is used
index 8d832d7..e1c90d6 100755 (executable)
@@ -28,7 +28,7 @@ __print_args() {
        echo "      [--ricsim-prefix <prefix> ] [--use-local-image <app-nam>+]  [--use-snapshot-image <app-nam>+]"
        echo "      [--use-staging-image <app-nam>+] [--use-release-image <app-nam>+] [--image-repo <repo-address]"
        echo "      [--repo-policy local|remote] [--cluster-timeout <timeout-in seconds>] [--print-stats]"
-       echo "      [--override <override-environment-filename> --pre-clean]"
+       echo "      [--override <override-environment-filename> --pre-clean --gen-stats]"
 }
 
 if [ $# -eq 1 ] && [ "$1" == "help" ]; then
@@ -59,6 +59,7 @@ if [ $# -eq 1 ] && [ "$1" == "help" ]; then
        echo "--print-stats         -  Print current test stats after each test."
        echo "--override <file>     -  Override setting from the file supplied by --env-file"
        echo "--pre-clean           -  Will clean kube resouces when running docker and vice versa"
+       echo "--gen-stats           -  Collect container/pod runtime statistics"
 
        echo ""
        echo "List of app short names supported: "$APP_SHORT_NAMES
@@ -127,6 +128,11 @@ STOP_AT_ERROR=0
 # Applies only to images defined in the test-env files with image names and tags defined as XXXX_RELEASE
 IMAGE_CATEGORY="DEV"
 
+#Var to indicate docker-compose version, V1 or V2
+#V1 names replicated containers <proj-name>_<service-name>_<index>
+#V2 names replicated containers <proj-name>-<service-name>-<index>
+DOCKER_COMPOSE_VERION="V1"
+
 # Function to indent cmd output with one space
 indent1() { sed 's/^/ /'; }
 
@@ -207,6 +213,9 @@ RES_DEVIATION=0
 #Var to control if current stats shall be printed
 PRINT_CURRENT_STATS=0
 
+#Var to control if container/pod runtim statistics shall be collected
+COLLECT_RUNTIME_STATS=0
+
 #File to keep deviation messages
 DEVIATION_FILE=".tmp_deviations"
 rm $DEVIATION_FILE &> /dev/null
@@ -222,6 +231,9 @@ trap_fnc() {
 }
 trap trap_fnc ERR
 
+# Trap to kill subprocesses
+trap "kill 0" EXIT
+
 # Counter for tests
 TEST_SEQUENCE_NR=1
 
@@ -338,9 +350,16 @@ __log_conf_ok() {
 #Var for measuring execution time
 TCTEST_START=$SECONDS
 
+#Vars to hold the start time and timer text for a custom timer
+TC_TIMER_STARTTIME=""
+TC_TIMER_TIMER_TEXT=""
+TC_TIMER_CURRENT_FAILS="" # Then numer of failed test when timer starts.
+                          # Compared with the current number of fails at timer stop
+                                                 # to judge the measurement reliability
+
 #File to save timer measurement results
 TIMER_MEASUREMENTS=".timer_measurement.txt"
-echo -e "Activity \t Duration" > $TIMER_MEASUREMENTS
+echo -e "Activity \t Duration \t Info" > $TIMER_MEASUREMENTS
 
 # If this is set, some images (control by the parameter repo-polcy) will be re-tagged and pushed to this repo before any
 IMAGE_REPO_ADR=""
@@ -652,6 +671,15 @@ while [ $paramerror -eq 0 ] && [ $foundparm -eq 0 ]; do
                        foundparm=0
                fi
        fi
+       if [ $paramerror -eq 0 ]; then
+               if [ "$1" == "--gen-stats" ]; then
+                       COLLECT_RUNTIME_STATS=1
+                       echo "Option set - Collect runtime statistics"
+                       shift;
+                       foundparm=0
+               fi
+       fi
+
 done
 echo ""
 
@@ -746,35 +774,58 @@ if [ ! -z "$TMP_APPS" ]; then
 else
        echo " None"
 fi
+
+echo -e $BOLD"Auto adding included apps"$EBOLD
+       for iapp in $INCLUDED_IMAGES; do
+               file_pointer=$(echo $iapp | tr '[:upper:]' '[:lower:]')
+               file_pointer="../common/"$file_pointer"_api_functions.sh"
+               echo " Auto-adding included app $iapp.  Sourcing $file_pointer"
+               . $file_pointer
+               if [ ! -f "$file_pointer" ]; then
+                       echo " Include file $file_pointer for app $iapp does not exist"
+                       exit 1
+               fi
+       done
 echo ""
 
+echo -e $BOLD"Test environment info"$EBOLD
+
 # Check needed installed sw
+
+tmp=$(which bash)
+if [ $? -ne 0 ] || [ -z "$tmp" ]; then
+       echo -e $RED"bash is required to run the test environment, pls install"$ERED
+       exit 1
+fi
+echo " bash is installed and using version:"
+echo "$(bash --version)" | indent2
+
 tmp=$(which python3)
-if [ $? -ne 0 ] || [ -z tmp ]; then
+if [ $? -ne 0 ] || [ -z "$tmp" ]; then
        echo -e $RED"python3 is required to run the test environment, pls install"$ERED
        exit 1
 fi
+echo " python3 is installed and using version: $(python3 --version)"
+
 tmp=$(which docker)
-if [ $? -ne 0 ] || [ -z tmp ]; then
+if [ $? -ne 0 ] || [ -z "$tmp" ]; then
        echo -e $RED"docker is required to run the test environment, pls install"$ERED
        exit 1
 fi
+echo " docker is installed and using versions:"
+echo  "  $(docker version --format 'Client version {{.Client.Version}} Server version {{.Server.Version}}')"
 
 tmp=$(which docker-compose)
-if [ $? -ne 0 ] || [ -z tmp ]; then
+if [ $? -ne 0 ] || [ -z "$tmp" ]; then
        if [ $RUNMODE == "DOCKER" ]; then
                echo -e $RED"docker-compose is required to run the test environment, pls install"$ERED
                exit 1
        fi
 fi
-if [ $RUNMODE == "DOCKER" ]; then
-       tmp=$(docker-compose version | grep -i 'Docker Compose version')
-       if [[ "$tmp" == *'v2'* ]]; then
-               echo -e $RED"docker-compose is using docker-compose version 2"$ERED
-               echo -e $RED"The test environment only support version 1"$ERED
-               echo -e $RED"Disable version 2 by cmd 'docker-compose disable-v2' and re-run the script "$ERED
-               exit 1
-       fi
+tmp=$(docker-compose version --short)
+echo " docker-compose installed and using version $tmp"
+if [[ "$tmp" == *'v2'* ]]; then
+       DOCKER_COMPOSE_VERION="V2"
 fi
 
 tmp=$(which kubectl)
@@ -785,6 +836,8 @@ if [ $? -ne 0 ] || [ -z tmp ]; then
        fi
 else
        if [ $RUNMODE == "KUBE" ]; then
+               echo " kubectl is installed and using versions:"
+               echo $(kubectl version --short=true) | indent2
                res=$(kubectl cluster-info 2>&1)
                if [ $? -ne 0 ]; then
                        echo -e "$BOLD$RED############################################# $ERED$EBOLD"
@@ -816,6 +869,8 @@ else
        fi
 fi
 
+echo ""
+
 echo -e $BOLD"Checking configured image setting for this test case"$EBOLD
 
 #Temp var to check for image variable name errors
@@ -1449,6 +1504,8 @@ setup_testenvironment() {
        echo -e $BOLD"======================================================="$EBOLD
        echo ""
 
+       LOG_STAT_ARGS=""
+
        for imagename in $APP_SHORT_NAMES; do
                __check_included_image $imagename
                retcode_i=$?
@@ -1464,9 +1521,16 @@ setup_testenvironment() {
 
                        function_pointer="__"$imagename"_initial_setup"
                        $function_pointer
+
+                       function_pointer="__"$imagename"_statisics_setup"
+                       LOG_STAT_ARGS=$LOG_STAT_ARGS" "$($function_pointer)
                fi
        done
 
+       if [ $COLLECT_RUNTIME_STATS -eq 1 ]; then
+               ../common/genstat.sh $RUNMODE $SECONDS $TESTLOGS/$ATC/stat_data.csv $LOG_STAT_ARGS &
+       fi
+
 }
 
 # Function to print the test result, shall be the last cmd in a test script
@@ -1498,8 +1562,16 @@ print_result() {
        echo "Timer measurement in the test script"
        echo "===================================="
        column -t -s $'\t' $TIMER_MEASUREMENTS
+       if [ $RES_PASS != $RES_TEST ]; then
+               echo -e $RED"Measurement may not be reliable when there are failed test - failures may cause long measurement values due to timeouts etc."$ERED
+       fi
        echo ""
 
+       if [ $COLLECT_RUNTIME_STATS -eq 1 ]; then
+               echo "Runtime statistics collected in file: "$TESTLOGS/$ATC/stat_data.csv
+               echo ""
+       fi
+
        total=$((RES_PASS+RES_FAIL))
        if [ $RES_TEST -eq 0 ]; then
                echo -e "\033[1mNo tests seem to have been executed. Check the script....\033[0m"
@@ -1574,57 +1646,44 @@ print_result() {
 #####################################################################
 
 # Start timer for time measurement
-# args - (any args will be printed though)
+# args:  <timer message to print>  -  timer value and message will be printed both on screen
+#                                     and in the timer measurement report - if at least one "print_timer is called"
 start_timer() {
        echo -e $BOLD"INFO(${BASH_LINENO[0]}): "${FUNCNAME[0]}"," $@ $EBOLD
-       TC_TIMER=$SECONDS
+       TC_TIMER_STARTTIME=$SECONDS
+       TC_TIMER_TIMER_TEXT="${@:1}"
+       if [ $# -ne 1 ]; then
+               __print_err "need 1 arg,  <timer message to print>" $@
+               TC_TIMER_TIMER_TEXT=${FUNCNAME[0]}":"${BASH_LINENO[0]}
+               echo " Assigning timer name: "$TC_TIMER_TIMER_TEXT
+       fi
+       TC_TIMER_CURRENT_FAILS=$(($RES_FAIL+$RES_CONF_FAIL))
        echo " Timer started: $(date)"
 }
 
-# Print the value of the time (in seconds)
-# args - <timer message to print>  -  timer value and message will be printed both on screen
-#                                     and in the timer measurement report
+# Print the running timer  the value of the time (in seconds)
+# Timer value and message will be printed both on screen and in the timer measurement report
 print_timer() {
-       echo -e $BOLD"INFO(${BASH_LINENO[0]}): "${FUNCNAME[0]}"," $@ $EBOLD
-       if [ $# -lt 1 ]; then
-               ((RES_CONF_FAIL++))
-       __print_err "need 1 or more args,  <timer message to print>" $@
-               exit 1
+       echo -e $BOLD"INFO(${BASH_LINENO[0]}): "${FUNCNAME[0]}"," $TC_TIMER_TIMER_TEXT $EBOLD
+       if [ -z  "$TC_TIMER_STARTTIME" ]; then
+               __print_err "timer not started" $@
+               return 1
        fi
-       duration=$(($SECONDS-$TC_TIMER))
+       duration=$(($SECONDS-$TC_TIMER_STARTTIME))
        if [ $duration -eq 0 ]; then
                duration="<1 second"
        else
                duration=$duration" seconds"
        fi
        echo " Timer duration :" $duration
-
-       echo -e "${@:1} \t $duration" >> $TIMER_MEASUREMENTS
-}
-
-# Print the value of the time (in seconds) and reset the timer
-# args - <timer message to print>  -  timer value and message will be printed both on screen
-#                                     and in the timer measurement report
-print_and_reset_timer() {
-       echo -e $BOLD"INFO(${BASH_LINENO[0]}): "${FUNCNAME[0]}"," $@ $EBOLD
-       if [ $# -lt 1 ]; then
-               ((RES_CONF_FAIL++))
-       __print_err "need 1 or more args,  <timer message to print>" $@
-               exit 1
+       res="-"
+       if [ $(($RES_FAIL+$RES_CONF_FAIL)) -ne $TC_TIMER_CURRENT_FAILS ]; then
+               res="Failures occured during test - timer not reliabled"
        fi
-       duration=$(($SECONDS-$TC_TIMER))" seconds"
-       if [ $duration -eq 0 ]; then
-               duration="<1 second"
-       else
-               duration=$duration" seconds"
-       fi
-       echo " Timer duration :" $duration
-       TC_TIMER=$SECONDS
-       echo " Timer reset"
-
-       echo -e "${@:1} \t $duration" >> $TIMER_MEASUREMENTS
 
+       echo -e "$TC_TIMER_TIMER_TEXT \t $duration \t $res" >> $TIMER_MEASUREMENTS
 }
+
 # Print info about a deviations from intended tests
 # Each deviation counted is also printed in the testreport
 # args <deviation message to print>
@@ -1667,6 +1726,10 @@ __clean_containers() {
        for imagename in $APP_SHORT_NAMES; do
                docker ps -a --filter "label=nrttest_app=$imagename"  --filter "network=$DOCKER_SIM_NWNAME" --format ' {{.Label "nrttest_dp"}}\n{{.Label "nrttest_app"}}\n{{.Names}}' >> $running_contr_file
        done
+       running_contr_file_empty="No docker containers running, started by previous test execution"
+       if [ -s $running_contr_file ]; then
+               running_contr_file_empty=""
+       fi
 
        # Kill all containers started by the test env - to speed up shut down
     docker kill $(docker ps -a  --filter "label=nrttest_app" --format '{{.Names}}') &> /dev/null
@@ -1714,37 +1777,41 @@ __clean_containers() {
                tab_heading3="$tab_heading3"" "
        done
 
-       echo " $tab_heading1$tab_heading2$tab_heading3"" Actions"
-       cntr=0
-       while read p; do
-               if (( $cntr % 3 == 0 ));then
-                       row=""
-                       heading=$p
-                       heading_len=$tab_heading1_len
-               fi
-               if (( $cntr % 3 == 1));then
-                       heading=$p
-                       heading_len=$tab_heading2_len
-               fi
-               if (( $cntr % 3 == 2));then
-                       contr=$p
-                       heading=$p
-                       heading_len=$tab_heading3_len
-               fi
-               while (( ${#heading} < $heading_len)); do
-                       heading="$heading"" "
-               done
-               row=$row$heading
-               if (( $cntr % 3 == 2));then
-                       echo -ne $row$SAMELINE
-                       echo -ne " $row ${GREEN}stopping...${EGREEN}${SAMELINE}"
-                       docker stop $(docker ps -qa --filter name=${contr} --filter network=$DOCKER_SIM_NWNAME) &> /dev/null
-                       echo -ne " $row ${GREEN}stopped removing...${EGREEN}${SAMELINE}"
-                       docker rm --force $(docker ps -qa --filter name=${contr} --filter network=$DOCKER_SIM_NWNAME) &> /dev/null
-                       echo -e  " $row ${GREEN}stopped removed     ${EGREEN}"
-               fi
-               let cntr=cntr+1
-       done <$running_contr_file
+       if [ ! -z "$running_contr_file_empty" ]; then
+               echo $running_contr_file_empty | indent1
+       else
+               echo " $tab_heading1$tab_heading2$tab_heading3"" Actions"
+               cntr=0
+               while read p; do
+                       if (( $cntr % 3 == 0 ));then
+                               row=""
+                               heading=$p
+                               heading_len=$tab_heading1_len
+                       fi
+                       if (( $cntr % 3 == 1));then
+                               heading=$p
+                               heading_len=$tab_heading2_len
+                       fi
+                       if (( $cntr % 3 == 2));then
+                               contr=$p
+                               heading=$p
+                               heading_len=$tab_heading3_len
+                       fi
+                       while (( ${#heading} < $heading_len)); do
+                               heading="$heading"" "
+                       done
+                       row=$row$heading
+                       if (( $cntr % 3 == 2));then
+                               echo -ne $row$SAMELINE
+                               echo -ne " $row ${GREEN}stopping...${EGREEN}${SAMELINE}"
+                               docker stop $(docker ps -qa --filter name=${contr} --filter network=$DOCKER_SIM_NWNAME) &> /dev/null
+                               echo -ne " $row ${GREEN}stopped removing...${EGREEN}${SAMELINE}"
+                               docker rm --force $(docker ps -qa --filter name=${contr} --filter network=$DOCKER_SIM_NWNAME) &> /dev/null
+                               echo -e  " $row ${GREEN}stopped removed     ${EGREEN}"
+                       fi
+                       let cntr=cntr+1
+               done <$running_contr_file
+       fi
 
        echo ""
 
@@ -1956,7 +2023,7 @@ __kube_delete_all_resources() {
                                        echo -e "  Scaled $restype $resid $ns_text with label $labelname=$labelid to 0, current count=$count $GREEN OK $EGREEN"
                                fi
                                echo -ne "  Deleting $restype $resid $ns_text with label $labelname=$labelid "$SAMELINE
-                               kubectl delete $restype $resid $ns_flag 1> /dev/null 2> ./tmp/kubeerr
+                               kubectl delete --grace-period=1 $restype $resid $ns_flag 1> /dev/null 2> ./tmp/kubeerr
                                if [ $? -eq 0 ]; then
                                        echo -e "  Deleted $restype $resid $ns_text with label $labelname=$labelid $GREEN OK $EGREEN"
                                else
@@ -2142,41 +2209,6 @@ __kube_create_configmap() {
        return 0
 }
 
-# Function to create a configmap in kubernetes
-# args: <configmap-name> <namespace> <labelname> <labelid> <path-to-data-file> <path-to-output-yaml>
-# (Not for test scripts)
-__kube_create_configmapXXXXXXXXXXXXX() {
-       echo -ne " Creating configmap $1 "$SAMELINE
-       #envsubst < $5 > $5"_tmp"
-       #cp $5"_tmp" $5  #Need to copy back to orig file name since create configmap neeed the original file name
-       kubectl create configmap $1  -n $2 --from-file=$5 --dry-run=client -o yaml > $6
-       if [ $? -ne 0 ]; then
-               echo -e " Creating configmap $1 $RED Failed $ERED"
-               ((RES_CONF_FAIL++))
-               return 1
-       fi
-
-       kubectl apply -f $6 1> /dev/null 2> ./tmp/kubeerr
-       if [ $? -ne 0 ]; then
-               echo -e " Creating configmap $1 $RED Apply failed $ERED"
-               echo "  Message: $(<./tmp/kubeerr)"
-               ((RES_CONF_FAIL++))
-               return 1
-       fi
-       kubectl label configmap $1 -n $2 $3"="$4 --overwrite 1> /dev/null 2> ./tmp/kubeerr
-       if [ $? -ne 0 ]; then
-               echo -e " Creating configmap $1 $RED Labeling failed $ERED"
-               echo "  Message: $(<./tmp/kubeerr)"
-               ((RES_CONF_FAIL++))
-               return 1
-       fi
-       # Log the resulting map
-       kubectl get configmap $1 -n $2 -o yaml > $6
-
-       echo -e " Creating configmap $1 $GREEN OK $EGREEN"
-       return 0
-}
-
 # This function runs a kubectl cmd where a single output value is expected, for example get ip with jsonpath filter.
 # The function retries up to the timeout given in the cmd flag '--cluster-timeout'
 # args: <full kubectl cmd with parameters>
@@ -2292,14 +2324,16 @@ clean_environment() {
        if [ $RUNMODE == "KUBE" ]; then
                __clean_kube
                if [ $PRE_CLEAN -eq 1 ]; then
-                       echo " Clean docker resouces to free up resources, may take time..."
+                       echo " Cleaning docker resouces to free up resources, may take time..."
                        ../common/clean_docker.sh 2&>1 /dev/null
+                       echo ""
                fi
        else
                __clean_containers
                if [ $PRE_CLEAN -eq 1 ]; then
-                       echo " Clean kubernetes resouces to free up resources, may take time..."
+                       echo " Cleaning kubernetes resouces to free up resources, may take time..."
                        ../common/clean_kube.sh 2&>1 /dev/null
+                       echo ""
                fi
        fi
 }
@@ -2399,6 +2433,9 @@ __start_container() {
        appcount=$1
        shift
 
+       envsubst < $compose_file > "gen_"$compose_file
+       compose_file="gen_"$compose_file
+
        if [ "$compose_args" == "NODOCKERARGS" ]; then
                docker-compose -f $compose_file up -d &> .dockererr
                if [ $? -ne 0 ]; then
index 4aa3a7a..6409fa2 100644 (file)
@@ -1,2 +1,3 @@
 .tmp.json
-.dockererr
\ No newline at end of file
+.dockererr
+.env
\ No newline at end of file
index 4b4d8da..94ef606 100644 (file)
@@ -25,6 +25,7 @@ import traceback
 import logging
 import socket
 from threading import RLock
+from hashlib import md5
 
 # Disable all logging of GET on reading counters and db
 class AjaxFilter(logging.Filter):
@@ -54,6 +55,7 @@ hosts_set=set()
 # Request and response constants
 CALLBACK_URL="/callbacks/<string:id>"
 CALLBACK_MR_URL="/callbacks-mr/<string:id>" #Json list with string encoded items
+CALLBACK_TEXT_URL="/callbacks-text/<string:id>" # Callback for string of text
 APP_READ_URL="/get-event/<string:id>"
 APP_READ_ALL_URL="/get-all-events/<string:id>"
 DUMP_ALL_URL="/db"
@@ -111,7 +113,14 @@ def receiveresponse(id):
                 cntr_callbacks[id][1]+=1
                 msg=msg_callbacks[id][0]
                 print("Fetching msg for id: "+id+", msg="+str(msg))
-                del msg[TIME_STAMP]
+
+                if (isinstance(msg,dict)):
+                    del msg[TIME_STAMP]
+                    if ("md5" in msg.keys()):
+                        print("EXTRACTED MD5")
+                        msg=msg["md5"]
+                        print("MD5: "+str(msg))
+
                 del msg_callbacks[id][0]
                 return json.dumps(msg),200
             print("No messages for id: "+id)
@@ -139,7 +148,8 @@ def receiveresponse_all(id):
                 msg=msg_callbacks[id]
                 print("Fetching all msgs for id: "+id+", msg="+str(msg))
                 for sub_msg in msg:
-                    del sub_msg[TIME_STAMP]
+                    if (isinstance(sub_msg, dict)):
+                        del sub_msg[TIME_STAMP]
                 del msg_callbacks[id]
                 return json.dumps(msg),200
             print("No messages for id: "+id)
@@ -180,7 +190,8 @@ def events_write(id):
 
         with lock:
             cntr_msg_callbacks += 1
-            msg[TIME_STAMP]=str(datetime.now())
+            if (isinstance(msg, dict)):
+                msg[TIME_STAMP]=str(datetime.now())
             if (id in msg_callbacks.keys()):
                 msg_callbacks[id].append(msg)
             else:
@@ -202,8 +213,9 @@ def events_write(id):
     return 'OK',200
 
 
-# Receive a json callback message with payload fromatted accoirding to output frm the message router
-# URI and payload, (PUT or POST): /callbacks/<id> <json messages>
+# Receive a json callback message with payload formatted according to output from the message router
+# Array of stringified json objects
+# URI and payload, (PUT or POST): /callbacks-mr/<id> <json messages>
 # json is a list of string encoded json items
 # response: OK 200 or 500 for other errors
 @app.route(CALLBACK_MR_URL,
@@ -212,17 +224,21 @@ def events_write_mr(id):
     global msg_callbacks
     global cntr_msg_callbacks
 
+    storeas=request.args.get('storeas') #If set, store payload as a md5 hascode and dont log the payload
+                                        #Large payloads will otherwise overload the server
     try:
         print("Received callback (mr) for id: "+id +", content-type="+request.content_type)
-        remote_host_logging(request)
         print("raw data: str(request.data): "+str(request.data))
+        if (storeas is None):
+            print("raw data: str(request.data): "+str(request.data))
         do_delay()
         try:
             #if (request.content_type == MIME_JSON):
             if (MIME_JSON in request.content_type):
                 data = request.data
                 msg_list = json.loads(data)
-                print("Payload(json): "+str(msg_list))
+                if (storeas is None):
+                    print("Payload(json): "+str(msg_list))
             else:
                 msg_list=[]
                 print("Payload(content-type="+request.content_type+"). Setting empty json as payload")
@@ -234,11 +250,21 @@ def events_write_mr(id):
         with lock:
             remote_host_logging(request)
             for msg in msg_list:
-                print("msg (str): "+str(msg))
-                msg=json.loads(msg)
-                print("msg (json): "+str(msg))
+                if (storeas is None):
+                    msg=json.loads(msg)
+                else:
+                    #Convert to compact json without ws between parameter and value...
+                    #It seem that ws is added somewhere along to way to this server
+                    msg=json.loads(msg)
+                    msg=json.dumps(msg, separators=(',', ':'))
+
+                    md5msg={}
+                    md5msg["md5"]=md5(msg.encode('utf-8')).hexdigest()
+                    msg=md5msg
+                    print("msg (json converted to md5 hash): "+str(msg["md5"]))
                 cntr_msg_callbacks += 1
-                msg[TIME_STAMP]=str(datetime.now())
+                if (isinstance(msg, dict)):
+                    msg[TIME_STAMP]=str(datetime.now())
                 if (id in msg_callbacks.keys()):
                     msg_callbacks[id].append(msg)
                 else:
@@ -259,6 +285,73 @@ def events_write_mr(id):
 
     return 'OK',200
 
+# Receive a callback message of a single text message (content type ignored)
+# or a json array of strings (content type json)
+# URI and payload, (PUT or POST): /callbacks-text/<id> <text message>
+# response: OK 200 or 500 for other errors
+@app.route(CALLBACK_TEXT_URL,
+    methods=['PUT','POST'])
+def events_write_text(id):
+    global msg_callbacks
+    global cntr_msg_callbacks
+
+    storeas=request.args.get('storeas') #If set, store payload as a md5 hascode and dont log the payload
+                                        #Large payloads will otherwise overload the server
+    try:
+        print("Received callback for id: "+id +", content-type="+request.content_type)
+        remote_host_logging(request)
+        if (storeas is None):
+            print("raw data: str(request.data): "+str(request.data))
+        do_delay()
+
+        try:
+            msg_list=None
+            if (MIME_JSON in request.content_type):  #Json array of strings
+                msg_list=json.loads(request.data)
+            else:
+                data=request.data.decode("utf-8")    #Assuming string
+                msg_list=[]
+                msg_list.append(data)
+
+            for msg in msg_list:
+                if (storeas == "md5"):
+                    md5msg={}
+                    print("msg: "+str(msg))
+                    print("msg (endcode str): "+str(msg.encode('utf-8')))
+                    md5msg["md5"]=md5(msg.encode('utf-8')).hexdigest()
+                    msg=md5msg
+                    print("msg (data converted to md5 hash): "+str(msg["md5"]))
+
+                if (isinstance(msg, dict)):
+                    msg[TIME_STAMP]=str(datetime.now())
+
+                with lock:
+                    cntr_msg_callbacks += 1
+                    if (id in msg_callbacks.keys()):
+                        msg_callbacks[id].append(msg)
+                    else:
+                        msg_callbacks[id]=[]
+                        msg_callbacks[id].append(msg)
+
+                    if (id in cntr_callbacks.keys()):
+                        cntr_callbacks[id][0] += 1
+                    else:
+                        cntr_callbacks[id]=[]
+                        cntr_callbacks[id].append(1)
+                        cntr_callbacks[id].append(0)
+        except Exception as e:
+            print(CAUGHT_EXCEPTION+str(e))
+            traceback.print_exc()
+            return 'NOTOK',500
+
+
+    except Exception as e:
+        print(CAUGHT_EXCEPTION+str(e))
+        traceback.print_exc()
+        return 'NOTOK',500
+
+    return 'OK',200
+
 ### Functions for test ###
 
 # Dump the whole db of current callbacks
index e1b9ff9..32beca1 100644 (file)
@@ -43,7 +43,10 @@ http {
            proxy_set_header   X-Real-IP            $remote_addr;
            proxy_set_header   X-Forwarded-For      $proxy_add_x_forwarded_for;
            proxy_pass      http://localhost:2222;
+
+           client_max_body_size 0;
         }
+
     }
     ##
     # SSL Settings
index fb6d674..4b1913f 100644 (file)
@@ -69,11 +69,13 @@ SERVER_ERROR="Server error :"
 
 topic_write=""
 topic_read=""
+generic_topics_upload_baseurl=""
 
 uploader_thread=None
 downloader_thread=None
+generic_uploader_thread=None
 
-# Function to download messages from dmaap
+# Function to upload PMS messages to dmaap
 def dmaap_uploader():
     global msg_requests
     global cntr_msg_requests_fetched
@@ -107,7 +109,7 @@ def dmaap_uploader():
         sleep(0.01)
 
 
-# Function to upload messages to dmaap
+# Function to download PMS messages from dmaap
 def dmaap_downloader():
     global msg_responses
     global cntr_msg_responses_submitted
@@ -150,6 +152,48 @@ def dmaap_downloader():
         except Exception as e:
             sleep(1)
 
+# Function to upload generic messages to dmaap
+def dmaap_generic_uploader():
+    global msg_requests
+    global cntr_msg_requests_fetched
+
+    print("Starting generic uploader")
+
+    headers_json = {'Content-type': 'application/json', 'Accept': '*/*'}
+    headers_text = {'Content-type': 'text/plain', 'Accept': '*/*'}
+
+    while True:
+        if (len(generic_messages)):
+            for topicname in generic_messages.keys():    #topicname contains the path of the topics, eg. "/event/<topic>"
+                topic_queue=generic_messages[topicname]
+                if (len(topic_queue)>0):
+                    if (topicname.endswith(".text")):
+                        msg=topic_queue[0]
+                        headers=headers_text
+                    else:
+                        msg=topic_queue[0]
+                        msg=json.dumps(msg)
+                        headers=headers_json
+                    url=generic_topics_upload_baseurl+topicname
+                    print("Sending to dmaap : "+ url)
+                    print("Sending to dmaap : "+ msg)
+                    print("Sending to dmaap : "+ str(headers))
+                    try:
+                        resp=requests.post(url, data=msg, headers=headers, timeout=10)
+                        if (resp.status_code<199 & resp.status_code > 299):
+                            print("Failed, response code: " + str(resp.status_code))
+                            sleep(1)
+                        else:
+                            print("Dmaap response code: " + str(resp.status_code))
+                            print("Dmaap response text: " + str(resp.text))
+                            with lock:
+                                topic_queue.pop(0)
+                                cntr_msg_requests_fetched += 1
+                    except Exception as e:
+                        print("Failed, exception: "+ str(e))
+                        sleep(1)
+        sleep(0.01)
+
 #I'm alive function
 @app.route('/',
     methods=['GET'])
@@ -157,7 +201,7 @@ def index():
     return 'OK', 200
 
 
-# Helper function to create a Dmaap request message
+# Helper function to create a Dmaap PMS request message
 # args : <GET|PUT|DELETE> <correlation-id> <json-string-payload - may be None> <url>
 # response: json formatted string of a complete Dmaap message
 def create_message(operation, correlation_id, payload, url):
@@ -171,7 +215,7 @@ def create_message(operation, correlation_id, payload, url):
 
 ### MR-stub interface, for MR control
 
-# Send a message to MR
+# Send a PMS message to MR
 # URI and parameters (PUT or POST): /send-request?operation=<GET|PUT|POST|DELETE>&url=<url>
 # response: <correlation-id> (http 200) o4 400 for parameter error or 500 for other errors
 @app.route(APP_WRITE_URL,
@@ -212,7 +256,7 @@ def sendrequest():
             print(APP_WRITE_URL+"-"+CAUGHT_EXCEPTION+" "+str(e) + " "+traceback.format_exc())
             return Response(SERVER_ERROR+" "+str(e), status=500, mimetype=MIME_TEXT)
 
-# Receive a message response for MR for the included correlation id
+# Receive a PMS message response for MR for the included correlation id
 # URI and parameter, (GET): /receive-response?correlationid=<correlation-id>
 # response: <json-array of 1 response> 200 or empty 204 or other errors 500
 @app.route(APP_READ_URL,
@@ -243,7 +287,7 @@ def receiveresponse():
 
 ### Dmaap interface ###
 
-# Read messages stream. URI according to agent configuration.
+# Read PMS messages stream. URI according to agent configuration.
 # URI, (GET): /events/A1-POLICY-AGENT-READ/users/policy-agent
 # response: 200 <json array of request messages>, or 500 for other errors
 @app.route(AGENT_READ_URL,
@@ -299,7 +343,7 @@ def events_read():
     print("timeout: "+str(timeout)+", start_time: "+str(start_time)+", current_time: "+str(current_time))
     return Response("[]", status=200, mimetype=MIME_JSON)
 
-# Write messages stream. URI according to agent configuration.
+# Write PMS messages stream. URI according to agent configuration.
 # URI and payload, (PUT or POST): /events/A1-POLICY-AGENT-WRITE <json array of response messages>
 # response: OK 200 or 400 for missing json parameters, 500 for other errors
 @app.route(AGENT_WRITE_URL,
@@ -367,10 +411,10 @@ def oru_read():
         return Response(json.dumps(res), status=200, mimetype=MIME_JSON)
     return Response("[]", status=200, mimetype=MIME_JSON)
 
-# Generic POST/PUT catching all urls starting with /events/<topic>.
+# Generic POST catching all urls starting with /events/<topic>.
 # Writes the message in a que for that topic
 @app.route("/events/<path>",
-    methods=['PUT','POST'])
+    methods=['POST'])
 def generic_write(path):
     global generic_messages
     global cntr_msg_responses_submitted
@@ -378,8 +422,12 @@ def generic_write(path):
     write_method=str(request.method)
     with lock:
         try:
-            payload=request.json
-            print(write_method+" on "+urlkey+" json=" + json.dumps(payload))
+            if (urlkey.endswith(".text")):
+                payload=str(request.data.decode('UTF-8'))
+                print(write_method+" on "+urlkey+" text=" + payload)
+            else:
+                payload=request.json
+                print(write_method+" on "+urlkey+" json=" + json.dumps(payload))
             topicmsgs=[]
             if (urlkey in generic_messages.keys()):
                 topicmsgs=generic_messages[urlkey]
@@ -407,6 +455,9 @@ def generic_read(path):
     global generic_messages
     global cntr_msg_requests_fetched
 
+    if generic_topics_upload_baseurl:
+        return Response('Url not available when running as mrstub frontend', status=404, mimetype=MIME_TEXT)
+
     urlpath="/events/"+str(path)
     urlkey="/events/"+str(path).split("/")[0] #Extract topic
     print("GET on topic"+urlkey)
@@ -530,7 +581,14 @@ if os.getenv("TOPIC_READ") is not None:
         uploader_thread=Thread(target=dmaap_uploader)
         uploader_thread.start()
 
-else:
+if os.environ['GENERIC_TOPICS_UPLOAD_BASEURL'] is not None:
+    print("GENERIC_TOPICS_UPLOAD_BASEURL:"+os.environ['GENERIC_TOPICS_UPLOAD_BASEURL'])
+    generic_topics_upload_baseurl=os.environ['GENERIC_TOPICS_UPLOAD_BASEURL']
+    if generic_topics_upload_baseurl and generic_uploader_thread is None:
+        generic_uploader_thread=Thread(target=dmaap_generic_uploader)
+        generic_uploader_thread.start()
+
+if os.getenv("TOPIC_READ") is None or os.environ['GENERIC_TOPICS_UPLOAD_BASEURL'] is None:
     print("No env variables - OK")
 
 if __name__ == "__main__":
index c548e56..35b5ba0 100644 (file)
@@ -39,7 +39,8 @@ http {
 
         # serve dynamic requests
         location / {
-        proxy_pass      http://localhost:2222;
+            proxy_pass      http://localhost:2222;
+            client_max_body_size 0;
         }
     }
     ##
index 4aa3a7a..7dc00c5 100644 (file)
@@ -1,2 +1,3 @@
 .tmp.json
-.dockererr
\ No newline at end of file
+.dockererr
+gen_docker-compose*
\ No newline at end of file
index 105c875..003dbfa 100644 (file)
@@ -19,9 +19,8 @@ version: '3.0'
 
 networks:
   default:
-    external:
-      name: ${DOCKER_SIM_NWNAME}
-
+    external: true
+    name: ${DOCKER_SIM_NWNAME}
 services:
 
   consul-server:
index 4aa3a7a..7dc00c5 100644 (file)
@@ -1,2 +1,3 @@
 .tmp.json
-.dockererr
\ No newline at end of file
+.dockererr
+gen_docker-compose*
\ No newline at end of file
index 94628fa..b860a89 100644 (file)
@@ -17,8 +17,8 @@
 version: '3.0'
 networks:
   default:
-    external:
-      name: ${DOCKER_SIM_NWNAME}
+    external: true
+    name: ${DOCKER_SIM_NWNAME}
 services:
   control-panel:
     image: ${CONTROL_PANEL_IMAGE}
diff --git a/test/simulator-group/cr/.env b/test/simulator-group/cr/.env
new file mode 100644 (file)
index 0000000..a64de54
--- /dev/null
@@ -0,0 +1 @@
+COMPOSE_PROJECT_NAME=callback-receiver
index 4aa3a7a..7dc00c5 100644 (file)
@@ -1,2 +1,3 @@
 .tmp.json
-.dockererr
\ No newline at end of file
+.dockererr
+gen_docker-compose*
\ No newline at end of file
index 0cb36d2..7953662 100644 (file)
@@ -1,5 +1,5 @@
 apiVersion: apps/v1
-kind: Deployment
+kind: StatefulSet
 metadata:
   name: $CR_APP_NAME
   namespace: $KUBE_SIM_NAMESPACE
@@ -7,7 +7,8 @@ metadata:
     run: $CR_APP_NAME
     autotest: CR
 spec:
-  replicas: 1
+  replicas: $CR_APP_COUNT
+  serviceName: $CR_APP_NAME
   selector:
     matchLabels:
       run: $CR_APP_NAME
index 634a464..047ec45 100644 (file)
 version: '3.0'
 networks:
   default:
-    external:
-      name: ${DOCKER_SIM_NWNAME}
+    external: true
+    name: ${DOCKER_SIM_NWNAME}
 services:
-  callback-receiver:
+  cr:
+    scale: $CR_APP_COUNT
     networks:
       - default
-    container_name: ${CR_APP_NAME}
     image: ${CR_IMAGE}
     ports:
-      - ${CR_EXTERNAL_PORT}:${CR_INTERNAL_PORT}
-      - ${CR_EXTERNAL_SECURE_PORT}:${CR_INTERNAL_SECURE_PORT}
+      - ${CR_INTERNAL_PORT}/tcp
+      - ${CR_INTERNAL_SECURE_PORT}/tcp
     labels:
       - "nrttest_app=CR"
       - "nrttest_dp=${CR_DISPLAY_NAME}"
index 43e532b..0367643 100644 (file)
@@ -7,15 +7,9 @@ metadata:
     run: $CR_APP_NAME
     autotest: CR
 spec:
-  type: ClusterIP
   ports:
-  - port: $CR_EXTERNAL_PORT
-    targetPort: $CR_INTERNAL_PORT
-    protocol: TCP
+  - port: 80
     name: http
-  - port: $CR_EXTERNAL_SECURE_PORT
-    targetPort: $CR_INTERNAL_SECURE_PORT
-    protocol: TCP
-    name: https
+  clusterIP: None
   selector:
-    run: $CR_APP_NAME
\ No newline at end of file
+    run: $CR_APP_NAME
index 4aa3a7a..7dc00c5 100644 (file)
@@ -1,2 +1,3 @@
 .tmp.json
-.dockererr
\ No newline at end of file
+.dockererr
+gen_docker-compose*
\ No newline at end of file
index b20a9d7..f96db09 100644 (file)
@@ -68,4 +68,7 @@ app:
   configuration-filepath: /opt/app/dmaap-adaptor-service/data/application_configuration.json
   dmaap-base-url: $MR_SERVICE_PATH
   # The url used to adress this component. This is used as a callback url sent to other components.
-  dmaap-adapter-base-url: $DMAAP_ADP_SERVICE_PATH
\ No newline at end of file
+  dmaap-adapter-base-url: $DMAAP_ADP_SERVICE_PATH
+  # KAFKA boostrap server. This is only needed if there are Information Types that uses a kafkaInputTopic
+  kafka:
+    bootstrap-servers: $MR_KAFKA_SERVICE_PATH
index b6605e3..e36d910 100644 (file)
@@ -2,8 +2,13 @@
   "types": [
      {
         "id": "ExampleInformationType",
-        "dmaapTopicUrl": "/events/unauthenticated.dmaapadp.json/dmaapadapterproducer/msgs",
+        "dmaapTopicUrl": "/events/unauthenticated.dmaapadp.json/dmaapadapterproducer/msgs?timeout=15000&limit=100",
         "useHttpProxy": ${DMMAAP_ADP_PROXY_FLAG}
-     }
+     },
+     {
+      "id": "ExampleInformationTypeKafka",
+      "kafkaInputTopic": "unauthenticated.dmaapadp_kafka.text",
+      "useHttpProxy": ${DMMAAP_ADP_PROXY_FLAG}
+   }
   ]
 }
\ No newline at end of file
index f9dee41..cea605a 100644 (file)
@@ -17,8 +17,8 @@
 version: '3.0'
 networks:
   default:
-    external:
-      name: ${DOCKER_SIM_NWNAME}
+    external: true
+    name: ${DOCKER_SIM_NWNAME}
 services:
   dmaap-adapter-service:
     image: ${DMAAP_ADP_IMAGE}
diff --git a/test/simulator-group/dmaapadp/mnt/.gitignore b/test/simulator-group/dmaapadp/mnt/.gitignore
new file mode 100644 (file)
index 0000000..cdf0793
--- /dev/null
@@ -0,0 +1,17 @@
+################################################################################
+#   Copyright (c) 2021 Nordix Foundation.                                      #
+#                                                                              #
+#   Licensed under the Apache License, Version 2.0 (the "License");            #
+#   you may not use this file except in compliance with the License.           #
+#   You may obtain a copy of the License at                                    #
+#                                                                              #
+#       http://www.apache.org/licenses/LICENSE-2.0                             #
+#                                                                              #
+#   Unless required by applicable law or agreed to in writing, software        #
+#   distributed under the License is distributed on an "AS IS" BASIS,          #
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.   #
+#   See the License for the specific language governing permissions and        #
+#   limitations under the License.                                             #
+################################################################################
+*
+!.gitignore
index 4aa3a7a..7dc00c5 100644 (file)
@@ -1,2 +1,3 @@
 .tmp.json
-.dockererr
\ No newline at end of file
+.dockererr
+gen_docker-compose*
\ No newline at end of file
index e0296fa..aa8a0f1 100644 (file)
@@ -40,7 +40,7 @@ spec:
         - name: DMAAP_MR_ADDR
           value: "$MR_SERVICE_PATH"
         - name: LOG_LEVEL
-          value: "Debug"
+          value: Debug
       volumes:
       - configMap:
           defaultMode: 420
index 21fe551..e4c9a00 100644 (file)
@@ -17,8 +17,8 @@
 version: '3.0'
 networks:
   default:
-    external:
-      name: ${DOCKER_SIM_NWNAME}
+    external: true
+    name: ${DOCKER_SIM_NWNAME}
 services:
   dmaap-mediator-service:
     image: ${DMAAP_MED_IMAGE}
@@ -32,7 +32,7 @@ services:
       - INFO_PRODUCER_PORT=${DMAAP_MED_CONF_SELF_PORT}
       - INFO_COORD_ADDR=${ECS_SERVICE_PATH}
       - DMAAP_MR_ADDR=${MR_SERVICE_PATH}
-      - LOG_LEVEL="Debug"
+      - LOG_LEVEL=Debug
     volumes:
     - ${DMAAP_MED_HOST_MNT_DIR}/$DMAAP_MED_DATA_FILE:${DMAAP_MED_DATA_MOUNT_PATH}/$DMAAP_MED_DATA_FILE
     labels:
diff --git a/test/simulator-group/dmaapmed/mnt/.gitignore b/test/simulator-group/dmaapmed/mnt/.gitignore
new file mode 100644 (file)
index 0000000..b94353c
--- /dev/null
@@ -0,0 +1,17 @@
+################################################################################
+#   Copyright (c) 2021 Nordix Foundation.                                      #
+#                                                                              #
+#   Licensed under the Apache License, Version 2.0 (the "License");            #
+#   you may not use this file except in compliance with the License.           #
+#   You may obtain a copy of the License at                                    #
+#                                                                              #
+#       http://www.apache.org/licenses/LICENSE-2.0                             #
+#                                                                              #
+#   Unless required by applicable law or agreed to in writing, software        #
+#   distributed under the License is distributed on an "AS IS" BASIS,          #
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.   #
+#   See the License for the specific language governing permissions and        #
+#   limitations under the License.                                             #
+################################################################################
+*
+!.gitignore
\ No newline at end of file
index 8a67226..ddb776f 100644 (file)
@@ -3,7 +3,7 @@
      [
        {
          "id": "STD_Fault_Messages",
-         "dmaapTopicUrl": "/events/unauthenticated.dmaapmed.json/dmaapmediatorproducer/STD_Fault_Messages"
+         "dmaapTopicUrl": "/events/unauthenticated.dmaapmed.json/dmaapmediatorproducer/STD_Fault_Messages?timeout=15000&limit=100"
        }
    ]
  }
\ No newline at end of file
diff --git a/test/simulator-group/dmaapmr/.gitignore b/test/simulator-group/dmaapmr/.gitignore
new file mode 100644 (file)
index 0000000..7dc00c5
--- /dev/null
@@ -0,0 +1,3 @@
+.tmp.json
+.dockererr
+gen_docker-compose*
\ No newline at end of file
index 2b39d15..1a9d40a 100644 (file)
 apiVersion: apps/v1
 kind: Deployment
 metadata:
-  name: $MR_DMAAP_KUBE_APP_NAME
+  name: $MR_ZOOKEEPER_APP_NAME
   namespace: $KUBE_ONAP_NAMESPACE
   labels:
-    run: $MR_DMAAP_KUBE_APP_NAME
+    run: $MR_ZOOKEEPER_APP_NAME
     autotest: DMAAPMR
 spec:
-  replicas: 1
   selector:
     matchLabels:
-      run: $MR_DMAAP_KUBE_APP_NAME
+      run: $MR_ZOOKEEPER_APP_NAME
   template:
     metadata:
       labels:
-        run: $MR_DMAAP_KUBE_APP_NAME
+        run: $MR_ZOOKEEPER_APP_NAME
         autotest: DMAAPMR
     spec:
       containers:
-      - name: $MR_DMAAP_KUBE_APP_NAME
-        image: $ONAP_DMAAPMR_IMAGE
+      - name: $MR_ZOOKEEPER_APP_NAME
+        image: $ONAP_ZOOKEEPER_IMAGE
         imagePullPolicy: $KUBE_IMAGE_PULL_POLICY
         ports:
         - name: http
-          containerPort: $MR_INTERNAL_PORT
-        - name: https
-          containerPort: $MR_INTERNAL_SECURE_PORT
+          containerPort: $MR_ZOOKEEPER_PORT
         env:
-        - name: enableCadi
-          value: 'false'
-        volumeMounts:
-        - mountPath: /appl/dmaapMR1/bundleconfig/etc/appprops/MsgRtrApi.properties
-          subPath: MsgRtrApi.properties
-          name: dmaapmr-msg-rtr-api
-        volumeMounts:
-        - mountPath: /appl/dmaapMR1/bundleconfig/etc/logback.xml
-          subPath: logback.xml
-          name: dmaapmr-log-back
+        - name: ZOOKEEPER_REPLICAS
+          value: '1'
+        - name: ZOOKEEPER_TICK_TIME
+          value: '2000'
+        - name: ZOOKEEPER_SYNC_LIMIT
+          value: '5'
+        - name: ZOOKEEPER_INIT_LIMIT
+          value: '10'
+        - name: ZOOKEEPER_MAX_CLIENT_CNXNS
+          value: '200'
+        - name: ZOOKEEPER_AUTOPURGE_SNAP_RETAIN_COUNT
+          value: '3'
+        - name: ZOOKEEPER_AUTOPURGE_PURGE_INTERVAL
+          value: '24'
+        - name: ZOOKEEPER_CLIENT_PORT
+          value: '$MR_ZOOKEEPER_PORT'
+        - name: KAFKA_OPTS
+          value: '-Djava.security.auth.login.config=/etc/zookeeper/secrets/jaas/zk_server_jaas.conf -Dzookeeper.kerberos.removeHostFromPrincipal=true -Dzookeeper.kerberos.removeRealmFromPrincipal=true -Dzookeeper.authProvider.1=org.apache.zookeeper.server.auth.SASLAuthenticationProvider -Dzookeeper.requireClientAuthScheme=sasl'
+        - name: ZOOKEEPER_SERVER_ID
+          value: '1'
         volumeMounts:
-        - mountPath: /appl/dmaapMR1/etc/cadi.properties
-          subPath: cadi.properties
-          name: dmaapmr-cadi
+        - mountPath: /etc/zookeeper/secrets/jaas/zk_server_jaas.conf
+          subPath: zk_server_jaas.conf
+          name: dmaapmr-zk-server-jaas
       volumes:
       - configMap:
           defaultMode: 420
-          name: dmaapmr-msgrtrapi.properties
-        name: dmaapmr-msg-rtr-api
-      - configMap:
-          defaultMode: 420
-          name: dmaapmr-logback.xml
-        name: dmaapmr-log-back
-      - configMap:
-          defaultMode: 420
-          name: dmaapmr-cadi.properties
-        name: dmaapmr-cadi
+          name: dmaapmr-zk-server-jaas.conf
+        name: dmaapmr-zk-server-jaas
 ---
 apiVersion: apps/v1
 kind: Deployment
 metadata:
-  name: $MR_KAFKA_BWDS_NAME
+  name: $MR_KAFKA_APP_NAME
   namespace: $KUBE_ONAP_NAMESPACE
   labels:
-    run: $MR_KAFKA_BWDS_NAME
+    run: $MR_KAFKA_APP_NAME
     autotest: DMAAPMR
 spec:
   replicas: 1
   selector:
     matchLabels:
-      run: $MR_KAFKA_BWDS_NAME
+      run: $MR_KAFKA_APP_NAME
   template:
     metadata:
       labels:
-        run: $MR_KAFKA_BWDS_NAME
+        run: $MR_KAFKA_APP_NAME
         autotest: DMAAPMR
     spec:
       containers:
-      - name: $MR_KAFKA_BWDS_NAME
+      - name: $MR_KAFKA_APP_NAME
         image: $ONAP_KAFKA_IMAGE
         imagePullPolicy: $KUBE_IMAGE_PULL_POLICY
         ports:
         - name: http
-          containerPort: 9095
+          containerPort: $MR_KAFKA_PORT
+        - name: http-external
+          containerPort: $MR_KAFKA_KUBE_NODE_PORT
         env:
         - name: enableCadi
           value: 'false'
         - name: KAFKA_ZOOKEEPER_CONNECT
-          value: 'zookeeper.onap:2181'
+          value: '$MR_ZOOKEEPER_APP_NAME:$MR_ZOOKEEPER_PORT'
         - name: KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS
           value: '40000'
         - name: KAFKA_ZOOKEEPER_SESSION_TIMEOUT_MS
           value: '40000'
         - name: KAFKA_LISTENER_SECURITY_PROTOCOL_MAP
-          value: 'INTERNAL_PLAINTEXT:PLAINTEXT,EXTERNAL_PLAINTEXT:PLAINTEXT'
+          value: 'INTERNAL_PLAINTEXT:PLAINTEXT,EXTERNAL_PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT'
         - name: KAFKA_ADVERTISED_LISTENERS
-          value: 'INTERNAL_PLAINTEXT://kaka:9092'
-#        - name: KAFKA_ADVERTISED_LISTENERS
-#          value: 'INTERNAL_PLAINTEXT://localhost:9092'
+          value: 'INTERNAL_PLAINTEXT://$MR_KAFKA_SERVICE_PATH,PLAINTEXT_HOST://localhost:$MR_KAFKA_KUBE_NODE_PORT'
         - name: KAFKA_LISTENERS
-          value: 'EXTERNAL_PLAINTEXT://0.0.0.0:9095,INTERNAL_PLAINTEXT://0.0.0.0:9092'
+          value: 'INTERNAL_PLAINTEXT://0.0.0.0:$MR_KAFKA_PORT,PLAINTEXT_HOST://0.0.0.0:$MR_KAFKA_KUBE_NODE_PORT'
         - name: KAFKA_INTER_BROKER_LISTENER_NAME
           value: INTERNAL_PLAINTEXT
         - name: KAFKA_CONFLUENT_SUPPORT_METRICS_ENABLE
@@ -110,7 +109,6 @@ spec:
           value: '1'
         - name: KAFKA_OFFSETS_TOPIC_NUM_PARTITIONS
           value: '1'
-
         volumeMounts:
         - mountPath: /etc/kafka/secrets/jaas/zk_client_jaas.conf
           subPath: zk_client_jaas.conf
@@ -124,58 +122,55 @@ spec:
 apiVersion: apps/v1
 kind: Deployment
 metadata:
-  name: $MR_ZOOKEEPER_APP_NAME
+  name: $MR_DMAAP_APP_NAME
   namespace: $KUBE_ONAP_NAMESPACE
   labels:
-    run: $MR_ZOOKEEPER_APP_NAME
+    run: $MR_DMAAP_APP_NAME
     autotest: DMAAPMR
 spec:
   replicas: 1
   selector:
     matchLabels:
-      run: $MR_ZOOKEEPER_APP_NAME
+      run: $MR_DMAAP_APP_NAME
   template:
     metadata:
       labels:
-        run: $MR_ZOOKEEPER_APP_NAME
+        run: $MR_DMAAP_APP_NAME
         autotest: DMAAPMR
     spec:
       containers:
-      - name: $MR_ZOOKEEPER_APP_NAME
-        image: $ONAP_ZOOKEEPER_IMAGE
+      - name: $MR_DMAAP_APP_NAME
+        image: $ONAP_DMAAPMR_IMAGE
         imagePullPolicy: $KUBE_IMAGE_PULL_POLICY
         ports:
         - name: http
-          containerPort: 2181
+          containerPort: $MR_INTERNAL_PORT
+        - name: https
+          containerPort: $MR_INTERNAL_SECURE_PORT
         env:
-        - name: ZOOKEEPER_REPLICAS
-          value: '1'
-        - name: ZOOKEEPER_TICK_TIME
-          value: '2000'
-        - name: ZOOKEEPER_SYNC_LIMIT
-          value: '5'
-        - name: ZOOKEEPER_INIT_LIMIT
-          value: '10'
-        - name: ZOOKEEPER_MAX_CLIENT_CNXNS
-          value: '200'
-        - name: ZOOKEEPER_AUTOPURGE_SNAP_RETAIN_COUNT
-          value: '3'
-        - name: ZOOKEEPER_AUTOPURGE_PURGE_INTERVAL
-          value: '24'
-        - name: ZOOKEEPER_CLIENT_PORT
-          value: '2181'
-        - name: KAFKA_OPTS
-          value: '-Djava.security.auth.login.config=/etc/zookeeper/secrets/jaas/zk_server_jaas.conf -Dzookeeper.kerberos.removeHostFromPrincipal=true -Dzookeeper.kerberos.removeRealmFromPrincipal=true -Dzookeeper.authProvider.1=org.apache.zookeeper.server.auth.SASLAuthenticationProvider -Dzookeeper.requireClientAuthScheme=sasl'
-        - name: ZOOKEEPER_SERVER_ID
-          value: '1'
-        - name: ZOOKEEPER_SASL_ENABLED
+        - name: enableCadi
           value: 'false'
         volumeMounts:
-        - mountPath: /etc/zookeeper/secrets/jaas/zk_server_jaas.conf
-          subPath: zk_server_jaas.conf
-          name: dmaapmr-zk-server-jaas
+        - mountPath: /appl/dmaapMR1/bundleconfig/etc/appprops/MsgRtrApi.properties
+          subPath: MsgRtrApi.properties
+          name: dmaapmr-msg-rtr-api
+        - mountPath: /appl/dmaapMR1/bundleconfig/etc/logback.xml
+          subPath: logback.xml
+          name: dmaapmr-log-back
+        - mountPath: /appl/dmaapMR1/etc/cadi.properties
+          subPath: cadi.properties
+          name: dmaapmr-cadi
       volumes:
       - configMap:
           defaultMode: 420
-          name: dmaapmr-zk-server-jaas.conf
-        name: dmaapmr-zk-server-jaas
\ No newline at end of file
+          name: dmaapmr-msgrtrapi.properties
+        name: dmaapmr-msg-rtr-api
+      - configMap:
+          defaultMode: 420
+          name: dmaapmr-logback.xml
+        name: dmaapmr-log-back
+      - configMap:
+          defaultMode: 420
+          name: dmaapmr-cadi.properties
+        name: dmaapmr-cadi
+
@@ -37,7 +37,7 @@
 ##
 #config.zk.servers=172.18.1.1
 #config.zk.servers={{.Values.zookeeper.name}}:{{.Values.zookeeper.port}}
-config.zk.servers=zookeeper:2181
+config.zk.servers=$MR_ZOOKEEPER_SERVICE_PATH
 
 #config.zk.root=/fe3c/cambria/config
 
@@ -51,7 +51,7 @@ config.zk.servers=zookeeper:2181
 ##        if you want to change request.required.acks it can take this one value
 #kafka.metadata.broker.list=localhost:9092,localhost:9093
 #kafka.metadata.broker.list={{.Values.kafka.name}}:{{.Values.kafka.port}}
-kafka.metadata.broker.list=kafka:9092
+kafka.metadata.broker.list=$MR_KAFKA_SERVICE_PATH
 ##kafka.request.required.acks=-1
 #kafka.client.zookeeper=${config.zk.servers}
 consumer.timeout.ms=100
@@ -1,6 +1,7 @@
 # LICENSE_START=======================================================
 #  org.onap.dmaap
 #  ================================================================================
+#  Copyright Ã‚© 2021 Nordix Foundation. All rights reserved.
 #  Copyright Â© 2017 AT&T Intellectual Property. All rights reserved.
 #  ================================================================================
 #  Licensed under the Apache License, Version 2.0 (the "License");
@@ -34,7 +35,8 @@
 ##
 ## Both Cambria and Kafka make use of Zookeeper.
 ##
-config.zk.servers=zookeeper:2181
+config.zk.servers=$MR_ZOOKEEPER_SERVICE_PATH
+#$MR_ZOOKEEPER_APP_NAME:$MR_ZOOKEEPER_PORT
 
 ###############################################################################
 ##
@@ -45,7 +47,8 @@ config.zk.servers=zookeeper:2181
 ##        if you want to change request.required.acks it can take this one value
 #kafka.metadata.broker.list=localhost:9092,localhost:9093
 #kafka.metadata.broker.list={{.Values.kafka.name}}:{{.Values.kafka.port}}
-kafka.metadata.broker.list=kafka:9092
+kafka.metadata.broker.list=$MR_KAFKA_SERVICE_PATH
+#$MR_KAFKA_APP_NAME:$MR_KAFKA_PORT
 ##kafka.request.required.acks=-1
 #kafka.client.zookeeper=${config.zk.servers}
 consumer.timeout.ms=100
@@ -60,7 +63,6 @@ auto.commit.enable=false
 kafka.rebalance.backoff.ms=10000
 kafka.rebalance.max.retries=6
 
-
 ###############################################################################
 ##
 ##        Secured Config
@@ -135,7 +137,7 @@ cambria.consumer.cache.touchFreqMs=120000
 cambria.consumer.cache.zkBasePath=/fe3c/cambria/consumerCache
 consumer.timeout=17
 default.partitions=3
-default.replicas=3
+default.replicas=1
 ##############################################################################
 #100mb
 maxcontentlength=10000
diff --git a/test/simulator-group/dmaapmr/configs1/mr/cadi.properties b/test/simulator-group/dmaapmr/configs1/mr/cadi.properties
new file mode 100644 (file)
index 0000000..6178e42
--- /dev/null
@@ -0,0 +1,38 @@
+#  ============LICENSE_START===============================================
+#  Copyright (C) 2021 Nordix Foundation. All rights reserved.
+#  ========================================================================
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#  ============LICENSE_END=================================================
+#
+
+#Removed to be disable aaf in test env
+#aaf_locate_url=https://aaf-onap-test.osaaf.org:8095\
+aaf_url=https://AAF_LOCATE_URL/onap.org.osaaf.aaf.service:2.1
+aaf_env=DEV
+aaf_lur=org.onap.aaf.cadi.aaf.v2_0.AAFLurPerm
+
+#Removed to be disable aaf in test env
+# cadi_truststore=/appl/dmaapMR1/etc/org.onap.dmaap.mr.trust.jks
+# cadi_truststore_password=8FyfX+ar;0$uZQ0h9*oXchNX
+
+cadi_keyfile=/appl/dmaapMR1/etc/org.onap.dmaap.mr.keyfile
+
+cadi_alias=dmaapmr@mr.dmaap.onap.org
+cadi_keystore=/appl/dmaapMR1/etc/org.onap.dmaap.mr.p12
+cadi_keystore_password=GDQttV7)BlOvWMf6F7tz&cjy
+cadi_x509_issuers=CN=intermediateCA_1, OU=OSAAF, O=ONAP, C=US:CN=intermediateCA_7, OU=OSAAF, O=ONAP, C=US:CN=intermediateCA_9, OU=OSAAF, O=ONAP, C=US
+
+cadi_loglevel=INFO
+cadi_protocols=TLSv1.1,TLSv1.2
+cadi_latitude=37.78187
+cadi_longitude=-122.26147
\ No newline at end of file
@@ -1,5 +1,6 @@
 <!--
      ============LICENSE_START=======================================================
+     Copyright Ã‚© 2021 Nordix Foundation. All rights reserved.
      Copyright Â© 2019 AT&T Intellectual Property. All rights reserved.
      ================================================================================
      Licensed under the Apache License, Version 2.0 (the "License");
index 6b5c9c2..492fab1 100644 (file)
 version: '3.0'
 networks:
   default:
-    external:
-      name: ${DOCKER_SIM_NWNAME}
-
+    external: true
+    name: ${DOCKER_SIM_NWNAME}
 services:
   zookeeper:
     image: $ONAP_ZOOKEEPER_IMAGE
     container_name: $MR_ZOOKEEPER_APP_NAME
     ports:
-      - "2181:2181"
+      - "$MR_ZOOKEEPER_PORT:$MR_ZOOKEEPER_PORT"
     environment:
      ZOOKEEPER_REPLICAS: 1
      ZOOKEEPER_TICK_TIME: 2000
@@ -35,7 +34,7 @@ services:
      ZOOKEEPER_MAX_CLIENT_CNXNS: 200
      ZOOKEEPER_AUTOPURGE_SNAP_RETAIN_COUNT: 3
      ZOOKEEPER_AUTOPURGE_PURGE_INTERVAL: 24
-     ZOOKEEPER_CLIENT_PORT: 2181
+     ZOOKEEPER_CLIENT_PORT: $MR_ZOOKEEPER_PORT
      KAFKA_OPTS: -Djava.security.auth.login.config=/etc/zookeeper/secrets/jaas/zk_server_jaas.conf -Dzookeeper.kerberos.removeHostFromPrincipal=true -Dzookeeper.kerberos.removeRealmFromPrincipal=true -Dzookeeper.authProvider.1=org.apache.zookeeper.server.auth.SASLAuthenticationProvider -Dzookeeper.requireClientAuthScheme=sasl -Dzookeeper.4lw.commands.whitelist=*
      ZOOKEEPER_SERVER_ID: 1
     volumes:
@@ -50,15 +49,16 @@ services:
    image: $ONAP_KAFKA_IMAGE
    container_name: $MR_KAFKA_APP_NAME
    ports:
-    - "9092:9092"
+    - "$MR_KAFKA_PORT:$MR_KAFKA_PORT"
+    - "$MR_KAFKA_DOCKER_LOCALHOST_PORT:$MR_KAFKA_DOCKER_LOCALHOST_PORT"
    environment:
     enableCadi: 'false'
-    KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
+    KAFKA_ZOOKEEPER_CONNECT: $MR_ZOOKEEPER_APP_NAME:$MR_ZOOKEEPER_PORT
     KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 40000
     KAFKA_ZOOKEEPER_SESSION_TIMEOUT_MS: 40000
-    KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTERNAL_PLAINTEXT:PLAINTEXT,EXTERNAL_PLAINTEXT:PLAINTEXT
-    KAFKA_ADVERTISED_LISTENERS: INTERNAL_PLAINTEXT://kafka:9092
-    KAFKA_LISTENERS: INTERNAL_PLAINTEXT://0.0.0.0:9092
+    KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTERNAL_PLAINTEXT:PLAINTEXT,EXTERNAL_PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
+    KAFKA_ADVERTISED_LISTENERS: INTERNAL_PLAINTEXT://$MR_KAFKA_SERVICE_PATH,PLAINTEXT_HOST://localhost:$MR_KAFKA_DOCKER_LOCALHOST_PORT
+    KAFKA_LISTENERS: INTERNAL_PLAINTEXT://0.0.0.0:$MR_KAFKA_PORT,PLAINTEXT_HOST://0.0.0.0:$MR_KAFKA_DOCKER_LOCALHOST_PORT
     KAFKA_INTER_BROKER_LISTENER_NAME: INTERNAL_PLAINTEXT
     KAFKA_CONFLUENT_SUPPORT_METRICS_ENABLE: 'false'
     KAFKA_OPTS: -Djava.security.auth.login.config=/etc/kafka/secrets/jaas/zk_client_jaas.conf
diff --git a/test/simulator-group/dmaapmr/mnt/.gitignore b/test/simulator-group/dmaapmr/mnt/.gitignore
new file mode 100644 (file)
index 0000000..b94353c
--- /dev/null
@@ -0,0 +1,17 @@
+################################################################################
+#   Copyright (c) 2021 Nordix Foundation.                                      #
+#                                                                              #
+#   Licensed under the Apache License, Version 2.0 (the "License");            #
+#   you may not use this file except in compliance with the License.           #
+#   You may obtain a copy of the License at                                    #
+#                                                                              #
+#       http://www.apache.org/licenses/LICENSE-2.0                             #
+#                                                                              #
+#   Unless required by applicable law or agreed to in writing, software        #
+#   distributed under the License is distributed on an "AS IS" BASIS,          #
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.   #
+#   See the License for the specific language governing permissions and        #
+#   limitations under the License.                                             #
+################################################################################
+*
+!.gitignore
\ No newline at end of file
diff --git a/test/simulator-group/dmaapmr/mnt/mr/KUBE-MsgRtrApi.properties b/test/simulator-group/dmaapmr/mnt/mr/KUBE-MsgRtrApi.properties
deleted file mode 100644 (file)
index e174b6f..0000000
+++ /dev/null
@@ -1,174 +0,0 @@
-# LICENSE_START=======================================================
-#  org.onap.dmaap
-#  ================================================================================
-#  Copyright Ã‚© 2020 Nordix Foundation. All rights reserved.
-#  Copyright Ã‚© 2017 AT&T Intellectual Property. All rights reserved.
-#  ================================================================================
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#        http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#  ============LICENSE_END=========================================================
-#
-#  ECOMP is a trademark and service mark of AT&T Intellectual Property.
-#
-###############################################################################
-###############################################################################
-##
-## Cambria API Server config
-##
-## Default values are shown as commented settings.
-##
-###############################################################################
-##
-## HTTP service
-##
-## 3904 is standard as of 7/29/14.
-#
-## Zookeeper Connection
-##
-## Both Cambria and Kafka make use of Zookeeper.
-##
-#config.zk.servers=172.18.1.1
-#config.zk.servers={{.Values.zookeeper.name}}:{{.Values.zookeeper.port}}
-config.zk.servers=zookeeper.onap:2181
-
-#config.zk.root=/fe3c/cambria/config
-
-
-###############################################################################
-##
-## Kafka Connection
-##
-##        Items below are passed through to Kafka's producer and consumer
-##        configurations (after removing "kafka.")
-##        if you want to change request.required.acks it can take this one value
-#kafka.metadata.broker.list=localhost:9092,localhost:9093
-#kafka.metadata.broker.list={{.Values.kafka.name}}:{{.Values.kafka.port}}
-kafka.metadata.broker.list=akfak-bwds.onap:9092
-##kafka.request.required.acks=-1
-#kafka.client.zookeeper=${config.zk.servers}
-consumer.timeout.ms=100
-zookeeper.connection.timeout.ms=6000
-zookeeper.session.timeout.ms=20000
-zookeeper.sync.time.ms=2000
-auto.commit.interval.ms=1000
-fetch.message.max.bytes =1000000
-auto.commit.enable=false
-
-#(backoff*retries > zksessiontimeout)
-kafka.rebalance.backoff.ms=10000
-kafka.rebalance.max.retries=6
-
-
-###############################################################################
-##
-##        Secured Config
-##
-##        Some data stored in the config system is sensitive -- API keys and secrets,
-##        for example. to protect it, we use an encryption layer for this section
-##        of the config.
-##
-## The key is a base64 encode AES key. This must be created/configured for
-## each installation.
-#cambria.secureConfig.key=
-##
-## The initialization vector is a 16 byte value specific to the secured store.
-## This must be created/configured for each installation.
-#cambria.secureConfig.iv=
-
-## Southfield Sandbox
-cambria.secureConfig.key=b/7ouTn9FfEw2PQwL0ov/Q==
-cambria.secureConfig.iv=wR9xP5k5vbz/xD0LmtqQLw==
-authentication.adminSecret=fe3cCompound
-#cambria.secureConfig.key[pc569h]=YT3XPyxEmKCTLI2NK+Sjbw==
-#cambria.secureConfig.iv[pc569h]=rMm2jhR3yVnU+u2V9Ugu3Q==
-
-
-###############################################################################
-##
-## Consumer Caching
-##
-##        Kafka expects live connections from the consumer to the broker, which
-##        obviously doesn't work over connectionless HTTP requests. The Cambria
-##        server proxies HTTP requests into Kafka consumer sessions that are kept
-##        around for later re-use. Not doing so is costly for setup per request,
-##        which would substantially impact a high volume consumer's performance.
-##
-##        This complicates Cambria server failover, because we often need server
-##        A to close its connection before server B brings up the replacement.
-##
-
-## The consumer cache is normally enabled.
-#cambria.consumer.cache.enabled=true
-
-## Cached consumers are cleaned up after a period of disuse. The server inspects
-## consumers every sweepFreqSeconds and will clean up any connections that are
-## dormant for touchFreqMs.
-#cambria.consumer.cache.sweepFreqSeconds=15
-cambria.consumer.cache.touchFreqMs=120000
-##stickforallconsumerrequests=false
-## The cache is managed through ZK. The default value for the ZK connection
-## string is the same as config.zk.servers.
-#cambria.consumer.cache.zkConnect=${config.zk.servers}
-
-##
-## Shared cache information is associated with this node's name. The default
-## name is the hostname plus the HTTP service port this host runs on. (The
-## hostname is determined via InetAddress.getLocalHost ().getCanonicalHostName(),
-## which is not always adequate.) You can set this value explicitly here.
-##
-#cambria.api.node.identifier=<use-something-unique-to-this-instance>
-
-#cambria.rateLimit.maxEmptyPollsPerMinute=30
-#cambria.rateLimitActual.delay.ms=10
-
-###############################################################################
-##
-## Metrics Reporting
-##
-##        This server can report its metrics periodically on a topic.
-##
-#metrics.send.cambria.enabled=true
-#metrics.send.cambria.topic=cambria.apinode.metrics                                  #msgrtr.apinode.metrics.dmaap
-#metrics.send.cambria.sendEverySeconds=60
-
-cambria.consumer.cache.zkBasePath=/fe3c/cambria/consumerCache
-consumer.timeout=17
-default.partitions=3
-default.replicas=3
-##############################################################################
-#100mb
-maxcontentlength=10000
-
-
-##############################################################################
-#AAF Properties
-msgRtr.namespace.aaf=org.onap.dmaap.mr.topic
-msgRtr.topicfactory.aaf=org.onap.dmaap.mr.topicFactory|:org.onap.dmaap.mr.topic:
-enforced.topic.name.AAF=org.onap.dmaap.mr
-forceAAF=false
-transidUEBtopicreqd=false
-defaultNSforUEB=org.onap.dmaap.mr
-##############################################################################
-#Mirror Maker Agent
-
-msgRtr.mirrormakeradmin.aaf=org.onap.dmaap.mr.mirrormaker|*|admin
-msgRtr.mirrormakeruser.aaf=org.onap.dmaap.mr.mirrormaker|*|user
-msgRtr.mirrormakeruser.aaf.create=org.onap.dmaap.mr.topicFactory|:org.onap.dmaap.mr.topic:
-msgRtr.mirrormaker.timeout=15000
-msgRtr.mirrormaker.topic=org.onap.dmaap.mr.mirrormakeragent
-msgRtr.mirrormaker.consumergroup=mmagentserver
-msgRtr.mirrormaker.consumerid=1
-
-kafka.max.poll.interval.ms=300000
-kafka.heartbeat.interval.ms=60000
-kafka.session.timeout.ms=240000
-kafka.max.poll.records=1000
-
diff --git a/test/simulator-group/dmaapmr/mnt2/mr/KUBE-MsgRtrApi.properties b/test/simulator-group/dmaapmr/mnt2/mr/KUBE-MsgRtrApi.properties
deleted file mode 100644 (file)
index 7f7bc41..0000000
+++ /dev/null
@@ -1,166 +0,0 @@
-# LICENSE_START=======================================================
-#  org.onap.dmaap
-#  ================================================================================
-#  Copyright Â© 2017 AT&T Intellectual Property. All rights reserved.
-#  ================================================================================
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#        http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#  ============LICENSE_END=========================================================
-#
-#  ECOMP is a trademark and service mark of AT&T Intellectual Property.
-#
-###############################################################################
-###############################################################################
-##
-## Cambria API Server config
-##
-## Default values are shown as commented settings.
-##
-###############################################################################
-##
-## HTTP service
-##
-## 3904 is standard as of 7/29/14.
-#
-## Zookeeper Connection
-##
-## Both Cambria and Kafka make use of Zookeeper.
-##
-config.zk.servers=zookeeper:2181
-
-###############################################################################
-##
-## Kafka Connection
-##
-##        Items below are passed through to Kafka's producer and consumer
-##        configurations (after removing "kafka.")
-##        if you want to change request.required.acks it can take this one value
-#kafka.metadata.broker.list=localhost:9092,localhost:9093
-#kafka.metadata.broker.list={{.Values.kafka.name}}:{{.Values.kafka.port}}
-kafka.metadata.broker.list=kaka:9092
-##kafka.request.required.acks=-1
-#kafka.client.zookeeper=${config.zk.servers}
-consumer.timeout.ms=100
-zookeeper.connection.timeout.ms=6000
-zookeeper.session.timeout.ms=20000
-zookeeper.sync.time.ms=2000
-auto.commit.interval.ms=1000
-fetch.message.max.bytes =1000000
-auto.commit.enable=false
-
-#(backoff*retries > zksessiontimeout)
-kafka.rebalance.backoff.ms=10000
-kafka.rebalance.max.retries=6
-
-
-###############################################################################
-##
-##        Secured Config
-##
-##        Some data stored in the config system is sensitive -- API keys and secrets,
-##        for example. to protect it, we use an encryption layer for this section
-##        of the config.
-##
-## The key is a base64 encode AES key. This must be created/configured for
-## each installation.
-#cambria.secureConfig.key=
-##
-## The initialization vector is a 16 byte value specific to the secured store.
-## This must be created/configured for each installation.
-#cambria.secureConfig.iv=
-
-## Southfield Sandbox
-cambria.secureConfig.key=b/7ouTn9FfEw2PQwL0ov/Q==
-cambria.secureConfig.iv=wR9xP5k5vbz/xD0LmtqQLw==
-authentication.adminSecret=fe3cCompound
-
-
-###############################################################################
-##
-## Consumer Caching
-##
-##        Kafka expects live connections from the consumer to the broker, which
-##        obviously doesn't work over connectionless HTTP requests. The Cambria
-##        server proxies HTTP requests into Kafka consumer sessions that are kept
-##        around for later re-use. Not doing so is costly for setup per request,
-##        which would substantially impact a high volume consumer's performance.
-##
-##        This complicates Cambria server failover, because we often need server
-##        A to close its connection before server B brings up the replacement.
-##
-
-## The consumer cache is normally enabled.
-#cambria.consumer.cache.enabled=true
-
-## Cached consumers are cleaned up after a period of disuse. The server inspects
-## consumers every sweepFreqSeconds and will clean up any connections that are
-## dormant for touchFreqMs.
-#cambria.consumer.cache.sweepFreqSeconds=15
-cambria.consumer.cache.touchFreqMs=120000
-##stickforallconsumerrequests=false
-## The cache is managed through ZK. The default value for the ZK connection
-## string is the same as config.zk.servers.
-#cambria.consumer.cache.zkConnect=${config.zk.servers}
-
-##
-## Shared cache information is associated with this node's name. The default
-## name is the hostname plus the HTTP service port this host runs on. (The
-## hostname is determined via InetAddress.getLocalHost ().getCanonicalHostName(),
-## which is not always adequate.) You can set this value explicitly here.
-##
-#cambria.api.node.identifier=<use-something-unique-to-this-instance>
-
-#cambria.rateLimit.maxEmptyPollsPerMinute=30
-#cambria.rateLimitActual.delay.ms=10
-
-###############################################################################
-##
-## Metrics Reporting
-##
-##        This server can report its metrics periodically on a topic.
-##
-#metrics.send.cambria.enabled=true
-#metrics.send.cambria.topic=cambria.apinode.metrics
-#msgrtr.apinode.metrics.dmaap
-#metrics.send.cambria.sendEverySeconds=60
-
-cambria.consumer.cache.zkBasePath=/fe3c/cambria/consumerCache
-consumer.timeout=17
-default.partitions=3
-default.replicas=3
-##############################################################################
-#100mb
-maxcontentlength=10000
-
-
-##############################################################################
-#AAF Properties
-msgRtr.namespace.aaf=org.onap.dmaap.mr.topic
-msgRtr.topicfactory.aaf=org.onap.dmaap.mr.topicFactory|:org.onap.dmaap.mr.topic:
-enforced.topic.name.AAF=org.onap.dmaap.mr
-forceAAF=false
-transidUEBtopicreqd=false
-defaultNSforUEB=org.onap.dmaap.mr
-##############################################################################
-#Mirror Maker Agent
-
-msgRtr.mirrormakeradmin.aaf=org.onap.dmaap.mr.mirrormaker|*|admin
-msgRtr.mirrormakeruser.aaf=org.onap.dmaap.mr.mirrormaker|*|user
-msgRtr.mirrormakeruser.aaf.create=org.onap.dmaap.mr.topicFactory|:org.onap.dmaap.mr.topic:
-msgRtr.mirrormaker.timeout=15000
-msgRtr.mirrormaker.topic=org.onap.dmaap.mr.mirrormakeragent
-msgRtr.mirrormaker.consumergroup=mmagentserver
-msgRtr.mirrormaker.consumerid=1
-
-kafka.max.poll.interval.ms=300000
-kafka.heartbeat.interval.ms=60000
-kafka.session.timeout.ms=240000
-kafka.max.poll.records=1000
\ No newline at end of file
diff --git a/test/simulator-group/dmaapmr/mnt2/mr/cadi.properties b/test/simulator-group/dmaapmr/mnt2/mr/cadi.properties
deleted file mode 100644 (file)
index 3cd26ad..0000000
+++ /dev/null
@@ -1,19 +0,0 @@
-#aaf_locate_url=https://aaf-onap-test.osaaf.org:8095\
-aaf_url=https://AAF_LOCATE_URL/onap.org.osaaf.aaf.service:2.1
-aaf_env=DEV
-aaf_lur=org.onap.aaf.cadi.aaf.v2_0.AAFLurPerm
-
-cadi_truststore=/appl/dmaapMR1/etc/org.onap.dmaap.mr.trust.jks
-cadi_truststore_password=8FyfX+ar;0$uZQ0h9*oXchNX
-
-cadi_keyfile=/appl/dmaapMR1/etc/org.onap.dmaap.mr.keyfile
-
-cadi_alias=dmaapmr@mr.dmaap.onap.org
-cadi_keystore=/appl/dmaapMR1/etc/org.onap.dmaap.mr.p12
-cadi_keystore_password=GDQttV7)BlOvWMf6F7tz&cjy
-cadi_x509_issuers=CN=intermediateCA_1, OU=OSAAF, O=ONAP, C=US:CN=intermediateCA_7, OU=OSAAF, O=ONAP, C=US:CN=intermediateCA_9, OU=OSAAF, O=ONAP, C=US
-
-cadi_loglevel=INFO
-cadi_protocols=TLSv1.1,TLSv1.2
-cadi_latitude=37.78187
-cadi_longitude=-122.26147
\ No newline at end of file
index e5d5d8e..913b13e 100644 (file)
 apiVersion: v1
 kind: Service
 metadata:
-  name: $MR_DMAAP_KUBE_APP_NAME
+  name: $MR_ZOOKEEPER_APP_NAME
   namespace: $KUBE_ONAP_NAMESPACE
   labels:
-    run: $MR_DMAAP_KUBE_APP_NAME
+    run: $MR_ZOOKEEPER_APP_NAME
     autotest: DMAAPMR
 spec:
   type: ClusterIP
   ports:
-  - port: $MR_EXTERNAL_PORT
-    targetPort: $MR_INTERNAL_PORT
+  - port: $MR_ZOOKEEPER_PORT
+    targetPort: $MR_ZOOKEEPER_PORT
     protocol: TCP
     name: http
-  - port: $MR_EXTERNAL_SECURE_PORT
-    targetPort: $MR_INTERNAL_SECURE_PORT
-    protocol: TCP
-    name: https
   selector:
-    run: $MR_DMAAP_KUBE_APP_NAME
+    run: $MR_ZOOKEEPER_APP_NAME
 ---
 apiVersion: v1
 kind: Service
 metadata:
-  name: $MR_KAFKA_BWDS_NAME
+  name: $MR_KAFKA_APP_NAME
   namespace: $KUBE_ONAP_NAMESPACE
   labels:
-    run: $MR_KAFKA_BWDS_NAME
+    run: $MR_KAFKA_APP_NAME
     autotest: DMAAPMR
 spec:
-  type: ClusterIP
+  type: NodePort
   ports:
-  - port: 9092
-    targetPort: 9095
+  - port: $MR_KAFKA_PORT
+    targetPort: $MR_KAFKA_PORT
     protocol: TCP
     name: http
+  - port: $MR_KAFKA_KUBE_NODE_PORT
+    targetPort: $MR_KAFKA_KUBE_NODE_PORT
+    protocol: TCP
+    name: http-external
+    nodePort: $MR_KAFKA_KUBE_NODE_PORT
   selector:
-    run: $MR_KAFKA_BWDS_NAME
+    run: $MR_KAFKA_APP_NAME
 ---
 apiVersion: v1
 kind: Service
 metadata:
-  name: $MR_ZOOKEEPER_APP_NAME
+  name: $MR_DMAAP_APP_NAME
   namespace: $KUBE_ONAP_NAMESPACE
   labels:
-    run: $MR_ZOOKEEPER_APP_NAME
+    run: $MR_DMAAP_APP_NAME
     autotest: DMAAPMR
 spec:
   type: ClusterIP
   ports:
-  - port: 2181
-    targetPort: 2181
+  - port: $MR_EXTERNAL_PORT
+    targetPort: $MR_INTERNAL_PORT
     protocol: TCP
     name: http
+  - port: $MR_EXTERNAL_SECURE_PORT
+    targetPort: $MR_INTERNAL_SECURE_PORT
+    protocol: TCP
+    name: https
   selector:
-    run: $MR_ZOOKEEPER_APP_NAME
+    run: $MR_DMAAP_APP_NAME
 
 
-# ---
-# apiVersion: v1
-# kind: Service
-# metadata:
-#   name: dmaap-mr
-#   namespace: $KUBE_ONAP_NAMESPACE
-#   labels:
-#     run: $MR_DMAAP_KUBE_APP_NAME
-#     autotest: DMAAPMR
-# spec:
-#   type: ClusterIP
-#   ports:
-#   - port: $MR_EXTERNAL_PORT
-#     targetPort: $MR_INTERNAL_PORT
-#     protocol: TCP
-#     name: http
-#   - port: $MR_EXTERNAL_SECURE_PORT
-#     targetPort: $MR_INTERNAL_SECURE_PORT
-#     protocol: TCP
-#     name: https
-#   selector:
-#     run: $MR_DMAAP_KUBE_APP_NAME
-# ---
-# apiVersion: v1
-# kind: Service
-# metadata:
-#   name: dmaap-kafka
-#   namespace: $KUBE_ONAP_NAMESPACE
-#   labels:
-#     run: $MR_KAFKA_BWDS_NAME
-#     autotest: DMAAPMR
-# spec:
-#   type: ClusterIP
-#   ports:
-#   - port: 9092
-#     targetPort: 9092
-#     protocol: TCP
-#     name: http
-#   selector:
-#     run: $MR_KAFKA_BWDS_NAME
-# ---
-# apiVersion: v1
-# kind: Service
-# metadata:
-#   name: kafka
-#   namespace: $KUBE_ONAP_NAMESPACE
-#   labels:
-#     run: $MR_KAFKA_BWDS_NAME
-#     autotest: DMAAPMR
-# spec:
-#   type: ClusterIP
-#   ports:
-#   - port: 9092
-#     targetPort: 9092
-#     protocol: TCP
-#     name: http
-#   selector:
-#     run: $MR_KAFKA_BWDS_NAME
-# ---
-# apiVersion: v1
-# kind: Service
-# metadata:
-#   name: dmaap-zookeeper
-#   namespace: $KUBE_ONAP_NAMESPACE
-#   labels:
-#     run: $MR_ZOOKEEPER_APP_NAME
-#     autotest: DMAAPMR
-# spec:
-#   type: ClusterIP
-#   ports:
-#   - port: 2181
-#     targetPort: 2181
-#     protocol: TCP
-#     name: http
-#   selector:
-    run: $MR_ZOOKEEPER_APP_NAME
\ No newline at end of file
index 4aa3a7a..7dc00c5 100644 (file)
@@ -1,2 +1,3 @@
 .tmp.json
-.dockererr
\ No newline at end of file
+.dockererr
+gen_docker-compose*
\ No newline at end of file
index b057753..588f8c3 100644 (file)
@@ -16,8 +16,8 @@
 version: '3.0'
 networks:
   default:
-    external:
-      name: ${DOCKER_SIM_NWNAME}
+    external: true
+    name: ${DOCKER_SIM_NWNAME}
 services:
   ecs:
     image: ${ECS_IMAGE}
index 3384196..edfe9f8 100644 (file)
@@ -1 +1,2 @@
-.dockererr
\ No newline at end of file
+.dockererr
+gen_docker-compose*
\ No newline at end of file
index c27ba5e..2c8c61b 100644 (file)
@@ -18,8 +18,8 @@
 version: '3.0'
 networks:
   default:
-    external:
-      name: ${DOCKER_SIM_NWNAME}
+    external: true
+    name: ${DOCKER_SIM_NWNAME}
 services:
   httpproxy:
     networks:
index 7f0f349..2e644e0 100644 (file)
@@ -17,8 +17,8 @@
 version: '3.0'
 networks:
   default:
-    external:
-      name: ${DOCKER_SIM_NWNAME}
+    external: true
+    name: ${DOCKER_SIM_NWNAME}
 services:
   kubeproxy:
     image: ${KUBE_PROXY_IMAGE}
index 4aa3a7a..7dc00c5 100644 (file)
@@ -1,2 +1,3 @@
 .tmp.json
-.dockererr
\ No newline at end of file
+.dockererr
+gen_docker-compose*
\ No newline at end of file
index 0cf0f51..696af4e 100644 (file)
@@ -30,4 +30,6 @@ spec:
         - name: TOPIC_READ
           value: $TOPIC_READ
         - name: TOPIC_WRITE
-          value: $TOPIC_WRITE
\ No newline at end of file
+          value: $TOPIC_WRITE
+        - name: GENERIC_TOPICS_UPLOAD_BASEURL
+          value: $GENERIC_TOPICS_UPLOAD_BASEURL
\ No newline at end of file
index 9101b5b..608f5c8 100644 (file)
@@ -18,8 +18,8 @@
 version: '3.0'
 networks:
   default:
-    external:
-      name: ${DOCKER_SIM_NWNAME}
+    external: true
+    name: ${DOCKER_SIM_NWNAME}
 services:
   mr-stub:
     networks:
@@ -34,6 +34,7 @@ services:
     environment:
       - TOPIC_READ=${TOPIC_READ}
       - TOPIC_WRITE=${TOPIC_WRITE}
+      - GENERIC_TOPICS_UPLOAD_BASEURL=${GENERIC_TOPICS_UPLOAD_BASEURL}
     labels:
       - "nrttest_app=MR"
       - "nrttest_dp=${MR_STUB_DISPLAY_NAME}"
index c897ba7..7002eaf 100644 (file)
@@ -18,8 +18,8 @@
 version: '3.0'
 networks:
   default:
-    external:
-      name: ${DOCKER_SIM_NWNAME}
+    external: true
+    name: ${DOCKER_SIM_NWNAME}
 services:
   nonrtric-gateway:
     image: ${NRT_GATEWAY_IMAGE}
index 4aa3a7a..7dc00c5 100644 (file)
@@ -1,2 +1,3 @@
 .tmp.json
-.dockererr
\ No newline at end of file
+.dockererr
+gen_docker-compose*
\ No newline at end of file
index 2261151..4b42b42 100644 (file)
@@ -17,8 +17,8 @@
 version: '3.0'
 networks:
   default:
-    external:
-      name: ${DOCKER_SIM_NWNAME}
+    external: true
+    name: ${DOCKER_SIM_NWNAME}
 services:
   policy-agent:
     image: ${POLICY_AGENT_IMAGE}
index 4aa3a7a..b676cf8 100644 (file)
@@ -1,2 +1,3 @@
 .tmp.json
-.dockererr
\ No newline at end of file
+.dockererr
+gen_docker-compose*
index f771352..e05d3ad 100644 (file)
 version: '3.0'
 networks:
   default:
-    external:
-      name: ${DOCKER_SIM_NWNAME}
+    external: true
+    name: ${DOCKER_SIM_NWNAME}
 services:
   producer-stub:
     networks:
-      default:
-        aliases:
-          - ${PROD_STUB_APP_NAME_ALIAS}
+      - default
     container_name: ${PROD_STUB_APP_NAME}
     image: ${PROD_STUB_IMAGE}
     ports:
index 4aa3a7a..7dc00c5 100644 (file)
@@ -1,2 +1,3 @@
 .tmp.json
-.dockererr
\ No newline at end of file
+.dockererr
+gen_docker-compose*
\ No newline at end of file
index 3dbd45b..7c644b9 100644 (file)
@@ -17,8 +17,8 @@
 version: '3.0'
 networks:
   default:
-    external:
-      name: ${DOCKER_SIM_NWNAME}
+    external: true
+    name: ${DOCKER_SIM_NWNAME}
 services:
   rapp-catalogue:
     image: ${RAPP_CAT_IMAGE}
index c0c4339..1f9d6f9 100644 (file)
@@ -1,4 +1,5 @@
 .tmp.json
 .dockererr
 .env
-fakedir
\ No newline at end of file
+fakedir
+gen_docker-compose*
\ No newline at end of file
index a6358c7..3515973 100644 (file)
@@ -21,11 +21,11 @@ version: '3.0'
 
 networks:
   default:
-    external:
-      name: ${DOCKER_SIM_NWNAME}
+    external: true
+    name: ${DOCKER_SIM_NWNAME}
 
 services:
-  g1:
+  ${RICSIM_COMPOSE_SERVICE_NAME}:
     image: ${RIC_SIM_IMAGE}
     networks:
       - default
@@ -33,7 +33,7 @@ services:
       - ${RIC_SIM_INTERNAL_PORT}/tcp
       - ${RIC_SIM_INTERNAL_SECURE_PORT}/tcp
     environment:
-      - A1_VERSION=${G1_A1_VERSION}
+      - A1_VERSION=${RICSIM_COMPOSE_A1_VERSION}
       - REMOTE_HOSTS_LOGGING=1
       - ALLOW_HTTP=true
       - DUPLICATE_CHECK=1
@@ -42,75 +42,3 @@ services:
     labels:
       - "nrttest_app=RICSIM"
       - "nrttest_dp=${RIC_SIM_DISPLAY_NAME}"
-
-  g2:
-    image: ${RIC_SIM_IMAGE}
-    networks:
-      - default
-    ports:
-      - ${RIC_SIM_INTERNAL_PORT}/tcp
-      - ${RIC_SIM_INTERNAL_SECURE_PORT}/tcp
-    environment:
-      - A1_VERSION=${G2_A1_VERSION}
-      - REMOTE_HOSTS_LOGGING=1
-      - ALLOW_HTTP=true
-      - DUPLICATE_CHECK=1
-    volumes:
-      - ${RIC_SIM_CERT_MOUNT_DIR}:/usr/src/app/cert:ro
-    labels:
-      - "nrttest_app=RICSIM"
-      - "nrttest_dp=${RIC_SIM_DISPLAY_NAME}"
-
-  g3:
-    image: ${RIC_SIM_IMAGE}
-    networks:
-      - default
-    ports:
-      - ${RIC_SIM_INTERNAL_PORT}/tcp
-      - ${RIC_SIM_INTERNAL_SECURE_PORT}/tcp
-    environment:
-      - A1_VERSION=${G3_A1_VERSION}
-      - REMOTE_HOSTS_LOGGING=1
-      - ALLOW_HTTP=true
-      - DUPLICATE_CHECK=1
-    volumes:
-      - ${RIC_SIM_CERT_MOUNT_DIR}:/usr/src/app/cert:ro
-    labels:
-      - "nrttest_app=RICSIM"
-      - "nrttest_dp=${RIC_SIM_DISPLAY_NAME}"
-
-  g4:
-    image: ${RIC_SIM_IMAGE}
-    networks:
-      - default
-    ports:
-      - ${RIC_SIM_INTERNAL_PORT}/tcp
-      - ${RIC_SIM_INTERNAL_SECURE_PORT}/tcp
-    environment:
-      - A1_VERSION=${G4_A1_VERSION}
-      - REMOTE_HOSTS_LOGGING=1
-      - ALLOW_HTTP=true
-      - DUPLICATE_CHECK=1
-    volumes:
-      - ${RIC_SIM_CERT_MOUNT_DIR}:/usr/src/app/cert:ro
-    labels:
-      - "nrttest_app=RICSIM"
-      - "nrttest_dp=${RIC_SIM_DISPLAY_NAME}"
-
-  g5:
-    image: ${RIC_SIM_IMAGE}
-    networks:
-      - default
-    ports:
-      - ${RIC_SIM_INTERNAL_PORT}/tcp
-      - ${RIC_SIM_INTERNAL_SECURE_PORT}/tcp
-    environment:
-      - A1_VERSION=${G5_A1_VERSION}
-      - REMOTE_HOSTS_LOGGING=1
-      - ALLOW_HTTP=true
-      - DUPLICATE_CHECK=1
-    volumes:
-      - ${RIC_SIM_CERT_MOUNT_DIR}:/usr/src/app/cert:ro
-    labels:
-      - "nrttest_app=RICSIM"
-      - "nrttest_dp=${RIC_SIM_DISPLAY_NAME}"
\ No newline at end of file
index 4aa3a7a..7dc00c5 100644 (file)
@@ -1,2 +1,3 @@
 .tmp.json
-.dockererr
\ No newline at end of file
+.dockererr
+gen_docker-compose*
\ No newline at end of file
index 45f0f08..c794e67 100644 (file)
@@ -2,7 +2,7 @@ apiVersion: apps/v1
 kind: Deployment
 metadata:
   name: $SDNC_APP_NAME
-  namespace: $KUBE_SNDC_NAMESPACE
+  namespace: $KUBE_SDNC_NAMESPACE
   labels:
     run: $SDNC_APP_NAME
     autotest: SDNC
@@ -46,7 +46,7 @@ apiVersion: apps/v1
 kind: Deployment
 metadata:
   name: $SDNC_DB_APP_NAME
-  namespace: $KUBE_SNDC_NAMESPACE
+  namespace: $KUBE_SDNC_NAMESPACE
   labels:
     run: $SDNC_DB_APP_NAME
     autotest: SDNC
index 8861fe0..1824cd1 100644 (file)
@@ -2,7 +2,7 @@ apiVersion: apps/v1
 kind: Deployment
 metadata:
   name: $SDNC_APP_NAME
-  namespace: $KUBE_SNDC_NAMESPACE
+  namespace: $KUBE_SDNC_NAMESPACE
   labels:
     run: $SDNC_APP_NAME
     autotest: SDNC
@@ -62,7 +62,7 @@ apiVersion: apps/v1
 kind: Deployment
 metadata:
   name: $SDNC_DB_APP_NAME
-  namespace: $KUBE_SNDC_NAMESPACE
+  namespace: $KUBE_SDNC_NAMESPACE
   labels:
     run: $SDNC_DB_APP_NAME
     autotest: SDNC
index 03483a0..37c99b6 100644 (file)
@@ -17,9 +17,8 @@ version: '3.0'
 
 networks:
   default:
-    external:
-      name: ${DOCKER_SIM_NWNAME}
-
+    external: true
+    name: ${DOCKER_SIM_NWNAME}
 services:
   db:
     image: ${SDNC_DB_IMAGE}
index 0b6f357..505a54c 100644 (file)
@@ -17,9 +17,8 @@ version: '3'
 
 networks:
   default:
-    external:
-      name: ${DOCKER_SIM_NWNAME}
-
+    external: true
+    name: ${DOCKER_SIM_NWNAME}
 services:
   db:
     image: ${SDNC_DB_IMAGE}
index 45af8b6..f172c1c 100644 (file)
@@ -2,7 +2,7 @@ apiVersion: v1
 kind: Service
 metadata:
   name: $SDNC_APP_NAME
-  namespace: $KUBE_SNDC_NAMESPACE
+  namespace: $KUBE_SDNC_NAMESPACE
   labels:
     run: $SDNC_APP_NAME
     autotest: SDNC
@@ -24,7 +24,7 @@ apiVersion: v1
 kind: Service
 metadata:
   name: dbhost
-  namespace: $KUBE_SNDC_NAMESPACE
+  namespace: $KUBE_SDNC_NAMESPACE
   labels:
     run: $SDNC_DB_APP_NAME
     autotest: SDNC
@@ -42,7 +42,7 @@ apiVersion: v1
 kind: Service
 metadata:
   name: sdnctldb01
-  namespace: $KUBE_SNDC_NAMESPACE
+  namespace: $KUBE_SDNC_NAMESPACE
   labels:
     run: $SDNC_DB_APP_NAME
     autotest: SDNC
diff --git a/test/usecases/odusliceassurance/apexpolicyversion/SliceAssurance/events/OscDefinedEvent.json b/test/usecases/odusliceassurance/apexpolicyversion/SliceAssurance/events/OscDefinedEvent.json
new file mode 100644 (file)
index 0000000..080b59b
--- /dev/null
@@ -0,0 +1,116 @@
+{
+    "event":{
+        "commonEventHeader":{
+            "domain":"measurement",
+            "eventId":"_1634181300_PM15min",
+            "eventName":"measurement_O_RAN_COMPONENT_PM15min",
+            "eventType":"O_RAN_COMPONENT_PM15min",
+            "internalHeaderFields":{
+                "intervalEndTime":"Thu, 14 Oct 2021 03:15:00 +0000",
+                "intervalStartTime":"Thu, 14 Oct 2021 03:00:00 +0000"
+            },
+            "lastEpochMicrosec":1634181300000000,
+            "priority":"Low",
+            "reportingEntityId":"",
+            "reportingEntityName":"ORAN-DEV",
+            "sequence":0,
+            "sourceId":"",
+            "sourceName":"",
+            "startEpochMicrosec":1634180400000000,
+            "version":"4.1",
+            "vesEventListenerVersion":"7.2.1"
+        },
+        "measurementFields":{
+            "additionalFields":{
+
+            },
+            "additionalMeasurements":[
+                {
+                    "hashMap":{
+                        "cses":"0",
+                        "es":"0",
+                        "ses":"1",
+                        "unavailability":"0"
+                    },
+                    "name":"-1"
+                },
+                {
+                    "hashMap":{
+                        "cses":"0",
+                        "es":"0",
+                        "ses":"1",
+                        "unavailability":"0"
+                    },
+                    "name":"-2"
+                }
+            ],
+            "additionalObjects":[
+
+            ],
+            "codecUsageArray":[
+
+            ],
+            "concurrentSessions":2,
+            "configuredEntities":2,
+            "cpuUsageArray":[
+
+            ],
+            "diskUsageArray":[
+
+            ],
+            "featureUsageArray":{
+                "https://www.itu.int/rec/T-REC-G.841":"true"
+            },
+            "filesystemUsageArray":[
+
+            ],
+            "hugePagesArray":[
+
+            ],
+            "ipmi":{
+
+            },
+            "latencyDistribution":[
+
+            ],
+            "loadArray":[
+
+            ],
+            "machineCheckExceptionArray":[
+
+            ],
+            "meanRequestLatency":1000,
+            "measurementFieldsVersion":"4.0",
+            "measurementInterval":234,
+            "memoryUsageArray":[
+
+            ],
+            "networkSliceArray":[
+                {
+                    "DRB.UEThDl.SNSSAI":300,
+                    "DRB.UEThUl.SNSSAI":400,
+                    "networkSliceIdentifier":"SD0001"
+                },
+                {
+                    "DRB.UEThDl.SNSSAI":150,
+                    "DRB.UEThUl.SNSSAI":250,
+                    "networkSliceIdentifier":"SD0002"
+                },
+                {
+                    "DRB.UEThDl.SNSSAI":350,
+                    "DRB.UEThUl.SNSSAI":450,
+                    "networkSliceIdentifier":"SD0003"
+                }
+            ],
+            "nfcScalingMetric":3,
+            "nicPerformanceArray":[
+
+            ],
+            "numberOfMediaPortsInUse":234,
+            "processStatsArray":[
+
+            ],
+            "requestRate":23
+        }
+    }
+}
\ No newline at end of file
diff --git a/test/usecases/odusliceassurance/apexpolicyversion/SliceAssurance/events/StdDefinedEvent.json b/test/usecases/odusliceassurance/apexpolicyversion/SliceAssurance/events/StdDefinedEvent.json
new file mode 100644 (file)
index 0000000..243540a
--- /dev/null
@@ -0,0 +1,49 @@
+{
+    "event":{
+        "commonEventHeader":{
+            "domain":"@domain@",
+            "eventId":"@eventId@",
+            "eventName":"@domain@_@eventType@",
+            "eventType":"@eventType@",
+            "sequence":0,
+            "priority":"Low",
+            "reportingEntityId":"",
+            "reportingEntityName":"@controllerName@",
+            "sourceId":"",
+            "sourceName":"@controllerName@",
+            "startEpochMicrosec":"@timestamp@",
+            "lastEpochMicrosec":"@timestamp@",
+            "nfNamingCode":"@type@",
+            "nfVendorName":"@vendor@",
+            "stndDefinedNamespace":"o-ran-sc-du-hello-world-pm-streaming-oas3",
+            "timeZoneOffset":"+00:00",
+            "version":"4.1",
+            "vesEventListenerVersion":"7.2.1"
+        },
+        "stndDefinedFields":{
+            "stndDefinedFieldsVersion":"1.0",
+            "schemaReference":"https://gerrit.o-ran-sc.org/r/gitweb?p=scp/oam/modeling.git;a=blob_plain;f=data-model/oas3/experimental/o-ran-sc-du-hello-world-oas3.json;hb=refs/heads/master",
+            "data":{
+                "id":"string",
+                "start-time":"@collectionStartTime@",
+                "administrative-state":"locked",
+                "operational-state":"disabled",
+                "user-label":"string",
+                "job-tag":"string",
+                "granularity-period":0,
+                "measurements":[
+                    {
+                        "measurement-type-instance-reference":"/network-function/distributed-unit-functions[id='%duId%']/cell[id='%cellId%']/supported-measurements/performance-measurement-type[.='user-equipment-average-throughput-downlink']/supported-snssai-subcounter-instances/slice-differentiator[.=%sd%]",
+                        "value":300,
+                        "unit":"kbit/s"
+                    },
+                    {
+                        "measurement-type-instance-reference":"/network-function/distributed-unit-functions[id='%duId%']/cell[id='%cellId%']/supported-measurements/performance-measurement-type[.='user-equipment-average-throughput-uplink']/supported-snssai-subcounter-instances/slice-differentiator[.=%sd%]",
+                        "value":100,
+                        "unit":"kbit/s"
+                    }
+                ]
+            }
+        }
+    }
+}
\ No newline at end of file
diff --git a/test/usecases/odusliceassurance/apexpolicyversion/SliceAssurance/schemas/OscDefinedInputSchema.avsc b/test/usecases/odusliceassurance/apexpolicyversion/SliceAssurance/schemas/OscDefinedInputSchema.avsc
new file mode 100644 (file)
index 0000000..076bf93
--- /dev/null
@@ -0,0 +1,364 @@
+{
+    "type": "record",
+    "name": "Osc_Defined_Input",
+    "fields": [
+        {
+            "name": "event",
+            "type": {
+                "type": "record",
+                "name": "Event_Type",
+                "fields": [
+                    {
+                        "name": "commonEventHeader",
+                        "type": {
+                            "type": "record",
+                            "name": "Common_Event_Header_Type",
+                            "fields": [
+                                {
+                                    "name": "domain",
+                                    "type": "string"
+                                },
+                                {
+                                    "name": "eventId",
+                                    "type": "string"
+                                },
+                                {
+                                    "name": "eventName",
+                                    "type": "string"
+                                },
+                                {
+                                    "name": "eventType",
+                                    "type": "string"
+                                },
+                                {
+                                    "name": "internalHeaderFields",
+                                    "type": {
+                                        "type": "record",
+                                        "name": "Internal_Header_Fields_Type",
+                                        "fields": [
+                                            {
+                                                "name": "intervalEndTime",
+                                                "type": "string"
+                                            },
+                                            {
+                                                "name": "intervalStartTime",
+                                                "type": "string"
+                                            }
+                                        ]
+                                    }
+                                },
+                                {
+                                    "name": "lastEpochMicrosec",
+                                    "type": "long"
+                                },
+                                {
+                                    "name": "priority",
+                                    "type": "string"
+                                },
+                                {
+                                    "name": "reportingEntityId",
+                                    "type": "string"
+                                },
+                                {
+                                    "name": "reportingEntityName",
+                                    "type": "string"
+                                },
+                                {
+                                    "name": "sequence",
+                                    "type": "int"
+                                },
+                                {
+                                    "name": "sourceId",
+                                    "type": "string"
+                                },
+                                {
+                                    "name": "sourceName",
+                                    "type": "string"
+                                },
+                                {
+                                    "name": "startEpochMicrosec",
+                                    "type": "long"
+                                },
+                                {
+                                    "name": "version",
+                                    "type": "string"
+                                },
+                                {
+                                    "name": "vesEventListenerVersion",
+                                    "type": "string"
+                                }
+                            ]
+                        }
+                    },
+                    {
+                        "name": "measurementFields",
+                        "type": {
+                            "type": "record",
+                            "name": "Measurement_Fields_Type",
+                            "fields": [
+                                {
+                                    "name": "additionalFields",
+                                    "type": {
+                                        "type": "record",
+                                        "name": "Additional_Fields_Type",
+                                        "fields": []
+                                    }
+                                },
+                                {
+                                    "name": "additionalMeasurements",
+                                    "type": {
+                                        "type": "array",
+                                        "items": {
+                                            "name": "Additional_Measurements_Array",
+                                            "type": "record",
+                                            "fields": [
+                                                {
+                                                    "name": "hashMap",
+                                                    "type": {
+                                                        "type": "record",
+                                                        "name": "Hash_Map_Type",
+                                                        "fields": [
+                                                            {
+                                                                "name": "cses",
+                                                                "type": "string"
+                                                            },
+                                                            {
+                                                                "name": "es",
+                                                                "type": "string"
+                                                            },
+                                                            {
+                                                                "name": "ses",
+                                                                "type": "string"
+                                                            },
+                                                            {
+                                                                "name": "unavailability",
+                                                                "type": "string"
+                                                            }
+                                                        ]
+                                                    }
+                                                },
+                                                {
+                                                    "name": "name",
+                                                    "type": "string"
+                                                }
+                                            ]
+                                        }
+                                    }
+                                },
+                                {
+                                    "name": "additionalObjects",
+                                    "type": {
+                                        "type": "array",
+                                        "items": {
+                                            "name": "Additional_Objects_Array",
+                                            "type": "record",
+                                            "fields": []
+                                        }
+                                    }
+                                },
+                                {
+                                    "name": "codecUsageArray",
+                                    "type": {
+                                        "type": "array",
+                                        "items": {
+                                            "name": "Codec_Usage_Array",
+                                            "type": "record",
+                                            "fields": []
+                                        }
+                                    }
+                                },
+                                {
+                                    "name": "concurrentSessions",
+                                    "type": "int"
+                                },
+                                {
+                                    "name": "configuredEntities",
+                                    "type": "int"
+                                },
+                                {
+                                    "name": "cpuUsageArray",
+                                    "type": {
+                                        "type": "array",
+                                        "items": {
+                                            "name": "Cpu_Usage_Array",
+                                            "type": "record",
+                                            "fields": []
+                                        }
+                                    }
+                                },
+                                {
+                                    "name": "diskUsageArray",
+                                    "type": {
+                                        "type": "array",
+                                        "items": {
+                                            "name": "Disk_Usage_Array",
+                                            "type": "record",
+                                            "fields": []
+                                        }
+                                    }
+                                },
+                                {
+                                    "name": "featureUsageArray",
+                                    "type": {
+                                        "type": "record",
+                                        "name": "Feature_Usage_Array_Type",
+                                        "fields": [
+                                            {
+                                                "name": "https_ColoN__SlasH__SlasH_www_DoT_itu_DoT_int_SlasH_rec_SlasH_T_DasH_REC_DasH_G_DoT_841",
+                                                "type": "string"
+                                            }
+                                        ]
+                                    }
+                                },
+                                {
+                                    "name": "filesystemUsageArray",
+                                    "type": {
+                                        "type": "array",
+                                        "items": {
+                                            "name": "Filesystem_Usage_Array",
+                                            "type": "record",
+                                            "fields": []
+                                        }
+                                    }
+                                },
+                                {
+                                    "name": "hugePagesArray",
+                                    "type": {
+                                        "type": "array",
+                                        "items": {
+                                            "name": "Huge_Pages_Array",
+                                            "type": "record",
+                                            "fields": []
+                                        }
+                                    }
+                                },
+                                {
+                                    "name": "ipmi",
+                                    "type": {
+                                        "type": "record",
+                                        "name": "ipmi_Type",
+                                        "fields": []
+                                    }
+                                },
+                                {
+                                    "name": "latencyDistribution",
+                                    "type": {
+                                        "type": "array",
+                                        "items": {
+                                            "name": "Latency_Distribution_Array",
+                                            "type": "record",
+                                            "fields": []
+                                        }
+                                    }
+                                },
+                                {
+                                    "name": "loadArray",
+                                    "type": {
+                                        "type": "array",
+                                        "items": {
+                                            "name": "Load_Array",
+                                            "type": "record",
+                                            "fields": []
+                                        }
+                                    }
+                                },
+                                {
+                                    "name": "machineCheckExceptionArray",
+                                    "type": {
+                                        "type": "array",
+                                        "items": {
+                                            "name": "Machine_Check_Exception_Array",
+                                            "type": "record",
+                                            "fields": []
+                                        }
+                                    }
+                                },
+                                {
+                                    "name": "meanRequestLatency",
+                                    "type": "int"
+                                },
+                                {
+                                    "name": "measurementFieldsVersion",
+                                    "type": "string"
+                                },
+                                {
+                                    "name": "measurementInterval",
+                                    "type": "int"
+                                },
+                                {
+                                    "name": "memoryUsageArray",
+                                    "type": {
+                                        "type": "array",
+                                        "items": {
+                                            "name": "Memory_Usage_Array",
+                                            "type": "record",
+                                            "fields": []
+                                        }
+                                    }
+                                },
+                                {
+                                    "name": "networkSliceArray",
+                                    "type": {
+                                        "type": "array",
+                                        "items": {
+                                            "name": "Network_Slice_Array",
+                                            "type": "record",
+                                            "fields": [
+                                                {
+                                                    "name": "DRB_DoT_UEThDl_DoT_SNSSAI",
+                                                    "type": "int"
+                                                },
+                                                {
+                                                    "name": "DRB_DoT_UEThUl_DoT_SNSSAI",
+                                                    "type": "int"
+                                                },
+                                                {
+                                                    "name": "networkSliceIdentifier",
+                                                    "type": "string"
+                                                }
+                                            ]
+                                        }
+                                    }
+                                },
+                                {
+                                    "name": "nfcScalingMetric",
+                                    "type": "int"
+                                },
+                                {
+                                    "name": "nicPerformanceArray",
+                                    "type": {
+                                        "type": "array",
+                                        "items": {
+                                            "name": "Nic_Performance_Array",
+                                            "type": "record",
+                                            "fields": []
+                                        }
+                                    }
+                                },
+                                {
+                                    "name": "numberOfMediaPortsInUse",
+                                    "type": "int"
+                                },
+                                {
+                                    "name": "processStatsArray",
+                                    "type": {
+                                        "type": "array",
+                                        "items": {
+                                            "name": "Process_Stats_Array",
+                                            "type": "record",
+                                            "fields": []
+                                        }
+                                    }
+                                },
+                                {
+                                    "name": "requestRate",
+                                    "type": "int"
+                                }
+                            ]
+                        }
+                    }
+                ]
+            }
+        }
+    ]
+}
\ No newline at end of file
diff --git a/test/usecases/odusliceassurance/apexpolicyversion/SliceAssurance/schemas/StdDefinedInputSchema.avsc b/test/usecases/odusliceassurance/apexpolicyversion/SliceAssurance/schemas/StdDefinedInputSchema.avsc
new file mode 100644 (file)
index 0000000..155ae1d
--- /dev/null
@@ -0,0 +1,174 @@
+{
+    "type": "record",
+    "name": "Std_Defined_Input",
+    "fields": [
+        {
+            "name": "event",
+            "type": {
+                "type": "record",
+                "name": "Event_Type",
+                "fields": [
+                    {
+                        "name": "commonEventHeader",
+                        "type": {
+                            "type": "record",
+                            "name": "Common_Event_Header_Type",
+                            "fields": [
+                                {
+                                    "name": "domain",
+                                    "type": "string"
+                                },
+                                {
+                                    "name": "eventId",
+                                    "type": "string"
+                                },
+                                {
+                                    "name": "eventName",
+                                    "type": "string"
+                                },
+                                {
+                                    "name": "eventType",
+                                    "type": "string"
+                                },
+                                {
+                                    "name": "sequence",
+                                    "type": "int"
+                                },
+                                {
+                                    "name": "priority",
+                                    "type": "string"
+                                },
+                                {
+                                    "name": "reportingEntityId",
+                                    "type": "string"
+                                },
+                                {
+                                    "name": "reportingEntityName",
+                                    "type": "string"
+                                },
+                                {
+                                    "name": "sourceId",
+                                    "type": "string"
+                                },
+                                {
+                                    "name": "sourceName",
+                                    "type": "string"
+                                },
+                                {
+                                    "name": "startEpochMicrosec",
+                                    "type": "string"
+                                },
+                                {
+                                    "name": "lastEpochMicrosec",
+                                    "type": "string"
+                                },
+                                {
+                                    "name": "nfNamingCode",
+                                    "type": "string"
+                                },
+                                {
+                                    "name": "nfVendorName",
+                                    "type": "string"
+                                },
+                                {
+                                    "name": "stndDefinedNamespace",
+                                    "type": "string"
+                                },
+                                {
+                                    "name": "timeZoneOffset",
+                                    "type": "string"
+                                },
+                                {
+                                    "name": "version",
+                                    "type": "string"
+                                },
+                                {
+                                    "name": "vesEventListenerVersion",
+                                    "type": "string"
+                                }
+                            ]
+                        }
+                    },
+                    {
+                        "name": "stndDefinedFields",
+                        "type": {
+                            "type": "record",
+                            "name": "Stnd_Defined_Fields_Type",
+                            "fields": [
+                                {
+                                    "name": "stndDefinedFieldsVersion",
+                                    "type": "string"
+                                },
+                                {
+                                    "name": "schemaReference",
+                                    "type": "string"
+                                },
+                                {
+                                    "name": "data",
+                                    "type": {
+                                        "type": "record",
+                                        "name": "Data_Type",
+                                        "fields": [
+                                            {
+                                                "name": "id",
+                                                "type": "string"
+                                            },
+                                            {
+                                                "name": "start_DasH_time",
+                                                "type": "string"
+                                            },
+                                            {
+                                                "name": "administrative_DasH_state",
+                                                "type": "string"
+                                            },
+                                            {
+                                                "name": "operational_DasH_state",
+                                                "type": "string"
+                                            },
+                                            {
+                                                "name": "user_DasH_label",
+                                                "type": "string"
+                                            },
+                                            {
+                                                "name": "job_DasH_tag",
+                                                "type": "string"
+                                            },
+                                            {
+                                                "name": "granularity_DasH_period",
+                                                "type": "int"
+                                            },
+                                            {
+                                                "name": "measurements",
+                                                "type": {
+                                                    "type": "array",
+                                                    "items": {
+                                                        "name": "Measurements_Array",
+                                                        "type": "record",
+                                                        "fields": [
+                                                            {
+                                                                "name": "measurement_DasH_type_DasH_instance_DasH_reference",
+                                                                "type": "string"
+                                                            },
+                                                            {
+                                                                "name": "value",
+                                                                "type": "int"
+                                                            },
+                                                            {
+                                                                "name": "unit",
+                                                                "type": "string"
+                                                            }
+                                                        ]
+                                                    }
+                                                }
+                                            }
+                                        ]
+                                    }
+                                }
+                            ]
+                        }
+                    }
+                ]
+            }
+        }
+    ]
+}
\ No newline at end of file