Merge "First version of ODU slice assurance usecase"
authorHenrik Andersson <henrik.b.andersson@est.tech>
Wed, 8 Dec 2021 11:45:27 +0000 (11:45 +0000)
committerGerrit Code Review <gerrit@o-ran-sc.org>
Wed, 8 Dec 2021 11:45:27 +0000 (11:45 +0000)
293 files changed:
a1-policy-management-service/.gitignore [moved from policy-agent/.gitignore with 100% similarity]
a1-policy-management-service/Dockerfile [moved from policy-agent/Dockerfile with 88% similarity]
a1-policy-management-service/README.md [moved from policy-agent/README.md with 100% similarity]
a1-policy-management-service/dpo/blueprints/k8s-policy-agent.yaml [moved from policy-agent/dpo/blueprints/k8s-policy-agent.yaml with 100% similarity]
a1-policy-management-service/eclipse-formatter.xml [moved from enrichment-coordinator-service/eclipse-formatter.xml with 100% similarity]
a1-policy-management-service/pom.xml [moved from policy-agent/pom.xml with 100% similarity]
a1-policy-management-service/src [moved from policy-agent/src with 100% similarity]
dmaap-adaptor-java/Dockerfile
dmaap-adaptor-java/README.md
dmaap-adaptor-java/api/api.json
dmaap-adaptor-java/api/api.yaml
dmaap-adaptor-java/config/application.yaml
dmaap-adaptor-java/config/application_configuration.json
dmaap-adaptor-java/src/main/java/org/oran/dmaapadapter/Application.java
dmaap-adaptor-java/src/main/java/org/oran/dmaapadapter/SwaggerConfig.java
dmaap-adaptor-java/src/main/java/org/oran/dmaapadapter/clients/AsyncRestClient.java
dmaap-adaptor-java/src/main/java/org/oran/dmaapadapter/configuration/ApplicationConfig.java
dmaap-adaptor-java/src/main/java/org/oran/dmaapadapter/controllers/ProducerCallbacksController.java
dmaap-adaptor-java/src/main/java/org/oran/dmaapadapter/exceptions/ServiceException.java
dmaap-adaptor-java/src/main/java/org/oran/dmaapadapter/r1/ConsumerJobInfo.java
dmaap-adaptor-java/src/main/java/org/oran/dmaapadapter/repository/InfoTypes.java
dmaap-adaptor-java/src/main/java/org/oran/dmaapadapter/repository/Jobs.java
dmaap-adaptor-java/src/main/java/org/oran/dmaapadapter/repository/MultiMap.java
dmaap-adaptor-java/src/main/java/org/oran/dmaapadapter/tasks/DmaapTopicConsumer.java
dmaap-adaptor-java/src/main/java/org/oran/dmaapadapter/tasks/KafkaJobDataConsumer.java
dmaap-adaptor-java/src/main/java/org/oran/dmaapadapter/tasks/KafkaTopicConsumers.java
dmaap-adaptor-java/src/main/java/org/oran/dmaapadapter/tasks/KafkaTopicListener.java
dmaap-adaptor-java/src/main/java/org/oran/dmaapadapter/tasks/ProducerRegstrationTask.java
dmaap-adaptor-java/src/main/resources/typeSchemaDmaap.json [new file with mode: 0644]
dmaap-adaptor-java/src/main/resources/typeSchemaKafka.json
dmaap-adaptor-java/src/test/java/org/oran/dmaapadapter/ApplicationTest.java
dmaap-adaptor-java/src/test/java/org/oran/dmaapadapter/IcsSimulatorController.java [moved from dmaap-adaptor-java/src/test/java/org/oran/dmaapadapter/EcsSimulatorController.java with 96% similarity]
dmaap-adaptor-java/src/test/java/org/oran/dmaapadapter/IntegrationWithIcs.java [moved from dmaap-adaptor-java/src/test/java/org/oran/dmaapadapter/IntegrationWithEcs.java with 74% similarity]
dmaap-adaptor-java/src/test/java/org/oran/dmaapadapter/IntegrationWithKafka.java
dmaap-adaptor-java/src/test/resources/test_application_configuration.json
dmaap-adaptor-java/src/test/resources/test_application_configuration_kafka.json [deleted file]
dmaap-mediator-producer/README.md
dmaap-mediator-producer/build_and_test.sh [new file with mode: 0755]
dmaap-mediator-producer/container-tag.yaml [deleted file]
dmaap-mediator-producer/go.mod
dmaap-mediator-producer/go.sum
dmaap-mediator-producer/internal/config/config.go
dmaap-mediator-producer/internal/config/config_test.go
dmaap-mediator-producer/internal/jobs/jobs.go
dmaap-mediator-producer/internal/jobs/jobs_test.go
dmaap-mediator-producer/internal/restclient/HTTPClient.go
dmaap-mediator-producer/internal/server/server.go
dmaap-mediator-producer/internal/server/server_test.go
dmaap-mediator-producer/main.go
dmaap-mediator-producer/pom.xml [new file with mode: 0644]
dmaap-mediator-producer/stub/consumer/consumerstub.go
dmaap-mediator-producer/stub/dmaap/mrstub.go
docker-compose/.env
docker-compose/README.md
docker-compose/data/prepareIcsData.sh [moved from docker-compose/data/prepareEcsData.sh with 55% similarity]
docker-compose/data/sendMsgToMediator.sh
docker-compose/data/testdata/ICS/InfoJob.json [moved from docker-compose/data/testdata/ECS/EiJob.json with 100% similarity]
docker-compose/data/testdata/ICS/InfoProducer.json [moved from docker-compose/data/testdata/ECS/EiProducer.json with 100% similarity]
docker-compose/data/testdata/ICS/InfoType.json [moved from docker-compose/data/testdata/ECS/EiType.json with 100% similarity]
docker-compose/data/testdata/dmaap-mediator-java/flow.puml
docker-compose/dmaap-mediator-go/docker-compose.yaml
docker-compose/dmaap-mediator-java/config/application.yaml
docker-compose/ics/docker-compose.yaml [moved from docker-compose/ecs/docker-compose.yaml with 90% similarity]
docs/api-docs.rst
docs/conf.py
docs/developer-guide.rst
docs/installation-guide.rst
docs/overview.rst
enrichment-coordinator-service/Dockerfile [deleted file]
information-coordinator-service/.gitignore [moved from enrichment-coordinator-service/.gitignore with 100% similarity]
information-coordinator-service/Dockerfile [new file with mode: 0644]
information-coordinator-service/api/ics-api.json [moved from enrichment-coordinator-service/api/ecs-api.json with 90% similarity]
information-coordinator-service/api/ics-api.yaml [moved from enrichment-coordinator-service/api/ecs-api.yaml with 89% similarity]
information-coordinator-service/config/README [moved from enrichment-coordinator-service/config/README with 100% similarity]
information-coordinator-service/config/application.yaml [moved from enrichment-coordinator-service/config/application.yaml with 80% similarity]
information-coordinator-service/config/keystore.jks [moved from enrichment-coordinator-service/config/keystore.jks with 100% similarity]
information-coordinator-service/config/truststore.jks [moved from enrichment-coordinator-service/config/truststore.jks with 100% similarity]
information-coordinator-service/eclipse-formatter.xml [moved from policy-agent/eclipse-formatter.xml with 100% similarity]
information-coordinator-service/pom.xml [moved from enrichment-coordinator-service/pom.xml with 96% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/Application.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/Application.java with 97% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/BeanFactory.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/BeanFactory.java with 93% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/SwaggerConfig.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/SwaggerConfig.java with 94% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/clients/AsyncRestClient.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/clients/AsyncRestClient.java with 98% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/clients/AsyncRestClientFactory.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/clients/AsyncRestClientFactory.java with 97% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/configuration/ApplicationConfig.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/configuration/ApplicationConfig.java with 96% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/configuration/WebClientConfig.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/configuration/WebClientConfig.java with 96% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/ErrorResponse.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/ErrorResponse.java with 97% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/StatusController.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/StatusController.java with 95% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/VoidResponse.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/VoidResponse.java with 95% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/a1e/A1eCallbacks.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/a1e/A1eCallbacks.java with 88% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/a1e/A1eConsts.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/a1e/A1eConsts.java with 97% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/a1e/A1eController.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/a1e/A1eController.java with 96% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/a1e/A1eEiJobInfo.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/a1e/A1eEiJobInfo.java with 98% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/a1e/A1eEiJobStatus.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/a1e/A1eEiJobStatus.java with 97% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/a1e/A1eEiTypeInfo.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/a1e/A1eEiTypeInfo.java with 95% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/r1consumer/ConsumerCallbacks.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/r1consumer/ConsumerCallbacks.java with 88% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/r1consumer/ConsumerConsts.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/r1consumer/ConsumerConsts.java with 97% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/r1consumer/ConsumerController.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/r1consumer/ConsumerController.java with 97% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/r1consumer/ConsumerInfoTypeInfo.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/r1consumer/ConsumerInfoTypeInfo.java with 97% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/r1consumer/ConsumerJobInfo.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/r1consumer/ConsumerJobInfo.java with 97% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/r1consumer/ConsumerJobStatus.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/r1consumer/ConsumerJobStatus.java with 97% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/r1consumer/ConsumerTypeRegistrationInfo.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/r1consumer/ConsumerTypeRegistrationInfo.java with 97% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/r1consumer/ConsumerTypeSubscriptionInfo.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/r1consumer/ConsumerTypeSubscriptionInfo.java with 97% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/r1producer/ProducerCallbacks.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/r1producer/ProducerCallbacks.java with 91% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/r1producer/ProducerConsts.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/r1producer/ProducerConsts.java with 95% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/r1producer/ProducerController.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/r1producer/ProducerController.java with 96% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/r1producer/ProducerInfoTypeInfo.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/r1producer/ProducerInfoTypeInfo.java with 97% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/r1producer/ProducerJobInfo.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/r1producer/ProducerJobInfo.java with 96% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/r1producer/ProducerRegistrationInfo.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/r1producer/ProducerRegistrationInfo.java with 97% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/controllers/r1producer/ProducerStatusInfo.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/r1producer/ProducerStatusInfo.java with 97% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/exceptions/ServiceException.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/exceptions/ServiceException.java with 97% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/repository/InfoJob.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/repository/InfoJob.java with 97% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/repository/InfoJobs.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/repository/InfoJobs.java with 96% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/repository/InfoProducer.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/repository/InfoProducer.java with 98% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/repository/InfoProducers.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/repository/InfoProducers.java with 96% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/repository/InfoType.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/repository/InfoType.java with 96% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/repository/InfoTypeSubscriptions.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/repository/InfoTypeSubscriptions.java with 98% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/repository/InfoTypes.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/repository/InfoTypes.java with 96% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/repository/MultiMap.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/repository/MultiMap.java with 97% similarity]
information-coordinator-service/src/main/java/org/oransc/ics/tasks/ProducerSupervision.java [moved from enrichment-coordinator-service/src/main/java/org/oransc/enrichment/tasks/ProducerSupervision.java with 90% similarity]
information-coordinator-service/src/test/java/org/oransc/ics/ApplicationTest.java [moved from enrichment-coordinator-service/src/test/java/org/oransc/enrichment/ApplicationTest.java with 95% similarity]
information-coordinator-service/src/test/java/org/oransc/ics/MockInformationService.java [moved from enrichment-coordinator-service/src/test/java/org/oransc/enrichment/MockEnrichmentService.java with 97% similarity]
information-coordinator-service/src/test/java/org/oransc/ics/clients/AsyncRestClientTest.java [moved from enrichment-coordinator-service/src/test/java/org/oransc/enrichment/clients/AsyncRestClientTest.java with 99% similarity]
information-coordinator-service/src/test/java/org/oransc/ics/controller/ConsumerSimulatorController.java [moved from enrichment-coordinator-service/src/test/java/org/oransc/enrichment/controller/ConsumerSimulatorController.java with 93% similarity]
information-coordinator-service/src/test/java/org/oransc/ics/controller/ProducerSimulatorController.java [moved from enrichment-coordinator-service/src/test/java/org/oransc/enrichment/controller/ProducerSimulatorController.java with 96% similarity]
onap/oran
pom.xml
r-app-catalogue/Dockerfile
test/auto-test/FTC1.sh
test/auto-test/FTC10.sh
test/auto-test/FTC100.sh
test/auto-test/FTC110.sh
test/auto-test/FTC1100.sh
test/auto-test/FTC150.sh
test/auto-test/FTC1800.sh
test/auto-test/FTC2001.sh
test/auto-test/FTC2002.sh
test/auto-test/FTC2003.sh
test/auto-test/FTC300.sh
test/auto-test/FTC3000.sh
test/auto-test/FTC310.sh
test/auto-test/FTC350.sh
test/auto-test/FTC800.sh
test/auto-test/FTC805.sh
test/auto-test/FTC810.sh
test/auto-test/FTC850.sh
test/auto-test/FTC900.sh
test/auto-test/FTC_HELM_E_RELEASE.sh
test/auto-test/ONAP_UC.sh
test/auto-test/PM_DEMO.sh
test/auto-test/PM_EI_DEMO.sh
test/auto-test/README.md
test/auto-test/startMR.sh
test/auto-test/testdata/ics/ei-type-1.json [moved from test/auto-test/testdata/ecs/ei-type-1.json with 100% similarity]
test/auto-test/testdata/ics/ei-type-2.json [moved from test/auto-test/testdata/ecs/ei-type-2.json with 100% similarity]
test/auto-test/testdata/ics/ei-type-3.json [moved from test/auto-test/testdata/ecs/ei-type-3.json with 100% similarity]
test/auto-test/testdata/ics/ei-type-4.json [moved from test/auto-test/testdata/ecs/ei-type-4.json with 100% similarity]
test/auto-test/testdata/ics/ei-type-5.json [moved from test/auto-test/testdata/ecs/ei-type-5.json with 100% similarity]
test/auto-test/testdata/ics/ei-type-6.json [moved from test/auto-test/testdata/ecs/ei-type-6.json with 100% similarity]
test/auto-test/testdata/ics/empty-type.json [moved from test/auto-test/testdata/ecs/empty-type.json with 100% similarity]
test/auto-test/testdata/ics/info-type-1.json [moved from test/auto-test/testdata/ecs/info-type-1.json with 100% similarity]
test/auto-test/testdata/ics/info-type-2.json [moved from test/auto-test/testdata/ecs/info-type-2.json with 100% similarity]
test/auto-test/testdata/ics/info-type-3.json [moved from test/auto-test/testdata/ecs/info-type-3.json with 100% similarity]
test/auto-test/testdata/ics/info-type-4.json [moved from test/auto-test/testdata/ecs/info-type-4.json with 100% similarity]
test/auto-test/testdata/ics/info-type-5.json [moved from test/auto-test/testdata/ecs/info-type-5.json with 100% similarity]
test/auto-test/testdata/ics/info-type-50.json [moved from test/auto-test/testdata/ecs/info-type-50.json with 100% similarity]
test/auto-test/testdata/ics/info-type-6.json [moved from test/auto-test/testdata/ecs/info-type-6.json with 100% similarity]
test/auto-test/testdata/ics/info-type-60.json [moved from test/auto-test/testdata/ecs/info-type-60.json with 100% similarity]
test/auto-test/testdata/ics/info-type-info.json [moved from test/auto-test/testdata/ecs/info-type-info.json with 100% similarity]
test/auto-test/testdata/ics/job-template.json [moved from test/auto-test/testdata/ecs/job-template.json with 100% similarity]
test/auto-test/testdata/ics/job-template2.json [moved from test/auto-test/testdata/ecs/job-template2.json with 100% similarity]
test/common/README.md
test/common/api_curl.sh
test/common/cbs_api_functions.sh [new file with mode: 0644]
test/common/clean_kube.sh
test/common/consul_api_functions.sh [moved from test/common/consul_cbs_functions.sh with 68% similarity]
test/common/cp_api_functions.sh [moved from test/common/control_panel_api_functions.sh with 95% similarity]
test/common/cr_api_functions.sh
test/common/dmaapadp_api_functions.sh
test/common/dmaapmed_api_functions.sh
test/common/dmaapmr_api_functions.sh [new file with mode: 0644]
test/common/do_curl_function.sh
test/common/genstat.sh
test/common/httpproxy_api_functions.sh [moved from test/common/http_proxy_api_functions.sh with 66% similarity]
test/common/ics_api_functions.sh [moved from test/common/ecs_api_functions.sh with 82% similarity]
test/common/kafkapc_api_functions.sh [new file with mode: 0644]
test/common/kubeproxy_api_functions.sh [moved from test/common/kube_proxy_api_functions.sh with 100% similarity]
test/common/mr_api_functions.sh
test/common/ngw_api_functions.sh [moved from test/common/gateway_api_functions.sh with 97% similarity]
test/common/pa_api_functions.sh [moved from test/common/agent_api_functions.sh with 92% similarity]
test/common/prodstub_api_functions.sh
test/common/rc_api_functions.sh [moved from test/common/rapp_catalogue_api_functions.sh with 98% similarity]
test/common/ricsim_api_functions.sh [moved from test/common/ricsimulator_api_functions.sh with 88% similarity]
test/common/sdnc_api_functions.sh [moved from test/common/controller_api_functions.sh with 93% similarity]
test/common/test_env-onap-guilin.sh
test/common/test_env-onap-honolulu.sh
test/common/test_env-onap-istanbul.sh
test/common/test_env-oran-cherry.sh
test/common/test_env-oran-d-release.sh
test/common/test_env-oran-e-release.sh
test/common/testcase_common.sh
test/common/testengine_config.sh
test/cr/.gitignore
test/cr/Dockerfile
test/cr/app/nginx.conf
test/http-https-proxy/Dockerfile
test/kafka-procon/.gitignore [new file with mode: 0644]
test/kafka-procon/Dockerfile [new file with mode: 0644]
test/kafka-procon/basic_test.sh [new file with mode: 0755]
test/kafka-procon/build-and-start.sh [new file with mode: 0755]
test/kafka-procon/go.mod [new file with mode: 0644]
test/kafka-procon/go.sum [new file with mode: 0644]
test/kafka-procon/main.go [new file with mode: 0644]
test/kafka-procon/start_local.sh [new file with mode: 0755]
test/mrstub/Dockerfile
test/mrstub/app/nginx.conf
test/prodstub/Dockerfile
test/prodstub/app/nginx.conf
test/simulator-group/consul_cbs/.gitignore
test/simulator-group/consul_cbs/docker-compose.yml
test/simulator-group/control_panel/.gitignore
test/simulator-group/control_panel/application.properties
test/simulator-group/control_panel/docker-compose.yml
test/simulator-group/control_panel/nginx.conf
test/simulator-group/cr/.env [new file with mode: 0644]
test/simulator-group/cr/.gitignore
test/simulator-group/cr/app.yaml
test/simulator-group/cr/docker-compose.yml
test/simulator-group/cr/svc.yaml
test/simulator-group/dmaapadp/.gitignore
test/simulator-group/dmaapadp/application.yaml
test/simulator-group/dmaapadp/docker-compose.yml
test/simulator-group/dmaapmed/.gitignore
test/simulator-group/dmaapmed/app.yaml
test/simulator-group/dmaapmed/docker-compose.yml
test/simulator-group/dmaapmr/.gitignore [new file with mode: 0644]
test/simulator-group/dmaapmr/app.yaml
test/simulator-group/dmaapmr/configs0/kafka/zk_client_jaas.conf [new file with mode: 0644]
test/simulator-group/dmaapmr/configs0/mr/MsgRtrApi.properties [moved from test/simulator-group/dmaapmr/mnt/mr/MsgRtrApi.properties with 90% similarity]
test/simulator-group/dmaapmr/configs0/mr/cadi.properties [moved from test/simulator-group/dmaapmr/mnt/mr/cadi.properties with 58% similarity]
test/simulator-group/dmaapmr/configs0/mr/logback.xml [moved from test/simulator-group/dmaapmr/mnt/mr/logback.xml with 98% similarity]
test/simulator-group/dmaapmr/configs0/zk/zk_server_jaas.conf [moved from test/simulator-group/dmaapmr/mnt/zk/zk_server_jaas.conf with 67% similarity]
test/simulator-group/dmaapmr/configs1/kafka/zk_client_jaas.conf [moved from test/simulator-group/dmaapmr/configs/kafka/zk_client_jaas.conf with 100% similarity]
test/simulator-group/dmaapmr/configs1/mr/MsgRtrApi.properties [moved from test/simulator-group/dmaapmr/configs/mr/MsgRtrApi.properties with 96% similarity]
test/simulator-group/dmaapmr/configs1/mr/cadi.properties [moved from test/simulator-group/dmaapmr/configs/mr/cadi.properties with 100% similarity]
test/simulator-group/dmaapmr/configs1/mr/logback.xml [moved from test/simulator-group/dmaapmr/configs/mr/logback.xml with 100% similarity]
test/simulator-group/dmaapmr/configs1/zk/zk_server_jaas.conf [moved from test/simulator-group/dmaapmr/configs/zk/zk_server_jaas.conf with 100% similarity]
test/simulator-group/dmaapmr/docker-compose.yml
test/simulator-group/dmaapmr/mnt/kafka/zk_client_jaas.conf [deleted file]
test/simulator-group/dmaapmr/svc.yaml
test/simulator-group/ecs/.gitignore [deleted file]
test/simulator-group/ecs/app.yaml [deleted file]
test/simulator-group/ecs/mnt/.gitignore [deleted file]
test/simulator-group/ecs/svc.yaml [deleted file]
test/simulator-group/httpproxy/.gitignore
test/simulator-group/httpproxy/docker-compose.yml
test/simulator-group/ics/.gitignore [new file with mode: 0644]
test/simulator-group/ics/app.yaml [new file with mode: 0644]
test/simulator-group/ics/application.yaml [moved from test/simulator-group/ecs/application.yaml with 60% similarity]
test/simulator-group/ics/docker-compose.yml [moved from test/simulator-group/ecs/docker-compose.yml with 65% similarity]
test/simulator-group/ics/pv.yaml [moved from test/simulator-group/ecs/pv.yaml with 58% similarity]
test/simulator-group/ics/pvc.yaml [moved from test/simulator-group/ecs/pvc.yaml with 67% similarity]
test/simulator-group/ics/svc.yaml [new file with mode: 0644]
test/simulator-group/kafka-procon/.gitignore [new file with mode: 0644]
test/simulator-group/kafka-procon/app.yaml [new file with mode: 0644]
test/simulator-group/kafka-procon/docker-compose.yml [new file with mode: 0644]
test/simulator-group/kafka-procon/svc.yaml [new file with mode: 0644]
test/simulator-group/kubeproxy/.gitignore
test/simulator-group/kubeproxy/docker-compose.yml
test/simulator-group/mrstub/.gitignore
test/simulator-group/mrstub/docker-compose.yml
test/simulator-group/ngw/.gitignore
test/simulator-group/ngw/application.yaml
test/simulator-group/ngw/docker-compose.yml
test/simulator-group/policy_agent/.gitignore
test/simulator-group/policy_agent/app.yaml
test/simulator-group/policy_agent/docker-compose.yml
test/simulator-group/prodstub/.gitignore
test/simulator-group/prodstub/docker-compose.yml
test/simulator-group/rapp_catalogue/.gitignore
test/simulator-group/rapp_catalogue/docker-compose.yml
test/simulator-group/ric/.gitignore
test/simulator-group/ric/docker-compose.yml
test/simulator-group/sdnc/.gitignore
test/simulator-group/sdnc/docker-compose-2.yml
test/simulator-group/sdnc/docker-compose.yml
test/simulator-group/sim-monitor.js
test/usecases/odusliceassurance/apexpolicyversion/SliceAssurance/schemas/StdDefinedOutputSchema.avsc [new file with mode: 0644]
test/usecases/oruclosedlooprecovery/goversion/stub/sdnr/sdnrstub.go
test/usecases/oruclosedlooprecovery/scriptversion/app/Dockerfile
test/usecases/oruclosedlooprecovery/scriptversion/helm/dmaap-mr/templates/deployment.yaml
test/usecases/oruclosedlooprecovery/scriptversion/simulators/Dockerfile-message-generator
test/usecases/oruclosedlooprecovery/scriptversion/simulators/Dockerfile-sdnr-sim

similarity index 88%
rename from policy-agent/Dockerfile
rename to a1-policy-management-service/Dockerfile
index f64eebb..3775b39 100644 (file)
@@ -34,8 +34,11 @@ ADD /config/application_configuration.json /opt/app/policy-agent/data/applicatio
 ADD /config/keystore.jks /opt/app/policy-agent/etc/cert/keystore.jks
 ADD /config/truststore.jks /opt/app/policy-agent/etc/cert/truststore.jks
 
-RUN chmod -R 777 /opt/app/policy-agent/config/
-RUN chmod -R 777 /opt/app/policy-agent/data/
+RUN groupadd -g 999 appuser && \
+    useradd -r -u 999 -g appuser appuser
+RUN chown -R appuser:appuser /opt/app/policy-agent
+RUN chown -R appuser:appuser /var/log/policy-agent
+USER appuser
 
 ADD target/${JAR} /opt/app/policy-agent/policy-agent.jar
 CMD ["java", "-jar", "/opt/app/policy-agent/policy-agent.jar"]
index b2c0c30..9843699 100644 (file)
@@ -30,14 +30,18 @@ WORKDIR /opt/app/dmaap-adaptor-service
 RUN mkdir -p /var/log/dmaap-adaptor-service
 RUN mkdir -p /opt/app/dmaap-adaptor-service/etc/cert/
 RUN mkdir -p /var/dmaap-adaptor-service
-RUN chmod -R 777 /var/dmaap-adaptor-service
 
 ADD /config/application.yaml /opt/app/dmaap-adaptor-service/config/application.yaml
 ADD /config/application_configuration.json /opt/app/dmaap-adaptor-service/data/application_configuration.json_example
 ADD /config/keystore.jks /opt/app/dmaap-adaptor-service/etc/cert/keystore.jks
 ADD /config/truststore.jks /opt/app/dmaap-adaptor-service/etc/cert/truststore.jks
 
-RUN chmod -R 777 /opt/app/dmaap-adaptor-service/config/
+
+RUN groupadd -g 999 appuser && \
+    useradd -r -u 999 -g appuser appuser
+RUN chown -R appuser:appuser /var/dmaap-adaptor-service/
+RUN chown -R appuser:appuser /opt/app/dmaap-adaptor-service/
+USER appuser
 
 ADD target/${JAR} /opt/app/dmaap-adaptor-service/dmaap-adaptor.jar
 CMD ["java", "-jar", "/opt/app/dmaap-adaptor-service/dmaap-adaptor.jar"]
index 9b35fe5..162bfb2 100644 (file)
@@ -1,9 +1,9 @@
 # O-RAN-SC Non-RealTime RIC DMaaP Information Producer
-This product is a generic information producer (as defined by the Information Coordinator Service (ICS)). It can produce any information that can be retrieved from DMaaP. Its main tasks is to register information types and itself as a producer using the ICS Data Producer API.
+This product is a generic information producer (as defined by the Information Coordinator Service (ICS)). It can produce any information that can be retrieved from DMaaP or Kafka. Its main tasks is to register information types and itself as a producer using the ICS Data Producer API.
 
 A data consumer may create information jobs through the ICS Data Producer API.
 
-This service will retrieve data from the DMaaP Message Router (MR) and distribute it further to the data consumers (information job owners).
+This service will retrieve data from the DMaaP Message Router (MR) or from the Kafka streaming platform and will distribute it further to the data consumers (information job owners).
 
 The component is a springboot service and is configured as any springboot service through the file `config/application.yaml`. The component log can be retrieved and logging can be controled by means of REST call. See the API documentation (api/api.yaml).
 
@@ -14,20 +14,91 @@ The file `config/application_configuration.json` contains the configuration of j
        "types":
         [
           {
-             "id":  "STD_Fault_Messages",
-             "dmaapTopicUrl":  events/unauthenticated.SEC_FAULT_OUTPUT/dmaapmediatorproducer/STD-Fault-Messages_1.0.0",
+             "id":  "ExampleInformationType1_1.0.0",
+             "dmaapTopicUrl":  "events/unauthenticated.SEC_FAULT_OUTPUT/dmaapmediatorproducer/STD-Fault-Messages_1.0.0",
+             "useHttpProxy": true
+          },
+          {
+             "id": "ExampleInformationType2_2.0.0",
+             "kafkaInputTopic": "KafkaInputTopic",
              "useHttpProxy": false
           }
         ]
     }
 ```
 
-Each information has the following properties:
+Each information type has the following properties:
  - id the information type identity as exposed in the Information Coordination Service data consumer API
  - dmaapTopicUrl the URL to for fetching information from  DMaaP
+ - kafkaInputTopic a Kafka topic to get input from
  - useHttpProxy if true, the received information will be delivered using a HTTP proxy (provided that one is setup in the application.yaml file). This might for instance be needed if the data consumer is in the RAN or outside the cluster.
 
-The service producer will constantly poll MR for all configured job types. When receiving messages for a type, it will distribute these messages to all jobs registered for the type. If no jobs for that type are registered, the messages will be discarded. If a consumer is unavailable for distribution, the messages will be discarded for that consumer.
+The service producer will poll MR and/or listen to Kafka topics for all configured job types. When receiving messages for a type, it will distribute these messages to all jobs registered for the type. If a consumer is unavailable for distribution, the messages will be discarded for that consumer.
+
+When an Information Job is created in the Information Coordinator Service Consumer API, it is possible to define a number of job specific properties. For an Information type that has a Kafka topic defined, the following Json schema defines the properties that can be used:
+
+
+```sh
+{
+  "$schema": "http://json-schema.org/draft-04/schema#",
+  "type": "object",
+  "properties": {
+    "filter": {
+      "type": "string"
+    },
+    "maxConcurrency": {
+      "type": "integer"
+    },
+    "bufferTimeout": {
+      "type": "object",
+      "properties": {
+        "maxSize": {
+          "type": "integer"
+        },
+        "maxTimeMiliseconds": {
+          "type": "integer"
+        }
+      },
+      "additionalProperties": false,
+      "required": [
+        "maxSize",
+        "maxTimeMiliseconds"
+      ]
+    }
+  },
+  "additionalProperties": false
+}
+```
+-filter is a regular expression. Only strings that matches the expression will be pushed further to the consumer.
+-maxConcurrency the maximum number of concurrent REST session for the data delivery to the consumer. 
+ The default is 1 and that is the number that must be used to guarantee that the object sequence is maintained. 
+ A higher number will give higher throughtput. 
+-bufferTimeout, can be used to reduce the number of REST calls to the consumer. If defined, a number of objects will be 
+ buffered and sent in one REST call to the consumer.
+ The buffered objects will be put in a Json array and quoted. Example; 
+   Object1 and Object2 may be posted in one call -->  ["Object1", "Object2"]
+ The bufferTimeout is a Json object and the parameters in the object are:
+   - maxSize the maximum number of buffered objects before posting
+   - maxTimeMiliseconds the maximum delay time to buffer before posting
+ If no bufferTimeout is specified, each object will be posted as received in separate calls (not quoted and put in a Json array).
+
+
+For an information type that only has a DMaaP topic, the following Json schema defines the possible parameters to use when creating an information job:
+
+```sh
+{
+  "$schema": "http://json-schema.org/draft-04/schema#",
+  "type": "object",
+  "properties": {
+    "filter": {
+       "type": "string"
+     }
+  },
+  "additionalProperties": false
+}
+```
+-filter is a regular expression. Only strings that matches the expression will be pushed furter to the consumer. This
+ has a similar meaning as in jobs that receives data from Kafka.
 
 ## License
 
index 39056e9..04c4ab0 100644 (file)
     }},
     "openapi": "3.0.1",
     "paths": {
-        "/dmaap_dataproducer/info_job": {
-            "post": {
-                "summary": "Callback for Information Job creation/modification",
-                "requestBody": {
-                    "content": {"application/json": {"schema": {"type": "string"}}},
-                    "required": true
-                },
-                "description": "The call is invoked to activate or to modify a data subscription. The endpoint is provided by the Information Producer.",
-                "operationId": "jobCreatedCallback",
-                "responses": {
-                    "200": {
-                        "description": "OK",
-                        "content": {"application/json": {"schema": {"$ref": "#/components/schemas/void"}}}
-                    },
-                    "404": {
-                        "description": "Information type is not found",
-                        "content": {"application/json": {"schema": {"$ref": "#/components/schemas/error_information"}}}
-                    }
-                },
-                "tags": ["Producer job control API"]
-            },
-            "get": {
-                "summary": "Get all jobs",
-                "description": "Returns all info jobs, can be used for trouble shooting",
-                "operationId": "getJobs",
-                "responses": {"200": {
-                    "description": "Information jobs",
-                    "content": {"application/json": {"schema": {
-                        "type": "array",
-                        "items": {"$ref": "#/components/schemas/producer_info_job_request"}
-                    }}}
-                }},
-                "tags": ["Producer job control API"]
-            }
-        },
-        "/dmaap_dataproducer/health_check": {"get": {
-            "summary": "Producer supervision",
-            "description": "The endpoint is provided by the Information Producer and is used for supervision of the producer.",
-            "operationId": "producerSupervision",
-            "responses": {"200": {
-                "description": "The producer is OK",
-                "content": {"application/json": {"schema": {"type": "string"}}}
-            }},
-            "tags": ["Producer job control API"]
-        }},
         "/actuator/threaddump": {"get": {
             "summary": "Actuator web endpoint 'threaddump'",
             "operationId": "handle_2_1_3",
             }],
             "tags": ["Information Coordinator Service Simulator (exists only in test)"]
         }},
+        "/generic_dataproducer/health_check": {"get": {
+            "summary": "Producer supervision",
+            "description": "The endpoint is provided by the Information Producer and is used for supervision of the producer.",
+            "operationId": "producerSupervision",
+            "responses": {"200": {
+                "description": "The producer is OK",
+                "content": {"application/json": {"schema": {"type": "string"}}}
+            }},
+            "tags": ["Producer job control API"]
+        }},
+        "/generic_dataproducer/info_job": {
+            "post": {
+                "summary": "Callback for Information Job creation/modification",
+                "requestBody": {
+                    "content": {"application/json": {"schema": {"type": "string"}}},
+                    "required": true
+                },
+                "description": "The call is invoked to activate or to modify a data subscription. The endpoint is provided by the Information Producer.",
+                "operationId": "jobCreatedCallback",
+                "responses": {
+                    "200": {
+                        "description": "OK",
+                        "content": {"application/json": {"schema": {"$ref": "#/components/schemas/void"}}}
+                    },
+                    "400": {
+                        "description": "Other error in the request",
+                        "content": {"application/json": {"schema": {"$ref": "#/components/schemas/error_information"}}}
+                    },
+                    "404": {
+                        "description": "Information type is not found",
+                        "content": {"application/json": {"schema": {"$ref": "#/components/schemas/error_information"}}}
+                    }
+                },
+                "tags": ["Producer job control API"]
+            },
+            "get": {
+                "summary": "Get all jobs",
+                "description": "Returns all info jobs, can be used for trouble shooting",
+                "operationId": "getJobs",
+                "responses": {"200": {
+                    "description": "Information jobs",
+                    "content": {"application/json": {"schema": {
+                        "type": "array",
+                        "items": {"$ref": "#/components/schemas/producer_info_job_request"}
+                    }}}
+                }},
+                "tags": ["Producer job control API"]
+            }
+        },
         "/actuator/loggers": {"get": {
             "summary": "Actuator web endpoint 'loggers'",
             "operationId": "handle_6",
                 "tags": ["Information Coordinator Service Simulator (exists only in test)"]
             }
         },
+        "/generic_dataproducer/info_job/{infoJobId}": {"delete": {
+            "summary": "Callback for Information Job deletion",
+            "description": "The call is invoked to terminate a data subscription. The endpoint is provided by the Information Producer.",
+            "operationId": "jobDeletedCallback",
+            "responses": {"200": {
+                "description": "OK",
+                "content": {"application/json": {"schema": {"$ref": "#/components/schemas/void"}}}
+            }},
+            "parameters": [{
+                "schema": {"type": "string"},
+                "in": "path",
+                "name": "infoJobId",
+                "required": true
+            }],
+            "tags": ["Producer job control API"]
+        }},
         "/actuator/metrics/{requiredMetricName}": {"get": {
             "summary": "Actuator web endpoint 'metrics-requiredMetricName'",
             "operationId": "handle_5",
                 "tags": ["Actuator"]
             }
         },
-        "/dmaap_dataproducer/info_job/{infoJobId}": {"delete": {
-            "summary": "Callback for Information Job deletion",
-            "description": "The call is invoked to terminate a data subscription. The endpoint is provided by the Information Producer.",
-            "operationId": "jobDeletedCallback",
-            "responses": {"200": {
-                "description": "OK",
-                "content": {"application/json": {"schema": {"$ref": "#/components/schemas/void"}}}
-            }},
-            "parameters": [{
-                "schema": {"type": "string"},
-                "in": "path",
-                "name": "infoJobId",
-                "required": true
-            }],
-            "tags": ["Producer job control API"]
-        }},
         "/actuator/health": {"get": {
             "summary": "Actuator web endpoint 'health'",
             "operationId": "handle_11",
             "name": "Copyright (C) 2021 Nordix Foundation. Licensed under the Apache License.",
             "url": "http://www.apache.org/licenses/LICENSE-2.0"
         },
-        "description": "Reads data from DMAAP and sends it further to information consumers",
-        "title": "Generic Dmaap Information Producer",
+        "description": "Reads data from DMaaP and Kafka and posts it further to information consumers",
+        "title": "Generic Dmaap and Kafka Information Producer",
         "version": "1.0"
     },
     "tags": [{
index 3c9fb59..1fb78fa 100644 (file)
@@ -1,7 +1,8 @@
 openapi: 3.0.1
 info:
-  title: Generic Dmaap Information Producer
-  description: Reads data from DMAAP and sends it further to information consumers
+  title: Generic Dmaap and Kafka Information Producer
+  description: Reads data from DMaaP and Kafka and posts it further to information
+    consumers
   license:
     name: Copyright (C) 2021 Nordix Foundation. Licensed under the Apache License.
     url: http://www.apache.org/licenses/LICENSE-2.0
@@ -15,63 +16,6 @@ tags:
     description: Spring Boot Actuator Web API Documentation
     url: https://docs.spring.io/spring-boot/docs/current/actuator-api/html/
 paths:
-  /dmaap_dataproducer/info_job:
-    get:
-      tags:
-      - Producer job control API
-      summary: Get all jobs
-      description: Returns all info jobs, can be used for trouble shooting
-      operationId: getJobs
-      responses:
-        200:
-          description: Information jobs
-          content:
-            application/json:
-              schema:
-                type: array
-                items:
-                  $ref: '#/components/schemas/producer_info_job_request'
-    post:
-      tags:
-      - Producer job control API
-      summary: Callback for Information Job creation/modification
-      description: The call is invoked to activate or to modify a data subscription.
-        The endpoint is provided by the Information Producer.
-      operationId: jobCreatedCallback
-      requestBody:
-        content:
-          application/json:
-            schema:
-              type: string
-        required: true
-      responses:
-        200:
-          description: OK
-          content:
-            application/json:
-              schema:
-                $ref: '#/components/schemas/void'
-        404:
-          description: Information type is not found
-          content:
-            application/json:
-              schema:
-                $ref: '#/components/schemas/error_information'
-  /dmaap_dataproducer/health_check:
-    get:
-      tags:
-      - Producer job control API
-      summary: Producer supervision
-      description: The endpoint is provided by the Information Producer and is used
-        for supervision of the producer.
-      operationId: producerSupervision
-      responses:
-        200:
-          description: The producer is OK
-          content:
-            application/json:
-              schema:
-                type: string
   /actuator/threaddump:
     get:
       tags:
@@ -124,6 +68,69 @@ paths:
             application/json:
               schema:
                 type: object
+  /generic_dataproducer/health_check:
+    get:
+      tags:
+      - Producer job control API
+      summary: Producer supervision
+      description: The endpoint is provided by the Information Producer and is used
+        for supervision of the producer.
+      operationId: producerSupervision
+      responses:
+        200:
+          description: The producer is OK
+          content:
+            application/json:
+              schema:
+                type: string
+  /generic_dataproducer/info_job:
+    get:
+      tags:
+      - Producer job control API
+      summary: Get all jobs
+      description: Returns all info jobs, can be used for trouble shooting
+      operationId: getJobs
+      responses:
+        200:
+          description: Information jobs
+          content:
+            application/json:
+              schema:
+                type: array
+                items:
+                  $ref: '#/components/schemas/producer_info_job_request'
+    post:
+      tags:
+      - Producer job control API
+      summary: Callback for Information Job creation/modification
+      description: The call is invoked to activate or to modify a data subscription.
+        The endpoint is provided by the Information Producer.
+      operationId: jobCreatedCallback
+      requestBody:
+        content:
+          application/json:
+            schema:
+              type: string
+        required: true
+      responses:
+        200:
+          description: OK
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/void'
+        400:
+          description: Other error in the request
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/error_information'
+        404:
+          description: Information type is not found
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/error_information'
   /actuator/loggers:
     get:
       tags:
@@ -195,6 +202,29 @@ paths:
             application/json:
               schema:
                 type: object
+  /generic_dataproducer/info_job/{infoJobId}:
+    delete:
+      tags:
+      - Producer job control API
+      summary: Callback for Information Job deletion
+      description: The call is invoked to terminate a data subscription. The endpoint
+        is provided by the Information Producer.
+      operationId: jobDeletedCallback
+      parameters:
+      - name: infoJobId
+        in: path
+        required: true
+        style: simple
+        explode: false
+        schema:
+          type: string
+      responses:
+        200:
+          description: OK
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/void'
   /actuator/metrics/{requiredMetricName}:
     get:
       tags:
@@ -287,29 +317,6 @@ paths:
             '*/*':
               schema:
                 type: object
-  /dmaap_dataproducer/info_job/{infoJobId}:
-    delete:
-      tags:
-      - Producer job control API
-      summary: Callback for Information Job deletion
-      description: The call is invoked to terminate a data subscription. The endpoint
-        is provided by the Information Producer.
-      operationId: jobDeletedCallback
-      parameters:
-      - name: infoJobId
-        in: path
-        required: true
-        style: simple
-        explode: false
-        schema:
-          type: string
-      responses:
-        200:
-          description: OK
-          content:
-            application/json:
-              schema:
-                $ref: '#/components/schemas/void'
   /actuator/health:
     get:
       tags:
index 6a2d68a..c3476ac 100644 (file)
@@ -46,14 +46,15 @@ app:
     # The HTTP proxy (if configured) will only be used for accessing NearRT RIC:s
     http.proxy-host:
     http.proxy-port: 0
-  ecs-base-url: https://localhost:8434
+  ics-base-url: https://localhost:8434
   # Location of the component configuration file. The file will only be used if the Consul database is not used;
   # configuration from the Consul will override the file.
   configuration-filepath: /opt/app/dmaap-adaptor-service/data/application_configuration.json
   dmaap-base-url: http://dradmin:dradmin@localhost:2222
   # The url used to adress this component. This is used as a callback url sent to other components.
   dmaap-adapter-base-url: https://localhost:8435
-  # KAFKA boostrap server. This is only needed if there are Information Types that uses a kafkaInputTopic
+  # KAFKA boostrap servers. This is only needed if there are Information Types that uses a kafkaInputTopic
+  # several redundant boostrap servers can be specified, separated by a comma ','.
   kafka:
     bootstrap-servers: localhost:9092
 
index ae34c56..6aaffd1 100644 (file)
@@ -1,9 +1,15 @@
 {
    "types": [
       {
-         "id": "ExampleInformationType",
+         "id": "ExampleInformationType1",
          "dmaapTopicUrl": "/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c12/C12",
          "useHttpProxy": true
+      },
+      {
+         "id": "ExampleInformationType2",
+         "kafkaInputTopic": "TutorialTopic",
+         "useHttpProxy": false
       }
+      
    ]
-}
\ No newline at end of file
+}
index aa10972..2058202 100644 (file)
 
 package org.oran.dmaapadapter;
 
+import java.io.File;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.boot.ApplicationArguments;
 import org.springframework.boot.SpringApplication;
 import org.springframework.boot.autoconfigure.SpringBootApplication;
+import org.springframework.boot.context.properties.EnableConfigurationProperties;
+import org.springframework.context.ConfigurableApplicationContext;
+import org.springframework.scheduling.annotation.EnableScheduling;
+import org.springframework.scheduling.annotation.Scheduled;
 
 @SpringBootApplication
+@EnableConfigurationProperties
+@EnableScheduling
 public class Application {
 
+    private static final Logger logger = LoggerFactory.getLogger(Application.class);
+
+    @Value("${app.configuration-filepath}")
+    private String localConfigurationFilePath;
+
+    private long configFileLastModification = 0;
+    private static ConfigurableApplicationContext applicationContext;
+
     public static void main(String[] args) {
-        SpringApplication.run(Application.class);
+        applicationContext = SpringApplication.run(Application.class);
     }
 
+    @Scheduled(fixedRate = 10 * 1000)
+    public void checkConfigFileChanges() {
+        long timestamp = new File(localConfigurationFilePath).lastModified();
+        if (configFileLastModification != 0 && timestamp != configFileLastModification) {
+            logger.info("Restarting due to change in the file {}", localConfigurationFilePath);
+            restartApplication();
+        }
+        configFileLastModification = timestamp;
+    }
+
+    private static void restartApplication() {
+        if (applicationContext == null) {
+            logger.info("Cannot restart in unittest");
+            return;
+        }
+        ApplicationArguments args = applicationContext.getBean(ApplicationArguments.class);
+
+        Thread thread = new Thread(() -> {
+            applicationContext.close();
+            applicationContext = SpringApplication.run(Application.class, args.getSourceArgs());
+        });
+
+        thread.setDaemon(false);
+        thread.start();
+    }
 }
index 8f33377..6128d2e 100644 (file)
@@ -38,6 +38,6 @@ import io.swagger.v3.oas.annotations.info.License;
 public class SwaggerConfig {
     private SwaggerConfig() {}
 
-    static final String API_TITLE = "Generic Dmaap Information Producer";
-    static final String DESCRIPTION = "Reads data from DMAAP and sends it further to information consumers";
+    static final String API_TITLE = "Generic Dmaap and Kafka Information Producer";
+    static final String DESCRIPTION = "Reads data from DMaaP and Kafka and posts it further to information consumers";
 }
index 8b3efed..746fdd7 100644 (file)
@@ -47,6 +47,7 @@ import reactor.netty.transport.ProxyProvider;
 /**
  * Generic reactive REST client.
  */
+@SuppressWarnings("java:S4449") // @Add Nullable to third party api
 public class AsyncRestClient {
 
     private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
@@ -83,7 +84,7 @@ public class AsyncRestClient {
     }
 
     public Mono<String> postWithAuthHeader(String uri, String body, String username, String password,
-            MediaType mediaType) {
+            @Nullable MediaType mediaType) {
         Object traceTag = createTraceTag();
         logger.debug("{} POST (auth) uri = '{}{}''", traceTag, baseUrl, uri);
         logger.trace("{} POST body: {}", traceTag, body);
@@ -111,16 +112,6 @@ public class AsyncRestClient {
         return retrieve(traceTag, request);
     }
 
-    public Mono<ResponseEntity<String>> putForEntity(String uri) {
-        Object traceTag = createTraceTag();
-        logger.debug("{} PUT uri = '{}{}''", traceTag, baseUrl, uri);
-        logger.trace("{} PUT body: <empty>", traceTag);
-        RequestHeadersSpec<?> request = getWebClient() //
-                .put() //
-                .uri(uri);
-        return retrieve(traceTag, request);
-    }
-
     public Mono<String> put(String uri, String body) {
         return putForEntity(uri, body) //
                 .map(this::toBody);
index f17a9c0..3ea64e7 100644 (file)
@@ -28,6 +28,7 @@ import java.util.Collection;
 import java.util.Collections;
 
 import lombok.Getter;
+import lombok.Setter;
 
 import org.oran.dmaapadapter.configuration.WebClientConfig.HttpProxyConfig;
 import org.oran.dmaapadapter.repository.InfoType;
@@ -73,12 +74,13 @@ public class ApplicationConfig {
     private int httpProxyPort = 0;
 
     @Getter
+    @Setter
     @Value("${server.port}")
     private int localServerHttpPort;
 
     @Getter
-    @Value("${app.ecs-base-url}")
-    private String ecsBaseUrl;
+    @Value("${app.ics-base-url}")
+    private String icsBaseUrl;
 
     @Getter
     @Value("${app.dmaap-adapter-base-url}")
index 07f5aa7..94f9f8d 100644 (file)
@@ -34,6 +34,7 @@ import io.swagger.v3.oas.annotations.tags.Tag;
 import java.util.ArrayList;
 import java.util.Collection;
 
+import org.oran.dmaapadapter.exceptions.ServiceException;
 import org.oran.dmaapadapter.r1.ProducerJobInfo;
 import org.oran.dmaapadapter.repository.InfoTypes;
 import org.oran.dmaapadapter.repository.Job;
@@ -58,8 +59,8 @@ public class ProducerCallbacksController {
 
     public static final String API_NAME = "Producer job control API";
     public static final String API_DESCRIPTION = "";
-    public static final String JOB_URL = "/dmaap_dataproducer/info_job";
-    public static final String SUPERVISION_URL = "/dmaap_dataproducer/health_check";
+    public static final String JOB_URL = "/generic_dataproducer/info_job";
+    public static final String SUPERVISION_URL = "/generic_dataproducer/health_check";
     private static Gson gson = new GsonBuilder().create();
     private final Jobs jobs;
     private final InfoTypes types;
@@ -77,6 +78,8 @@ public class ProducerCallbacksController {
                     content = @Content(schema = @Schema(implementation = VoidResponse.class))), //
             @ApiResponse(responseCode = "404", description = "Information type is not found", //
                     content = @Content(schema = @Schema(implementation = ErrorResponse.ErrorInfo.class))), //
+            @ApiResponse(responseCode = "400", description = "Other error in the request", //
+                    content = @Content(schema = @Schema(implementation = ErrorResponse.ErrorInfo.class))) //
     })
     public ResponseEntity<Object> jobCreatedCallback( //
             @RequestBody String body) {
@@ -86,8 +89,12 @@ public class ProducerCallbacksController {
             this.jobs.addJob(request.id, request.targetUri, types.getType(request.typeId), request.owner,
                     request.lastUpdated, toJobParameters(request.jobData));
             return new ResponseEntity<>(HttpStatus.OK);
+        } catch (ServiceException e) {
+            logger.warn("jobCreatedCallback failed: {}", e.getMessage());
+            return ErrorResponse.create(e, e.getHttpStatus());
         } catch (Exception e) {
-            return ErrorResponse.create(e, HttpStatus.NOT_FOUND);
+            logger.warn("jobCreatedCallback failed: {}", e.getMessage());
+            return ErrorResponse.create(e, HttpStatus.BAD_REQUEST);
         }
     }
 
index 740911d..b30e28e 100644 (file)
@@ -31,16 +31,6 @@ public class ServiceException extends Exception {
     @Getter
     private final HttpStatus httpStatus;
 
-    public ServiceException(String message) {
-        super(message);
-        httpStatus = null;
-    }
-
-    public ServiceException(String message, Exception originalException) {
-        super(message, originalException);
-        httpStatus = null;
-    }
-
     public ServiceException(String message, HttpStatus httpStatus) {
         super(message);
         this.httpStatus = httpStatus;
index ce4a3b7..c1737db 100644 (file)
@@ -28,7 +28,7 @@ import io.swagger.v3.oas.annotations.media.Schema;
 import org.immutables.gson.Gson;
 
 @Gson.TypeAdapters
-@Schema(name = "consumer_job", description = "Information for an Enrichment Information Job")
+@Schema(name = "consumer_job", description = "Information for an Information Job")
 public class ConsumerJobInfo {
 
     @Schema(name = "info_type_id", description = "Information type Idenitifier of the subscription job",
index 558fc46..baa998b 100644 (file)
@@ -28,6 +28,7 @@ import java.util.Vector;
 import org.oran.dmaapadapter.exceptions.ServiceException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+import org.springframework.http.HttpStatus;
 
 public class InfoTypes {
     private static final Logger logger = LoggerFactory.getLogger(InfoTypes.class);
@@ -47,7 +48,7 @@ public class InfoTypes {
     public synchronized InfoType getType(String id) throws ServiceException {
         InfoType type = allTypes.get(id);
         if (type == null) {
-            throw new ServiceException("Could not find type: " + id);
+            throw new ServiceException("Could not find type: " + id, HttpStatus.NOT_FOUND);
         }
         return type;
     }
index 0e7743d..ec33774 100644 (file)
@@ -35,6 +35,7 @@ import org.oran.dmaapadapter.repository.Job.Parameters;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.HttpStatus;
 import org.springframework.stereotype.Component;
 
 @Component
@@ -59,7 +60,7 @@ public class Jobs {
     public synchronized Job getJob(String id) throws ServiceException {
         Job job = allJobs.get(id);
         if (job == null) {
-            throw new ServiceException("Could not find job: " + id);
+            throw new ServiceException("Could not find job: " + id, HttpStatus.NOT_FOUND);
         }
         return job;
     }
index 38f3d17..f7cc14e 100644 (file)
@@ -24,6 +24,7 @@ import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
+import java.util.Set;
 import java.util.Vector;
 
 /**
@@ -50,6 +51,14 @@ public class MultiMap<T> {
         return null;
     }
 
+    public T get(String key1, String key2) {
+        Map<String, T> innerMap = this.map.get(key1);
+        if (innerMap == null) {
+            return null;
+        }
+        return innerMap.get(key2);
+    }
+
     public Collection<T> get(String key) {
         Map<String, T> innerMap = this.map.get(key);
         if (innerMap == null) {
@@ -58,6 +67,10 @@ public class MultiMap<T> {
         return new Vector<>(innerMap.values());
     }
 
+    public Set<String> keySet() {
+        return this.map.keySet();
+    }
+
     public void clear() {
         this.map.clear();
     }
index 217a072..fe7ec8b 100644 (file)
@@ -32,7 +32,6 @@ import org.slf4j.LoggerFactory;
 import org.springframework.http.MediaType;
 
 import reactor.core.publisher.Flux;
-import reactor.core.publisher.FluxSink;
 import reactor.core.publisher.Mono;
 
 /**
@@ -44,42 +43,10 @@ public class DmaapTopicConsumer {
     private static final Logger logger = LoggerFactory.getLogger(DmaapTopicConsumer.class);
 
     private final AsyncRestClient dmaapRestClient;
-    private final InfiniteFlux infiniteSubmitter = new InfiniteFlux();
     protected final ApplicationConfig applicationConfig;
     protected final InfoType type;
     protected final Jobs jobs;
 
-    /** Submits new elements until stopped */
-    private static class InfiniteFlux {
-        private FluxSink<Integer> sink;
-        private int counter = 0;
-
-        public synchronized Flux<Integer> start() {
-            stop();
-            return Flux.create(this::next).doOnRequest(this::onRequest);
-        }
-
-        public synchronized void stop() {
-            if (this.sink != null) {
-                this.sink.complete();
-                this.sink = null;
-            }
-        }
-
-        void onRequest(long no) {
-            logger.debug("InfiniteFlux.onRequest {}", no);
-            for (long i = 0; i < no; ++i) {
-                sink.next(counter++);
-            }
-        }
-
-        void next(FluxSink<Integer> sink) {
-            logger.debug("InfiniteFlux.next");
-            this.sink = sink;
-            sink.next(counter++);
-        }
-    }
-
     public DmaapTopicConsumer(ApplicationConfig applicationConfig, InfoType type, Jobs jobs) {
         AsyncRestClientFactory restclientFactory = new AsyncRestClientFactory(applicationConfig.getWebClientConfig());
         this.dmaapRestClient = restclientFactory.createRestClientNoHttpProxy("");
@@ -89,14 +56,18 @@ public class DmaapTopicConsumer {
     }
 
     public void start() {
-        infiniteSubmitter.start() //
+        Flux.range(0, Integer.MAX_VALUE) //
                 .flatMap(notUsed -> getFromMessageRouter(getDmaapUrl()), 1) //
                 .flatMap(this::pushDataToConsumers) //
                 .subscribe(//
                         null, //
                         throwable -> logger.error("DmaapMessageConsumer error: {}", throwable.getMessage()), //
-                        () -> logger.warn("DmaapMessageConsumer stopped {}", type.getId())); //
+                        this::onComplete); //
+    }
 
+    private void onComplete() {
+        logger.warn("DmaapMessageConsumer completed {}", type.getId());
+        start();
     }
 
     private String getDmaapUrl() {
@@ -128,6 +99,7 @@ public class DmaapTopicConsumer {
 
         // Distibute the body to all jobs for this type
         return Flux.fromIterable(this.jobs.getJobsForType(this.type)) //
+                .filter(job -> job.isFilterMatch(body)) //
                 .doOnNext(job -> logger.debug("Sending to consumer {}", job.getCallbackUrl())) //
                 .flatMap(job -> job.getConsumerRestClient().post("", body, MediaType.APPLICATION_JSON), CONCURRENCY) //
                 .onErrorResume(this::handleConsumerErrorResponse);
index 5550ce0..2a16f47 100644 (file)
@@ -31,7 +31,6 @@ import org.springframework.web.reactive.function.client.WebClientResponseExcepti
 import reactor.core.Disposable;
 import reactor.core.publisher.Flux;
 import reactor.core.publisher.Mono;
-import reactor.core.publisher.Sinks.Many;
 
 /**
  * The class streams data from a multi cast sink and sends the data to the Job
@@ -75,17 +74,22 @@ public class KafkaJobDataConsumer {
         this.job = job;
     }
 
-    public synchronized void start(Many<String> input) {
+    public synchronized void start(Flux<String> input) {
         stop();
         this.errorStats.resetKafkaErrors();
         this.subscription = getMessagesFromKafka(input, job) //
                 .flatMap(this::postToClient, job.getParameters().getMaxConcurrency()) //
                 .onErrorResume(this::handleError) //
                 .subscribe(this::handleConsumerSentOk, //
-                        t -> stop(), //
+                        this::handleExceptionInStream, //
                         () -> logger.warn("KafkaMessageConsumer stopped jobId: {}", job.getId()));
     }
 
+    private void handleExceptionInStream(Throwable t) {
+        logger.warn("KafkaMessageConsumer exception: {}, jobId: {}", t.getMessage(), job.getId());
+        stop();
+    }
+
     private Mono<String> postToClient(String body) {
         logger.debug("Sending to consumer {} {} {}", job.getId(), job.getCallbackUrl(), body);
         MediaType contentType = this.job.isBuffered() ? MediaType.APPLICATION_JSON : null;
@@ -94,8 +98,8 @@ public class KafkaJobDataConsumer {
 
     public synchronized void stop() {
         if (this.subscription != null) {
-            subscription.dispose();
-            subscription = null;
+            this.subscription.dispose();
+            this.subscription = null;
         }
     }
 
@@ -103,9 +107,8 @@ public class KafkaJobDataConsumer {
         return this.subscription != null;
     }
 
-    private Flux<String> getMessagesFromKafka(Many<String> input, Job job) {
-        Flux<String> result = input.asFlux() //
-                .filter(job::isFilterMatch);
+    private Flux<String> getMessagesFromKafka(Flux<String> input, Job job) {
+        Flux<String> result = input.filter(job::isFilterMatch);
 
         if (job.isBuffered()) {
             result = result.map(this::quote) //
index 0ed85c6..4809017 100644 (file)
@@ -30,6 +30,7 @@ import org.oran.dmaapadapter.repository.InfoType;
 import org.oran.dmaapadapter.repository.InfoTypes;
 import org.oran.dmaapadapter.repository.Job;
 import org.oran.dmaapadapter.repository.Jobs;
+import org.oran.dmaapadapter.repository.MultiMap;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
@@ -46,7 +47,7 @@ public class KafkaTopicConsumers {
     private final Map<String, KafkaTopicListener> topicListeners = new HashMap<>(); // Key is typeId
 
     @Getter
-    private final Map<String, KafkaJobDataConsumer> consumers = new HashMap<>(); // Key is jobId
+    private final MultiMap<KafkaJobDataConsumer> consumers = new MultiMap<>(); // Key is typeId, jobId
 
     private static final int CONSUMER_SUPERVISION_INTERVAL_MS = 1000 * 60 * 3;
 
@@ -70,22 +71,25 @@ public class KafkaTopicConsumers {
             public void onJobRemoved(Job job) {
                 removeJob(job);
             }
-
         });
     }
 
     public synchronized void addJob(Job job) {
-        if (this.consumers.get(job.getId()) == null && job.getType().isKafkaTopicDefined()) {
+        if (job.getType().isKafkaTopicDefined()) {
+            removeJob(job);
             logger.debug("Kafka job added {}", job.getId());
             KafkaTopicListener topicConsumer = topicListeners.get(job.getType().getId());
+            if (consumers.get(job.getType().getId()).isEmpty()) {
+                topicConsumer.start();
+            }
             KafkaJobDataConsumer subscription = new KafkaJobDataConsumer(job);
-            subscription.start(topicConsumer.getOutput());
-            consumers.put(job.getId(), subscription);
+            subscription.start(topicConsumer.getOutput().asFlux());
+            consumers.put(job.getType().getId(), job.getId(), subscription);
         }
     }
 
     public synchronized void removeJob(Job job) {
-        KafkaJobDataConsumer d = consumers.remove(job.getId());
+        KafkaJobDataConsumer d = consumers.remove(job.getType().getId(), job.getId());
         if (d != null) {
             logger.debug("Kafka job removed {}", job.getId());
             d.stop();
@@ -93,10 +97,9 @@ public class KafkaTopicConsumers {
     }
 
     @Scheduled(fixedRate = CONSUMER_SUPERVISION_INTERVAL_MS)
-    public synchronized void restartNonRunningTasks() {
-
-        for (KafkaJobDataConsumer consumer : consumers.values()) {
-            if (!consumer.isRunning()) {
+    public synchronized void restartNonRunningTopics() {
+        for (String typeId : this.consumers.keySet()) {
+            for (KafkaJobDataConsumer consumer : this.consumers.get(typeId)) {
                 restartTopic(consumer);
             }
         }
@@ -110,10 +113,6 @@ public class KafkaTopicConsumers {
     }
 
     private void restartConsumersOfType(KafkaTopicListener topic, InfoType type) {
-        this.consumers.forEach((jobId, consumer) -> {
-            if (consumer.getJob().getType().getId().equals(type.getId())) {
-                consumer.start(topic.getOutput());
-            }
-        });
+        this.consumers.get(type.getId()).forEach(consumer -> consumer.start(topic.getOutput().asFlux()));
     }
 }
index d1045ee..f3b44a3 100644 (file)
@@ -53,7 +53,6 @@ public class KafkaTopicListener {
     public KafkaTopicListener(ApplicationConfig applicationConfig, InfoType type) {
         this.applicationConfig = applicationConfig;
         this.type = type;
-        start();
     }
 
     public Many<String> getOutput() {
index 8b5b6cf..ec3f2b2 100644 (file)
@@ -42,6 +42,7 @@ import org.oran.dmaapadapter.repository.InfoTypes;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.HttpStatus;
 import org.springframework.scheduling.annotation.EnableScheduling;
 import org.springframework.scheduling.annotation.Scheduled;
 import org.springframework.stereotype.Component;
@@ -50,7 +51,8 @@ import reactor.core.publisher.Flux;
 import reactor.core.publisher.Mono;
 
 /**
- * Registers the types and this producer in ECS. This is done when needed.
+ * Registers the types and this producer in Innformation Coordinator Service.
+ * This is done when needed.
  */
 @Component
 @EnableScheduling
@@ -65,7 +67,7 @@ public class ProducerRegstrationTask {
 
     private static final String PRODUCER_ID = "DmaapGenericInfoProducer";
     @Getter
-    private boolean isRegisteredInEcs = false;
+    private boolean isRegisteredInIcs = false;
     private static final int REGISTRATION_SUPERVISION_INTERVAL_MS = 1000 * 5;
 
     public ProducerRegstrationTask(@Autowired ApplicationConfig applicationConfig, @Autowired InfoTypes types) {
@@ -76,18 +78,21 @@ public class ProducerRegstrationTask {
     }
 
     @Scheduled(fixedRate = REGISTRATION_SUPERVISION_INTERVAL_MS)
-    public void supervisionTask() {
-        checkRegistration() //
-                .filter(isRegistrationOk -> !isRegistrationOk || !this.isRegisteredInEcs) //
-                .flatMap(isRegisterred -> registerTypesAndProducer()) //
-                .subscribe( //
-                        null, //
-                        this::handleRegistrationFailure, //
-                        this::handleRegistrationCompleted);
+    public void runSupervisionTask() {
+        supervisionTask().subscribe( //
+                null, //
+                this::handleRegistrationFailure, //
+                this::handleRegistrationCompleted);
+    }
+
+    public Mono<String> supervisionTask() {
+        return checkRegistration() //
+                .filter(isRegistrationOk -> !isRegistrationOk || !this.isRegisteredInIcs) //
+                .flatMap(isRegisterred -> registerTypesAndProducer());
     }
 
     private void handleRegistrationCompleted() {
-        isRegisteredInEcs = true;
+        isRegisteredInIcs = true;
     }
 
     private void handleRegistrationFailure(Throwable t) {
@@ -96,7 +101,7 @@ public class ProducerRegstrationTask {
 
     // Returns TRUE if registration is correct
     private Mono<Boolean> checkRegistration() {
-        final String url = applicationConfig.getEcsBaseUrl() + "/data-producer/v1/info-producers/" + PRODUCER_ID;
+        final String url = applicationConfig.getIcsBaseUrl() + "/data-producer/v1/info-producers/" + PRODUCER_ID;
         return restClient.get(url) //
                 .flatMap(this::isRegisterredInfoCorrect) //
                 .onErrorResume(t -> Mono.just(Boolean.FALSE));
@@ -105,7 +110,7 @@ public class ProducerRegstrationTask {
     private Mono<Boolean> isRegisterredInfoCorrect(String registerredInfoStr) {
         ProducerRegistrationInfo registerredInfo = gson.fromJson(registerredInfoStr, ProducerRegistrationInfo.class);
         if (isEqual(producerRegistrationInfo(), registerredInfo)) {
-            logger.trace("Already registered in ECS");
+            logger.trace("Already registered in ICS");
             return Mono.just(Boolean.TRUE);
         } else {
             return Mono.just(Boolean.FALSE);
@@ -113,13 +118,13 @@ public class ProducerRegstrationTask {
     }
 
     private String registerTypeUrl(InfoType type) {
-        return applicationConfig.getEcsBaseUrl() + "/data-producer/v1/info-types/" + type.getId();
+        return applicationConfig.getIcsBaseUrl() + "/data-producer/v1/info-types/" + type.getId();
     }
 
     private Mono<String> registerTypesAndProducer() {
         final int CONCURRENCY = 20;
         final String producerUrl =
-                applicationConfig.getEcsBaseUrl() + "/data-producer/v1/info-producers/" + PRODUCER_ID;
+                applicationConfig.getIcsBaseUrl() + "/data-producer/v1/info-producers/" + PRODUCER_ID;
 
         return Flux.fromIterable(this.types.getAll()) //
                 .doOnNext(type -> logger.info("Registering type {}", type.getId())) //
@@ -144,37 +149,26 @@ public class ProducerRegstrationTask {
     }
 
     private Object jsonSchemaObject(InfoType type) throws IOException, ServiceException {
-
-        if (type.isKafkaTopicDefined()) {
-            String schemaStrKafka = readSchemaFile("/typeSchemaKafka.json");
-            return jsonObject(schemaStrKafka);
-        } else {
-            // An object with no properties
-            String schemaStr = "{" //
-                    + "\"type\": \"object\"," //
-                    + "\"properties\": {}," //
-                    + "\"additionalProperties\": false" //
-                    + "}"; //
-
-            return jsonObject(schemaStr);
-        }
+        String schemaFile = type.isKafkaTopicDefined() ? "/typeSchemaKafka.json" : "/typeSchemaDmaap.json";
+        return jsonObject(readSchemaFile(schemaFile));
     }
 
     private String readSchemaFile(String filePath) throws IOException, ServiceException {
         InputStream in = getClass().getResourceAsStream(filePath);
         logger.debug("Reading application schema file from: {} with: {}", filePath, in);
         if (in == null) {
-            throw new ServiceException("Could not readfile: " + filePath);
+            throw new ServiceException("Could not readfile: " + filePath, HttpStatus.INTERNAL_SERVER_ERROR);
         }
         return CharStreams.toString(new InputStreamReader(in, StandardCharsets.UTF_8));
     }
 
+    @SuppressWarnings("java:S2139") // Log exception
     private Object jsonObject(String json) {
         try {
             return JsonParser.parseString(json).getAsJsonObject();
         } catch (Exception e) {
-            logger.error("Bug, error in JSON: {}", json);
-            throw new NullPointerException(e.toString());
+            logger.error("Bug, error in JSON: {} {}", json, e.getMessage());
+            throw new NullPointerException(e.getMessage());
         }
     }
 
@@ -185,7 +179,6 @@ public class ProducerRegstrationTask {
     }
 
     private ProducerRegistrationInfo producerRegistrationInfo() {
-
         return ProducerRegistrationInfo.builder() //
                 .jobCallbackUrl(baseUrl() + ProducerCallbacksController.JOB_URL) //
                 .producerSupervisionCallbackUrl(baseUrl() + ProducerCallbacksController.SUPERVISION_URL) //
diff --git a/dmaap-adaptor-java/src/main/resources/typeSchemaDmaap.json b/dmaap-adaptor-java/src/main/resources/typeSchemaDmaap.json
new file mode 100644 (file)
index 0000000..a50b236
--- /dev/null
@@ -0,0 +1,10 @@
+{
+  "$schema": "http://json-schema.org/draft-04/schema#",
+  "type": "object",
+  "properties": {
+    "filter": {
+       "type": "string"
+     }
+  },
+  "additionalProperties": false
+}
index 290b70a..38e7807 100644 (file)
           "type": "integer"
         }
       },
+      "additionalProperties": false,
       "required": [
         "maxSize",
         "maxTimeMiliseconds"
       ]
     }
   },
-  "required": []
+  "additionalProperties": false
 }
\ No newline at end of file
index 287c95e..8c41423 100644 (file)
@@ -34,6 +34,7 @@ import java.nio.file.Paths;
 
 import org.json.JSONObject;
 import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
 import org.junit.jupiter.api.Test;
 import org.junit.jupiter.api.extension.ExtendWith;
 import org.oran.dmaapadapter.clients.AsyncRestClient;
@@ -46,9 +47,12 @@ import org.oran.dmaapadapter.configuration.WebClientConfig.HttpProxyConfig;
 import org.oran.dmaapadapter.controllers.ProducerCallbacksController;
 import org.oran.dmaapadapter.r1.ConsumerJobInfo;
 import org.oran.dmaapadapter.r1.ProducerJobInfo;
-import org.oran.dmaapadapter.repository.InfoType;
 import org.oran.dmaapadapter.repository.InfoTypes;
+import org.oran.dmaapadapter.repository.Job;
 import org.oran.dmaapadapter.repository.Jobs;
+import org.oran.dmaapadapter.tasks.KafkaJobDataConsumer;
+import org.oran.dmaapadapter.tasks.KafkaTopicConsumers;
+import org.oran.dmaapadapter.tasks.ProducerRegstrationTask;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.boot.test.context.SpringBootTest;
 import org.springframework.boot.test.context.SpringBootTest.WebEnvironment;
@@ -64,11 +68,12 @@ import org.springframework.test.context.TestPropertySource;
 import org.springframework.test.context.junit.jupiter.SpringExtension;
 import org.springframework.web.reactive.function.client.WebClientResponseException;
 
+import reactor.core.publisher.Flux;
 import reactor.core.publisher.Mono;
 import reactor.test.StepVerifier;
 
 @ExtendWith(SpringExtension.class)
-@SpringBootTest(webEnvironment = WebEnvironment.DEFINED_PORT)
+@SpringBootTest(webEnvironment = WebEnvironment.RANDOM_PORT)
 @TestPropertySource(properties = { //
         "server.ssl.key-store=./config/keystore.jks", //
         "app.webclient.trust-store=./config/truststore.jks", //
@@ -89,7 +94,13 @@ class ApplicationTest {
     private ConsumerController consumerController;
 
     @Autowired
-    private EcsSimulatorController ecsSimulatorController;
+    private IcsSimulatorController icsSimulatorController;
+
+    @Autowired
+    KafkaTopicConsumers kafkaTopicConsumers;
+
+    @Autowired
+    ProducerRegstrationTask producerRegistrationTask;
 
     private com.google.gson.Gson gson = new com.google.gson.GsonBuilder().create();
 
@@ -98,7 +109,7 @@ class ApplicationTest {
 
     static class TestApplicationConfig extends ApplicationConfig {
         @Override
-        public String getEcsBaseUrl() {
+        public String getIcsBaseUrl() {
             return thisProcessUrl();
         }
 
@@ -138,10 +149,15 @@ class ApplicationTest {
         }
     }
 
+    @BeforeEach
+    void setPort() {
+        this.applicationConfig.setLocalServerHttpPort(this.localServerHttpPort);
+    }
+
     @AfterEach
     void reset() {
         this.consumerController.testResults.reset();
-        this.ecsSimulatorController.testResults.reset();
+        this.icsSimulatorController.testResults.reset();
         this.jobs.clear();
     }
 
@@ -174,8 +190,7 @@ class ApplicationTest {
     }
 
     private ConsumerJobInfo consumerJobInfo() {
-        InfoType type = this.types.getAll().iterator().next();
-        return consumerJobInfo(type.getId(), "EI_JOB_ID");
+        return consumerJobInfo("DmaapInformationType", "EI_JOB_ID");
     }
 
     private Object jsonObject() {
@@ -232,15 +247,56 @@ class ApplicationTest {
     }
 
     @Test
-    void testWholeChain() throws Exception {
+    void testReceiveAndPostDataFromKafka() {
+        final String JOB_ID = "ID";
+        final String TYPE_ID = "KafkaInformationType";
+        await().untilAsserted(() -> assertThat(icsSimulatorController.testResults.registrationInfo).isNotNull());
+        assertThat(icsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(this.types.size());
+
+        // Create a job
+        Job.Parameters param = new Job.Parameters("", new Job.BufferTimeout(123, 456), 1);
+        String targetUri = baseUrl() + ConsumerController.CONSUMER_TARGET_URL;
+        ConsumerJobInfo kafkaJobInfo =
+                new ConsumerJobInfo(TYPE_ID, jsonObject(gson.toJson(param)), "owner", targetUri, "");
+
+        this.icsSimulatorController.addJob(kafkaJobInfo, JOB_ID, restClient());
+        await().untilAsserted(() -> assertThat(this.jobs.size()).isEqualTo(1));
+
+        KafkaJobDataConsumer kafkaConsumer = this.kafkaTopicConsumers.getConsumers().get(TYPE_ID, JOB_ID);
+
+        // Handle received data from Kafka, check that it has been posted to the
+        // consumer
+        kafkaConsumer.start(Flux.just("data"));
+
+        ConsumerController.TestResults consumer = this.consumerController.testResults;
+        await().untilAsserted(() -> assertThat(consumer.receivedBodies.size()).isEqualTo(1));
+        assertThat(consumer.receivedBodies.get(0)).isEqualTo("[\"data\"]");
+
+        // Test send an exception
+        kafkaConsumer.start(Flux.error(new NullPointerException()));
+
+        // Test regular restart of stopped
+        kafkaConsumer.stop();
+        this.kafkaTopicConsumers.restartNonRunningTopics();
+        await().untilAsserted(() -> assertThat(kafkaConsumer.isRunning()).isTrue());
+
+        // Delete the job
+        this.icsSimulatorController.deleteJob(JOB_ID, restClient());
+        await().untilAsserted(() -> assertThat(this.jobs.size()).isZero());
+    }
+
+    @Test
+    void testReceiveAndPostDataFromDmaap() throws Exception {
         final String JOB_ID = "ID";
 
         // Register producer, Register types
-        await().untilAsserted(() -> assertThat(ecsSimulatorController.testResults.registrationInfo).isNotNull());
-        assertThat(ecsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(1);
+        await().untilAsserted(() -> assertThat(icsSimulatorController.testResults.registrationInfo).isNotNull());
+        assertThat(icsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(this.types.size());
+        assertThat(producerRegistrationTask.isRegisteredInIcs()).isTrue();
+        producerRegistrationTask.supervisionTask().block();
 
         // Create a job
-        this.ecsSimulatorController.addJob(consumerJobInfo(), JOB_ID, restClient());
+        this.icsSimulatorController.addJob(consumerJobInfo(), JOB_ID, restClient());
         await().untilAsserted(() -> assertThat(this.jobs.size()).isEqualTo(1));
 
         // Return two messages from DMAAP and verify that these are sent to the owner of
@@ -253,30 +309,28 @@ class ApplicationTest {
 
         String jobUrl = baseUrl() + ProducerCallbacksController.JOB_URL;
         String jobs = restClient().get(jobUrl).block();
-        assertThat(jobs).contains("ExampleInformationType");
+        assertThat(jobs).contains(JOB_ID);
 
         // Delete the job
-        this.ecsSimulatorController.deleteJob(JOB_ID, restClient());
+        this.icsSimulatorController.deleteJob(JOB_ID, restClient());
         await().untilAsserted(() -> assertThat(this.jobs.size()).isZero());
-
     }
 
     @Test
     void testReRegister() throws Exception {
         // Wait foir register types and producer
-        await().untilAsserted(() -> assertThat(ecsSimulatorController.testResults.registrationInfo).isNotNull());
-        assertThat(ecsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(1);
+        await().untilAsserted(() -> assertThat(icsSimulatorController.testResults.registrationInfo).isNotNull());
+        assertThat(icsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(this.types.size());
 
         // Clear the registration, should trigger a re-register
-        ecsSimulatorController.testResults.reset();
-        await().untilAsserted(() -> assertThat(ecsSimulatorController.testResults.registrationInfo).isNotNull());
-        assertThat(ecsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(1);
+        icsSimulatorController.testResults.reset();
+        await().untilAsserted(() -> assertThat(icsSimulatorController.testResults.registrationInfo).isNotNull());
+        assertThat(icsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(this.types.size());
 
         // Just clear the registerred types, should trigger a re-register
-        ecsSimulatorController.testResults.types.clear();
+        icsSimulatorController.testResults.types.clear();
         await().untilAsserted(
-                () -> assertThat(ecsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(1));
-
+                () -> assertThat(icsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(2));
     }
 
     private void testErrorCode(Mono<?> request, HttpStatus expStatus, String responseContains) {
@@ -303,5 +357,4 @@ class ApplicationTest {
         }
         return true;
     }
-
 }
@@ -47,7 +47,7 @@ import org.springframework.web.bind.annotation.RestController;
 
 @RestController("IcsSimulatorController")
 @Tag(name = "Information Coordinator Service Simulator (exists only in test)")
-public class EcsSimulatorController {
+public class IcsSimulatorController {
 
     private final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
     private final static Gson gson = new GsonBuilder().create();
@@ -104,13 +104,13 @@ public class EcsSimulatorController {
         ProducerJobInfo request =
                 new ProducerJobInfo(job.jobDefinition, jobId, job.infoTypeId, job.jobResultUri, job.owner, "TIMESTAMP");
         String body = gson.toJson(request);
-        logger.info("ECS Simulator PUT job: {}", body);
+        logger.info("ICS Simulator PUT job: {}", body);
         restClient.post(url, body, MediaType.APPLICATION_JSON).block();
     }
 
     public void deleteJob(String jobId, AsyncRestClient restClient) {
         String url = this.testResults.registrationInfo.jobCallbackUrl + "/" + jobId;
-        logger.info("ECS Simulator DELETE job: {}", url);
+        logger.info("ICS Simulator DELETE job: {}", url);
         restClient.delete(url).block();
 
     }
@@ -38,8 +38,8 @@ import org.oran.dmaapadapter.configuration.ImmutableWebClientConfig;
 import org.oran.dmaapadapter.configuration.WebClientConfig;
 import org.oran.dmaapadapter.configuration.WebClientConfig.HttpProxyConfig;
 import org.oran.dmaapadapter.r1.ConsumerJobInfo;
-import org.oran.dmaapadapter.repository.InfoType;
 import org.oran.dmaapadapter.repository.InfoTypes;
+import org.oran.dmaapadapter.repository.Job;
 import org.oran.dmaapadapter.repository.Jobs;
 import org.oran.dmaapadapter.tasks.ProducerRegstrationTask;
 import org.springframework.beans.factory.annotation.Autowired;
@@ -59,11 +59,12 @@ import org.springframework.test.context.junit.jupiter.SpringExtension;
         "server.ssl.key-store=./config/keystore.jks", //
         "app.webclient.trust-store=./config/truststore.jks", //
         "app.configuration-filepath=./src/test/resources/test_application_configuration.json", //
-        "app.ecs-base-url=https://localhost:8434" //
+        "app.ics-base-url=https://localhost:8434" //
 })
-class IntegrationWithEcs {
+class IntegrationWithIcs {
 
-    private static final String EI_JOB_ID = "EI_JOB_ID";
+    private static final String DMAAP_JOB_ID = "DMAAP_JOB_ID";
+    private static final String DMAAP_TYPE_ID = "DmaapInformationType";
 
     @Autowired
     private ApplicationConfig applicationConfig;
@@ -85,7 +86,7 @@ class IntegrationWithEcs {
     static class TestApplicationConfig extends ApplicationConfig {
 
         @Override
-        public String getEcsBaseUrl() {
+        public String getIcsBaseUrl() {
             return "https://localhost:8434";
         }
 
@@ -128,8 +129,7 @@ class IntegrationWithEcs {
     @AfterEach
     void reset() {
         this.consumerController.testResults.reset();
-        this.jobs.clear();
-        this.types.clear();
+        assertThat(this.jobs.size()).isZero();
     }
 
     private AsyncRestClient restClient(boolean useTrustValidation) {
@@ -160,35 +160,30 @@ class IntegrationWithEcs {
         return "https://localhost:" + this.applicationConfig.getLocalServerHttpPort();
     }
 
-    private String ecsBaseUrl() {
-        return applicationConfig.getEcsBaseUrl();
+    private String icsBaseUrl() {
+        return applicationConfig.getIcsBaseUrl();
     }
 
     private String jobUrl(String jobId) {
-        return ecsBaseUrl() + "/data-consumer/v1/info-jobs/" + jobId;
+        return icsBaseUrl() + "/data-consumer/v1/info-jobs/" + jobId + "?typeCheck=true";
     }
 
-    private void createInformationJobInEcs(String jobId) {
-        String body = gson.toJson(consumerJobInfo());
+    private void createInformationJobInIcs(String typeId, String jobId, String filter) {
+        String body = gson.toJson(consumerJobInfo(typeId, filter));
         try {
             // Delete the job if it already exists
-            deleteInformationJobInEcs(jobId);
+            deleteInformationJobInIcs(jobId);
         } catch (Exception e) {
         }
         restClient().putForEntity(jobUrl(jobId), body).block();
     }
 
-    private void deleteInformationJobInEcs(String jobId) {
+    private void deleteInformationJobInIcs(String jobId) {
         restClient().delete(jobUrl(jobId)).block();
     }
 
-    private ConsumerJobInfo consumerJobInfo() {
-        InfoType type = this.types.getAll().iterator().next();
-        return consumerJobInfo(type.getId(), EI_JOB_ID);
-    }
-
-    private Object jsonObject() {
-        return jsonObject("{}");
+    private ConsumerJobInfo consumerJobInfo(String typeId, String filter) {
+        return consumerJobInfo(typeId, DMAAP_JOB_ID, filter);
     }
 
     private Object jsonObject(String json) {
@@ -199,39 +194,63 @@ class IntegrationWithEcs {
         }
     }
 
-    private ConsumerJobInfo consumerJobInfo(String typeId, String infoJobId) {
+    private String quote(String str) {
+        return "\"" + str + "\"";
+    }
+
+    private String consumerUri() {
+        return selfBaseUrl() + ConsumerController.CONSUMER_TARGET_URL;
+    }
+
+    private ConsumerJobInfo consumerJobInfo(String typeId, String infoJobId, String filter) {
         try {
-            String targetUri = selfBaseUrl() + ConsumerController.CONSUMER_TARGET_URL;
-            return new ConsumerJobInfo(typeId, jsonObject(), "owner", targetUri, "");
+
+            String jsonStr = "{ \"filter\" :" + quote(filter) + "}";
+            return new ConsumerJobInfo(typeId, jsonObject(jsonStr), "owner", consumerUri(), "");
         } catch (Exception e) {
             return null;
         }
     }
 
+    @Test
+    void testCreateKafkaJob() {
+        await().untilAsserted(() -> assertThat(producerRegstrationTask.isRegisteredInIcs()).isTrue());
+        final String TYPE_ID = "KafkaInformationType";
+
+        Job.Parameters param = new Job.Parameters("filter", new Job.BufferTimeout(123, 456), 1);
+
+        ConsumerJobInfo jobInfo =
+                new ConsumerJobInfo(TYPE_ID, jsonObject(gson.toJson(param)), "owner", consumerUri(), "");
+        String body = gson.toJson(jobInfo);
+
+        restClient().putForEntity(jobUrl("KAFKA_JOB_ID"), body).block();
+
+        await().untilAsserted(() -> assertThat(this.jobs.size()).isEqualTo(1));
+
+        deleteInformationJobInIcs("KAFKA_JOB_ID");
+        await().untilAsserted(() -> assertThat(this.jobs.size()).isZero());
+    }
+
     @Test
     void testWholeChain() throws Exception {
-        await().untilAsserted(() -> assertThat(producerRegstrationTask.isRegisteredInEcs()).isTrue());
+        await().untilAsserted(() -> assertThat(producerRegstrationTask.isRegisteredInIcs()).isTrue());
 
-        createInformationJobInEcs(EI_JOB_ID);
+        createInformationJobInIcs(DMAAP_TYPE_ID, DMAAP_JOB_ID, ".*DmaapResponse.*");
 
         await().untilAsserted(() -> assertThat(this.jobs.size()).isEqualTo(1));
 
         DmaapSimulatorController.dmaapResponses.add("DmaapResponse1");
         DmaapSimulatorController.dmaapResponses.add("DmaapResponse2");
+        DmaapSimulatorController.dmaapResponses.add("Junk");
 
         ConsumerController.TestResults results = this.consumerController.testResults;
         await().untilAsserted(() -> assertThat(results.receivedBodies.size()).isEqualTo(2));
         assertThat(results.receivedBodies.get(0)).isEqualTo("DmaapResponse1");
 
-        deleteInformationJobInEcs(EI_JOB_ID);
+        deleteInformationJobInIcs(DMAAP_JOB_ID);
 
         await().untilAsserted(() -> assertThat(this.jobs.size()).isZero());
 
-        synchronized (this) {
-            // logger.warn("**************** Keeping server alive! " +
-            // this.applicationConfig.getLocalServerHttpPort());
-            // this.wait();
-        }
     }
 
 }
index 470e114..c38af8a 100644 (file)
@@ -75,10 +75,12 @@ import reactor.kafka.sender.SenderRecord;
 @TestPropertySource(properties = { //
         "server.ssl.key-store=./config/keystore.jks", //
         "app.webclient.trust-store=./config/truststore.jks", //
-        "app.configuration-filepath=./src/test/resources/test_application_configuration_kafka.json"//
+        "app.configuration-filepath=./src/test/resources/test_application_configuration.json"//
 })
 class IntegrationWithKafka {
 
+    final String TYPE_ID = "KafkaInformationType";
+
     @Autowired
     private ApplicationConfig applicationConfig;
 
@@ -92,12 +94,12 @@ class IntegrationWithKafka {
     private ConsumerController consumerController;
 
     @Autowired
-    private EcsSimulatorController ecsSimulatorController;
+    private IcsSimulatorController icsSimulatorController;
 
     @Autowired
     private KafkaTopicConsumers kafkaTopicConsumers;
 
-    private com.google.gson.Gson gson = new com.google.gson.GsonBuilder().create();
+    private static com.google.gson.Gson gson = new com.google.gson.GsonBuilder().create();
 
     private static final Logger logger = LoggerFactory.getLogger(IntegrationWithKafka.class);
 
@@ -106,7 +108,7 @@ class IntegrationWithKafka {
 
     static class TestApplicationConfig extends ApplicationConfig {
         @Override
-        public String getEcsBaseUrl() {
+        public String getIcsBaseUrl() {
             return thisProcessUrl();
         }
 
@@ -149,7 +151,7 @@ class IntegrationWithKafka {
     @AfterEach
     void reset() {
         this.consumerController.testResults.reset();
-        this.ecsSimulatorController.testResults.reset();
+        this.icsSimulatorController.testResults.reset();
         this.jobs.clear();
     }
 
@@ -181,14 +183,15 @@ class IntegrationWithKafka {
         return "https://localhost:" + this.applicationConfig.getLocalServerHttpPort();
     }
 
-    private Object jobParametersAsJsonObject(String filter, long maxTimeMiliseconds, int maxSize, int maxConcurrency) {
+    private static Object jobParametersAsJsonObject(String filter, long maxTimeMiliseconds, int maxSize,
+            int maxConcurrency) {
         Job.Parameters param =
                 new Job.Parameters(filter, new Job.BufferTimeout(maxSize, maxTimeMiliseconds), maxConcurrency);
         String str = gson.toJson(param);
         return jsonObject(str);
     }
 
-    private Object jsonObject(String json) {
+    private static Object jsonObject(String json) {
         try {
             return JsonParser.parseString(json).getAsJsonObject();
         } catch (Exception e) {
@@ -196,12 +199,10 @@ class IntegrationWithKafka {
         }
     }
 
-    private ConsumerJobInfo consumerJobInfo(String filter, Duration maxTime, int maxSize, int maxConcurrency) {
+    ConsumerJobInfo consumerJobInfo(String filter, Duration maxTime, int maxSize, int maxConcurrency) {
         try {
-            InfoType type = this.types.getAll().iterator().next();
-            String typeId = type.getId();
             String targetUri = baseUrl() + ConsumerController.CONSUMER_TARGET_URL;
-            return new ConsumerJobInfo(typeId,
+            return new ConsumerJobInfo(TYPE_ID,
                     jobParametersAsJsonObject(filter, maxTime.toMillis(), maxSize, maxConcurrency), "owner", targetUri,
                     "");
         } catch (Exception e) {
@@ -221,9 +222,11 @@ class IntegrationWithKafka {
         return SenderOptions.create(props);
     }
 
-    private SenderRecord<Integer, String, Integer> senderRecord(String data, int i) {
-        final InfoType infoType = this.types.getAll().iterator().next();
-        return SenderRecord.create(new ProducerRecord<>(infoType.getKafkaInputTopic(), i, data + i), i);
+    private SenderRecord<Integer, String, Integer> senderRecord(String data) {
+        final InfoType infoType = this.types.get(TYPE_ID);
+        int key = 1;
+        int correlationMetadata = 2;
+        return SenderRecord.create(new ProducerRecord<>(infoType.getKafkaInputTopic(), key, data), correlationMetadata);
     }
 
     private void sendDataToStream(Flux<SenderRecord<Integer, String, Integer>> dataToSend) {
@@ -244,38 +247,32 @@ class IntegrationWithKafka {
     }
 
     @Test
-    void kafkaIntegrationTest() throws InterruptedException {
+    void kafkaIntegrationTest() throws Exception {
         final String JOB_ID1 = "ID1";
         final String JOB_ID2 = "ID2";
 
         // Register producer, Register types
-        await().untilAsserted(() -> assertThat(ecsSimulatorController.testResults.registrationInfo).isNotNull());
-        assertThat(ecsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(1);
+        await().untilAsserted(() -> assertThat(icsSimulatorController.testResults.registrationInfo).isNotNull());
+        assertThat(icsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(this.types.size());
 
         // Create two jobs. One buffering and one with a filter
-        this.ecsSimulatorController.addJob(consumerJobInfo(null, Duration.ofMillis(400), 1000, 20), JOB_ID1,
+        this.icsSimulatorController.addJob(consumerJobInfo(null, Duration.ofMillis(400), 1000, 20), JOB_ID1,
                 restClient());
-        this.ecsSimulatorController.addJob(consumerJobInfo("^Message_1$", Duration.ZERO, 0, 1), JOB_ID2, restClient());
+        this.icsSimulatorController.addJob(consumerJobInfo("^Message_1$", Duration.ZERO, 0, 1), JOB_ID2, restClient());
 
         await().untilAsserted(() -> assertThat(this.jobs.size()).isEqualTo(2));
 
-        var dataToSend = Flux.range(1, 3).map(i -> senderRecord("Message_", i)); // Message_1, Message_2 etc.
+        var dataToSend = Flux.range(1, 3).map(i -> senderRecord("Message_" + i)); // Message_1, Message_2 etc.
         sendDataToStream(dataToSend);
 
         verifiedReceivedByConsumer("Message_1", "[\"Message_1\", \"Message_2\", \"Message_3\"]");
 
-        // Just for testing quoting
-        this.consumerController.testResults.reset();
-        dataToSend = Flux.just(senderRecord("Message\"_", 1));
-        sendDataToStream(dataToSend);
-        verifiedReceivedByConsumer("[\"Message\\\"_1\"]");
-
         // Delete the jobs
-        this.ecsSimulatorController.deleteJob(JOB_ID1, restClient());
-        this.ecsSimulatorController.deleteJob(JOB_ID2, restClient());
+        this.icsSimulatorController.deleteJob(JOB_ID1, restClient());
+        this.icsSimulatorController.deleteJob(JOB_ID2, restClient());
 
         await().untilAsserted(() -> assertThat(this.jobs.size()).isZero());
-        await().untilAsserted(() -> assertThat(this.kafkaTopicConsumers.getConsumers()).isEmpty());
+        await().untilAsserted(() -> assertThat(this.kafkaTopicConsumers.getConsumers().keySet()).isEmpty());
     }
 
     @Test
@@ -284,30 +281,38 @@ class IntegrationWithKafka {
         final String JOB_ID2 = "ID2";
 
         // Register producer, Register types
-        await().untilAsserted(() -> assertThat(ecsSimulatorController.testResults.registrationInfo).isNotNull());
-        assertThat(ecsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(1);
+        await().untilAsserted(() -> assertThat(icsSimulatorController.testResults.registrationInfo).isNotNull());
+        assertThat(icsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(this.types.size());
 
         // Create two jobs.
-        this.ecsSimulatorController.addJob(consumerJobInfo(null, Duration.ZERO, 0, 1), JOB_ID1, restClient());
-        this.ecsSimulatorController.addJob(consumerJobInfo(null, Duration.ZERO, 0, 1), JOB_ID2, restClient());
+        this.icsSimulatorController.addJob(consumerJobInfo(null, Duration.ofMillis(400), 1000, 1), JOB_ID1,
+                restClient());
+        this.icsSimulatorController.addJob(consumerJobInfo(null, Duration.ZERO, 0, 1), JOB_ID2, restClient());
 
         await().untilAsserted(() -> assertThat(this.jobs.size()).isEqualTo(2));
 
-        var dataToSend = Flux.range(1, 1000000).map(i -> senderRecord("Message_", i)); // Message_1, Message_2 etc.
+        var dataToSend = Flux.range(1, 1000000).map(i -> senderRecord("Message_" + i)); // Message_1, Message_2 etc.
         sendDataToStream(dataToSend); // this should overflow
 
-        KafkaJobDataConsumer consumer = kafkaTopicConsumers.getConsumers().values().iterator().next();
+        KafkaJobDataConsumer consumer = kafkaTopicConsumers.getConsumers().get(TYPE_ID).iterator().next();
         await().untilAsserted(() -> assertThat(consumer.isRunning()).isFalse());
         this.consumerController.testResults.reset();
 
-        kafkaTopicConsumers.restartNonRunningTasks();
-        this.ecsSimulatorController.deleteJob(JOB_ID2, restClient()); // Delete one job
+        this.icsSimulatorController.deleteJob(JOB_ID2, restClient()); // Delete one job
+        kafkaTopicConsumers.restartNonRunningTopics();
         Thread.sleep(1000); // Restarting the input seems to take some asynch time
 
-        dataToSend = Flux.range(1, 1).map(i -> senderRecord("Howdy_", i));
+        dataToSend = Flux.just(senderRecord("Howdy\""));
         sendDataToStream(dataToSend);
 
-        verifiedReceivedByConsumer("Howdy_1");
+        verifiedReceivedByConsumer("[\"Howdy\\\"\"]");
+
+        // Delete the jobs
+        this.icsSimulatorController.deleteJob(JOB_ID1, restClient());
+        this.icsSimulatorController.deleteJob(JOB_ID2, restClient());
+
+        await().untilAsserted(() -> assertThat(this.jobs.size()).isZero());
+        await().untilAsserted(() -> assertThat(this.kafkaTopicConsumers.getConsumers().keySet()).isEmpty());
     }
 
 }
index 794eb8e..32e6c32 100644 (file)
@@ -1,9 +1,14 @@
 {
    "types": [
       {
-         "id": "ExampleInformationType",
+         "id": "DmaapInformationType",
          "dmaapTopicUrl": "/dmaap-topic-1",
          "useHttpProxy": false
+      },
+      {
+         "id": "KafkaInformationType",
+         "kafkaInputTopic": "TutorialTopic",
+         "useHttpProxy": false
       }
    ]
 }
\ No newline at end of file
diff --git a/dmaap-adaptor-java/src/test/resources/test_application_configuration_kafka.json b/dmaap-adaptor-java/src/test/resources/test_application_configuration_kafka.json
deleted file mode 100644 (file)
index e2ea525..0000000
+++ /dev/null
@@ -1,9 +0,0 @@
-{
-   "types": [
-      {
-         "id": "ExampleInformationType",
-         "kafkaInputTopic": "TutorialTopic",
-         "useHttpProxy": false
-      }
-   ]
-}
\ No newline at end of file
index 2fd7194..69a4626 100644 (file)
@@ -8,7 +8,7 @@ The producer takes a number of environment variables, described below, as config
 
 >- INFO_PRODUCER_HOST  **Required**. The host for the producer.                                   Example: `https://mrproducer`
 >- INFO_PRODUCER_PORT  Optional. The port for the product.                                        Defaults to `8085`.
->- INFO_COORD_ADDR     Optional. The address of the Information Coordinator.                      Defaults to `https://enrichmentservice:8434`.
+>- INFO_COORD_ADDR     Optional. The address of the Information Coordinator.                      Defaults to `https://informationservice:8434`.
 >- DMAAP_MR_ADDR       Optional. The address of the DMaaP Message Router.                         Defaults to `https://message-router.onap:3905`.
 >- PRODUCER_CERT_PATH  Optional. The path to the certificate to use for https.                    Defaults to `security/producer.crt`
 >- PRODUCER_KEY_PATH   Optional. The path to the key to the certificate to use for https.         Defaults to `security/producer.key`
@@ -38,19 +38,23 @@ At start up the producer will register the configured job types in ICS and also
 
 Once the initial registration is done, the producer will constantly poll MR for all configured job types. When receiving messages for a type, it will distribute these messages to all jobs registered for the type. If no jobs for that type are registered, the messages will be discarded. If a consumer is unavailable for distribution, the messages will be discarded for that consumer until it is available again.
 
+The producer provides a REST API to control the log level. The available levels are the same as the ones used in the configuration above.
+
+    PUT https://mrproducer:8085/admin/log?level=<new level>
+
 ## Development
 
 To make it easy to test during development of the producer, two stubs are provided in the `stub` folder.
 
-One, under the `dmaap` folder, called `dmaap` that stubs MR and respond with an array with one message with `eventSeverity` alternating between `NORMAL` and `CRITICAL`. The default port is `3905`, but this can be overridden by passing a `-port [PORT]` flag when starting the stub. To build and start the stub, do the following:
+One, under the `dmaap` folder, called `dmaap` that stubs MR and respond with an array with one message with `eventSeverity` alternating between `NORMAL` and `CRITICAL`. The default port is `3905`, but this can be overridden by passing a `-port <PORT>` flag when starting the stub. To build and start the stub, do the following:
 >1. cd stub/dmaap
 >2. go build
->3. ./dmaap
+>3. ./dmaap [-port \<PORT>]
 
-One, under the `consumer` folder, called `consumer` that at startup will register a job of type `STD_Fault_Messages` in ICS, and then listen for REST calls and print the body of them. By default, it listens to the port `40935`, but his can be overridden by passing a `-port [PORT]` flag when starting the stub. To build and start the stub, do the following:
+One, under the `consumer` folder, called `consumer` that at startup will register a job of type `STD_Fault_Messages` in ICS, and then listen for REST calls and print the body of them. By default, it listens to the port `40935`, but his can be overridden by passing a `-port <PORT>` flag when starting the stub. To build and start the stub, do the following:
 >1. cd stub/consumer
 >2. go build
->3. ./consumer
+>3. ./consumer [-port \<PORT>]
 
 Mocks needed for unit tests have been generated using `github.com/stretchr/testify/mock` and are checked in under the `mocks` folder. **Note!** Keep in mind that if any of the mocked interfaces change, a new mock for that interface must be generated and checked in.
 
diff --git a/dmaap-mediator-producer/build_and_test.sh b/dmaap-mediator-producer/build_and_test.sh
new file mode 100755 (executable)
index 0000000..397124d
--- /dev/null
@@ -0,0 +1,22 @@
+#!/bin/bash
+##############################################################################
+#
+#   Copyright (C) 2021: Nordix Foundation
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+##############################################################################
+
+go build
+
+go test ./...
diff --git a/dmaap-mediator-producer/container-tag.yaml b/dmaap-mediator-producer/container-tag.yaml
deleted file mode 100644 (file)
index 230e590..0000000
+++ /dev/null
@@ -1,2 +0,0 @@
----
-tag: 1.0.0
index ffea6a2..eaaecf7 100644 (file)
@@ -3,15 +3,15 @@ module oransc.org/nonrtric/dmaapmediatorproducer
 go 1.17
 
 require (
+       github.com/gorilla/mux v1.8.0
+       github.com/hashicorp/go-retryablehttp v0.7.0
        github.com/sirupsen/logrus v1.8.1
        github.com/stretchr/testify v1.7.0
 )
 
 require (
        github.com/davecgh/go-spew v1.1.1 // indirect
-       github.com/gorilla/mux v1.8.0 // indirect
        github.com/hashicorp/go-cleanhttp v0.5.1 // indirect
-       github.com/hashicorp/go-retryablehttp v0.7.0 // indirect
        github.com/pmezard/go-difflib v1.0.0 // indirect
        github.com/stretchr/objx v0.1.0 // indirect
        golang.org/x/sys v0.0.0-20191026070338-33540a1f6037 // indirect
index 8447fa0..4b3557b 100644 (file)
@@ -5,6 +5,7 @@ github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI=
 github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So=
 github.com/hashicorp/go-cleanhttp v0.5.1 h1:dH3aiDG9Jvb5r5+bYHsikaOUIpcM0xvgMXVoDkXMzJM=
 github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
+github.com/hashicorp/go-hclog v0.9.2 h1:CG6TE5H9/JXsFWJCfoIVpKFIkFe6ysEuHirp4DxCsHI=
 github.com/hashicorp/go-hclog v0.9.2/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ=
 github.com/hashicorp/go-retryablehttp v0.7.0 h1:eu1EI/mbirUgP5C8hVsTNaGZreBDlYiwC1FZWkvQPQ4=
 github.com/hashicorp/go-retryablehttp v0.7.0/go.mod h1:vAew36LZh98gCBJNLH42IQ1ER/9wtLZZ8meHqQvEYWY=
index eef1b5f..e03c40a 100644 (file)
@@ -21,6 +21,7 @@
 package config
 
 import (
+       "encoding/json"
        "fmt"
        "os"
        "strconv"
@@ -42,7 +43,7 @@ func New() *Config {
        return &Config{
                InfoProducerHost:       getEnv("INFO_PRODUCER_HOST", ""),
                InfoProducerPort:       getEnvAsInt("INFO_PRODUCER_PORT", 8085),
-               InfoCoordinatorAddress: getEnv("INFO_COORD_ADDR", "https://enrichmentservice:8434"),
+               InfoCoordinatorAddress: getEnv("INFO_COORD_ADDR", "https://informationservice:8434"),
                DMaaPMRAddress:         getEnv("DMAAP_MR_ADDR", "https://message-router.onap:3905"),
                ProducerCertPath:       getEnv("PRODUCER_CERT_PATH", "security/producer.crt"),
                ProducerKeyPath:        getEnv("PRODUCER_KEY_PATH", "security/producer.key"),
@@ -81,3 +82,19 @@ func getLogLevel() log.Level {
                return log.InfoLevel
        }
 }
+
+func GetJobTypesFromConfiguration(configFile string) ([]TypeDefinition, error) {
+       typeDefsByte, err := os.ReadFile(configFile)
+       if err != nil {
+               return nil, err
+       }
+       typeDefs := struct {
+               Types []TypeDefinition `json:"types"`
+       }{}
+       err = json.Unmarshal(typeDefsByte, &typeDefs)
+       if err != nil {
+               return nil, err
+       }
+
+       return typeDefs.Types, nil
+}
index 90d3c03..faf5900 100644 (file)
@@ -23,7 +23,7 @@ package config
 import (
        "bytes"
        "os"
-       "reflect"
+       "path/filepath"
        "testing"
 
        log "github.com/sirupsen/logrus"
@@ -70,14 +70,13 @@ func TestNew_faultyIntValueSetConfigContainDefaultValueAndWarnInLog(t *testing.T
                LogLevel:               log.InfoLevel,
                InfoProducerHost:       "",
                InfoProducerPort:       8085,
-               InfoCoordinatorAddress: "https://enrichmentservice:8434",
+               InfoCoordinatorAddress: "https://informationservice:8434",
                DMaaPMRAddress:         "https://message-router.onap:3905",
                ProducerCertPath:       "security/producer.crt",
                ProducerKeyPath:        "security/producer.key",
        }
-       if got := New(); !reflect.DeepEqual(got, &wantConfig) {
-               t.Errorf("New() = %v, want %v", got, &wantConfig)
-       }
+       got := New()
+       assertions.Equal(&wantConfig, got)
        logString := buf.String()
        assertions.Contains(logString, "Invalid int value: wrong for variable: INFO_PRODUCER_PORT. Default value: 8085 will be used")
 }
@@ -97,7 +96,7 @@ func TestNew_envFaultyLogLevelConfigContainDefaultValues(t *testing.T) {
                LogLevel:               log.InfoLevel,
                InfoProducerHost:       "",
                InfoProducerPort:       8085,
-               InfoCoordinatorAddress: "https://enrichmentservice:8434",
+               InfoCoordinatorAddress: "https://informationservice:8434",
                DMaaPMRAddress:         "https://message-router.onap:3905",
                ProducerCertPath:       "security/producer.crt",
                ProducerKeyPath:        "security/producer.key",
@@ -109,3 +108,30 @@ func TestNew_envFaultyLogLevelConfigContainDefaultValues(t *testing.T) {
        logString := buf.String()
        assertions.Contains(logString, "Invalid log level: wrong. Log level will be Info!")
 }
+
+const typeDefinition = `{"types": [{"id": "type1", "dmaapTopicUrl": "events/unauthenticated.SEC_FAULT_OUTPUT/dmaapmediatorproducer/type1"}]}`
+
+func TestGetTypesFromConfiguration_fileOkShouldReturnSliceOfTypeDefinitions(t *testing.T) {
+       assertions := require.New(t)
+       typesDir, err := os.MkdirTemp("", "configs")
+       if err != nil {
+               t.Errorf("Unable to create temporary directory for types due to: %v", err)
+       }
+       fname := filepath.Join(typesDir, "type_config.json")
+       t.Cleanup(func() {
+               os.RemoveAll(typesDir)
+       })
+       if err = os.WriteFile(fname, []byte(typeDefinition), 0666); err != nil {
+               t.Errorf("Unable to create temporary config file for types due to: %v", err)
+       }
+
+       types, err := GetJobTypesFromConfiguration(fname)
+
+       wantedType := TypeDefinition{
+               Id:            "type1",
+               DmaapTopicURL: "events/unauthenticated.SEC_FAULT_OUTPUT/dmaapmediatorproducer/type1",
+       }
+       wantedTypes := []TypeDefinition{wantedType}
+       assertions.EqualValues(wantedTypes, types)
+       assertions.Nil(err)
+}
index 6dad5fd..b6616a1 100644 (file)
@@ -21,9 +21,7 @@
 package jobs
 
 import (
-       "encoding/json"
        "fmt"
-       "os"
        "sync"
 
        log "github.com/sirupsen/logrus"
@@ -47,7 +45,7 @@ type JobInfo struct {
 }
 
 type JobTypesManager interface {
-       LoadTypesFromConfiguration() ([]config.TypeDefinition, error)
+       LoadTypesFromConfiguration(types []config.TypeDefinition) []config.TypeDefinition
        GetSupportedTypes() []string
 }
 
@@ -57,16 +55,14 @@ type JobsManager interface {
 }
 
 type JobsManagerImpl struct {
-       configFile       string
        allTypes         map[string]TypeData
        pollClient       restclient.HTTPClient
        mrAddress        string
        distributeClient restclient.HTTPClient
 }
 
-func NewJobsManagerImpl(typeConfigFilePath string, pollClient restclient.HTTPClient, mrAddr string, distributeClient restclient.HTTPClient) *JobsManagerImpl {
+func NewJobsManagerImpl(pollClient restclient.HTTPClient, mrAddr string, distributeClient restclient.HTTPClient) *JobsManagerImpl {
        return &JobsManagerImpl{
-               configFile:       typeConfigFilePath,
                allTypes:         make(map[string]TypeData),
                pollClient:       pollClient,
                mrAddress:        mrAddr,
@@ -107,26 +103,15 @@ func (jm *JobsManagerImpl) validateJobInfo(ji JobInfo) error {
        return nil
 }
 
-func (jm *JobsManagerImpl) LoadTypesFromConfiguration() ([]config.TypeDefinition, error) {
-       typeDefsByte, err := os.ReadFile(jm.configFile)
-       if err != nil {
-               return nil, err
-       }
-       typeDefs := struct {
-               Types []config.TypeDefinition `json:"types"`
-       }{}
-       err = json.Unmarshal(typeDefsByte, &typeDefs)
-       if err != nil {
-               return nil, err
-       }
-       for _, typeDef := range typeDefs.Types {
+func (jm *JobsManagerImpl) LoadTypesFromConfiguration(types []config.TypeDefinition) []config.TypeDefinition {
+       for _, typeDef := range types {
                jm.allTypes[typeDef.Id] = TypeData{
                        TypeId:        typeDef.Id,
                        DMaaPTopicURL: typeDef.DmaapTopicURL,
                        jobsHandler:   newJobsHandler(typeDef.Id, typeDef.DmaapTopicURL, jm.pollClient, jm.distributeClient),
                }
        }
-       return typeDefs.Types, nil
+       return types
 }
 
 func (jm *JobsManagerImpl) GetSupportedTypes() []string {
index 552b5fa..30b4ffd 100644 (file)
@@ -24,8 +24,6 @@ import (
        "bytes"
        "io/ioutil"
        "net/http"
-       "os"
-       "path/filepath"
        "sync"
        "testing"
        "time"
@@ -38,26 +36,18 @@ const typeDefinition = `{"types": [{"id": "type1", "dmaapTopicUrl": "events/unau
 
 func TestJobsManagerGetTypes_filesOkShouldReturnSliceOfTypesAndProvideSupportedTypes(t *testing.T) {
        assertions := require.New(t)
-       typesDir, err := os.MkdirTemp("", "configs")
-       if err != nil {
-               t.Errorf("Unable to create temporary directory for types due to: %v", err)
-       }
-       fname := filepath.Join(typesDir, "type_config.json")
-       managerUnderTest := NewJobsManagerImpl(fname, nil, "", nil)
-       t.Cleanup(func() {
-               os.RemoveAll(typesDir)
-       })
-       if err = os.WriteFile(fname, []byte(typeDefinition), 0666); err != nil {
-               t.Errorf("Unable to create temporary config file for types due to: %v", err)
-       }
-       types, err := managerUnderTest.LoadTypesFromConfiguration()
+
+       managerUnderTest := NewJobsManagerImpl(nil, "", nil)
+
        wantedType := config.TypeDefinition{
                Id:            "type1",
                DmaapTopicURL: "events/unauthenticated.SEC_FAULT_OUTPUT/dmaapmediatorproducer/type1",
        }
        wantedTypes := []config.TypeDefinition{wantedType}
+
+       types := managerUnderTest.LoadTypesFromConfiguration(wantedTypes)
+
        assertions.EqualValues(wantedTypes, types)
-       assertions.Nil(err)
 
        supportedTypes := managerUnderTest.GetSupportedTypes()
        assertions.EqualValues([]string{"type1"}, supportedTypes)
@@ -65,7 +55,7 @@ func TestJobsManagerGetTypes_filesOkShouldReturnSliceOfTypesAndProvideSupportedT
 
 func TestJobsManagerAddJobWhenTypeIsSupported_shouldAddJobToChannel(t *testing.T) {
        assertions := require.New(t)
-       managerUnderTest := NewJobsManagerImpl("", nil, "", nil)
+       managerUnderTest := NewJobsManagerImpl(nil, "", nil)
        wantedJob := JobInfo{
                Owner:            "owner",
                LastUpdated:      "now",
@@ -93,7 +83,7 @@ func TestJobsManagerAddJobWhenTypeIsSupported_shouldAddJobToChannel(t *testing.T
 
 func TestJobsManagerAddJobWhenTypeIsNotSupported_shouldReturnError(t *testing.T) {
        assertions := require.New(t)
-       managerUnderTest := NewJobsManagerImpl("", nil, "", nil)
+       managerUnderTest := NewJobsManagerImpl(nil, "", nil)
        jobInfo := JobInfo{
                InfoTypeIdentity: "type1",
        }
@@ -105,7 +95,7 @@ func TestJobsManagerAddJobWhenTypeIsNotSupported_shouldReturnError(t *testing.T)
 
 func TestJobsManagerAddJobWhenJobIdMissing_shouldReturnError(t *testing.T) {
        assertions := require.New(t)
-       managerUnderTest := NewJobsManagerImpl("", nil, "", nil)
+       managerUnderTest := NewJobsManagerImpl(nil, "", nil)
        managerUnderTest.allTypes["type1"] = TypeData{
                TypeId: "type1",
        }
@@ -120,7 +110,7 @@ func TestJobsManagerAddJobWhenJobIdMissing_shouldReturnError(t *testing.T) {
 
 func TestJobsManagerAddJobWhenTargetUriMissing_shouldReturnError(t *testing.T) {
        assertions := require.New(t)
-       managerUnderTest := NewJobsManagerImpl("", nil, "", nil)
+       managerUnderTest := NewJobsManagerImpl(nil, "", nil)
        managerUnderTest.allTypes["type1"] = TypeData{
                TypeId: "type1",
        }
@@ -136,7 +126,7 @@ func TestJobsManagerAddJobWhenTargetUriMissing_shouldReturnError(t *testing.T) {
 
 func TestJobsManagerDeleteJob_shouldSendDeleteToChannel(t *testing.T) {
        assertions := require.New(t)
-       managerUnderTest := NewJobsManagerImpl("", nil, "", nil)
+       managerUnderTest := NewJobsManagerImpl(nil, "", nil)
        jobsHandler := jobsHandler{
                deleteJobCh: make(chan string)}
        managerUnderTest.allTypes["type1"] = TypeData{
@@ -192,7 +182,7 @@ func TestAddJobToJobsManager_shouldStartPollAndDistributeMessages(t *testing.T)
        })
        jobsHandler := newJobsHandler("type1", "/topicUrl", pollClientMock, distributeClientMock)
 
-       jobsManager := NewJobsManagerImpl("", pollClientMock, "http://mrAddr", distributeClientMock)
+       jobsManager := NewJobsManagerImpl(pollClientMock, "http://mrAddr", distributeClientMock)
        jobsManager.allTypes["type1"] = TypeData{
                DMaaPTopicURL: "/topicUrl",
                TypeId:        "type1",
index 8ccd4b2..9a827e7 100644 (file)
@@ -31,6 +31,7 @@ import (
        "time"
 
        "github.com/hashicorp/go-retryablehttp"
+       log "github.com/sirupsen/logrus"
 )
 
 // HTTPClient interface
@@ -115,6 +116,7 @@ func CreateClientCertificate(certPath string, keyPath string) (tls.Certificate,
 
 func CreateRetryClient(cert tls.Certificate) *http.Client {
        rawRetryClient := retryablehttp.NewClient()
+       rawRetryClient.Logger = leveledLogger{}
        rawRetryClient.RetryWaitMax = time.Minute
        rawRetryClient.RetryMax = math.MaxInt
        rawRetryClient.HTTPClient.Transport = getSecureTransportWithoutVerify(cert)
@@ -145,3 +147,28 @@ func IsUrlSecure(configUrl string) bool {
        u, _ := url.Parse(configUrl)
        return u.Scheme == "https"
 }
+
+// Used to get leveled logging in the RetryClient
+type leveledLogger struct {
+}
+
+func (ll leveledLogger) Error(msg string, keysAndValues ...interface{}) {
+       log.WithFields(getFields(keysAndValues)).Error(msg)
+}
+func (ll leveledLogger) Info(msg string, keysAndValues ...interface{}) {
+       log.WithFields(getFields(keysAndValues)).Info(msg)
+}
+func (ll leveledLogger) Debug(msg string, keysAndValues ...interface{}) {
+       log.WithFields(getFields(keysAndValues)).Debug(msg)
+}
+func (ll leveledLogger) Warn(msg string, keysAndValues ...interface{}) {
+       log.WithFields(getFields(keysAndValues)).Warn(msg)
+}
+
+func getFields(keysAndValues []interface{}) log.Fields {
+       fields := log.Fields{}
+       for i := 0; i < len(keysAndValues); i = i + 2 {
+               fields[fmt.Sprint(keysAndValues[i])] = keysAndValues[i+1]
+       }
+       return fields
+}
index 79646c2..8c5577d 100644 (file)
@@ -27,6 +27,7 @@ import (
        "net/http"
 
        "github.com/gorilla/mux"
+       log "github.com/sirupsen/logrus"
        "oransc.org/nonrtric/dmaapmediatorproducer/internal/jobs"
 )
 
@@ -34,6 +35,8 @@ const StatusPath = "/status"
 const AddJobPath = "/jobs"
 const jobIdToken = "infoJobId"
 const deleteJobPath = AddJobPath + "/{" + jobIdToken + "}"
+const logLevelToken = "level"
+const logAdminPath = "/admin/log"
 
 type ProducerCallbackHandler struct {
        jobsManager jobs.JobsManager
@@ -51,6 +54,7 @@ func NewRouter(jm jobs.JobsManager) *mux.Router {
        r.HandleFunc(StatusPath, statusHandler).Methods(http.MethodGet).Name("status")
        r.HandleFunc(AddJobPath, callbackHandler.addInfoJobHandler).Methods(http.MethodPost).Name("add")
        r.HandleFunc(deleteJobPath, callbackHandler.deleteInfoJobHandler).Methods(http.MethodDelete).Name("delete")
+       r.HandleFunc(logAdminPath, callbackHandler.setLogLevel).Methods(http.MethodPut).Name("setLogLevel")
        r.NotFoundHandler = &notFoundHandler{}
        r.MethodNotAllowedHandler = &methodNotAllowedHandler{}
        return r
@@ -87,6 +91,17 @@ func (h *ProducerCallbackHandler) deleteInfoJobHandler(w http.ResponseWriter, r
        h.jobsManager.DeleteJobFromRESTCall(id)
 }
 
+func (h *ProducerCallbackHandler) setLogLevel(w http.ResponseWriter, r *http.Request) {
+       query := r.URL.Query()
+       logLevelStr := query.Get(logLevelToken)
+       if loglevel, err := log.ParseLevel(logLevelStr); err == nil {
+               log.SetLevel(loglevel)
+       } else {
+               http.Error(w, fmt.Sprintf("Invalid log level: %v. Log level will not be changed!", logLevelStr), http.StatusBadRequest)
+               return
+       }
+}
+
 type notFoundHandler struct{}
 
 func (h *notFoundHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
index 1d458c9..1db3644 100644 (file)
@@ -78,6 +78,14 @@ func TestNewRouter(t *testing.T) {
        handler.ServeHTTP(responseRecorder, newRequest(http.MethodPut, "/status", nil, t))
        assertions.Equal(http.StatusMethodNotAllowed, responseRecorder.Code)
        assertions.Contains(responseRecorder.Body.String(), "Method is not supported.")
+
+       setLogLevelRoute := r.Get("setLogLevel")
+       assertions.NotNil(setLogLevelRoute)
+       supportedMethods, err = setLogLevelRoute.GetMethods()
+       assertions.Equal([]string{http.MethodPut}, supportedMethods)
+       assertions.Nil(err)
+       path, _ = setLogLevelRoute.GetPathTemplate()
+       assertions.Equal("/admin/log", path)
 }
 
 func TestStatusHandler(t *testing.T) {
@@ -119,7 +127,6 @@ func TestAddInfoJobHandler(t *testing.T) {
                                },
                        },
                        wantedStatus: http.StatusOK,
-                       wantedBody:   "",
                },
                {
                        name: "AddInfoJobHandler with incorrect job info, should return BadRequest",
@@ -171,6 +178,50 @@ func TestDeleteJob(t *testing.T) {
        jobHandlerMock.AssertCalled(t, "DeleteJobFromRESTCall", "job1")
 }
 
+func TestSetLogLevel(t *testing.T) {
+       assertions := require.New(t)
+
+       type args struct {
+               logLevel string
+       }
+       tests := []struct {
+               name         string
+               args         args
+               wantedStatus int
+               wantedBody   string
+       }{
+               {
+                       name: "Set to valid log level, should return OK",
+                       args: args{
+                               logLevel: "Debug",
+                       },
+                       wantedStatus: http.StatusOK,
+               },
+               {
+                       name: "Set to invalid log level, should return BadRequest",
+                       args: args{
+                               logLevel: "bad",
+                       },
+                       wantedStatus: http.StatusBadRequest,
+                       wantedBody:   "Invalid log level: bad",
+               },
+       }
+       for _, tt := range tests {
+               t.Run(tt.name, func(t *testing.T) {
+                       callbackHandlerUnderTest := NewProducerCallbackHandler(nil)
+
+                       handler := http.HandlerFunc(callbackHandlerUnderTest.setLogLevel)
+                       responseRecorder := httptest.NewRecorder()
+                       r, _ := http.NewRequest(http.MethodPut, "/admin/log?level="+tt.args.logLevel, nil)
+
+                       handler.ServeHTTP(responseRecorder, r)
+
+                       assertions.Equal(tt.wantedStatus, responseRecorder.Code, tt.name)
+                       assertions.Contains(responseRecorder.Body.String(), tt.wantedBody, tt.name)
+               })
+       }
+}
+
 func newRequest(method string, url string, jobInfo *jobs.JobInfo, t *testing.T) *http.Request {
        var body io.Reader
        if jobInfo != nil {
index 194ed75..2d72466 100644 (file)
@@ -56,7 +56,7 @@ func main() {
        }
        retryClient := restclient.CreateRetryClient(cert)
 
-       jobsManager := jobs.NewJobsManagerImpl("configs/type_config.json", retryClient, configuration.DMaaPMRAddress, restclient.CreateClientWithoutRetry(cert, 5*time.Second))
+       jobsManager := jobs.NewJobsManagerImpl(retryClient, configuration.DMaaPMRAddress, restclient.CreateClientWithoutRetry(cert, 10*time.Second))
        if err := registerTypesAndProducer(jobsManager, configuration.InfoCoordinatorAddress, callbackAddress, retryClient); err != nil {
                log.Fatalf("Stopping producer due to: %v", err)
        }
@@ -87,13 +87,15 @@ func validateConfiguration(configuration *config.Config) error {
 }
 func registerTypesAndProducer(jobTypesHandler jobs.JobTypesManager, infoCoordinatorAddress string, callbackAddress string, client restclient.HTTPClient) error {
        registrator := config.NewRegistratorImpl(infoCoordinatorAddress, client)
-       if types, err := jobTypesHandler.LoadTypesFromConfiguration(); err == nil {
-               if regErr := registrator.RegisterTypes(types); regErr != nil {
-                       return fmt.Errorf("unable to register all types due to: %v", regErr)
-               }
-       } else {
-               return fmt.Errorf("unable to get types to register due to: %v", err)
+       configTypes, err := config.GetJobTypesFromConfiguration("configs/type_config.json")
+       if err != nil {
+               return fmt.Errorf("unable to register all types due to: %v", err)
        }
+       regErr := registrator.RegisterTypes(jobTypesHandler.LoadTypesFromConfiguration(configTypes))
+       if regErr != nil {
+               return fmt.Errorf("unable to register all types due to: %v", regErr)
+       }
+
        producer := config.ProducerRegistrationInfo{
                InfoProducerSupervisionCallbackUrl: callbackAddress + server.StatusPath,
                SupportedInfoTypes:                 jobTypesHandler.GetSupportedTypes(),
diff --git a/dmaap-mediator-producer/pom.xml b/dmaap-mediator-producer/pom.xml
new file mode 100644 (file)
index 0000000..00c3d9f
--- /dev/null
@@ -0,0 +1,112 @@
+<!--
+  ============LICENSE_START=======================================================
+   Copyright (C) 2021 Nordix Foundation.
+  ================================================================================
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+
+  SPDX-License-Identifier: Apache-2.0
+  ============LICENSE_END=========================================================
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+    xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+
+    <groupId>oransc.org</groupId>
+    <artifactId>dmaapmediatorproducer</artifactId>
+    <version>1.0.0</version>
+    <properties>
+        <docker-maven-plugin.version>0.30.0</docker-maven-plugin.version>
+    </properties>
+
+     <build>
+        <plugins>
+            <plugin>
+              <artifactId>exec-maven-plugin</artifactId>
+              <groupId>org.codehaus.mojo</groupId>
+              <executions>
+                  <execution>
+                      <id>Build Go binary</id>
+                      <phase>generate-sources</phase>
+                      <goals>
+                          <goal>exec</goal>
+                      </goals>
+                      <configuration>
+                          <executable>${basedir}/build_and_test.sh</executable>
+                      </configuration>
+                  </execution>
+              </executions>
+          </plugin>
+            <plugin>
+                <groupId>io.fabric8</groupId>
+                <artifactId>docker-maven-plugin</artifactId>
+                <version>${docker-maven-plugin.version}</version>
+                <inherited>false</inherited>
+                <executions>
+                    <execution>
+                        <id>generate-nonrtric-dmaap-mediator-producer-image</id>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>build</goal>
+                        </goals>
+                        <configuration>
+                            <pullRegistry>${env.CONTAINER_PULL_REGISTRY}</pullRegistry>
+                            <images>
+                                
+                            </images>
+                        </configuration>
+                    </execution>
+                    <execution>
+                        <id>push-nonrtric-dmaap-mediator-producer-image</id>
+                        <goals>
+                            <goal>build</goal>
+                            <goal>push</goal>
+                        </goals>
+                        <configuration>
+                            <pullRegistry>${env.CONTAINER_PULL_REGISTRY}</pullRegistry>
+                            <pushRegistry>${env.CONTAINER_PUSH_REGISTRY}</pushRegistry>
+                            <images>
+                                
+                            </images>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+      </plugins>
+    </build>
+</project>
index 5cbcaea..4260cae 100644 (file)
@@ -43,7 +43,7 @@ func main() {
 
        registerJob(*port)
 
-       fmt.Print("Starting consumer on port: ", *port)
+       fmt.Println("Starting consumer on port: ", *port)
        fmt.Println(http.ListenAndServe(fmt.Sprintf(":%v", *port), nil))
 }
 
@@ -59,11 +59,11 @@ func registerJob(port int) {
                InfoTypeId:    "STD_Fault_Messages",
                JobDefinition: "{}",
        }
-       fmt.Print("Registering consumer: ", jobInfo)
+       fmt.Println("Registering consumer: ", jobInfo)
        body, _ := json.Marshal(jobInfo)
        putErr := restclient.Put(fmt.Sprintf("http://localhost:8083/data-consumer/v1/info-jobs/job%v", port), body, &httpClient)
        if putErr != nil {
-               fmt.Printf("Unable to register consumer: %v", putErr)
+               fmt.Println("Unable to register consumer: ", putErr)
        }
 }
 
index 36ffa39..451bc9a 100644 (file)
@@ -71,12 +71,13 @@ func handleData(w http.ResponseWriter, req *http.Request) {
        var responseBody []byte
        if critical {
                responseBody = getFaultMessage("CRITICAL")
+               fmt.Println("Sending CRITICAL")
                critical = false
        } else {
                responseBody = getFaultMessage("NORMAL")
+               fmt.Println("Sending NORMAL")
                critical = true
        }
-       // w.Write(responseBody)
        fmt.Fprint(w, string(responseBody))
 }
 
index 6fc3528..abbce20 100644 (file)
@@ -35,9 +35,9 @@ CONTROL_PANEL_IMAGE_TAG="2.2.0"
 NONRTRIC_GATEWAY_IMAGE_BASE="nexus3.o-ran-sc.org:10002/o-ran-sc/nonrtric-gateway"
 NONRTRIC_GATEWAY_IMAGE_TAG="1.0.0"
 
-#ECS
-ECS_IMAGE_BASE="nexus3.o-ran-sc.org:10002/o-ran-sc/nonrtric-enrichment-coordinator-service"
-ECS_IMAGE_TAG="1.1.0"
+#ICS
+ICS_IMAGE_BASE="nexus3.o-ran-sc.org:10002/o-ran-sc/nonrtric-information-coordinator-service"
+ICS_IMAGE_TAG="1.1.0"
 
 #CONSUMER
 CONSUMER_IMAGE_BASE="eexit/mirror-http-server"
@@ -61,4 +61,4 @@ DMAAP_MEDIATOR_GO_TAG="1.0,0"
 
 #DMAAP_MEDIATOR_JAVA
 DMAAP_MEDIATOR_JAVA_BASE="nexus3.o-ran-sc.org:10003/o-ran-sc/nonrtric-dmaap-adaptor"
-DMAAP_MEDIATOR_JAVA_TAG="1.0.0-SNAPSHOT"
\ No newline at end of file
+DMAAP_MEDIATOR_JAVA_TAG="1.0.0-SNAPSHOT"
index d7a4f17..8fcca7e 100644 (file)
@@ -65,19 +65,19 @@ All the generated data is shown on the web page
 By default, if the containers are started up and running by docker-compose file in the same directory, just run commands:
 ./preparePmsData.sh
 
-prepareEcsData.sh
-This is to generate some data into the ECS microservice
+prepareIcsData.sh
+This is to generate some data into the ICS microservice
 
 prepareDmaapMsg.sh
 This is to generate some data into the Dmaap MR, so that PMS reads message from MR
 
 ## O-RAN-SC Control Panel
 
-The Non-RT RIC Control Panel is a graphical user interface that enables the user to view and manage the A1 policies in the RAN and also view producers and jobs for the Enrichement coordinator service.
+The Non-RT RIC Control Panel is a graphical user interface that enables the user to view and manage the A1 policies in the RAN and also view producers and jobs for the Information coordinator service.
 
 ### O-RAN-SC Control Panel Gateway:
 
-To view the policy or enrichment information in control panel gui along with Policy Management Service & Enrichment Coordinator Service you should also have nonrtric gateway because all the request from the gui is passed through this API gateway.
+To view the policy or information jobs and types in control panel gui along with Policy Management Service & Information Coordinator Service you should also have nonrtric gateway because all the request from the gui is passed through this API gateway.
 
 #### Prerequisite:
 
@@ -85,4 +85,4 @@ Make sure to follow the section regarding sample data so there is data available
 
 To start all the necessary components, run the following command:
 
-docker-compose -f docker-compose.yaml -f control-panel/docker-compose.yaml -f nonrtric-gateway/docker-compose.yaml -f policy-service/docker-compose.yaml -f ecs/docker-compose.yaml -f a1-sim/docker-compose.yaml up
\ No newline at end of file
+docker-compose -f docker-compose.yaml -f control-panel/docker-compose.yaml -f nonrtric-gateway/docker-compose.yaml -f policy-service/docker-compose.yaml -f ics/docker-compose.yaml -f a1-sim/docker-compose.yaml up
similarity index 55%
rename from docker-compose/data/prepareEcsData.sh
rename to docker-compose/data/prepareIcsData.sh
index 21cc35b..5871776 100755 (executable)
 
 # The scripts in data/ will generate some dummy data in the running system.
 # It will create:
-# one EiProducer in ECS
-# one EiType in ECS
-# one EiJob in ECS
+# one InfoProducer in ICS
+# one InfoType in ICS
+# one InfoJob in ICS
 
 # Run command:
-# ./prepareEcsData.sh [ECS port] [http/https]
+# ./prepareIcsData.sh [ICS port] [http/https]
 
-ecs_port=${1:-8083}
+ics_port=${1:-8083}
 httpx=${4:-"http"}
 SHELL_FOLDER=$(cd "$(dirname "$0")";pwd)
 
-echo "using ecs port: "$ecs_port
+echo "using ics port: "$ics_port
 echo "using protocol: "$httpx
 echo -e "\n"
 
-echo "ECS status:"
-curl -skw " %{http_code}" $httpx://localhost:$ecs_port/status
+echo "ICS status:"
+curl -skw " %{http_code}" $httpx://localhost:$ics_port/status
 echo -e "\n"
 
-# Create EiType
-echo "Create EiType:"
-curl -X PUT -skw %{http_code} $httpx://localhost:$ecs_port/data-producer/v1/info-types/type1 -H accept:application/json -H Content-Type:application/json --data-binary @${SHELL_FOLDER}/testdata/ECS/EiType.json
+# Create InfoType
+echo "Create InfoType:"
+curl -X PUT -skw %{http_code} $httpx://localhost:$ics_port/data-producer/v1/info-types/type1 -H accept:application/json -H Content-Type:application/json --data-binary @${SHELL_FOLDER}/testdata/ICS/InfoType.json
 echo -e "\n"
 
-# Get EiTypes
-echo "Get EiTypes:"
-curl -X GET -skw %{http_code} $httpx://localhost:$ecs_port/data-producer/v1/info-types -H Content-Type:application/json | jq
+# Get InfoTypes
+echo "Get InfoTypes:"
+curl -X GET -skw %{http_code} $httpx://localhost:$ics_port/data-producer/v1/info-types -H Content-Type:application/json | jq
 echo -e "\n"
 
-# Get Individual EiType
-echo "Get Individual EiType:"
-curl -X GET -skw %{http_code} $httpx://localhost:$ecs_port/data-producer/v1/info-types/type1 -H Content-Type:application/json | jq
+# Get Individual InfoType
+echo "Get Individual InfoType:"
+curl -X GET -skw %{http_code} $httpx://localhost:$ics_port/data-producer/v1/info-types/type1 -H Content-Type:application/json | jq
 echo -e "\n"
 
-# Create EiProducer
-echo "Create EiProducer:"
-curl -X PUT -skw %{http_code} $httpx://localhost:$ecs_port/data-producer/v1/info-producers/1 -H Content-Type:application/json --data-binary @${SHELL_FOLDER}/testdata/ECS/EiProducer.json
+# Create InfoProducer
+echo "Create InfoProducer:"
+curl -X PUT -skw %{http_code} $httpx://localhost:$ics_port/data-producer/v1/info-producers/1 -H Content-Type:application/json --data-binary @${SHELL_FOLDER}/testdata/ICS/InfoProducer.json
 echo -e "\n"
 
-# Get EiProducers
-echo "Get EiProducers:"
-curl -X GET -skw %{http_code} $httpx://localhost:$ecs_port/data-producer/v1/info-producers -H Content-Type:application/json | jq
+# Get InfoProducers
+echo "Get InfoProducers:"
+curl -X GET -skw %{http_code} $httpx://localhost:$ics_port/data-producer/v1/info-producers -H Content-Type:application/json | jq
 echo -e "\n"
 
-# Get Individual EiProducer
-echo "Get Individual EiProducer:"
-curl -X GET -skw %{http_code} $httpx://localhost:$ecs_port/data-producer/v1/info-producers/1 -H Content-Type:application/json | jq
+# Get Individual InfoProducer
+echo "Get Individual InfoProducer:"
+curl -X GET -skw %{http_code} $httpx://localhost:$ics_port/data-producer/v1/info-producers/1 -H Content-Type:application/json | jq
 echo -e "\n"
 
-# Get Individual EiProducer Status
-echo "Get Individual EiProducer:"
-curl -X GET -skw %{http_code} $httpx://localhost:$ecs_port/data-producer/v1/info-producers/1/status -H Content-Type:application/json | jq
+# Get Individual InfoProducer Status
+echo "Get Individual InfoProducer:"
+curl -X GET -skw %{http_code} $httpx://localhost:$ics_port/data-producer/v1/info-producers/1/status -H Content-Type:application/json | jq
 echo -e "\n"
 
-# Create EiJob
-echo "Create EiJob Of A Certain Type type1:"
-curl -X PUT -skw %{http_code} $httpx://localhost:$ecs_port/A1-EI/v1/eijobs/job1 -H Content-Type:application/json --data-binary @${SHELL_FOLDER}/testdata/ECS/EiJob.json
+# Create InfoJob
+echo "Create InfoJob Of A Certain Type type1:"
+curl -X PUT -skw %{http_code} $httpx://localhost:$ics_port/A1-EI/v1/eijobs/job1 -H Content-Type:application/json --data-binary @${SHELL_FOLDER}/testdata/ICS/InfoJob.json
 echo -e "\n"
 
-# Get EiJobs
-echo "Get EiJobs:"
-curl -X GET -skw %{http_code} $httpx://localhost:$ecs_port/A1-EI/v1/eijobs -H Content-Type:application/json | jq
+# Get InfoJobs
+echo "Get InfoJobs:"
+curl -X GET -skw %{http_code} $httpx://localhost:$ics_port/A1-EI/v1/eijobs -H Content-Type:application/json | jq
 echo -e "\n"
 
-# Get Individual EiJob:
-echo "Get Individual EiJob:"
-curl -X GET -skw %{http_code} $httpx://localhost:$ecs_port/A1-EI/v1/eijobs/job1 -H Content-Type:application/json | jq
+# Get Individual InfoJob:
+echo "Get Individual InfoJob:"
+curl -X GET -skw %{http_code} $httpx://localhost:$ics_port/A1-EI/v1/eijobs/job1 -H Content-Type:application/json | jq
 echo -e "\n"
\ No newline at end of file
index a2f3db3..2b8eb5f 100755 (executable)
@@ -24,7 +24,7 @@
 # ./sendMsgToMediator.sh [dmaap-mr port] [http/https]
 
 SHELL_FOLDER=$(cd "$(dirname "$0")";pwd)
-bash ${SHELL_FOLDER}/prepareEcsData.sh
+bash ${SHELL_FOLDER}/prepareIcsData.sh
 
 dmaa_mr_port=${1:-3904}
 httpx=${2:-"http"}
index 5295fa6..9096720 100644 (file)
@@ -1,5 +1,5 @@
 @startuml
 dmaap_mr <- dmaap_mediator: dmaap_mediator reads msg from dmaap_mr
-dmaap_mediator -> ecs: dmaap_mediator gets jobs from ecs
+dmaap_mediator -> ics: dmaap_mediator gets jobs from ics
 dmaap_mediator -> consumer: callbackUrl, send msg to consumer
 @enduml
\ No newline at end of file
index 4efdf57..7db4895 100644 (file)
@@ -27,10 +27,10 @@ services:
     environment:
       - INFO_PRODUCER_HOST=http://consumer
       - INFO_PRODUCER_PORT=8088
-      - INFO_COORD_ADDR=http://ecs:8083
+      - INFO_COORD_ADDR=http://ics:8083
       - DMAAP_MR_ADDR=http://dmaap-mr:3904
       - PRODUCER_CERT_PATH=security/producer.crt
       - PRODUCER_KEY_PATH=security/producer.key
       - LOG_LEVEL=Debug
     networks:
-      - default
\ No newline at end of file
+      - default
index 57fd8ce..b34d02a 100755 (executable)
@@ -46,7 +46,7 @@ app:
     http.proxy-host:
     http.proxy-port: 0
   vardata-directory: /var/dmaap-adaptor-service
-  ecs-base-url: http://ecs:8083
+  ics-base-url: http://ics:8083
   # Location of the component configuration file. The file will only be used if the Consul database is not used;
   # configuration from the Consul will override the file.
   configuration-filepath: /opt/app/dmaap-adaptor-service/data/application_configuration.json
similarity index 90%
rename from docker-compose/ecs/docker-compose.yaml
rename to docker-compose/ics/docker-compose.yaml
index 6de293f..5c9c65a 100644 (file)
@@ -21,13 +21,13 @@ networks:
     name: nonrtric-docker-net
 
 services:
-  ecs:
-    image: "${ECS_IMAGE_BASE}:${ECS_IMAGE_TAG}"
-    container_name: ecs
+  ics:
+    image: "${ICS_IMAGE_BASE}:${ICS_IMAGE_TAG}"
+    container_name: ics
     networks:
       default:
         aliases:
-          - enrichment-service-container
+          - information-service-container
     ports:
       - 8083:8083
       - 8434:8434
index ef234a5..0de1b91 100644 (file)
@@ -20,7 +20,8 @@ Here we describe the APIs to access the Non-RT RIC functions.
 The core Non-RT RIC consists of several parts, with available APIs described in the sections below:
 
 * The A1 Policy Management Service
-* The Enrichment Coordinator Service
+* The Information Coordinator Service
+* DMaaP Adaptor
 * The Non-RT-RIC App Catalogue
 * K8S Helm Chart LCM Manager (Initial) **<ToDo>**
 
@@ -29,10 +30,10 @@ A1 Policy Management Service
 
 For information about the A1 Policy Management Service that is implemented in ONAP, see `ONAP docs <https://docs.onap.org/projects/onap-ccsdk-oran/en/latest/index.html>`_ and `wiki <https://wiki.onap.org/pages/viewpage.action?pageId=84672221>`_.
 
-Enrichment Coordinator Service
-==============================
+Information Coordinator Service
+===============================
 
-See `A1 Enrichment Information Coordination Service API <./ecs-api.html>`_ for full details of the API.
+See `A1 Information Information Coordination Service API <./ics-api.html>`_ for full details of the API.
 
 The API is also described in Swagger-JSON and YAML:
 
@@ -40,7 +41,23 @@ The API is also described in Swagger-JSON and YAML:
    :header: "API name", "|swagger-icon|", "|yaml-icon|"
    :widths: 10,5,5
 
-   "A1 Enrichment Information Coordination Service API", ":download:`link <../enrichment-coordinator-service/api/ecs-api.json>`", ":download:`link <../enrichment-coordinator-service/api/ecs-api.yaml>`"
+   "A1 Information Coordination Service API", ":download:`link <../information-coordinator-service/api/ics-api.json>`", ":download:`link <../information-coordinator-service/api/ics-api.yaml>`"
+
+DMaaP Adaptor
+=============
+
+The DMaaP Adaptor provides support for push delivery of any data received from DMaap or Kafka.
+
+See `DMaaP Adaptor API <./dmaap-adaptor-api.html>`_ for full details of the API.
+
+The API is also described in Swagger-JSON and YAML:
+
+
+.. csv-table::
+   :header: "API name", "|swagger-icon|", "|yaml-icon|"
+   :widths: 10,5, 5
+
+   "DMaaP Adaptor API", ":download:`link <../dmaap-adaptor-java/api/api.json>`", ":download:`link <../dmaap-adaptor-java/api/api.yaml>`"
 
 Non-RT-RIC App Catalogue (Initial)
 ==================================
index 85721c6..c5e504d 100644 (file)
@@ -9,7 +9,8 @@ linkcheck_ignore = [
     'http://127.0.0.1.*',
     'https://gerrit.o-ran-sc.org.*',
     './rac-api.html', #Generated file that doesn't exist at link check.
-    './ecs-api.html' #Generated file that doesn't exist at link check.
+    './ics-api.html', #Generated file that doesn't exist at link check.
+    './dmaap-adaptor-api.html' #Generated file that doesn't exist at link check.
 ]
 
 extensions = ['sphinxcontrib.redoc', 'sphinx.ext.intersphinx',]
@@ -22,9 +23,15 @@ redoc = [
                 'embed': True,
             },
             {
-                'name': 'ECS API',
-                'page': 'ecs-api',
-                'spec': '../enrichment-coordinator-service/api/ecs-api.json',
+                'name': 'ICS API',
+                'page': 'ics-api',
+                'spec': '../information-coordinator-service/api/ics-api.json',
+                'embed': True,
+            },
+            {
+                'name': 'DMaaP Adaptor API',
+                'page': 'dmaap-adaptor-api',
+                'spec': '../dmaap-adaptor-java/api/api.json',
                 'embed': True,
             }
         ]
index 43ac2d1..e0cb080 100644 (file)
@@ -15,21 +15,21 @@ A1 Policy Management Service & SDNC/A1 Controller & A1 Adapter
 The A1 Policy Management Service is implemented in ONAP. For documentation see `ONAP CCSDK documentation <https://docs.onap.org/projects/onap-ccsdk-oran/en/latest/index.html>`_
 and `wiki <https://wiki.onap.org/pages/viewpage.action?pageId=84672221>`_.
 
-Enrichment Coordinator Service
-------------------------------
-The Enrichment Coordinator Service is a Java 11 web application built using the Spring Framework. Using Spring Boot
+Information Coordinator Service
+-------------------------------
+The Information Coordinator Service is a Java 11 web application built using the Spring Framework. Using Spring Boot
 dependencies, it runs as a standalone application.
 
 Its main functionality is to act as a data subscription broker and to decouple data producer from data consumers.
 
-See the ./config/README file in the *enrichment-coordinator-service* directory Gerrit repo on how to create and setup
+See the ./config/README file in the *information-coordinator-service* directory Gerrit repo on how to create and setup
 the certificates and private keys needed for HTTPS.
 
 Start standalone
 ++++++++++++++++
 
-The project uses Maven. To start the Enrichment Coordinator Service as a freestanding application, run the following
-command in the *enrichment-coordinator-service* directory:
+The project uses Maven. To start the Information Coordinator Service as a freestanding application, run the following
+command in the *information-coordinator-service* directory:
 
     +-----------------------------+
     | mvn spring-boot:run         |
@@ -38,7 +38,7 @@ command in the *enrichment-coordinator-service* directory:
 Start in Docker
 +++++++++++++++
 
-To build and deploy the Enrichment Coordinator Service, go to the "enrichment-coordinator-service" folder and run the
+To build and deploy the Information Coordinator Service, go to the "information-coordinator-service" folder and run the
 following command:
 
     +-----------------------------+
@@ -48,7 +48,7 @@ following command:
 Then start the container by running the following command:
 
     +--------------------------------------------------------------------+
-    | docker run nonrtric-enrichment-coordinator-service                 |
+    | docker run nonrtric-information-coordinator-service                |
     +--------------------------------------------------------------------+
 
 Initial Non-RT-RIC App Catalogue
index c19a2d4..0404192 100644 (file)
@@ -28,7 +28,7 @@ command to start the components:
 
          docker-compose -f docker-compose.yaml
            -f policy-service/docker-compose.yaml
-           -f ecs/docker-compose.yaml
+           -f ics/docker-compose.yaml
 
 Install with Helm
 +++++++++++++++++
index 8645a18..3577e9c 100644 (file)
@@ -24,11 +24,12 @@ These are the components that make up the Non-RT-RIC:
 * Non-RT-RIC Control Panel / Dashboard
 * A1 Policy Management Service (developed in ONAP)
 * A1/SDNC Controller & A1 Adapter (Controller plugin)
-* Enrichment Information Coordinator
+* Information Coordinator Service
 * Non-RT-RIC (Spring Cloud) Service Gateway
 * Non-RT-RIC (Kong) Service Exposure Prototyping
 * Initial Non-RT-RIC App Catalogue
 * Near-RT-RIC A1 Simulator
+* DMaap Adaptor
 
 The code base for "D" Release is in the `NONRTRIC <https://gerrit.o-ran-sc.org/r/admin/repos/nonrtric>`_, `NONRTRIC-ControlPanel <https://gerrit.o-ran-sc.org/r/admin/repos/portal/nonrtric-controlpanel>`_, and `Near-RT-RIC A1-Simulator <https://gerrit.o-ran-sc.org/r/admin/repos/sim/a1-interface>`_ , Gerrit source repositories (D Branch).
 
@@ -40,7 +41,7 @@ Graphical user interface
 * View and Manage A1 policies in the RAN (near-RT-RICs)
 * Interacts with the Policy agent’s NBI (REST API)
 * Graphical A1 policy creation/editing is model-driven, based on policy type’s JSON schema
-* View and manage producers and jobs for the Enrichment coordinator service
+* View and manage producers and jobs for the Information coordinator service
 * Configure A1 Policy Management Service (e.g. add/remove near-rt-rics)
 * Interacts with the A1-PolicyManagementService & A1-EI-Coordinator (REST NBIs) via Service Exposure gateway
      
@@ -76,6 +77,10 @@ A1 Controller Service above A1 Controller/Adaptor that provides:
 
 See also: `A1 Policy Management Service in ONAP <https://wiki.onap.org/pages/viewpage.action?pageId=84672221>`_  
 
+
+Implementation:
+* Implemented as a Java Spring Boot application
+
 A1/SDNC Controller & A1 Adapter (Controller plugin)
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 Mediation point for A1 interface termination in SMO/NONRTRIC
@@ -90,8 +95,8 @@ Mediation point for A1 interface termination in SMO/NONRTRIC
 
 See also: `A1 Adapter/Controller Functions in ONAP <https://wiki.onap.org/pages/viewpage.action?pageId=84672221>`_  
   
-Enrichment Information Job Coordination Service
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+Information Coordination Service
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 
 Coordinate/Register A1-EI Types, Producers, Consumers, and Jobs.
 
@@ -106,14 +111,28 @@ Coordinate/Register A1-EI Types, Producers, Consumers, and Jobs.
 * Query status of A1-EI jobs
 * Monitors all near-RT-RICs and recovers from inconsistencies
 * After EI-type/Producer/Consumer/Job is successfully registered delivery/flow can happen directly between A1-EI Producers (in SMO/NONRTRIC domain) and A1-EI consumers (near-RT-RICs in RAN domain)
-* *Being extended to coordinate non-A1 Enrichment Information exchange between NONRTRIC Apps*
+* *Being extended to coordinate non-A1 Information exchange between NONRTRIC Apps*
+
+
+Implementation:
+* Implemented as a Java Spring Boot application
+
+Dmaap Adapter
+~~~~~~~~~~~~~
+
+Is a generic information producer, which registeres itself as an information producer of information types (in Information Coordination Service).
+The information types are defined in a configuration file. 
+Information jobs can retrieve data from DMaap or Kafka topics and push this to data consumers (accessing the ICS API).
+
+Implementation:
+* Implemented as a Java Spring Boot application
 
 Non-RT-RIC (Spring Cloud) Service Gateway
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
 Support Apps to use A1 Services 
 
 * `Spring Cloud Gateway <https://cloud.spring.io/spring-cloud-gateway>`_ provides the library to build a basic API gateway
-* Exposes A1 Policy Management Service & Enrichment Coordinator Service.  
+* Exposes A1 Policy Management Service & Information Coordinator Service.  
 * Additional predicates can be added in code or preferably in the Gateway yaml configuration.
 
 Implementation:
@@ -162,7 +181,7 @@ A1 Interface / Near-RT-RIC Simulator
 Stateful A1 test stub.
 
 * Used to create multiple stateful A1 providers (simulated near-rt-rics)
-* Supports A1-Policy and A1-EnrichmentInformation
+* Supports A1-Policy and A1-Enrichment Information
 * Swagger-based northbound interface, so easy to change the A1 profile exposed (e.g. A1 version, A1 Policy Types, A1-E1 consumers, etc)
 * All A1-AP versions supported
 
diff --git a/enrichment-coordinator-service/Dockerfile b/enrichment-coordinator-service/Dockerfile
deleted file mode 100644 (file)
index 744a237..0000000
+++ /dev/null
@@ -1,44 +0,0 @@
-#
-# ============LICENSE_START=======================================================
-#  Copyright (C) 2020 Nordix Foundation.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# SPDX-License-Identifier: Apache-2.0
-# ============LICENSE_END=========================================================
-#
-FROM openjdk:11-jre-slim
-
-ARG JAR
-
-WORKDIR /opt/app/enrichment-coordinator-service
-RUN mkdir -p /var/log/enrichment-coordinator-service
-RUN mkdir -p /opt/app/enrichment-coordinator-service/etc/cert/
-RUN mkdir -p /var/enrichment-coordinator-service
-RUN chmod -R 777 /var/enrichment-coordinator-service
-
-EXPOSE 8083 8434
-
-ADD /config/application.yaml /opt/app/enrichment-coordinator-service/config/application.yaml
-ADD target/${JAR} /opt/app/enrichment-coordinator-service/enrichment-coordinator-service.jar
-ADD /config/keystore.jks /opt/app/enrichment-coordinator-service/etc/cert/keystore.jks
-ADD /config/truststore.jks /opt/app/enrichment-coordinator-service/etc/cert/truststore.jks
-
-
-RUN chmod -R 777 /opt/app/enrichment-coordinator-service/config/
-
-CMD ["java", "-jar", "/opt/app/enrichment-coordinator-service/enrichment-coordinator-service.jar"]
-
-
-
-
diff --git a/information-coordinator-service/Dockerfile b/information-coordinator-service/Dockerfile
new file mode 100644 (file)
index 0000000..226d2ec
--- /dev/null
@@ -0,0 +1,47 @@
+#
+# ============LICENSE_START=======================================================
+#  Copyright (C) 2020 Nordix Foundation.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# SPDX-License-Identifier: Apache-2.0
+# ============LICENSE_END=========================================================
+#
+FROM openjdk:11-jre-slim
+
+ARG JAR
+
+WORKDIR /opt/app/information-coordinator-service
+RUN mkdir -p /var/log/information-coordinator-service
+RUN mkdir -p /opt/app/information-coordinator-service/etc/cert/
+RUN mkdir -p /var/information-coordinator-service
+
+EXPOSE 8083 8434
+
+ADD /config/application.yaml /opt/app/information-coordinator-service/config/application.yaml
+ADD target/${JAR} /opt/app/information-coordinator-service/information-coordinator-service.jar
+ADD /config/keystore.jks /opt/app/information-coordinator-service/etc/cert/keystore.jks
+ADD /config/truststore.jks /opt/app/information-coordinator-service/etc/cert/truststore.jks
+
+RUN groupadd -g 999 appuser && \
+    useradd -r -u 999 -g appuser appuser
+RUN chown -R appuser:appuser /opt/app/information-coordinator-service
+RUN chown -R appuser:appuser /var/information-coordinator-service
+RUN chown -R appuser:appuser /var/log/information-coordinator-service
+USER appuser
+
+CMD ["java", "-jar", "/opt/app/information-coordinator-service/information-coordinator-service.jar"]
+
+
+
+
             }
         },
         "consumer_job": {
-            "description": "Information for an Enrichment Information Job",
+            "description": "Information for an Information Job",
             "type": "object",
             "required": [
                 "info_type_id",
             "description": "Void/empty ",
             "type": "object"
         },
+        "Link": {
+            "type": "object",
+            "properties": {
+                "templated": {"type": "boolean"},
+                "href": {"type": "string"}
+            }
+        },
         "consumer_type_subscription_info": {
             "description": "Information for an information type subscription",
             "type": "object",
             }],
             "tags": ["A1-EI (registration)"]
         }},
+        "/actuator/threaddump": {"get": {
+            "summary": "Actuator web endpoint 'threaddump'",
+            "operationId": "handle_2_1_3",
+            "responses": {"200": {
+                "description": "OK",
+                "content": {"*/*": {"schema": {"type": "object"}}}
+            }},
+            "tags": ["Actuator"]
+        }},
         "/example_dataproducer/info_job": {"post": {
             "summary": "Callback for Information Job creation/modification",
             "requestBody": {
                 "tags": ["Data consumer"]
             }
         },
+        "/actuator/loggers": {"get": {
+            "summary": "Actuator web endpoint 'loggers'",
+            "operationId": "handle_6",
+            "responses": {"200": {
+                "description": "OK",
+                "content": {"*/*": {"schema": {"type": "object"}}}
+            }},
+            "tags": ["Actuator"]
+        }},
+        "/actuator/health/**": {"get": {
+            "summary": "Actuator web endpoint 'health-path'",
+            "operationId": "handle_12",
+            "responses": {"200": {
+                "description": "OK",
+                "content": {"*/*": {"schema": {"type": "object"}}}
+            }},
+            "tags": ["Actuator"]
+        }},
+        "/data-consumer/v1/info-types": {"get": {
+            "summary": "Information type identifiers",
+            "operationId": "getinfoTypeIdentifiers",
+            "responses": {"200": {
+                "description": "Information type identifiers",
+                "content": {"application/json": {"schema": {
+                    "type": "array",
+                    "items": {"type": "string"}
+                }}}
+            }},
+            "tags": ["Data consumer"]
+        }},
+        "/actuator/metrics/{requiredMetricName}": {"get": {
+            "summary": "Actuator web endpoint 'metrics-requiredMetricName'",
+            "operationId": "handle_5",
+            "responses": {"200": {
+                "description": "OK",
+                "content": {"*/*": {"schema": {"type": "object"}}}
+            }},
+            "parameters": [{
+                "schema": {"type": "string"},
+                "in": "path",
+                "name": "requiredMetricName",
+                "required": true
+            }],
+            "tags": ["Actuator"]
+        }},
+        "/actuator": {"get": {
+            "summary": "Actuator root web endpoint",
+            "operationId": "links_1",
+            "responses": {"200": {
+                "description": "OK",
+                "content": {"*/*": {"schema": {
+                    "additionalProperties": {
+                        "additionalProperties": {"$ref": "#/components/schemas/Link"},
+                        "type": "object"
+                    },
+                    "type": "object"
+                }}}
+            }},
+            "tags": ["Actuator"]
+        }},
+        "/data-consumer/v1/info-jobs": {"get": {
+            "summary": "Information Job identifiers",
+            "description": "query for information job identifiers",
+            "operationId": "getJobIds",
+            "responses": {
+                "200": {
+                    "description": "Information information job identifiers",
+                    "content": {"application/json": {"schema": {
+                        "type": "array",
+                        "items": {"type": "string"}
+                    }}}
+                },
+                "404": {
+                    "description": "Information type is not found",
+                    "content": {"application/json": {"schema": {"$ref": "#/components/schemas/ProblemDetails"}}}
+                }
+            },
+            "parameters": [
+                {
+                    "schema": {"type": "string"},
+                    "in": "query",
+                    "name": "infoTypeId",
+                    "description": "selects subscription jobs of matching information type",
+                    "required": false
+                },
+                {
+                    "schema": {"type": "string"},
+                    "in": "query",
+                    "name": "owner",
+                    "description": "selects result for one owner",
+                    "required": false
+                }
+            ],
+            "tags": ["Data consumer"]
+        }},
+        "/actuator/loggers/{name}": {
+            "post": {
+                "summary": "Actuator web endpoint 'loggers-name'",
+                "operationId": "handle_0",
+                "responses": {"200": {
+                    "description": "OK",
+                    "content": {"*/*": {"schema": {"type": "object"}}}
+                }},
+                "parameters": [{
+                    "schema": {"type": "string"},
+                    "in": "path",
+                    "name": "name",
+                    "required": true
+                }],
+                "tags": ["Actuator"]
+            },
+            "get": {
+                "summary": "Actuator web endpoint 'loggers-name'",
+                "operationId": "handle_7",
+                "responses": {"200": {
+                    "description": "OK",
+                    "content": {"*/*": {"schema": {"type": "object"}}}
+                }},
+                "parameters": [{
+                    "schema": {"type": "string"},
+                    "in": "path",
+                    "name": "name",
+                    "required": true
+                }],
+                "tags": ["Actuator"]
+            }
+        },
+        "/example_dataconsumer/info_jobs/{infoJobId}/status": {"post": {
+            "summary": "Callback for changed Information Job status",
+            "requestBody": {
+                "content": {"application/json": {"schema": {"$ref": "#/components/schemas/EiJobStatusObject"}}},
+                "required": true
+            },
+            "description": "The primitive is implemented by the data consumer and is invoked when a Information Job status has been changed.",
+            "operationId": "jobStatusCallback",
+            "responses": {"200": {
+                "description": "OK",
+                "content": {"application/json": {"schema": {"$ref": "#/components/schemas/Void"}}}
+            }},
+            "parameters": [{
+                "schema": {"type": "string"},
+                "in": "path",
+                "name": "infoJobId",
+                "required": true
+            }],
+            "tags": ["A1-EI (callbacks)"]
+        }},
+        "/A1-EI/v1/eijobs/{eiJobId}/status": {"get": {
+            "summary": "EI job status",
+            "operationId": "getEiJobStatus_1",
+            "responses": {
+                "200": {
+                    "description": "EI job status",
+                    "content": {"application/json": {"schema": {"$ref": "#/components/schemas/EiJobStatusObject"}}}
+                },
+                "404": {
+                    "description": "Enrichment Information job is not found",
+                    "content": {"application/json": {"schema": {"$ref": "#/components/schemas/ProblemDetails"}}}
+                }
+            },
+            "parameters": [{
+                "schema": {"type": "string"},
+                "in": "path",
+                "name": "eiJobId",
+                "required": true
+            }],
+            "tags": ["A1-EI (registration)"]
+        }},
+        "/data-producer/v1/info-producers/{infoProducerId}/status": {"get": {
+            "summary": "Information producer status",
+            "operationId": "getInfoProducerStatus",
+            "responses": {
+                "200": {
+                    "description": "Information producer status",
+                    "content": {"application/json": {"schema": {"$ref": "#/components/schemas/producer_status"}}}
+                },
+                "404": {
+                    "description": "Information producer is not found",
+                    "content": {"application/json": {"schema": {"$ref": "#/components/schemas/ProblemDetails"}}}
+                }
+            },
+            "parameters": [{
+                "schema": {"type": "string"},
+                "in": "path",
+                "name": "infoProducerId",
+                "required": true
+            }],
+            "tags": ["Data producer (registration)"]
+        }},
+        "/data-consumer/v1/info-jobs/{infoJobId}/status": {"get": {
+            "summary": "Job status",
+            "operationId": "getEiJobStatus",
+            "responses": {
+                "200": {
+                    "description": "Information subscription job status",
+                    "content": {"application/json": {"schema": {"$ref": "#/components/schemas/consumer_job_status"}}}
+                },
+                "404": {
+                    "description": "Information subscription job is not found",
+                    "content": {"application/json": {"schema": {"$ref": "#/components/schemas/ProblemDetails"}}}
+                }
+            },
+            "parameters": [{
+                "schema": {"type": "string"},
+                "in": "path",
+                "name": "infoJobId",
+                "required": true
+            }],
+            "tags": ["Data consumer"]
+        }},
+        "/actuator/metrics": {"get": {
+            "summary": "Actuator web endpoint 'metrics'",
+            "operationId": "handle_4",
+            "responses": {"200": {
+                "description": "OK",
+                "content": {"*/*": {"schema": {"type": "object"}}}
+            }},
+            "tags": ["Actuator"]
+        }},
+        "/actuator/info": {"get": {
+            "summary": "Actuator web endpoint 'info'",
+            "operationId": "handle_9",
+            "responses": {"200": {
+                "description": "OK",
+                "content": {"*/*": {"schema": {"type": "object"}}}
+            }},
+            "tags": ["Actuator"]
+        }},
         "/example_dataproducer/health_check": {"get": {
             "summary": "Producer supervision",
             "description": "The endpoint is provided by the Information Producer and is used for supervision of the producer.",
             }},
             "tags": ["A1-EI (registration)"]
         }},
-        "/data-consumer/v1/info-types": {"get": {
-            "summary": "Information type identifiers",
-            "operationId": "getinfoTypeIdentifiers",
-            "responses": {"200": {
-                "description": "Information type identifiers",
-                "content": {"application/json": {"schema": {
-                    "type": "array",
-                    "items": {"type": "string"}
-                }}}
-            }},
-            "tags": ["Data consumer"]
-        }},
         "/data-producer/v1/info-producers/{infoProducerId}": {
             "get": {
                 "summary": "Individual Information Producer",
                 "tags": ["A1-EI (registration)"]
             }
         },
-        "/data-consumer/v1/info-jobs": {"get": {
-            "summary": "Information Job identifiers",
-            "description": "query for information job identifiers",
-            "operationId": "getJobIds",
-            "responses": {
-                "200": {
-                    "description": "Information information job identifiers",
-                    "content": {"application/json": {"schema": {
-                        "type": "array",
-                        "items": {"type": "string"}
-                    }}}
-                },
-                "404": {
-                    "description": "Information type is not found",
-                    "content": {"application/json": {"schema": {"$ref": "#/components/schemas/ProblemDetails"}}}
-                }
-            },
-            "parameters": [
-                {
-                    "schema": {"type": "string"},
-                    "in": "query",
-                    "name": "infoTypeId",
-                    "description": "selects subscription jobs of matching information type",
-                    "required": false
-                },
-                {
-                    "schema": {"type": "string"},
-                    "in": "query",
-                    "name": "owner",
-                    "description": "selects result for one owner",
-                    "required": false
-                }
-            ],
-            "tags": ["Data consumer"]
+        "/actuator/logfile": {"get": {
+            "summary": "Actuator web endpoint 'logfile'",
+            "operationId": "handle_8",
+            "responses": {"200": {
+                "description": "OK",
+                "content": {"*/*": {"schema": {"type": "object"}}}
+            }},
+            "tags": ["Actuator"]
         }},
         "/data-consumer/v1/info-jobs/{infoJobId}": {
             "get": {
             }],
             "tags": ["Data consumer"]
         }},
-        "/example_dataconsumer/info_jobs/{infoJobId}/status": {"post": {
-            "summary": "Callback for changed Information Job status",
-            "requestBody": {
-                "content": {"application/json": {"schema": {"$ref": "#/components/schemas/EiJobStatusObject"}}},
-                "required": true
-            },
-            "description": "The primitive is implemented by the data consumer and is invoked when a Information Job status has been changed.",
-            "operationId": "jobStatusCallback",
+        "/actuator/health": {"get": {
+            "summary": "Actuator web endpoint 'health'",
+            "operationId": "handle_11",
             "responses": {"200": {
                 "description": "OK",
-                "content": {"application/json": {"schema": {"$ref": "#/components/schemas/Void"}}}
+                "content": {"*/*": {"schema": {"type": "object"}}}
             }},
-            "parameters": [{
-                "schema": {"type": "string"},
-                "in": "path",
-                "name": "infoJobId",
-                "required": true
-            }],
-            "tags": ["A1-EI (callbacks)"]
+            "tags": ["Actuator"]
         }},
         "/A1-EI/v1/eijobs": {"get": {
             "summary": "EI job identifiers",
             ],
             "tags": ["A1-EI (registration)"]
         }},
-        "/A1-EI/v1/eijobs/{eiJobId}/status": {"get": {
-            "summary": "EI job status",
-            "operationId": "getEiJobStatus_1",
-            "responses": {
-                "200": {
-                    "description": "EI job status",
-                    "content": {"application/json": {"schema": {"$ref": "#/components/schemas/EiJobStatusObject"}}}
-                },
-                "404": {
-                    "description": "Enrichment Information job is not found",
-                    "content": {"application/json": {"schema": {"$ref": "#/components/schemas/ProblemDetails"}}}
-                }
-            },
-            "parameters": [{
-                "schema": {"type": "string"},
-                "in": "path",
-                "name": "eiJobId",
-                "required": true
-            }],
-            "tags": ["A1-EI (registration)"]
-        }},
-        "/data-producer/v1/info-producers/{infoProducerId}/status": {"get": {
-            "summary": "Information producer status",
-            "operationId": "getInfoProducerStatus",
-            "responses": {
-                "200": {
-                    "description": "Information producer status",
-                    "content": {"application/json": {"schema": {"$ref": "#/components/schemas/producer_status"}}}
-                },
-                "404": {
-                    "description": "Information producer is not found",
-                    "content": {"application/json": {"schema": {"$ref": "#/components/schemas/ProblemDetails"}}}
-                }
-            },
-            "parameters": [{
-                "schema": {"type": "string"},
-                "in": "path",
-                "name": "infoProducerId",
-                "required": true
-            }],
-            "tags": ["Data producer (registration)"]
-        }},
         "/data-producer/v1/info-producers/{infoProducerId}/info-jobs": {"get": {
             "summary": "Information Job definitions",
             "description": "Information Job definitions for one Information Producer",
             }],
             "tags": ["Data producer (registration)"]
         }},
-        "/data-consumer/v1/info-jobs/{infoJobId}/status": {"get": {
-            "summary": "Job status",
-            "operationId": "getEiJobStatus",
-            "responses": {
-                "200": {
-                    "description": "Information subscription job status",
-                    "content": {"application/json": {"schema": {"$ref": "#/components/schemas/consumer_job_status"}}}
-                },
-                "404": {
-                    "description": "Information subscription job is not found",
-                    "content": {"application/json": {"schema": {"$ref": "#/components/schemas/ProblemDetails"}}}
-                }
-            },
-            "parameters": [{
-                "schema": {"type": "string"},
-                "in": "path",
-                "name": "infoJobId",
-                "required": true
-            }],
-            "tags": ["Data consumer"]
-        }},
         "/example_dataconsumer/info_type_status": {"post": {
             "summary": "Callback for changed Information type registration status",
             "requestBody": {
                 "content": {"application/json": {"schema": {"$ref": "#/components/schemas/Void"}}}
             }},
             "tags": ["Data consumer (callbacks)"]
+        }},
+        "/actuator/heapdump": {"get": {
+            "summary": "Actuator web endpoint 'heapdump'",
+            "operationId": "handle_10",
+            "responses": {"200": {
+                "description": "OK",
+                "content": {"*/*": {"schema": {"type": "object"}}}
+            }},
+            "tags": ["Actuator"]
         }}
     },
     "info": {
         {
             "name": "Data consumer",
             "description": "API for data consumers"
+        },
+        {
+            "name": "Actuator",
+            "description": "Monitor and interact",
+            "externalDocs": {
+                "description": "Spring Boot Actuator Web API Documentation",
+                "url": "https://docs.spring.io/spring-boot/docs/current/actuator-api/html/"
+            }
         }
     ]
 }
\ No newline at end of file
@@ -42,6 +42,11 @@ tags:
   description: API for monitoring of the service
 - name: Data consumer
   description: API for data consumers
+- name: Actuator
+  description: Monitor and interact
+  externalDocs:
+    description: Spring Boot Actuator Web API Documentation
+    url: https://docs.spring.io/spring-boot/docs/current/actuator-api/html/
 paths:
   /example_dataproducer/info_job/{infoJobId}:
     delete:
@@ -108,6 +113,19 @@ paths:
             application/json:
               schema:
                 $ref: '#/components/schemas/ProblemDetails'
+  /actuator/threaddump:
+    get:
+      tags:
+      - Actuator
+      summary: Actuator web endpoint 'threaddump'
+      operationId: handle_2_1_3
+      responses:
+        200:
+          description: OK
+          content:
+            '*/*':
+              schema:
+                type: object
   /example_dataproducer/info_job:
     post:
       tags:
@@ -325,6 +343,301 @@ paths:
             application/json:
               schema:
                 $ref: '#/components/schemas/ProblemDetails'
+  /actuator/loggers:
+    get:
+      tags:
+      - Actuator
+      summary: Actuator web endpoint 'loggers'
+      operationId: handle_6
+      responses:
+        200:
+          description: OK
+          content:
+            '*/*':
+              schema:
+                type: object
+  /actuator/health/**:
+    get:
+      tags:
+      - Actuator
+      summary: Actuator web endpoint 'health-path'
+      operationId: handle_12
+      responses:
+        200:
+          description: OK
+          content:
+            '*/*':
+              schema:
+                type: object
+  /data-consumer/v1/info-types:
+    get:
+      tags:
+      - Data consumer
+      summary: Information type identifiers
+      operationId: getinfoTypeIdentifiers
+      responses:
+        200:
+          description: Information type identifiers
+          content:
+            application/json:
+              schema:
+                type: array
+                items:
+                  type: string
+  /actuator/metrics/{requiredMetricName}:
+    get:
+      tags:
+      - Actuator
+      summary: Actuator web endpoint 'metrics-requiredMetricName'
+      operationId: handle_5
+      parameters:
+      - name: requiredMetricName
+        in: path
+        required: true
+        style: simple
+        explode: false
+        schema:
+          type: string
+      responses:
+        200:
+          description: OK
+          content:
+            '*/*':
+              schema:
+                type: object
+  /actuator:
+    get:
+      tags:
+      - Actuator
+      summary: Actuator root web endpoint
+      operationId: links_1
+      responses:
+        200:
+          description: OK
+          content:
+            '*/*':
+              schema:
+                type: object
+                additionalProperties:
+                  type: object
+                  additionalProperties:
+                    $ref: '#/components/schemas/Link'
+  /data-consumer/v1/info-jobs:
+    get:
+      tags:
+      - Data consumer
+      summary: Information Job identifiers
+      description: query for information job identifiers
+      operationId: getJobIds
+      parameters:
+      - name: infoTypeId
+        in: query
+        description: selects subscription jobs of matching information type
+        required: false
+        style: form
+        explode: true
+        schema:
+          type: string
+      - name: owner
+        in: query
+        description: selects result for one owner
+        required: false
+        style: form
+        explode: true
+        schema:
+          type: string
+      responses:
+        200:
+          description: Information information job identifiers
+          content:
+            application/json:
+              schema:
+                type: array
+                items:
+                  type: string
+        404:
+          description: Information type is not found
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/ProblemDetails'
+  /actuator/loggers/{name}:
+    get:
+      tags:
+      - Actuator
+      summary: Actuator web endpoint 'loggers-name'
+      operationId: handle_7
+      parameters:
+      - name: name
+        in: path
+        required: true
+        style: simple
+        explode: false
+        schema:
+          type: string
+      responses:
+        200:
+          description: OK
+          content:
+            '*/*':
+              schema:
+                type: object
+    post:
+      tags:
+      - Actuator
+      summary: Actuator web endpoint 'loggers-name'
+      operationId: handle_0
+      parameters:
+      - name: name
+        in: path
+        required: true
+        style: simple
+        explode: false
+        schema:
+          type: string
+      responses:
+        200:
+          description: OK
+          content:
+            '*/*':
+              schema:
+                type: object
+  /example_dataconsumer/info_jobs/{infoJobId}/status:
+    post:
+      tags:
+      - A1-EI (callbacks)
+      summary: Callback for changed Information Job status
+      description: The primitive is implemented by the data consumer and is invoked
+        when a Information Job status has been changed.
+      operationId: jobStatusCallback
+      parameters:
+      - name: infoJobId
+        in: path
+        required: true
+        style: simple
+        explode: false
+        schema:
+          type: string
+      requestBody:
+        content:
+          application/json:
+            schema:
+              $ref: '#/components/schemas/EiJobStatusObject'
+        required: true
+      responses:
+        200:
+          description: OK
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/Void'
+  /A1-EI/v1/eijobs/{eiJobId}/status:
+    get:
+      tags:
+      - A1-EI (registration)
+      summary: EI job status
+      operationId: getEiJobStatus_1
+      parameters:
+      - name: eiJobId
+        in: path
+        required: true
+        style: simple
+        explode: false
+        schema:
+          type: string
+      responses:
+        200:
+          description: EI job status
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/EiJobStatusObject'
+        404:
+          description: Enrichment Information job is not found
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/ProblemDetails'
+  /data-producer/v1/info-producers/{infoProducerId}/status:
+    get:
+      tags:
+      - Data producer (registration)
+      summary: Information producer status
+      operationId: getInfoProducerStatus
+      parameters:
+      - name: infoProducerId
+        in: path
+        required: true
+        style: simple
+        explode: false
+        schema:
+          type: string
+      responses:
+        200:
+          description: Information producer status
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/producer_status'
+        404:
+          description: Information producer is not found
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/ProblemDetails'
+  /data-consumer/v1/info-jobs/{infoJobId}/status:
+    get:
+      tags:
+      - Data consumer
+      summary: Job status
+      operationId: getEiJobStatus
+      parameters:
+      - name: infoJobId
+        in: path
+        required: true
+        style: simple
+        explode: false
+        schema:
+          type: string
+      responses:
+        200:
+          description: Information subscription job status
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/consumer_job_status'
+        404:
+          description: Information subscription job is not found
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/ProblemDetails'
+  /actuator/metrics:
+    get:
+      tags:
+      - Actuator
+      summary: Actuator web endpoint 'metrics'
+      operationId: handle_4
+      responses:
+        200:
+          description: OK
+          content:
+            '*/*':
+              schema:
+                type: object
+  /actuator/info:
+    get:
+      tags:
+      - Actuator
+      summary: Actuator web endpoint 'info'
+      operationId: handle_9
+      responses:
+        200:
+          description: OK
+          content:
+            '*/*':
+              schema:
+                type: object
   /example_dataproducer/health_check:
     get:
       tags:
@@ -355,21 +668,6 @@ paths:
                 type: array
                 items:
                   type: string
-  /data-consumer/v1/info-types:
-    get:
-      tags:
-      - Data consumer
-      summary: Information type identifiers
-      operationId: getinfoTypeIdentifiers
-      responses:
-        200:
-          description: Information type identifiers
-          content:
-            application/json:
-              schema:
-                type: array
-                items:
-                  type: string
   /data-producer/v1/info-producers/{infoProducerId}:
     get:
       tags:
@@ -620,45 +918,19 @@ paths:
             application/json:
               schema:
                 $ref: '#/components/schemas/ProblemDetails'
-  /data-consumer/v1/info-jobs:
+  /actuator/logfile:
     get:
       tags:
-      - Data consumer
-      summary: Information Job identifiers
-      description: query for information job identifiers
-      operationId: getJobIds
-      parameters:
-      - name: infoTypeId
-        in: query
-        description: selects subscription jobs of matching information type
-        required: false
-        style: form
-        explode: true
-        schema:
-          type: string
-      - name: owner
-        in: query
-        description: selects result for one owner
-        required: false
-        style: form
-        explode: true
-        schema:
-          type: string
+      - Actuator
+      summary: Actuator web endpoint 'logfile'
+      operationId: handle_8
       responses:
         200:
-          description: Information information job identifiers
-          content:
-            application/json:
-              schema:
-                type: array
-                items:
-                  type: string
-        404:
-          description: Information type is not found
+          description: OK
           content:
-            application/json:
+            '*/*':
               schema:
-                $ref: '#/components/schemas/ProblemDetails'
+                type: object
   /data-consumer/v1/info-jobs/{infoJobId}:
     get:
       tags:
@@ -830,35 +1102,19 @@ paths:
             application/json:
               schema:
                 $ref: '#/components/schemas/ProblemDetails'
-  /example_dataconsumer/info_jobs/{infoJobId}/status:
-    post:
+  /actuator/health:
+    get:
       tags:
-      - A1-EI (callbacks)
-      summary: Callback for changed Information Job status
-      description: The primitive is implemented by the data consumer and is invoked
-        when a Information Job status has been changed.
-      operationId: jobStatusCallback
-      parameters:
-      - name: infoJobId
-        in: path
-        required: true
-        style: simple
-        explode: false
-        schema:
-          type: string
-      requestBody:
-        content:
-          application/json:
-            schema:
-              $ref: '#/components/schemas/EiJobStatusObject'
-        required: true
+      - Actuator
+      summary: Actuator web endpoint 'health'
+      operationId: handle_11
       responses:
         200:
           description: OK
           content:
-            application/json:
+            '*/*':
               schema:
-                $ref: '#/components/schemas/Void'
+                type: object
   /A1-EI/v1/eijobs:
     get:
       tags:
@@ -898,60 +1154,6 @@ paths:
             application/json:
               schema:
                 $ref: '#/components/schemas/ProblemDetails'
-  /A1-EI/v1/eijobs/{eiJobId}/status:
-    get:
-      tags:
-      - A1-EI (registration)
-      summary: EI job status
-      operationId: getEiJobStatus_1
-      parameters:
-      - name: eiJobId
-        in: path
-        required: true
-        style: simple
-        explode: false
-        schema:
-          type: string
-      responses:
-        200:
-          description: EI job status
-          content:
-            application/json:
-              schema:
-                $ref: '#/components/schemas/EiJobStatusObject'
-        404:
-          description: Enrichment Information job is not found
-          content:
-            application/json:
-              schema:
-                $ref: '#/components/schemas/ProblemDetails'
-  /data-producer/v1/info-producers/{infoProducerId}/status:
-    get:
-      tags:
-      - Data producer (registration)
-      summary: Information producer status
-      operationId: getInfoProducerStatus
-      parameters:
-      - name: infoProducerId
-        in: path
-        required: true
-        style: simple
-        explode: false
-        schema:
-          type: string
-      responses:
-        200:
-          description: Information producer status
-          content:
-            application/json:
-              schema:
-                $ref: '#/components/schemas/producer_status'
-        404:
-          description: Information producer is not found
-          content:
-            application/json:
-              schema:
-                $ref: '#/components/schemas/ProblemDetails'
   /data-producer/v1/info-producers/{infoProducerId}/info-jobs:
     get:
       tags:
@@ -982,33 +1184,6 @@ paths:
             application/json:
               schema:
                 $ref: '#/components/schemas/ProblemDetails'
-  /data-consumer/v1/info-jobs/{infoJobId}/status:
-    get:
-      tags:
-      - Data consumer
-      summary: Job status
-      operationId: getEiJobStatus
-      parameters:
-      - name: infoJobId
-        in: path
-        required: true
-        style: simple
-        explode: false
-        schema:
-          type: string
-      responses:
-        200:
-          description: Information subscription job status
-          content:
-            application/json:
-              schema:
-                $ref: '#/components/schemas/consumer_job_status'
-        404:
-          description: Information subscription job is not found
-          content:
-            application/json:
-              schema:
-                $ref: '#/components/schemas/ProblemDetails'
   /example_dataconsumer/info_type_status:
     post:
       tags:
@@ -1031,6 +1206,19 @@ paths:
             application/json:
               schema:
                 $ref: '#/components/schemas/Void'
+  /actuator/heapdump:
+    get:
+      tags:
+      - Actuator
+      summary: Actuator web endpoint 'heapdump'
+      operationId: handle_10
+      responses:
+        200:
+          description: OK
+          content:
+            '*/*':
+              schema:
+                type: object
 components:
   schemas:
     consumer_information_type:
@@ -1258,7 +1446,7 @@ components:
         status_notification_uri:
           type: string
           description: The target of Information subscription job status notifications
-      description: Information for an Enrichment Information Job
+      description: Information for an Information Job
     producer_status:
       required:
       - operational_state
@@ -1274,6 +1462,13 @@ components:
     Void:
       type: object
       description: 'Void/empty '
+    Link:
+      type: object
+      properties:
+        templated:
+          type: boolean
+        href:
+          type: string
     consumer_type_subscription_info:
       required:
       - owner
@@ -5,6 +5,8 @@ spring:
     allow-bean-definition-overriding: true
   aop:
     auto: false
+springdoc:
+  show-actuator: true 
 management:
   endpoints:
     web:
@@ -19,9 +21,9 @@ logging:
     org.springframework: ERROR
     org.springframework.data: ERROR
     org.springframework.web.reactive.function.client.ExchangeFunctions: ERROR
-    org.oransc.enrichment: INFO
+    org.oransc.ics: INFO
   file:
-    name: /var/log/enrichment-coordinator-service/application.log
+    name: /var/log/information-coordinator-service/application.log
 server:
    # Configuration of the HTTP/REST server. The parameters are defined and handeled by the springboot framework.
    # See springboot documentation.
@@ -30,7 +32,7 @@ server:
    ssl:
       key-store-type: JKS
       key-store-password: policy_agent
-      key-store: /opt/app/enrichment-coordinator-service/etc/cert/keystore.jks
+      key-store: /opt/app/information-coordinator-service/etc/cert/keystore.jks
       key-password: policy_agent
       key-alias: policy_agent
 app:
@@ -40,10 +42,10 @@ app:
     # Note that the same keystore as for the server is used.
     trust-store-used: false
     trust-store-password: policy_agent
-    trust-store: /opt/app/enrichment-coordinator-service/etc/cert/truststore.jks
+    trust-store: /opt/app/information-coordinator-service/etc/cert/truststore.jks
     # Configuration of usage of HTTP Proxy for the southbound accesses.
     # The HTTP proxy (if configured) will only be used for accessing NearRT RIC:s
     http.proxy-host:
     http.proxy-port: 0
-  vardata-directory: /var/enrichment-coordinator-service
+  vardata-directory: /var/information-coordinator-service
 
similarity index 96%
rename from enrichment-coordinator-service/pom.xml
rename to information-coordinator-service/pom.xml
index 4edf4d4..2de2bf2 100644 (file)
@@ -30,7 +30,7 @@
         <relativePath />
     </parent>
     <groupId>org.o-ran-sc.nonrtric</groupId>
-    <artifactId>enrichment-coordinator-service</artifactId>
+    <artifactId>information-coordinator-service</artifactId>
     <version>1.2.0-SNAPSHOT</version>
     <licenses>
         <license>
                             <goal>generate</goal>
                         </goals>
                         <configuration>
-                            <inputSpec>${project.basedir}/api/ecs-api.json</inputSpec>
+                            <inputSpec>${project.basedir}/api/ics-api.json</inputSpec>
                             <language>openapi-yaml</language>
                             <output>${project.basedir}/api</output>
                             <configOptions>
-                                <outputFile>ecs-api.yaml</outputFile>
+                                <outputFile>ics-api.yaml</outputFile>
                             </configOptions>
                         </configuration>
                     </execution>
                 <inherited>false</inherited>
                 <executions>
                     <execution>
-                        <id>generate-enrichment-coordinator-service-image</id>
+                        <id>generate-information-coordinator-service-image</id>
                         <phase>package</phase>
                         <goals>
                             <goal>build</goal>
                             <pullRegistry>${env.CONTAINER_PULL_REGISTRY}</pullRegistry>
                             <images>
                                 <image>
-                                    <name>o-ran-sc/nonrtric-enrichment-coordinator-service:${project.version}</name>
+                                    <name>o-ran-sc/nonrtric-information-coordinator-service:${project.version}</name>
                                     <build>
                                         <cleanup>try</cleanup>
                                         <contextDir>${basedir}</contextDir>
                         </configuration>
                     </execution>
                     <execution>
-                        <id>push-enrichment-coordinator-service-image</id>
+                        <id>push-information-coordinator-service-image</id>
                         <goals>
                             <goal>build</goal>
                             <goal>push</goal>
                             <pushRegistry>${env.CONTAINER_PUSH_REGISTRY}</pushRegistry>
                             <images>
                                 <image>
-                                    <name>o-ran-sc/nonrtric-enrichment-coordinator-service:${project.version}</name>
+                                    <name>o-ran-sc/nonrtric-information-coordinator-service:${project.version}</name>
                                     <build>
                                         <contextDir>${basedir}</contextDir>
                                         <dockerFile>Dockerfile</dockerFile>
         <system>JIRA</system>
         <url>https://jira.o-ran-sc.org/</url>
     </issueManagement>
-</project>
+</project>
\ No newline at end of file
@@ -18,7 +18,7 @@
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment;
+package org.oransc.ics;
 
 import org.springframework.boot.SpringApplication;
 import org.springframework.boot.autoconfigure.SpringBootApplication;
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment;
+package org.oransc.ics;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
 
 import java.lang.invoke.MethodHandles;
 
 import org.apache.catalina.connector.Connector;
-import org.oransc.enrichment.configuration.ApplicationConfig;
-import org.oransc.enrichment.controllers.r1producer.ProducerCallbacks;
-import org.oransc.enrichment.repository.InfoJobs;
-import org.oransc.enrichment.repository.InfoTypes;
+import org.oransc.ics.configuration.ApplicationConfig;
+import org.oransc.ics.controllers.r1producer.ProducerCallbacks;
+import org.oransc.ics.repository.InfoJobs;
+import org.oransc.ics.repository.InfoTypes;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Value;
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment;
+package org.oransc.ics;
 
 import io.swagger.v3.oas.annotations.OpenAPIDefinition;
 import io.swagger.v3.oas.annotations.info.Info;
 import io.swagger.v3.oas.annotations.info.License;
 import io.swagger.v3.oas.annotations.tags.Tag;
 
-import org.oransc.enrichment.controllers.StatusController;
-import org.oransc.enrichment.controllers.a1e.A1eConsts;
-import org.oransc.enrichment.controllers.r1consumer.ConsumerConsts;
-import org.oransc.enrichment.controllers.r1producer.ProducerConsts;
+import org.oransc.ics.controllers.StatusController;
+import org.oransc.ics.controllers.a1e.A1eConsts;
+import org.oransc.ics.controllers.r1consumer.ConsumerConsts;
+import org.oransc.ics.controllers.r1producer.ProducerConsts;
 
 /**
  * Swagger configuration class that uses swagger documentation type and scans
@@ -18,7 +18,7 @@
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment.clients;
+package org.oransc.ics.clients;
 
 import io.netty.channel.ChannelOption;
 import io.netty.handler.ssl.SslContext;
@@ -28,7 +28,7 @@ import io.netty.handler.timeout.WriteTimeoutHandler;
 import java.lang.invoke.MethodHandles;
 import java.util.concurrent.atomic.AtomicInteger;
 
-import org.oransc.enrichment.configuration.WebClientConfig.HttpProxyConfig;
+import org.oransc.ics.configuration.WebClientConfig.HttpProxyConfig;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.http.MediaType;
@@ -18,7 +18,7 @@
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment.clients;
+package org.oransc.ics.clients;
 
 import io.netty.handler.ssl.SslContext;
 import io.netty.handler.ssl.SslContextBuilder;
@@ -41,8 +41,8 @@ import java.util.stream.Collectors;
 
 import javax.net.ssl.KeyManagerFactory;
 
-import org.oransc.enrichment.configuration.WebClientConfig;
-import org.oransc.enrichment.configuration.WebClientConfig.HttpProxyConfig;
+import org.oransc.ics.configuration.WebClientConfig;
+import org.oransc.ics.configuration.WebClientConfig.HttpProxyConfig;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.util.ResourceUtils;
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment.configuration;
+package org.oransc.ics.configuration;
 
 import lombok.Getter;
 
-import org.oransc.enrichment.configuration.WebClientConfig.HttpProxyConfig;
+import org.oransc.ics.configuration.WebClientConfig.HttpProxyConfig;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Value;
@@ -18,7 +18,7 @@
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment.controllers;
+package org.oransc.ics.controllers;
 
 import com.google.gson.Gson;
 import com.google.gson.GsonBuilder;
@@ -26,7 +26,7 @@ import com.google.gson.annotations.SerializedName;
 
 import io.swagger.v3.oas.annotations.media.Schema;
 
-import org.oransc.enrichment.exceptions.ServiceException;
+import org.oransc.ics.exceptions.ServiceException;
 import org.springframework.http.HttpHeaders;
 import org.springframework.http.HttpStatus;
 import org.springframework.http.MediaType;
@@ -18,7 +18,7 @@
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment.controllers;
+package org.oransc.ics.controllers;
 
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.gson.annotations.SerializedName;
@@ -31,9 +31,9 @@ import io.swagger.v3.oas.annotations.responses.ApiResponses;
 import io.swagger.v3.oas.annotations.tags.Tag;
 
 import org.immutables.gson.Gson;
-import org.oransc.enrichment.repository.InfoJobs;
-import org.oransc.enrichment.repository.InfoProducers;
-import org.oransc.enrichment.repository.InfoTypes;
+import org.oransc.ics.repository.InfoJobs;
+import org.oransc.ics.repository.InfoProducers;
+import org.oransc.ics.repository.InfoTypes;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.http.HttpStatus;
 import org.springframework.http.MediaType;
@@ -18,7 +18,7 @@
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment.controllers.a1e;
+package org.oransc.ics.controllers.a1e;
 
 import com.google.gson.Gson;
 import com.google.gson.GsonBuilder;
@@ -26,13 +26,13 @@ import com.google.gson.GsonBuilder;
 import java.lang.invoke.MethodHandles;
 import java.util.Collection;
 
-import org.oransc.enrichment.clients.AsyncRestClient;
-import org.oransc.enrichment.clients.AsyncRestClientFactory;
-import org.oransc.enrichment.configuration.ApplicationConfig;
-import org.oransc.enrichment.repository.InfoJob;
-import org.oransc.enrichment.repository.InfoJobs;
-import org.oransc.enrichment.repository.InfoProducers;
-import org.oransc.enrichment.repository.InfoType;
+import org.oransc.ics.clients.AsyncRestClient;
+import org.oransc.ics.clients.AsyncRestClientFactory;
+import org.oransc.ics.configuration.ApplicationConfig;
+import org.oransc.ics.repository.InfoJob;
+import org.oransc.ics.repository.InfoJobs;
+import org.oransc.ics.repository.InfoProducers;
+import org.oransc.ics.repository.InfoType;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
@@ -18,7 +18,7 @@
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment.controllers.a1e;
+package org.oransc.ics.controllers.a1e;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.gson.Gson;
@@ -40,16 +40,16 @@ import java.util.ArrayList;
 import java.util.List;
 
 import org.json.JSONObject;
-import org.oransc.enrichment.configuration.ApplicationConfig;
-import org.oransc.enrichment.controllers.ErrorResponse;
-import org.oransc.enrichment.controllers.VoidResponse;
-import org.oransc.enrichment.controllers.r1producer.ProducerCallbacks;
-import org.oransc.enrichment.exceptions.ServiceException;
-import org.oransc.enrichment.repository.InfoJob;
-import org.oransc.enrichment.repository.InfoJobs;
-import org.oransc.enrichment.repository.InfoProducers;
-import org.oransc.enrichment.repository.InfoType;
-import org.oransc.enrichment.repository.InfoTypes;
+import org.oransc.ics.configuration.ApplicationConfig;
+import org.oransc.ics.controllers.ErrorResponse;
+import org.oransc.ics.controllers.VoidResponse;
+import org.oransc.ics.controllers.r1producer.ProducerCallbacks;
+import org.oransc.ics.exceptions.ServiceException;
+import org.oransc.ics.repository.InfoJob;
+import org.oransc.ics.repository.InfoJobs;
+import org.oransc.ics.repository.InfoProducers;
+import org.oransc.ics.repository.InfoType;
+import org.oransc.ics.repository.InfoTypes;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment.controllers.r1consumer;
+package org.oransc.ics.controllers.r1consumer;
 
 import com.google.gson.Gson;
 import com.google.gson.GsonBuilder;
 
-import org.oransc.enrichment.clients.AsyncRestClient;
-import org.oransc.enrichment.clients.AsyncRestClientFactory;
-import org.oransc.enrichment.configuration.ApplicationConfig;
-import org.oransc.enrichment.repository.InfoType;
-import org.oransc.enrichment.repository.InfoTypeSubscriptions;
+import org.oransc.ics.clients.AsyncRestClient;
+import org.oransc.ics.clients.AsyncRestClientFactory;
+import org.oransc.ics.configuration.ApplicationConfig;
+import org.oransc.ics.repository.InfoType;
+import org.oransc.ics.repository.InfoTypeSubscriptions;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.stereotype.Component;
 import reactor.core.publisher.Mono;
@@ -18,7 +18,7 @@
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment.controllers.r1consumer;
+package org.oransc.ics.controllers.r1consumer;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.gson.Gson;
@@ -41,17 +41,17 @@ import java.util.Collection;
 import java.util.List;
 
 import org.json.JSONObject;
-import org.oransc.enrichment.controllers.ErrorResponse;
-import org.oransc.enrichment.controllers.VoidResponse;
-import org.oransc.enrichment.controllers.r1producer.ProducerCallbacks;
-import org.oransc.enrichment.exceptions.ServiceException;
-import org.oransc.enrichment.repository.InfoJob;
-import org.oransc.enrichment.repository.InfoJobs;
-import org.oransc.enrichment.repository.InfoProducer;
-import org.oransc.enrichment.repository.InfoProducers;
-import org.oransc.enrichment.repository.InfoType;
-import org.oransc.enrichment.repository.InfoTypeSubscriptions;
-import org.oransc.enrichment.repository.InfoTypes;
+import org.oransc.ics.controllers.ErrorResponse;
+import org.oransc.ics.controllers.VoidResponse;
+import org.oransc.ics.controllers.r1producer.ProducerCallbacks;
+import org.oransc.ics.exceptions.ServiceException;
+import org.oransc.ics.repository.InfoJob;
+import org.oransc.ics.repository.InfoJobs;
+import org.oransc.ics.repository.InfoProducer;
+import org.oransc.ics.repository.InfoProducers;
+import org.oransc.ics.repository.InfoType;
+import org.oransc.ics.repository.InfoTypeSubscriptions;
+import org.oransc.ics.repository.InfoTypes;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
@@ -18,7 +18,7 @@
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment.controllers.r1consumer;
+package org.oransc.ics.controllers.r1consumer;
 
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.gson.annotations.SerializedName;
@@ -28,7 +28,7 @@ import io.swagger.v3.oas.annotations.media.Schema;
 import org.immutables.gson.Gson;
 
 @Gson.TypeAdapters
-@Schema(name = "consumer_job", description = "Information for an Enrichment Information Job")
+@Schema(name = "consumer_job", description = "Information for an Information Job")
 public class ConsumerJobInfo {
 
     @Schema(
@@ -18,7 +18,7 @@
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment.controllers.r1producer;
+package org.oransc.ics.controllers.r1producer;
 
 import com.google.gson.Gson;
 import com.google.gson.GsonBuilder;
@@ -27,13 +27,13 @@ import java.lang.invoke.MethodHandles;
 import java.time.Duration;
 import java.util.Collection;
 
-import org.oransc.enrichment.clients.AsyncRestClient;
-import org.oransc.enrichment.clients.AsyncRestClientFactory;
-import org.oransc.enrichment.configuration.ApplicationConfig;
-import org.oransc.enrichment.repository.InfoJob;
-import org.oransc.enrichment.repository.InfoJobs;
-import org.oransc.enrichment.repository.InfoProducer;
-import org.oransc.enrichment.repository.InfoProducers;
+import org.oransc.ics.clients.AsyncRestClient;
+import org.oransc.ics.clients.AsyncRestClientFactory;
+import org.oransc.ics.configuration.ApplicationConfig;
+import org.oransc.ics.repository.InfoJob;
+import org.oransc.ics.repository.InfoJobs;
+import org.oransc.ics.repository.InfoProducer;
+import org.oransc.ics.repository.InfoProducers;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -18,7 +18,7 @@
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment.controllers.r1producer;
+package org.oransc.ics.controllers.r1producer;
 
 import com.google.gson.Gson;
 import com.google.gson.GsonBuilder;
@@ -38,16 +38,16 @@ import java.util.ArrayList;
 import java.util.Collection;
 import java.util.List;
 
-import org.oransc.enrichment.controllers.ErrorResponse;
-import org.oransc.enrichment.controllers.VoidResponse;
-import org.oransc.enrichment.exceptions.ServiceException;
-import org.oransc.enrichment.repository.InfoJob;
-import org.oransc.enrichment.repository.InfoJobs;
-import org.oransc.enrichment.repository.InfoProducer;
-import org.oransc.enrichment.repository.InfoProducers;
-import org.oransc.enrichment.repository.InfoType;
-import org.oransc.enrichment.repository.InfoTypeSubscriptions;
-import org.oransc.enrichment.repository.InfoTypes;
+import org.oransc.ics.controllers.ErrorResponse;
+import org.oransc.ics.controllers.VoidResponse;
+import org.oransc.ics.exceptions.ServiceException;
+import org.oransc.ics.repository.InfoJob;
+import org.oransc.ics.repository.InfoJobs;
+import org.oransc.ics.repository.InfoProducer;
+import org.oransc.ics.repository.InfoProducers;
+import org.oransc.ics.repository.InfoType;
+import org.oransc.ics.repository.InfoTypeSubscriptions;
+import org.oransc.ics.repository.InfoTypes;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.http.HttpStatus;
 import org.springframework.http.MediaType;
@@ -18,7 +18,7 @@
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment.controllers.r1producer;
+package org.oransc.ics.controllers.r1producer;
 
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.gson.annotations.SerializedName;
@@ -26,7 +26,7 @@ import com.google.gson.annotations.SerializedName;
 import io.swagger.v3.oas.annotations.media.Schema;
 
 import org.immutables.gson.Gson;
-import org.oransc.enrichment.repository.InfoJob;
+import org.oransc.ics.repository.InfoJob;
 
 @Gson.TypeAdapters
 @Schema(
@@ -18,7 +18,7 @@
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment.repository;
+package org.oransc.ics.repository;
 
 import com.google.gson.Gson;
 import com.google.gson.GsonBuilder;
@@ -38,9 +38,9 @@ import java.util.Map;
 import java.util.ServiceLoader;
 import java.util.Vector;
 
-import org.oransc.enrichment.configuration.ApplicationConfig;
-import org.oransc.enrichment.controllers.r1producer.ProducerCallbacks;
-import org.oransc.enrichment.exceptions.ServiceException;
+import org.oransc.ics.configuration.ApplicationConfig;
+import org.oransc.ics.controllers.r1producer.ProducerCallbacks;
+import org.oransc.ics.exceptions.ServiceException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.http.HttpStatus;
@@ -18,7 +18,7 @@
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment.repository;
+package org.oransc.ics.repository;
 
 import java.lang.invoke.MethodHandles;
 import java.util.ArrayList;
@@ -30,9 +30,9 @@ import java.util.Vector;
 import lombok.Builder;
 import lombok.Getter;
 
-import org.oransc.enrichment.controllers.a1e.A1eCallbacks;
-import org.oransc.enrichment.controllers.r1producer.ProducerCallbacks;
-import org.oransc.enrichment.exceptions.ServiceException;
+import org.oransc.ics.controllers.a1e.A1eCallbacks;
+import org.oransc.ics.controllers.r1producer.ProducerCallbacks;
+import org.oransc.ics.exceptions.ServiceException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
@@ -18,7 +18,7 @@
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment.repository;
+package org.oransc.ics.repository;
 
 import com.google.gson.Gson;
 import com.google.gson.GsonBuilder;
@@ -41,8 +41,8 @@ import java.util.function.Function;
 import lombok.Builder;
 import lombok.Getter;
 
-import org.oransc.enrichment.configuration.ApplicationConfig;
-import org.oransc.enrichment.exceptions.ServiceException;
+import org.oransc.ics.configuration.ApplicationConfig;
+import org.oransc.ics.exceptions.ServiceException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
@@ -18,7 +18,7 @@
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment.repository;
+package org.oransc.ics.repository;
 
 import com.google.gson.Gson;
 import com.google.gson.GsonBuilder;
@@ -38,8 +38,8 @@ import java.util.Map;
 import java.util.ServiceLoader;
 import java.util.Vector;
 
-import org.oransc.enrichment.configuration.ApplicationConfig;
-import org.oransc.enrichment.exceptions.ServiceException;
+import org.oransc.ics.configuration.ApplicationConfig;
+import org.oransc.ics.exceptions.ServiceException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.http.HttpStatus;
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment.tasks;
+package org.oransc.ics.tasks;
 
-import org.oransc.enrichment.configuration.ApplicationConfig;
-import org.oransc.enrichment.controllers.a1e.A1eCallbacks;
-import org.oransc.enrichment.controllers.r1producer.ProducerCallbacks;
-import org.oransc.enrichment.repository.InfoJob;
-import org.oransc.enrichment.repository.InfoJobs;
-import org.oransc.enrichment.repository.InfoProducer;
-import org.oransc.enrichment.repository.InfoProducers;
+import org.oransc.ics.configuration.ApplicationConfig;
+import org.oransc.ics.controllers.a1e.A1eCallbacks;
+import org.oransc.ics.controllers.r1producer.ProducerCallbacks;
+import org.oransc.ics.repository.InfoJob;
+import org.oransc.ics.repository.InfoJobs;
+import org.oransc.ics.repository.InfoProducer;
+import org.oransc.ics.repository.InfoProducers;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
@@ -18,7 +18,7 @@
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment;
+package org.oransc.ics;
 
 import static org.assertj.core.api.Assertions.assertThat;
 import static org.awaitility.Awaitility.await;
@@ -41,40 +41,40 @@ import org.junit.jupiter.api.AfterEach;
 import org.junit.jupiter.api.BeforeEach;
 import org.junit.jupiter.api.Test;
 import org.junit.jupiter.api.extension.ExtendWith;
-import org.oransc.enrichment.clients.AsyncRestClient;
-import org.oransc.enrichment.clients.AsyncRestClientFactory;
-import org.oransc.enrichment.configuration.ApplicationConfig;
-import org.oransc.enrichment.configuration.ImmutableHttpProxyConfig;
-import org.oransc.enrichment.configuration.ImmutableWebClientConfig;
-import org.oransc.enrichment.configuration.WebClientConfig;
-import org.oransc.enrichment.configuration.WebClientConfig.HttpProxyConfig;
-import org.oransc.enrichment.controller.ConsumerSimulatorController;
-import org.oransc.enrichment.controller.ProducerSimulatorController;
-import org.oransc.enrichment.controllers.a1e.A1eConsts;
-import org.oransc.enrichment.controllers.a1e.A1eEiJobInfo;
-import org.oransc.enrichment.controllers.a1e.A1eEiJobStatus;
-import org.oransc.enrichment.controllers.a1e.A1eEiTypeInfo;
-import org.oransc.enrichment.controllers.r1consumer.ConsumerConsts;
-import org.oransc.enrichment.controllers.r1consumer.ConsumerInfoTypeInfo;
-import org.oransc.enrichment.controllers.r1consumer.ConsumerJobInfo;
-import org.oransc.enrichment.controllers.r1consumer.ConsumerJobStatus;
-import org.oransc.enrichment.controllers.r1consumer.ConsumerTypeRegistrationInfo;
-import org.oransc.enrichment.controllers.r1consumer.ConsumerTypeSubscriptionInfo;
-import org.oransc.enrichment.controllers.r1producer.ProducerCallbacks;
-import org.oransc.enrichment.controllers.r1producer.ProducerConsts;
-import org.oransc.enrichment.controllers.r1producer.ProducerInfoTypeInfo;
-import org.oransc.enrichment.controllers.r1producer.ProducerJobInfo;
-import org.oransc.enrichment.controllers.r1producer.ProducerRegistrationInfo;
-import org.oransc.enrichment.controllers.r1producer.ProducerStatusInfo;
-import org.oransc.enrichment.exceptions.ServiceException;
-import org.oransc.enrichment.repository.InfoJob;
-import org.oransc.enrichment.repository.InfoJobs;
-import org.oransc.enrichment.repository.InfoProducer;
-import org.oransc.enrichment.repository.InfoProducers;
-import org.oransc.enrichment.repository.InfoType;
-import org.oransc.enrichment.repository.InfoTypeSubscriptions;
-import org.oransc.enrichment.repository.InfoTypes;
-import org.oransc.enrichment.tasks.ProducerSupervision;
+import org.oransc.ics.clients.AsyncRestClient;
+import org.oransc.ics.clients.AsyncRestClientFactory;
+import org.oransc.ics.configuration.ApplicationConfig;
+import org.oransc.ics.configuration.ImmutableHttpProxyConfig;
+import org.oransc.ics.configuration.ImmutableWebClientConfig;
+import org.oransc.ics.configuration.WebClientConfig;
+import org.oransc.ics.configuration.WebClientConfig.HttpProxyConfig;
+import org.oransc.ics.controller.ConsumerSimulatorController;
+import org.oransc.ics.controller.ProducerSimulatorController;
+import org.oransc.ics.controllers.a1e.A1eConsts;
+import org.oransc.ics.controllers.a1e.A1eEiJobInfo;
+import org.oransc.ics.controllers.a1e.A1eEiJobStatus;
+import org.oransc.ics.controllers.a1e.A1eEiTypeInfo;
+import org.oransc.ics.controllers.r1consumer.ConsumerConsts;
+import org.oransc.ics.controllers.r1consumer.ConsumerInfoTypeInfo;
+import org.oransc.ics.controllers.r1consumer.ConsumerJobInfo;
+import org.oransc.ics.controllers.r1consumer.ConsumerJobStatus;
+import org.oransc.ics.controllers.r1consumer.ConsumerTypeRegistrationInfo;
+import org.oransc.ics.controllers.r1consumer.ConsumerTypeSubscriptionInfo;
+import org.oransc.ics.controllers.r1producer.ProducerCallbacks;
+import org.oransc.ics.controllers.r1producer.ProducerConsts;
+import org.oransc.ics.controllers.r1producer.ProducerInfoTypeInfo;
+import org.oransc.ics.controllers.r1producer.ProducerJobInfo;
+import org.oransc.ics.controllers.r1producer.ProducerRegistrationInfo;
+import org.oransc.ics.controllers.r1producer.ProducerStatusInfo;
+import org.oransc.ics.exceptions.ServiceException;
+import org.oransc.ics.repository.InfoJob;
+import org.oransc.ics.repository.InfoJobs;
+import org.oransc.ics.repository.InfoProducer;
+import org.oransc.ics.repository.InfoProducers;
+import org.oransc.ics.repository.InfoType;
+import org.oransc.ics.repository.InfoTypeSubscriptions;
+import org.oransc.ics.repository.InfoTypes;
+import org.oransc.ics.tasks.ProducerSupervision;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
@@ -182,7 +182,7 @@ class ApplicationTest {
         assertThat(jsonObj.remove("servers")).isNotNull();
 
         String indented = jsonObj.toString(4);
-        try (PrintStream out = new PrintStream(new FileOutputStream("api/ecs-api.json"))) {
+        try (PrintStream out = new PrintStream(new FileOutputStream("api/ics-api.json"))) {
             out.print(indented);
         }
     }
@@ -18,7 +18,7 @@
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment;
+package org.oransc.ics;
 
 import org.junit.jupiter.api.Test;
 import org.junit.jupiter.api.extension.ExtendWith;
@@ -37,7 +37,7 @@ import org.springframework.test.context.junit.jupiter.SpringExtension;
         "server.ssl.key-store=./config/keystore.jks", //
         "app.webclient.trust-store=./config/truststore.jks", "app.vardata-directory=./target"})
 @SuppressWarnings("squid:S3577") // Not containing any tests since it is a mock.
-class MockEnrichmentService {
+class MockInformationService {
     private static final Logger logger = LoggerFactory.getLogger(ApplicationTest.class);
 
     @LocalServerPort
@@ -18,7 +18,7 @@
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment.controller;
+package org.oransc.ics.controller;
 
 import io.swagger.v3.oas.annotations.Operation;
 import io.swagger.v3.oas.annotations.media.Content;
@@ -34,11 +34,11 @@ import java.util.List;
 
 import lombok.Getter;
 
-import org.oransc.enrichment.controllers.VoidResponse;
-import org.oransc.enrichment.controllers.a1e.A1eConsts;
-import org.oransc.enrichment.controllers.a1e.A1eEiJobStatus;
-import org.oransc.enrichment.controllers.r1consumer.ConsumerConsts;
-import org.oransc.enrichment.controllers.r1consumer.ConsumerTypeRegistrationInfo;
+import org.oransc.ics.controllers.VoidResponse;
+import org.oransc.ics.controllers.a1e.A1eConsts;
+import org.oransc.ics.controllers.a1e.A1eEiJobStatus;
+import org.oransc.ics.controllers.r1consumer.ConsumerConsts;
+import org.oransc.ics.controllers.r1consumer.ConsumerTypeRegistrationInfo;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.http.HttpStatus;
@@ -18,7 +18,7 @@
  * ========================LICENSE_END===================================
  */
 
-package org.oransc.enrichment.controller;
+package org.oransc.ics.controller;
 
 import io.swagger.v3.oas.annotations.Operation;
 import io.swagger.v3.oas.annotations.media.Content;
@@ -34,10 +34,10 @@ import java.util.List;
 
 import lombok.Getter;
 
-import org.oransc.enrichment.controllers.ErrorResponse;
-import org.oransc.enrichment.controllers.VoidResponse;
-import org.oransc.enrichment.controllers.r1producer.ProducerConsts;
-import org.oransc.enrichment.controllers.r1producer.ProducerJobInfo;
+import org.oransc.ics.controllers.ErrorResponse;
+import org.oransc.ics.controllers.VoidResponse;
+import org.oransc.ics.controllers.r1producer.ProducerConsts;
+import org.oransc.ics.controllers.r1producer.ProducerJobInfo;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.http.HttpStatus;
index 558d6d2..6e31874 160000 (submodule)
--- a/onap/oran
+++ b/onap/oran
@@ -1 +1 @@
-Subproject commit 558d6d2de33bb8cf4b16df980a0cdf3b1747a8e2
+Subproject commit 6e31874958b44f45c5dd78aef5c783916b16c6ee
diff --git a/pom.xml b/pom.xml
index e0665a1..71c6606 100644 (file)
--- a/pom.xml
+++ b/pom.xml
     <sonar-maven-plugin.version>3.7.0.1746</sonar-maven-plugin.version>
     </properties>
     <modules>
-        <module>policy-agent</module>
-        <module>enrichment-coordinator-service</module>
+        <module>a1-policy-management-service</module>
+        <module>information-coordinator-service</module>
         <module>r-app-catalogue</module>
         <module>helm-manager</module>
         <module>dmaap-adaptor-java</module>
+        <module>dmaap-mediator-producer</module>
     </modules>
     <build>
         <plugins>
index cd2efc9..474a3ce 100644 (file)
@@ -27,13 +27,16 @@ RUN mkdir -p /opt/app/r-app-catalogue/etc/cert/
 
 EXPOSE 8680 8633
 
+RUN groupadd -g 999 appuser && \
+    useradd -r -u 999 -g appuser appuser
+RUN chown -R appuser:appuser /opt/app/r-app-catalogue/
+RUN chown -R appuser:appuser /var/log/r-app-catalogue/
+USER appuser
+
 ADD /config/application.yaml /opt/app/r-app-catalogue/config/application.yaml
 ADD /config/r-app-catalogue-keystore.jks /opt/app/r-app-catalogue/etc/cert/keystore.jks
 ADD target/${JAR} /opt/app/r-app-catalogue/r-app-catalogue.jar
 
-
-RUN chmod -R 777 /opt/app/r-app-catalogue/config/
-
 CMD ["java", "-jar", "/opt/app/r-app-catalogue/r-app-catalogue.jar"]
 
 
index e4ffe75..1e34405 100755 (executable)
@@ -38,17 +38,7 @@ SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ORAN-CHERRY ORAN-D-R
 #Supported run modes
 SUPPORTED_RUNMODES="DOCKER KUBE"
 
-. ../common/testcase_common.sh  $@
-. ../common/agent_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/http_proxy_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
+. ../common/testcase_common.sh $@
 
 setup_testenvironment
 
@@ -85,7 +75,7 @@ for __httpx in $TESTED_PROTOCOLS ; do
 
         # Create service to be able to receive events when rics becomes available
         # Must use rest towards the agent since dmaap is not configured yet
-        api_put_service 201 "ric-registration" 0 "$CR_SERVICE_APP_PATH/ric-registration"
+        api_put_service 201 "ric-registration" 0 "$CR_SERVICE_APP_PATH_0/ric-registration"
 
         if [ $__httpx == "HTTPS" ]; then
             use_cr_https
@@ -122,7 +112,7 @@ for __httpx in $TESTED_PROTOCOLS ; do
         start_mr    "$MR_READ_TOPIC"  "/events" "users/policy-agent" \
                     "$MR_WRITE_TOPIC" "/events" "users/mr-stub"
 
-        start_cr
+        start_cr 1
 
         start_control_panel $SIM_GROUP/$CONTROL_PANEL_COMPOSE_DIR/$CONTROL_PANEL_CONFIG_FILE
 
@@ -163,9 +153,9 @@ for __httpx in $TESTED_PROTOCOLS ; do
 
             api_equal json:policy-instances 0
 
-            cr_equal received_callbacks 3 120
+            cr_equal received_callbacks 3 120
 
-            cr_api_check_all_sync_events 200 ric-registration ricsim_g1_1 ricsim_g2_1 ricsim_g3_1
+            cr_api_check_all_sync_events 200 ric-registration ricsim_g1_1 ricsim_g2_1 ricsim_g3_1
 
         else
             api_equal json:rics 2 300
@@ -189,7 +179,7 @@ for __httpx in $TESTED_PROTOCOLS ; do
         echo "##### Service registry and supervision #####"
         echo "############################################"
 
-        api_put_service 201 "serv1" 1000 "$CR_SERVICE_APP_PATH/1"
+        api_put_service 201 "serv1" 1000 "$CR_SERVICE_APP_PATH_0/1"
 
         api_get_service_ids 200 "serv1" "ric-registration"
 
@@ -210,7 +200,7 @@ for __httpx in $TESTED_PROTOCOLS ; do
         echo "############################################"
 
         if [ "$PMS_VERSION" == "V2" ]; then
-            notificationurl=$CR_SERVICE_APP_PATH"/test"
+            notificationurl=$CR_SERVICE_APP_PATH_0"/test"
         else
             notificationurl=""
         fi
@@ -249,7 +239,7 @@ for __httpx in $TESTED_PROTOCOLS ; do
         fi
 
         if [ "$PMS_VERSION" == "V2" ]; then
-            cr_equal received_callbacks 3
+            cr_equal received_callbacks 3
         fi
 
         if [[ $interface = *"DMAAP"* ]]; then
index 625346b..a561cc6 100755 (executable)
@@ -38,15 +38,6 @@ SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ORAN-CHERRY ORAN-D-R
 SUPPORTED_RUNMODES="DOCKER KUBE"
 
 . ../common/testcase_common.sh $@
-. ../common/agent_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
 
 setup_testenvironment
 
@@ -73,7 +64,7 @@ fi
 
 start_mr
 
-start_cr
+start_cr 1
 
 if [ $RUNMODE == "DOCKER" ]; then
     start_consul_cbs
@@ -121,14 +112,14 @@ fi
 # Create policies
 
 if [ "$PMS_VERSION" == "V2" ]; then
-    notificationurl=$CR_SERVICE_APP_PATH"/test"
+    notificationurl=$CR_SERVICE_APP_PATH_0"/test"
 else
     notificationurl=""
 fi
 
 use_agent_rest_http
 
-api_put_service 201 "service1" 3600 "$CR_SERVICE_APP_PATH/1"
+api_put_service 201 "service1" 3600 "$CR_SERVICE_APP_PATH_0/1"
 
 api_put_policy 201 "service1" ricsim_g1_1 1 2000 NOTRANSIENT $notificationurl testdata/OSC/pi1_template.json 1
 
@@ -173,7 +164,7 @@ fi
 #Update policies
 use_agent_rest_http
 
-api_put_service 200 "service1" 3600 "$CR_SERVICE_APP_PATH/1"
+api_put_service 200 "service1" 3600 "$CR_SERVICE_APP_PATH_0/1"
 
 api_put_policy 200 "service1" ricsim_g1_1 1 2000 NOTRANSIENT $notificationurl testdata/OSC/pi1_template.json 1
 
index ac6f8d5..da623ce 100755 (executable)
@@ -38,16 +38,7 @@ SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ORAN-CHERRY ORAN-D-R
 #Supported run modes
 SUPPORTED_RUNMODES="DOCKER KUBE"
 
-. ../common/testcase_common.sh  $@
-. ../common/agent_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
+. ../common/testcase_common.sh $@
 
 setup_testenvironment
 
@@ -89,7 +80,7 @@ for __httpx in $TESTED_PROTOCOLS ; do
 
         # Create service to be able to receive events when rics becomes available
         # Must use rest towards the agent since dmaap is not configured yet
-        api_put_service 201 "ric-registration" 0 "$CR_SERVICE_APP_PATH/ric-registration"
+        api_put_service 201 "ric-registration" 0 "$CR_SERVICE_APP_PATH_0/ric-registration"
 
 
         if [ $__httpx == "HTTPS" ]; then
@@ -124,7 +115,7 @@ for __httpx in $TESTED_PROTOCOLS ; do
 
         start_mr
 
-        start_cr
+        start_cr 1
 
         start_control_panel $SIM_GROUP/$CONTROL_PANEL_COMPOSE_DIR/$CONTROL_PANEL_CONFIG_FILE
 
@@ -176,8 +167,8 @@ for __httpx in $TESTED_PROTOCOLS ; do
         fi
 
         if [ "$PMS_VERSION" == "V2" ]; then
-            cr_equal received_callbacks 3 120
-            cr_api_check_all_sync_events 200 ric-registration ricsim_g1_1 ricsim_g2_1 ricsim_g3_1
+            cr_equal received_callbacks 3 120
+            cr_api_check_all_sync_events 200 ric-registration ricsim_g1_1 ricsim_g2_1 ricsim_g3_1
         fi
         mr_equal requests_submitted 0
 
@@ -194,14 +185,14 @@ for __httpx in $TESTED_PROTOCOLS ; do
 
         api_get_services 404 "service1"
 
-        api_put_service 201 "service1" 1000 "$CR_SERVICE_APP_PATH/1"
+        api_put_service 201 "service1" 1000 "$CR_SERVICE_APP_PATH_0/1"
 
-        api_put_service 200 "service1" 2000 "$CR_SERVICE_APP_PATH/1"
+        api_put_service 200 "service1" 2000 "$CR_SERVICE_APP_PATH_0/1"
 
 
-        api_put_service 400 "service2" -1 "$CR_SERVICE_APP_PATH/2"
+        api_put_service 400 "service2" -1 "$CR_SERVICE_APP_PATH_0/2"
 
-        api_put_service 400 "service2" "wrong" "$CR_SERVICE_APP_PATH/2"
+        api_put_service 400 "service2" "wrong" "$CR_SERVICE_APP_PATH_0/2"
 
         api_put_service 400 "service2" 100 "/test"
 
@@ -209,20 +200,20 @@ for __httpx in $TESTED_PROTOCOLS ; do
 
         api_put_service 201 "service2" 300 "ftp://localhost:80/test"
 
-        api_get_services 200 "service1" "service1" 2000 "$CR_SERVICE_APP_PATH/1"
+        api_get_services 200 "service1" "service1" 2000 "$CR_SERVICE_APP_PATH_0/1"
 
         api_get_service_ids 200 "service1" "service2" "ric-registration"
 
 
-        api_put_service 201 "service3" 5000 "$CR_SERVICE_APP_PATH/3"
+        api_put_service 201 "service3" 5000 "$CR_SERVICE_APP_PATH_0/3"
 
 
         api_get_service_ids 200 "service1" "service2" "service3" "ric-registration"
 
 
-        api_get_services 200 "service1" "service1" 2000 "$CR_SERVICE_APP_PATH/1"
+        api_get_services 200 "service1" "service1" 2000 "$CR_SERVICE_APP_PATH_0/1"
 
-        api_get_services 200 NOSERVICE "service1" 2000 "$CR_SERVICE_APP_PATH/1" "service2" 300 "ftp://localhost:80/test" "service3" 5000 "$CR_SERVICE_APP_PATH/3"  "ric-registration" 0 "$CR_SERVICE_APP_PATH/ric-registration"
+        api_get_services 200 NOSERVICE "service1" 2000 "$CR_SERVICE_APP_PATH_0/1" "service2" 300 "ftp://localhost:80/test" "service3" 5000 "$CR_SERVICE_APP_PATH_0/3"  "ric-registration" 0 "$CR_SERVICE_APP_PATH_0/ric-registration"
 
         api_get_services 200
 
@@ -251,7 +242,7 @@ for __httpx in $TESTED_PROTOCOLS ; do
         api_get_service_ids 200 "service2" "service3" "ric-registration"
 
 
-        api_put_service 201 "service1" 50 "$CR_SERVICE_APP_PATH/1"
+        api_put_service 201 "service1" 50 "$CR_SERVICE_APP_PATH_0/1"
 
         api_get_service_ids 200 "service1" "service2" "service3"  "ric-registration"
 
@@ -386,10 +377,10 @@ for __httpx in $TESTED_PROTOCOLS ; do
 
 
 
-        api_put_service 201 "service10" 3600 "$CR_SERVICE_APP_PATH/1"
+        api_put_service 201 "service10" 3600 "$CR_SERVICE_APP_PATH_0/1"
 
         if [ "$PMS_VERSION" == "V2" ]; then
-            notificationurl=$CR_SERVICE_APP_PATH"/test"
+            notificationurl=$CR_SERVICE_APP_PATH_0"/test"
         else
             notificationurl=""
         fi
@@ -544,7 +535,7 @@ for __httpx in $TESTED_PROTOCOLS ; do
         fi
 
         if [ "$PMS_VERSION" == "V2" ]; then
-            cr_equal received_callbacks 3
+            cr_equal received_callbacks 3
         fi
 
         if [[ $interface = *"DMAAP"* ]]; then
index e3b96a5..f855f6f 100755 (executable)
@@ -38,15 +38,7 @@ SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ORAN-CHERRY ORAN-D-R
 #Supported run modes
 SUPPORTED_RUNMODES="DOCKER KUBE"
 
-. ../common/testcase_common.sh  $@
-. ../common/agent_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
+. ../common/testcase_common.sh $@
 
 setup_testenvironment
 
@@ -71,7 +63,7 @@ fi
 
 start_mr
 
-start_cr
+start_cr 1
 
 if [ $RUNMODE == "DOCKER" ]; then
     start_consul_cbs
@@ -107,60 +99,60 @@ if [ "$PMS_VERSION" == "V2" ]; then
     sim_print ricsim_g3_1 interface
 fi
 
-api_put_service 201 "service1" 15 "$CR_SERVICE_APP_PATH/service1"
+api_put_service 201 "service1" 15 "$CR_SERVICE_APP_PATH_0/service1"
 
-api_get_services 200 "service1" "service1" 15 "$CR_SERVICE_APP_PATH/service1"
+api_get_services 200 "service1" "service1" 15 "$CR_SERVICE_APP_PATH_0/service1"
 
-api_put_service 201 "service2" 120 "$CR_SERVICE_APP_PATH/service2"
+api_put_service 201 "service2" 120 "$CR_SERVICE_APP_PATH_0/service2"
 
-api_get_services 200 "service2" "service2" 120 "$CR_SERVICE_APP_PATH/service2"
+api_get_services 200 "service2" "service2" 120 "$CR_SERVICE_APP_PATH_0/service2"
 
-api_put_service 200 "service1" 50 "$CR_SERVICE_APP_PATH/service1"
-api_put_service 200 "service2" 180 "$CR_SERVICE_APP_PATH/service2"
+api_put_service 200 "service1" 50 "$CR_SERVICE_APP_PATH_0/service1"
+api_put_service 200 "service2" 180 "$CR_SERVICE_APP_PATH_0/service2"
 
-api_get_services 200 "service1" "service1" 50 "$CR_SERVICE_APP_PATH/service1"
-api_get_services 200 "service2" "service2" 180 "$CR_SERVICE_APP_PATH/service2"
+api_get_services 200 "service1" "service1" 50 "$CR_SERVICE_APP_PATH_0/service1"
+api_get_services 200 "service2" "service2" 180 "$CR_SERVICE_APP_PATH_0/service2"
 
 api_get_service_ids 200 "service1" "service2"
 
 sleep_wait 30 "Waiting for keep alive timeout"
 
-api_get_services 200 "service1" "service1" 50 "$CR_SERVICE_APP_PATH/service1"
-api_get_services 200 "service2" "service2" 180 "$CR_SERVICE_APP_PATH/service2"
+api_get_services 200 "service1" "service1" 50 "$CR_SERVICE_APP_PATH_0/service1"
+api_get_services 200 "service2" "service2" 180 "$CR_SERVICE_APP_PATH_0/service2"
 
 sleep_wait 100 "Waiting for keep alive timeout"
 
 api_get_services 404 "service1"
-api_get_services 200 "service2" "service2" 180 "$CR_SERVICE_APP_PATH/service2"
+api_get_services 200 "service2" "service2" 180 "$CR_SERVICE_APP_PATH_0/service2"
 
 api_delete_services 204 "service2"
 
 api_get_services 404 "service1"
 api_get_services 404 "service2"
 
-api_put_service 201 "service3" 60 "$CR_SERVICE_APP_PATH/service3"
+api_put_service 201 "service3" 60 "$CR_SERVICE_APP_PATH_0/service3"
 
-api_get_services 200 "service3" "service3" 60 "$CR_SERVICE_APP_PATH/service3"
+api_get_services 200 "service3" "service3" 60 "$CR_SERVICE_APP_PATH_0/service3"
 
 sleep_wait 30 "Waiting for keep alive timeout"
 
-api_put_service 200 "service3" 60 "$CR_SERVICE_APP_PATH/service3"
+api_put_service 200 "service3" 60 "$CR_SERVICE_APP_PATH_0/service3"
 
 sleep_wait 100 "Waiting for keep alive timeout"
 
 api_get_services 404 "service3"
 
-api_put_service 201 "service4" 120 "$CR_SERVICE_APP_PATH/service4"
+api_put_service 201 "service4" 120 "$CR_SERVICE_APP_PATH_0/service4"
 
 sleep_wait 60 "Waiting for keep alive timeout"
 
-api_get_services 200 "service4" "service4" 120 "$CR_SERVICE_APP_PATH/service4"
+api_get_services 200 "service4" "service4" 120 "$CR_SERVICE_APP_PATH_0/service4"
 
 api_put_services_keepalive 200 "service4"
 
 sleep_wait 90 "Waiting for keep alive timeout"
 
-api_get_services 200 "service4" "service4" 120 "$CR_SERVICE_APP_PATH/service4"
+api_get_services 200 "service4" "service4" 120 "$CR_SERVICE_APP_PATH_0/service4"
 
 api_delete_services 204 "service4"
 
@@ -183,7 +175,7 @@ api_put_services_keepalive 404 "service3"
 api_put_services_keepalive 404 "service4"
 
 # Policy delete after timeout
-api_put_service 201 "service10" 600 "$CR_SERVICE_APP_PATH/service10"
+api_put_service 201 "service10" 600 "$CR_SERVICE_APP_PATH_0/service10"
 
 sim_put_policy_type 201 ricsim_g1_1 1 testdata/OSC/sim_1.json
 
@@ -209,7 +201,7 @@ else
 fi
 
 if [ "$PMS_VERSION" == "V2" ]; then
-    notificationurl=$CR_SERVICE_APP_PATH"/test"
+    notificationurl=$CR_SERVICE_APP_PATH_0"/test"
 else
     notificationurl=""
 fi
@@ -263,7 +255,7 @@ if [ "$PMS_VERSION" == "V2" ]; then
     sim_equal ricsim_g3_1 num_instances 1
 fi
 
-api_put_service 200 "service10" 10 "$CR_SERVICE_APP_PATH/service10"
+api_put_service 200 "service10" 10 "$CR_SERVICE_APP_PATH_0/service10"
 
 #Wait for service expiry
 api_equal json:policies 0 120
index 0e4f4a7..cf172a0 100755 (executable)
 #
 
 
-TC_ONELINE_DESCR="ECS full interfaces walkthrough"
+TC_ONELINE_DESCR="ICS full interfaces walkthrough"
 
 #App names to include in the test when running docker, space separated list
-DOCKER_INCLUDED_IMAGES="ECS PRODSTUB CR RICSIM CP HTTPPROXY NGW KUBEPROXY"
+DOCKER_INCLUDED_IMAGES="ICS PRODSTUB CR RICSIM CP HTTPPROXY NGW KUBEPROXY"
 
 #App names to include in the test when running kubernetes, space separated list
-KUBE_INCLUDED_IMAGES="PRODSTUB CR ECS RICSIM CP HTTPPROXY KUBEPROXY NGW"
+KUBE_INCLUDED_IMAGES="PRODSTUB CR ICS RICSIM CP HTTPPROXY KUBEPROXY NGW"
 #Prestarted app (not started by script) to include in the test when running kubernetes, space separated list
 KUBE_PRESTARTED_IMAGES=""
 
@@ -38,16 +38,7 @@ SUPPORTED_PROFILES="ONAP-HONOLULU ONAP-ISTANBUL ORAN-CHERRY ORAN-D-RELEASE ORAN-
 #Supported run modes
 SUPPORTED_RUNMODES="DOCKER KUBE"
 
-. ../common/testcase_common.sh  $@
-. ../common/ecs_api_functions.sh
-. ../common/prodstub_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/http_proxy_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
+. ../common/testcase_common.sh $@
 
 setup_testenvironment
 
@@ -59,7 +50,7 @@ clean_environment
 
 start_kube_proxy
 
-use_ecs_rest_https
+use_ics_rest_https
 
 use_prod_stub_https
 
@@ -69,15 +60,15 @@ use_cr_https
 
 start_http_proxy
 
-start_ecs NOPROXY $SIM_GROUP/$ECS_COMPOSE_DIR/$ECS_CONFIG_FILE  #Change NOPROXY to PROXY to run with http proxy
+start_ics NOPROXY $SIM_GROUP/$ICS_COMPOSE_DIR/$ICS_CONFIG_FILE  #Change NOPROXY to PROXY to run with http proxy
 
 if [ $RUNMODE == "KUBE" ]; then
-    ecs_api_admin_reset
+    ics_api_admin_reset
 fi
 
 start_prod_stub
 
-set_ecs_debug
+set_ics_debug
 
 start_control_panel $SIM_GROUP/$CONTROL_PANEL_COMPOSE_DIR/$CONTROL_PANEL_CONFIG_FILE
 
@@ -89,7 +80,7 @@ if [ "$PMS_VERSION" == "V2" ]; then
     start_ric_simulators ricsim_g3 4  STD_2.0.0
 fi
 
-start_cr
+start_cr 1
 
 CB_JOB="$PROD_STUB_SERVICE_PATH$PROD_STUB_JOB_CALLBACK"
 CB_SV="$PROD_STUB_SERVICE_PATH$PROD_STUB_SUPERVISION_CALLBACK"
@@ -110,89 +101,89 @@ TARGET150="http://localhost:80/target"  # Dummy target, no target for info data
 TARGET160="http://localhost:80/target"  # Dummy target, no target for info data in this env...
 
 #Status callbacks for eijobs
-STATUS1="$CR_SERVICE_APP_PATH/job1-status"
-STATUS2="$CR_SERVICE_APP_PATH/job2-status"
-STATUS3="$CR_SERVICE_APP_PATH/job3-status"
-STATUS8="$CR_SERVICE_APP_PATH/job8-status"
-STATUS10="$CR_SERVICE_APP_PATH/job10-status"
+STATUS1="$CR_SERVICE_APP_PATH_0/job1-status"
+STATUS2="$CR_SERVICE_APP_PATH_0/job2-status"
+STATUS3="$CR_SERVICE_APP_PATH_0/job3-status"
+STATUS8="$CR_SERVICE_APP_PATH_0/job8-status"
+STATUS10="$CR_SERVICE_APP_PATH_0/job10-status"
 
 #Status callbacks for infojobs
-INFOSTATUS101="$CR_SERVICE_APP_PATH/info-job101-status"
-INFOSTATUS102="$CR_SERVICE_APP_PATH/info-job102-status"
-INFOSTATUS103="$CR_SERVICE_APP_PATH/info-job103-status"
-INFOSTATUS108="$CR_SERVICE_APP_PATH/info-job108-status"
-INFOSTATUS110="$CR_SERVICE_APP_PATH/info-job110-status"
-INFOSTATUS150="$CR_SERVICE_APP_PATH/info-job150-status"
-INFOSTATUS160="$CR_SERVICE_APP_PATH/info-job160-status"
-
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+INFOSTATUS101="$CR_SERVICE_APP_PATH_0/info-job101-status"
+INFOSTATUS102="$CR_SERVICE_APP_PATH_0/info-job102-status"
+INFOSTATUS103="$CR_SERVICE_APP_PATH_0/info-job103-status"
+INFOSTATUS108="$CR_SERVICE_APP_PATH_0/info-job108-status"
+INFOSTATUS110="$CR_SERVICE_APP_PATH_0/info-job110-status"
+INFOSTATUS150="$CR_SERVICE_APP_PATH_0/info-job150-status"
+INFOSTATUS160="$CR_SERVICE_APP_PATH_0/info-job160-status"
+
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
     #Type registration status callbacks
-    TYPESTATUS1="$CR_SERVICE_APP_PATH/type-status1"
-    TYPESTATUS2="$CR_SERVICE_APP_PATH/type-status2"
+    TYPESTATUS1="$CR_SERVICE_APP_PATH_0/type-status1"
+    TYPESTATUS2="$CR_SERVICE_APP_PATH_0/type-status2"
 
-    ecs_api_idc_put_subscription 201 subscription-id-1 owner1 $TYPESTATUS1
+    ics_api_idc_put_subscription 201 subscription-id-1 owner1 $TYPESTATUS1
 
-    ecs_api_idc_get_subscription_ids 200 NOOWNER subscription-id-1
+    ics_api_idc_get_subscription_ids 200 NOOWNER subscription-id-1
 
-    ecs_api_idc_get_subscription_ids 200 owner1 subscription-id-1
+    ics_api_idc_get_subscription_ids 200 owner1 subscription-id-1
 
-    ecs_api_idc_get_subscription_ids 200 test EMPTY
+    ics_api_idc_get_subscription_ids 200 test EMPTY
 
-    ecs_api_idc_get_subscription 200 subscription-id-1 owner1 $TYPESTATUS1
+    ics_api_idc_get_subscription 200 subscription-id-1 owner1 $TYPESTATUS1
 
-    ecs_api_idc_get_subscription 404 test
+    ics_api_idc_get_subscription 404 test
 
-    ecs_api_idc_put_subscription 200 subscription-id-1 owner1 $TYPESTATUS1
+    ics_api_idc_put_subscription 200 subscription-id-1 owner1 $TYPESTATUS1
 
-    ecs_api_idc_put_subscription 200 subscription-id-1 owner1 $TYPESTATUS1
+    ics_api_idc_put_subscription 200 subscription-id-1 owner1 $TYPESTATUS1
 
-    ecs_api_idc_put_subscription 201 subscription-id-2 owner2 $TYPESTATUS2
+    ics_api_idc_put_subscription 201 subscription-id-2 owner2 $TYPESTATUS2
 
-    ecs_api_idc_get_subscription_ids 200 NOOWNER subscription-id-1 subscription-id-2
+    ics_api_idc_get_subscription_ids 200 NOOWNER subscription-id-1 subscription-id-2
 
-    ecs_api_idc_get_subscription_ids 200 owner1 subscription-id-1
+    ics_api_idc_get_subscription_ids 200 owner1 subscription-id-1
 
-    ecs_api_idc_get_subscription_ids 200 owner2 subscription-id-2
+    ics_api_idc_get_subscription_ids 200 owner2 subscription-id-2
 
-    ecs_api_idc_get_subscription 200 subscription-id-1 owner1 $TYPESTATUS1
-    ecs_api_idc_get_subscription 200 subscription-id-2 owner2 $TYPESTATUS2
+    ics_api_idc_get_subscription 200 subscription-id-1 owner1 $TYPESTATUS1
+    ics_api_idc_get_subscription 200 subscription-id-2 owner2 $TYPESTATUS2
 
-    ecs_api_idc_delete_subscription 204 subscription-id-2
+    ics_api_idc_delete_subscription 204 subscription-id-2
 
-    ecs_api_idc_get_subscription_ids 200 NOOWNER subscription-id-1
+    ics_api_idc_get_subscription_ids 200 NOOWNER subscription-id-1
 
-    ecs_api_edp_put_type_2 201 type1 testdata/ecs/ei-type-1.json
+    ics_api_edp_put_type_2 201 type1 testdata/ics/ei-type-1.json
 
-    cr_equal received_callbacks 1 30
-    cr_equal received_callbacks?id=type-status1 1
-    cr_api_check_all_ecs_subscription_events 200 type-status1 type1 testdata/ecs/ei-type-1.json REGISTERED
+    cr_equal received_callbacks 1 30
+    cr_equal received_callbacks?id=type-status1 1
+    cr_api_check_all_ics_subscription_events 200 0 type-status1 type1 testdata/ics/ei-type-1.json REGISTERED
 
-    ecs_api_edp_delete_type_2 204 type1
+    ics_api_edp_delete_type_2 204 type1
 
-    cr_equal received_callbacks 2 30
-    cr_equal received_callbacks?id=type-status1 2
-    cr_api_check_all_ecs_subscription_events 200 type-status1 type1 testdata/ecs/ei-type-1.json DEREGISTERED
+    cr_equal received_callbacks 2 30
+    cr_equal received_callbacks?id=type-status1 2
+    cr_api_check_all_ics_subscription_events 200 0 type-status1 type1 testdata/ics/ei-type-1.json DEREGISTERED
 
-    ecs_api_idc_put_subscription 201 subscription-id-2 owner2 $TYPESTATUS2
-    ecs_api_idc_get_subscription_ids 200 NOOWNER subscription-id-1 subscription-id-2
+    ics_api_idc_put_subscription 201 subscription-id-2 owner2 $TYPESTATUS2
+    ics_api_idc_get_subscription_ids 200 NOOWNER subscription-id-1 subscription-id-2
 
-    ecs_api_edp_put_type_2 201 type1 testdata/ecs/ei-type-1.json
+    ics_api_edp_put_type_2 201 type1 testdata/ics/ei-type-1.json
 
-    cr_equal received_callbacks 4 30
-    cr_equal received_callbacks?id=type-status1 3
-    cr_equal received_callbacks?id=type-status2 1
-    cr_api_check_all_ecs_subscription_events 200 type-status1 type1 testdata/ecs/ei-type-1.json REGISTERED
+    cr_equal received_callbacks 4 30
+    cr_equal received_callbacks?id=type-status1 3
+    cr_equal received_callbacks?id=type-status2 1
+    cr_api_check_all_ics_subscription_events 200 0 type-status1 type1 testdata/ics/ei-type-1.json REGISTERED
 
-    ecs_api_idc_delete_subscription 204 subscription-id-2
+    ics_api_idc_delete_subscription 204 subscription-id-2
 
-    ecs_api_edp_delete_type_2 204 type1
+    ics_api_edp_delete_type_2 204 type1
 
-    cr_equal received_callbacks 5 30
-    cr_equal received_callbacks?id=type-status1 4
-    cr_equal received_callbacks?id=type-status2 1
-    cr_api_check_all_ecs_subscription_events 200 type-status1 type1 testdata/ecs/ei-type-1.json DEREGISTERED
+    cr_equal received_callbacks 5 30
+    cr_equal received_callbacks?id=type-status1 4
+    cr_equal received_callbacks?id=type-status2 1
+    cr_api_check_all_ics_subscription_events 200 0 type-status1 type1 testdata/ics/ei-type-1.json DEREGISTERED
 
-    cr_api_reset
+    cr_api_reset 0
 fi
 
 ### Setup prodstub sim to accept calls for producers, types and jobs
@@ -248,462 +239,462 @@ prodstub_arm_job_create 200 prod-d job8
 prodstub_arm_job_create 200 prod-e job10
 prodstub_arm_job_create 200 prod-f job10
 
-### ecs status
-ecs_api_service_status 200
+### ics status
+ics_api_service_status 200
 
-cr_equal received_callbacks 0
+cr_equal received_callbacks 0
 
 ### Initial tests - no config made
 ### GET: type ids, types, producer ids, producers, job ids, jobs
 ### DELETE: jobs
-ecs_api_a1_get_type_ids 200 EMPTY
-ecs_api_a1_get_type 404 test-type
+ics_api_a1_get_type_ids 200 EMPTY
+ics_api_a1_get_type 404 test-type
 
-ecs_api_edp_get_type_ids 200 EMPTY
-if [ $ECS_VERSION == "V1-1" ]; then
-    ecs_api_edp_get_type 404 test-type
+ics_api_edp_get_type_ids 200 EMPTY
+if [ $ICS_VERSION == "V1-1" ]; then
+    ics_api_edp_get_type 404 test-type
 else
-    ecs_api_edp_get_type_2 404 test-type
+    ics_api_edp_get_type_2 404 test-type
 fi
 
-if [ $ECS_VERSION == "V1-1" ]; then
-    ecs_api_edp_get_producer_ids 200 EMPTY
-    ecs_api_edp_get_producer 404 test-prod
+if [ $ICS_VERSION == "V1-1" ]; then
+    ics_api_edp_get_producer_ids 200 EMPTY
+    ics_api_edp_get_producer 404 test-prod
 else
-    ecs_api_edp_get_producer_ids_2 200 NOTYPE EMPTY
-    ecs_api_edp_get_producer_2 404 test-prod
+    ics_api_edp_get_producer_ids_2 200 NOTYPE EMPTY
+    ics_api_edp_get_producer_2 404 test-prod
 fi
-ecs_api_edp_get_producer_status 404 test-prod
+ics_api_edp_get_producer_status 404 test-prod
 
-ecs_api_edp_delete_producer 404 test-prod
+ics_api_edp_delete_producer 404 test-prod
 
 if [  -z "$FLAT_A1_EI" ]; then
-    ecs_api_a1_get_job_ids 404 test-type NOWNER
-    ecs_api_a1_get_job_ids 404 test-type test-owner
+    ics_api_a1_get_job_ids 404 test-type NOWNER
+    ics_api_a1_get_job_ids 404 test-type test-owner
 
-    ecs_api_a1_get_job 404 test-type test-job
+    ics_api_a1_get_job 404 test-type test-job
 
-    ecs_api_a1_get_job_status 404 test-type test-job
+    ics_api_a1_get_job_status 404 test-type test-job
 else
-    ecs_api_a1_get_job_ids 200 test-type NOWNER EMPTY
-    ecs_api_a1_get_job_ids 200 test-type test-owner EMPTY
+    ics_api_a1_get_job_ids 200 test-type NOWNER EMPTY
+    ics_api_a1_get_job_ids 200 test-type test-owner EMPTY
 
-    ecs_api_a1_get_job 404 test-job
+    ics_api_a1_get_job 404 test-job
 
-    ecs_api_a1_get_job_status 404 test-job
+    ics_api_a1_get_job_status 404 test-job
 fi
 
 if [  -z "$FLAT_A1_EI" ]; then
-    ecs_api_a1_delete_job 404 test-type test-job
+    ics_api_a1_delete_job 404 test-type test-job
 else
-    ecs_api_a1_delete_job 404 test-job
+    ics_api_a1_delete_job 404 test-job
 fi
 
-if [ $ECS_VERSION == "V1-1" ]; then
-    ecs_api_edp_get_producer_jobs 404 test-prod
+if [ $ICS_VERSION == "V1-1" ]; then
+    ics_api_edp_get_producer_jobs 404 test-prod
 else
-    ecs_api_edp_get_producer_jobs_2 404 test-prod
+    ics_api_edp_get_producer_jobs_2 404 test-prod
 fi
 
-if [ $ECS_VERSION == "V1-2" ]; then
-    ecs_api_edp_get_type_2 404 test-type
-    ecs_api_edp_delete_type_2 404 test-type
+if [ $ICS_VERSION == "V1-2" ]; then
+    ics_api_edp_get_type_2 404 test-type
+    ics_api_edp_delete_type_2 404 test-type
 fi
 
 ### Setup of producer/job and testing apis ###
 
 ## Setup prod-a
-if [ $ECS_VERSION == "V1-1" ]; then
-    ecs_api_edp_put_producer 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ecs/ei-type-1.json
-    ecs_api_edp_put_producer 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ecs/ei-type-1.json
+if [ $ICS_VERSION == "V1-1" ]; then
+    ics_api_edp_put_producer 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ics/ei-type-1.json
+    ics_api_edp_put_producer 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ics/ei-type-1.json
 else
     #V1-2
-    ecs_api_edp_get_type_ids 200 EMPTY
-    ecs_api_edp_get_type_2 404 type1
-    ecs_api_edp_put_producer_2 404 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
+    ics_api_edp_get_type_ids 200 EMPTY
+    ics_api_edp_get_type_2 404 type1
+    ics_api_edp_put_producer_2 404 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
 
     # Create type, delete and create again
-    ecs_api_edp_put_type_2 201 type1 testdata/ecs/ei-type-1.json
-    ecs_api_edp_get_type_2 200 type1
-    ecs_api_edp_get_type_ids 200 type1
-    ecs_api_edp_delete_type_2 204 type1
-    ecs_api_edp_get_type_2 404 type1
-    ecs_api_edp_get_type_ids 200 EMPTY
-    if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPE-INFO"* ]]; then
-        ecs_api_edp_put_type_2 201 type1 testdata/ecs/ei-type-1.json testdata/ecs/info-type-info.json
+    ics_api_edp_put_type_2 201 type1 testdata/ics/ei-type-1.json
+    ics_api_edp_get_type_2 200 type1
+    ics_api_edp_get_type_ids 200 type1
+    ics_api_edp_delete_type_2 204 type1
+    ics_api_edp_get_type_2 404 type1
+    ics_api_edp_get_type_ids 200 EMPTY
+    if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPE-INFO"* ]]; then
+        ics_api_edp_put_type_2 201 type1 testdata/ics/ei-type-1.json testdata/ics/info-type-info.json
     else
-        ecs_api_edp_put_type_2 201 type1 testdata/ecs/ei-type-1.json
+        ics_api_edp_put_type_2 201 type1 testdata/ics/ei-type-1.json
     fi
-    ecs_api_edp_get_type_ids 200 type1
-    if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPE-INFO"* ]]; then
-        ecs_api_edp_get_type_2 200 type1 testdata/ecs/ei-type-1.json testdata/ecs/info-type-info.json
+    ics_api_edp_get_type_ids 200 type1
+    if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPE-INFO"* ]]; then
+        ics_api_edp_get_type_2 200 type1 testdata/ics/ei-type-1.json testdata/ics/info-type-info.json
     else
-        ecs_api_edp_get_type_2 200 type1 testdata/ecs/ei-type-1.json
+        ics_api_edp_get_type_2 200 type1 testdata/ics/ei-type-1.json
     fi
 
-    ecs_api_edp_put_producer_2 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
-    ecs_api_edp_put_producer_2 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
+    ics_api_edp_put_producer_2 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
+    ics_api_edp_put_producer_2 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
 
-    if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-        cr_equal received_callbacks 3 30
-        cr_equal received_callbacks?id=type-status1 3
-        cr_api_check_all_ecs_subscription_events 200 type-status1 type1 testdata/ecs/ei-type-1.json REGISTERED type1 testdata/ecs/ei-type-1.json DEREGISTERED type1 testdata/ecs/ei-type-1.json REGISTERED
+    if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+        cr_equal received_callbacks 3 30
+        cr_equal received_callbacks?id=type-status1 3
+        cr_api_check_all_ics_subscription_events 200 0 type-status1 type1 testdata/ics/ei-type-1.json REGISTERED type1 testdata/ics/ei-type-1.json DEREGISTERED type1 testdata/ics/ei-type-1.json REGISTERED
     else
-        cr_equal received_callbacks 0
+        cr_equal received_callbacks 0
     fi
 fi
 
 
-ecs_api_a1_get_type_ids 200 type1
+ics_api_a1_get_type_ids 200 type1
 if [  -z "$FLAT_A1_EI" ]; then
-    ecs_api_a1_get_type 200 type1 testdata/ecs/ei-type-1.json
+    ics_api_a1_get_type 200 type1 testdata/ics/ei-type-1.json
 else
-    ecs_api_a1_get_type 200 type1 testdata/ecs/empty-type.json
+    ics_api_a1_get_type 200 type1 testdata/ics/empty-type.json
 fi
 
-ecs_api_edp_get_type_ids 200 type1
-if [ $ECS_VERSION == "V1-1" ]; then
-    ecs_api_edp_get_type 200 type1 testdata/ecs/ei-type-1.json prod-a
+ics_api_edp_get_type_ids 200 type1
+if [ $ICS_VERSION == "V1-1" ]; then
+    ics_api_edp_get_type 200 type1 testdata/ics/ei-type-1.json prod-a
 else
-    if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPE-INFO"* ]]; then
-        ecs_api_edp_get_type_2 200 type1 testdata/ecs/ei-type-1.json testdata/ecs/info-type-info.json
+    if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPE-INFO"* ]]; then
+        ics_api_edp_get_type_2 200 type1 testdata/ics/ei-type-1.json testdata/ics/info-type-info.json
     else
-        ecs_api_edp_get_type_2 200 type1 testdata/ecs/ei-type-1.json
+        ics_api_edp_get_type_2 200 type1 testdata/ics/ei-type-1.json
     fi
 fi
 
-if [ $ECS_VERSION == "V1-1" ]; then
-    ecs_api_edp_get_producer_ids 200 prod-a
+if [ $ICS_VERSION == "V1-1" ]; then
+    ics_api_edp_get_producer_ids 200 prod-a
 else
-    ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-a
-    ecs_api_edp_get_producer_ids_2 200 type1 prod-a
-    ecs_api_edp_get_producer_ids_2 200 type2 EMPTY
+    ics_api_edp_get_producer_ids_2 200 NOTYPE prod-a
+    ics_api_edp_get_producer_ids_2 200 type1 prod-a
+    ics_api_edp_get_producer_ids_2 200 type2 EMPTY
 fi
 
-if [ $ECS_VERSION == "V1-1" ]; then
-    ecs_api_edp_get_producer 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ecs/ei-type-1.json
+if [ $ICS_VERSION == "V1-1" ]; then
+    ics_api_edp_get_producer 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ics/ei-type-1.json
 else
-    ecs_api_edp_get_producer_2 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
+    ics_api_edp_get_producer_2 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
 fi
 
-ecs_api_edp_get_producer_status 200 prod-a ENABLED
+ics_api_edp_get_producer_status 200 prod-a ENABLED
 
-ecs_api_a1_get_job_ids 200 type1 NOWNER EMPTY
-ecs_api_a1_get_job_ids 200 type1 test-owner EMPTY
+ics_api_a1_get_job_ids 200 type1 NOWNER EMPTY
+ics_api_a1_get_job_ids 200 type1 test-owner EMPTY
 
 if [  -z "$FLAT_A1_EI" ]; then
-    ecs_api_a1_get_job 404 type1 test-job
+    ics_api_a1_get_job 404 type1 test-job
 
-    ecs_api_a1_get_job_status 404 type1 test-job
+    ics_api_a1_get_job_status 404 type1 test-job
 else
-    ecs_api_a1_get_job 404 test-job
+    ics_api_a1_get_job 404 test-job
 
-    ecs_api_a1_get_job_status 404 test-job
+    ics_api_a1_get_job_status 404 test-job
 fi
-if [ $ECS_VERSION == "V1-1" ]; then
-    ecs_api_edp_get_producer_jobs 200 prod-a EMPTY
+if [ $ICS_VERSION == "V1-1" ]; then
+    ics_api_edp_get_producer_jobs 200 prod-a EMPTY
 else
-    ecs_api_edp_get_producer_jobs_2 200 prod-a EMPTY
+    ics_api_edp_get_producer_jobs_2 200 prod-a EMPTY
 fi
 
 ## Create a job for prod-a
 ## job1 - prod-a
 if [  -z "$FLAT_A1_EI" ]; then
-    ecs_api_a1_put_job 201 type1 job1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json
+    ics_api_a1_put_job 201 type1 job1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json
 else
-    ecs_api_a1_put_job 201 job1 type1 $TARGET1 ricsim_g3_1 $STATUS1 testdata/ecs/job-template.json
+    ics_api_a1_put_job 201 job1 type1 $TARGET1 ricsim_g3_1 $STATUS1 testdata/ics/job-template.json
 fi
 
 # Check the job data in the producer
-if [ $ECS_VERSION == "V1-1" ]; then
-    prodstub_check_jobdata 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json
+if [ $ICS_VERSION == "V1-1" ]; then
+    prodstub_check_jobdata 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json
 else
-    if [[ "$ECS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
-        prodstub_check_jobdata_2 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json
+    if [[ "$ICS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
+        prodstub_check_jobdata_2 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json
     else
-        prodstub_check_jobdata_3 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json
+        prodstub_check_jobdata_3 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json
     fi
 fi
 
-ecs_api_a1_get_job_ids 200 type1 NOWNER job1
-ecs_api_a1_get_job_ids 200 type1 ricsim_g3_1 job1
+ics_api_a1_get_job_ids 200 type1 NOWNER job1
+ics_api_a1_get_job_ids 200 type1 ricsim_g3_1 job1
 
 if [ ! -z "$FLAT_A1_EI" ]; then
-    ecs_api_a1_get_job_ids 200 NOTYPE NOWNER job1
+    ics_api_a1_get_job_ids 200 NOTYPE NOWNER job1
 fi
 
 if [  -z "$FLAT_A1_EI" ]; then
-    ecs_api_a1_get_job 200 type1 job1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json
+    ics_api_a1_get_job 200 type1 job1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json
 
-    ecs_api_a1_get_job_status 200 type1 job1 ENABLED
+    ics_api_a1_get_job_status 200 type1 job1 ENABLED
 else
-    ecs_api_a1_get_job 200 job1 type1 $TARGET1 ricsim_g3_1 $STATUS1 testdata/ecs/job-template.json
+    ics_api_a1_get_job 200 job1 type1 $TARGET1 ricsim_g3_1 $STATUS1 testdata/ics/job-template.json
 
-    ecs_api_a1_get_job_status 200 job1 ENABLED
+    ics_api_a1_get_job_status 200 job1 ENABLED
 fi
 
 prodstub_equal create/prod-a/job1 1
 
-if [ $ECS_VERSION == "V1-1" ]; then
-    ecs_api_edp_get_producer_jobs 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json
+if [ $ICS_VERSION == "V1-1" ]; then
+    ics_api_edp_get_producer_jobs 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json
 else
-    ecs_api_edp_get_producer_jobs_2 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json
+    ics_api_edp_get_producer_jobs_2 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json
 fi
 
 ## Create a second job for prod-a
 ## job2 - prod-a
 if [  -z "$FLAT_A1_EI" ]; then
-    ecs_api_a1_put_job 201 type1 job2 $TARGET2 ricsim_g3_2 testdata/ecs/job-template.json
+    ics_api_a1_put_job 201 type1 job2 $TARGET2 ricsim_g3_2 testdata/ics/job-template.json
 else
-    ecs_api_a1_put_job 201 job2 type1 $TARGET2 ricsim_g3_2 $STATUS2 testdata/ecs/job-template.json
+    ics_api_a1_put_job 201 job2 type1 $TARGET2 ricsim_g3_2 $STATUS2 testdata/ics/job-template.json
 fi
 
 # Check the job data in the producer
-if [ $ECS_VERSION == "V1-1" ]; then
-    prodstub_check_jobdata 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ecs/job-template.json
+if [ $ICS_VERSION == "V1-1" ]; then
+    prodstub_check_jobdata 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ics/job-template.json
 else
-    if [[ "$ECS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
-        prodstub_check_jobdata_2 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ecs/job-template.json
+    if [[ "$ICS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
+        prodstub_check_jobdata_2 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ics/job-template.json
     else
-        prodstub_check_jobdata_3 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ecs/job-template.json
+        prodstub_check_jobdata_3 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ics/job-template.json
     fi
 fi
-ecs_api_a1_get_job_ids 200 type1 NOWNER job1 job2
-ecs_api_a1_get_job_ids 200 type1 ricsim_g3_1 job1
-ecs_api_a1_get_job_ids 200 type1 ricsim_g3_2 job2
+ics_api_a1_get_job_ids 200 type1 NOWNER job1 job2
+ics_api_a1_get_job_ids 200 type1 ricsim_g3_1 job1
+ics_api_a1_get_job_ids 200 type1 ricsim_g3_2 job2
 if [ ! -z "$FLAT_A1_EI" ]; then
-    ecs_api_a1_get_job_ids 200 NOTYPE NOWNER job1 job2
+    ics_api_a1_get_job_ids 200 NOTYPE NOWNER job1 job2
 fi
 
 if [  -z "$FLAT_A1_EI" ]; then
-    ecs_api_a1_get_job 200 type1 job2 $TARGET2 ricsim_g3_2 testdata/ecs/job-template.json
+    ics_api_a1_get_job 200 type1 job2 $TARGET2 ricsim_g3_2 testdata/ics/job-template.json
 
-    ecs_api_a1_get_job_status 200 type1 job2 ENABLED
+    ics_api_a1_get_job_status 200 type1 job2 ENABLED
 else
-    ecs_api_a1_get_job 200 job2 type1 $TARGET2 ricsim_g3_2 $STATUS2 testdata/ecs/job-template.json
+    ics_api_a1_get_job 200 job2 type1 $TARGET2 ricsim_g3_2 $STATUS2 testdata/ics/job-template.json
 
-    ecs_api_a1_get_job_status 200 job2 ENABLED
+    ics_api_a1_get_job_status 200 job2 ENABLED
 fi
 
 prodstub_equal create/prod-a/job2 1
 
-if [ $ECS_VERSION == "V1-1" ]; then
-    ecs_api_edp_get_producer_jobs 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json job2 type1 $TARGET2 ricsim_g3_2 testdata/ecs/job-template.json
+if [ $ICS_VERSION == "V1-1" ]; then
+    ics_api_edp_get_producer_jobs 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json job2 type1 $TARGET2 ricsim_g3_2 testdata/ics/job-template.json
 else
-    ecs_api_edp_get_producer_jobs_2 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json job2 type1 $TARGET2 ricsim_g3_2 testdata/ecs/job-template.json
+    ics_api_edp_get_producer_jobs_2 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json job2 type1 $TARGET2 ricsim_g3_2 testdata/ics/job-template.json
 fi
 
 ## Setup prod-b
-if [ $ECS_VERSION == "V1-1" ]; then
-    ecs_api_edp_put_producer 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type2 testdata/ecs/ei-type-2.json
-else
-    ecs_api_edp_put_type_2 201 type2 testdata/ecs/ei-type-2.json
-    ecs_api_edp_put_producer_2 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type2
-    if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-        cr_equal received_callbacks 4 30
-        cr_equal received_callbacks?id=type-status1 4
-        cr_api_check_all_ecs_subscription_events 200 type-status1 type2 testdata/ecs/ei-type-2.json REGISTERED
+if [ $ICS_VERSION == "V1-1" ]; then
+    ics_api_edp_put_producer 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type2 testdata/ics/ei-type-2.json
+else
+    ics_api_edp_put_type_2 201 type2 testdata/ics/ei-type-2.json
+    ics_api_edp_put_producer_2 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type2
+    if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+        cr_equal received_callbacks 4 30
+        cr_equal received_callbacks?id=type-status1 4
+        cr_api_check_all_ics_subscription_events 200 0 type-status1 type2 testdata/ics/ei-type-2.json REGISTERED
     else
-        cr_equal received_callbacks 0
+        cr_equal received_callbacks 0
     fi
 fi
 
 
-ecs_api_a1_get_type_ids 200 type1 type2
+ics_api_a1_get_type_ids 200 type1 type2
 if [  -z "$FLAT_A1_EI" ]; then
-    ecs_api_a1_get_type 200 type1 testdata/ecs/ei-type-1.json
-    ecs_api_a1_get_type 200 type2 testdata/ecs/ei-type-2.json
+    ics_api_a1_get_type 200 type1 testdata/ics/ei-type-1.json
+    ics_api_a1_get_type 200 type2 testdata/ics/ei-type-2.json
 else
-    ecs_api_a1_get_type 200 type1 testdata/ecs/empty-type.json
-    ecs_api_a1_get_type 200 type2 testdata/ecs/empty-type.json
+    ics_api_a1_get_type 200 type1 testdata/ics/empty-type.json
+    ics_api_a1_get_type 200 type2 testdata/ics/empty-type.json
 fi
 
-ecs_api_edp_get_type_ids 200 type1 type2
-if [ $ECS_VERSION == "V1-1" ]; then
-    ecs_api_edp_get_type 200 type1 testdata/ecs/ei-type-1.json prod-a
-    ecs_api_edp_get_type 200 type2 testdata/ecs/ei-type-2.json prod-b
+ics_api_edp_get_type_ids 200 type1 type2
+if [ $ICS_VERSION == "V1-1" ]; then
+    ics_api_edp_get_type 200 type1 testdata/ics/ei-type-1.json prod-a
+    ics_api_edp_get_type 200 type2 testdata/ics/ei-type-2.json prod-b
 else
-    if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPE-INFO"* ]]; then
-        ecs_api_edp_get_type_2 200 type1 testdata/ecs/ei-type-1.json testdata/ecs/info-type-info.json
+    if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPE-INFO"* ]]; then
+        ics_api_edp_get_type_2 200 type1 testdata/ics/ei-type-1.json testdata/ics/info-type-info.json
     else
-        ecs_api_edp_get_type_2 200 type1 testdata/ecs/ei-type-1.json
+        ics_api_edp_get_type_2 200 type1 testdata/ics/ei-type-1.json
     fi
-    ecs_api_edp_get_type_2 200 type2 testdata/ecs/ei-type-2.json
+    ics_api_edp_get_type_2 200 type2 testdata/ics/ei-type-2.json
 fi
 
-if [ $ECS_VERSION == "V1-1" ]; then
-    ecs_api_edp_get_producer_ids 200 prod-a prod-b
+if [ $ICS_VERSION == "V1-1" ]; then
+    ics_api_edp_get_producer_ids 200 prod-a prod-b
 else
-    ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-a prod-b
+    ics_api_edp_get_producer_ids_2 200 NOTYPE prod-a prod-b
 fi
 
-if [ $ECS_VERSION == "V1-1" ]; then
-    ecs_api_edp_get_producer 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ecs/ei-type-1.json
-    ecs_api_edp_get_producer 200 prod-b $CB_JOB/prod-b $CB_SV/prod-b type2 testdata/ecs/ei-type-2.json
+if [ $ICS_VERSION == "V1-1" ]; then
+    ics_api_edp_get_producer 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ics/ei-type-1.json
+    ics_api_edp_get_producer 200 prod-b $CB_JOB/prod-b $CB_SV/prod-b type2 testdata/ics/ei-type-2.json
 else
-    ecs_api_edp_get_producer_2 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
-    ecs_api_edp_get_producer_2 200 prod-b $CB_JOB/prod-b $CB_SV/prod-b type2
+    ics_api_edp_get_producer_2 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
+    ics_api_edp_get_producer_2 200 prod-b $CB_JOB/prod-b $CB_SV/prod-b type2
 fi
 
-ecs_api_edp_get_producer_status 200 prod-b ENABLED
+ics_api_edp_get_producer_status 200 prod-b ENABLED
 
 ## Create job for prod-b
 ##  job3 - prod-b
 if [  -z "$FLAT_A1_EI" ]; then
-    ecs_api_a1_put_job 201 type2 job3 $TARGET3 ricsim_g3_3 testdata/ecs/job-template.json
+    ics_api_a1_put_job 201 type2 job3 $TARGET3 ricsim_g3_3 testdata/ics/job-template.json
 else
-    ecs_api_a1_put_job 201 job3 type2 $TARGET3 ricsim_g3_3 $STATUS3 testdata/ecs/job-template.json
+    ics_api_a1_put_job 201 job3 type2 $TARGET3 ricsim_g3_3 $STATUS3 testdata/ics/job-template.json
 fi
 
 prodstub_equal create/prod-b/job3 1
 
 # Check the job data in the producer
-if [ $ECS_VERSION == "V1-1" ]; then
-    prodstub_check_jobdata 200 prod-b job3 type2 $TARGET3 ricsim_g3_3 testdata/ecs/job-template.json
+if [ $ICS_VERSION == "V1-1" ]; then
+    prodstub_check_jobdata 200 prod-b job3 type2 $TARGET3 ricsim_g3_3 testdata/ics/job-template.json
 else
-    if [[ "$ECS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
-        prodstub_check_jobdata_2 200 prod-b job3 type2 $TARGET3 ricsim_g3_3 testdata/ecs/job-template.json
+    if [[ "$ICS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
+        prodstub_check_jobdata_2 200 prod-b job3 type2 $TARGET3 ricsim_g3_3 testdata/ics/job-template.json
     else
-        prodstub_check_jobdata_3 200 prod-b job3 type2 $TARGET3 ricsim_g3_3 testdata/ecs/job-template.json
+        prodstub_check_jobdata_3 200 prod-b job3 type2 $TARGET3 ricsim_g3_3 testdata/ics/job-template.json
     fi
 fi
 
-ecs_api_a1_get_job_ids 200 type1 NOWNER job1 job2
-ecs_api_a1_get_job_ids 200 type2 NOWNER job3
-ecs_api_a1_get_job_ids 200 type1 ricsim_g3_1 job1
-ecs_api_a1_get_job_ids 200 type1 ricsim_g3_2 job2
-ecs_api_a1_get_job_ids 200 type2 ricsim_g3_3 job3
+ics_api_a1_get_job_ids 200 type1 NOWNER job1 job2
+ics_api_a1_get_job_ids 200 type2 NOWNER job3
+ics_api_a1_get_job_ids 200 type1 ricsim_g3_1 job1
+ics_api_a1_get_job_ids 200 type1 ricsim_g3_2 job2
+ics_api_a1_get_job_ids 200 type2 ricsim_g3_3 job3
 
 if [  -z "$FLAT_A1_EI" ]; then
-    ecs_api_a1_get_job 200 type2 job3 $TARGET3 ricsim_g3_3 testdata/ecs/job-template.json
+    ics_api_a1_get_job 200 type2 job3 $TARGET3 ricsim_g3_3 testdata/ics/job-template.json
 
-    ecs_api_a1_get_job_status 200 type2 job3 ENABLED
+    ics_api_a1_get_job_status 200 type2 job3 ENABLED
 else
-    ecs_api_a1_get_job 200 job3 type2 $TARGET3 ricsim_g3_3 $STATUS3 testdata/ecs/job-template.json
+    ics_api_a1_get_job 200 job3 type2 $TARGET3 ricsim_g3_3 $STATUS3 testdata/ics/job-template.json
 
-    ecs_api_a1_get_job_status 200 job3 ENABLED
+    ics_api_a1_get_job_status 200 job3 ENABLED
 fi
 
-if [ $ECS_VERSION == "V1-1" ]; then
-    ecs_api_edp_get_producer_jobs 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json job2 type1 $TARGET2 ricsim_g3_2 testdata/ecs/job-template.json
-    ecs_api_edp_get_producer_jobs 200 prod-b job3 type2 $TARGET3 ricsim_g3_3 testdata/ecs/job-template.json
+if [ $ICS_VERSION == "V1-1" ]; then
+    ics_api_edp_get_producer_jobs 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json job2 type1 $TARGET2 ricsim_g3_2 testdata/ics/job-template.json
+    ics_api_edp_get_producer_jobs 200 prod-b job3 type2 $TARGET3 ricsim_g3_3 testdata/ics/job-template.json
 else
-    ecs_api_edp_get_producer_jobs_2 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json job2 type1 $TARGET2 ricsim_g3_2 testdata/ecs/job-template.json
-    ecs_api_edp_get_producer_jobs_2 200 prod-b job3 type2 $TARGET3 ricsim_g3_3 testdata/ecs/job-template.json
+    ics_api_edp_get_producer_jobs_2 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json job2 type1 $TARGET2 ricsim_g3_2 testdata/ics/job-template.json
+    ics_api_edp_get_producer_jobs_2 200 prod-b job3 type2 $TARGET3 ricsim_g3_3 testdata/ics/job-template.json
 fi
 
 ## Setup prod-c (no types)
-if [ $ECS_VERSION == "V1-1" ]; then
-    ecs_api_edp_put_producer 201 prod-c $CB_JOB/prod-c $CB_SV/prod-c NOTYPE
+if [ $ICS_VERSION == "V1-1" ]; then
+    ics_api_edp_put_producer 201 prod-c $CB_JOB/prod-c $CB_SV/prod-c NOTYPE
 else
-    ecs_api_edp_put_producer_2 201 prod-c $CB_JOB/prod-c $CB_SV/prod-c NOTYPE
+    ics_api_edp_put_producer_2 201 prod-c $CB_JOB/prod-c $CB_SV/prod-c NOTYPE
 fi
 
-if [ $ECS_VERSION == "V1-1" ]; then
-    ecs_api_edp_get_producer_ids 200 prod-a prod-b prod-c
+if [ $ICS_VERSION == "V1-1" ]; then
+    ics_api_edp_get_producer_ids 200 prod-a prod-b prod-c
 else
-    ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-a prod-b prod-c
+    ics_api_edp_get_producer_ids_2 200 NOTYPE prod-a prod-b prod-c
 fi
 
-if [ $ECS_VERSION == "V1-1" ]; then
-    ecs_api_edp_get_producer 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ecs/ei-type-1.json
-    ecs_api_edp_get_producer 200 prod-b $CB_JOB/prod-b $CB_SV/prod-b type2 testdata/ecs/ei-type-2.json
-    ecs_api_edp_get_producer 200 prod-c $CB_JOB/prod-c $CB_SV/prod-c EMPTY
+if [ $ICS_VERSION == "V1-1" ]; then
+    ics_api_edp_get_producer 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ics/ei-type-1.json
+    ics_api_edp_get_producer 200 prod-b $CB_JOB/prod-b $CB_SV/prod-b type2 testdata/ics/ei-type-2.json
+    ics_api_edp_get_producer 200 prod-c $CB_JOB/prod-c $CB_SV/prod-c EMPTY
 else
-    ecs_api_edp_get_producer_2 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
-    ecs_api_edp_get_producer_2 200 prod-b $CB_JOB/prod-b $CB_SV/prod-b type2
-    ecs_api_edp_get_producer_2 200 prod-c $CB_JOB/prod-c $CB_SV/prod-c EMPTY
+    ics_api_edp_get_producer_2 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
+    ics_api_edp_get_producer_2 200 prod-b $CB_JOB/prod-b $CB_SV/prod-b type2
+    ics_api_edp_get_producer_2 200 prod-c $CB_JOB/prod-c $CB_SV/prod-c EMPTY
 fi
 
-ecs_api_edp_get_producer_status 200 prod-c ENABLED
+ics_api_edp_get_producer_status 200 prod-c ENABLED
 
 
 ## Delete job3 and prod-b and re-create if different order
 
 # Delete job then producer
-ecs_api_a1_get_job_ids 200 NOTYPE NOWNER job1 job2 job3
-if [ $ECS_VERSION == "V1-1" ]; then
-    ecs_api_edp_get_producer_ids 200 prod-a prod-b prod-c
+ics_api_a1_get_job_ids 200 NOTYPE NOWNER job1 job2 job3
+if [ $ICS_VERSION == "V1-1" ]; then
+    ics_api_edp_get_producer_ids 200 prod-a prod-b prod-c
 else
-    ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-a prod-b prod-c
+    ics_api_edp_get_producer_ids_2 200 NOTYPE prod-a prod-b prod-c
 fi
 
 if [  -z "$FLAT_A1_EI" ]; then
-    ecs_api_a1_delete_job 204 type2 job3
+    ics_api_a1_delete_job 204 type2 job3
 else
-    ecs_api_a1_delete_job 204 job3
+    ics_api_a1_delete_job 204 job3
 fi
 
-ecs_api_a1_get_job_ids 200 NOTYPE NOWNER job1 job2
-if [ $ECS_VERSION == "V1-1" ]; then
-    ecs_api_edp_get_producer_ids 200 prod-a prod-b prod-c
+ics_api_a1_get_job_ids 200 NOTYPE NOWNER job1 job2
+if [ $ICS_VERSION == "V1-1" ]; then
+    ics_api_edp_get_producer_ids 200 prod-a prod-b prod-c
 else
-    ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-a prod-b prod-c
+    ics_api_edp_get_producer_ids_2 200 NOTYPE prod-a prod-b prod-c
 fi
 
-ecs_api_edp_delete_producer 204 prod-b
+ics_api_edp_delete_producer 204 prod-b
 
-ecs_api_edp_get_producer_status 404 prod-b
+ics_api_edp_get_producer_status 404 prod-b
 
-ecs_api_a1_get_job_ids 200 NOTYPE NOWNER job1 job2
-if [ $ECS_VERSION == "V1-1" ]; then
-    ecs_api_edp_get_producer_ids 200 prod-a prod-c
+ics_api_a1_get_job_ids 200 NOTYPE NOWNER job1 job2
+if [ $ICS_VERSION == "V1-1" ]; then
+    ics_api_edp_get_producer_ids 200 prod-a prod-c
 else
-    ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-a prod-c
+    ics_api_edp_get_producer_ids_2 200 NOTYPE prod-a prod-c
 fi
 
 prodstub_equal delete/prod-b/job3 1
 
 if [  -z "$FLAT_A1_EI" ]; then
-    ecs_api_a1_put_job 404 type2 job3 $TARGET3 ricsim_g3_3 testdata/ecs/job-template.json
+    ics_api_a1_put_job 404 type2 job3 $TARGET3 ricsim_g3_3 testdata/ics/job-template.json
 else
-    if [ $ECS_VERSION == "V1-1" ]; then
-        ecs_api_a1_put_job 404 job3 type2 $TARGET3 ricsim_g3_3 $STATUS3 testdata/ecs/job-template.json
+    if [ $ICS_VERSION == "V1-1" ]; then
+        ics_api_a1_put_job 404 job3 type2 $TARGET3 ricsim_g3_3 $STATUS3 testdata/ics/job-template.json
     else
-        ecs_api_a1_put_job 201 job3 type2 $TARGET3 ricsim_g3_3 $STATUS3 testdata/ecs/job-template.json
-        ecs_api_a1_get_job_status 200 job3 DISABLED
+        ics_api_a1_put_job 201 job3 type2 $TARGET3 ricsim_g3_3 $STATUS3 testdata/ics/job-template.json
+        ics_api_a1_get_job_status 200 job3 DISABLED
     fi
 fi
 
 # Put producer then job
-if [ $ECS_VERSION == "V1-1" ]; then
-    ecs_api_edp_put_producer 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type2 testdata/ecs/ei-type-2.json
+if [ $ICS_VERSION == "V1-1" ]; then
+    ics_api_edp_put_producer 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type2 testdata/ics/ei-type-2.json
 else
-    ecs_api_edp_put_producer_2 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type2
+    ics_api_edp_put_producer_2 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type2
 fi
 
-ecs_api_edp_get_producer_status 200 prod-b ENABLED
+ics_api_edp_get_producer_status 200 prod-b ENABLED
 
 if [  -z "$FLAT_A1_EI" ]; then
-    ecs_api_a1_put_job 201 type2 job3 $TARGET3 ricsim_g3_3 testdata/ecs/job-template2.json
-    ecs_api_a1_get_job_status 200 type2 job3 ENABLED
+    ics_api_a1_put_job 201 type2 job3 $TARGET3 ricsim_g3_3 testdata/ics/job-template2.json
+    ics_api_a1_get_job_status 200 type2 job3 ENABLED
 else
-    if [ $ECS_VERSION == "V1-1" ]; then
-        ecs_api_a1_put_job 201 job3 type2 $TARGET3 ricsim_g3_3 $STATUS3 testdata/ecs/job-template2.json
+    if [ $ICS_VERSION == "V1-1" ]; then
+        ics_api_a1_put_job 201 job3 type2 $TARGET3 ricsim_g3_3 $STATUS3 testdata/ics/job-template2.json
     else
-        ecs_api_a1_put_job 200 job3 type2 $TARGET3 ricsim_g3_3 $STATUS3 testdata/ecs/job-template2.json
+        ics_api_a1_put_job 200 job3 type2 $TARGET3 ricsim_g3_3 $STATUS3 testdata/ics/job-template2.json
     fi
-    ecs_api_a1_get_job_status 200 job3 ENABLED
+    ics_api_a1_get_job_status 200 job3 ENABLED
 fi
 
-if [ $ECS_VERSION == "V1-1" ]; then
-    prodstub_check_jobdata 200 prod-b job3 type2 $TARGET3 ricsim_g3_3 testdata/ecs/job-template2.json
+if [ $ICS_VERSION == "V1-1" ]; then
+    prodstub_check_jobdata 200 prod-b job3 type2 $TARGET3 ricsim_g3_3 testdata/ics/job-template2.json
 else
-    if [[ "$ECS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
-        prodstub_check_jobdata_2 200 prod-b job3 type2 $TARGET3 ricsim_g3_3 testdata/ecs/job-template2.json
+    if [[ "$ICS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
+        prodstub_check_jobdata_2 200 prod-b job3 type2 $TARGET3 ricsim_g3_3 testdata/ics/job-template2.json
     else
-        prodstub_check_jobdata_3 200 prod-b job3 type2 $TARGET3 ricsim_g3_3 testdata/ecs/job-template2.json
+        prodstub_check_jobdata_3 200 prod-b job3 type2 $TARGET3 ricsim_g3_3 testdata/ics/job-template2.json
     fi
 fi
 
-ecs_api_a1_get_job_ids 200 NOTYPE NOWNER job1 job2 job3
-if [ $ECS_VERSION == "V1-1" ]; then
-    ecs_api_edp_get_producer_ids 200 prod-a prod-b prod-c
+ics_api_a1_get_job_ids 200 NOTYPE NOWNER job1 job2 job3
+if [ $ICS_VERSION == "V1-1" ]; then
+    ics_api_edp_get_producer_ids 200 prod-a prod-b prod-c
 else
-    ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-a prod-b prod-c
+    ics_api_edp_get_producer_ids_2 200 NOTYPE prod-a prod-b prod-c
 fi
 
-if [ $ECS_VERSION == "V1-1" ]; then
+if [ $ICS_VERSION == "V1-1" ]; then
     prodstub_equal create/prod-b/job3 2
 else
     prodstub_equal create/prod-b/job3 3
@@ -711,462 +702,462 @@ fi
 prodstub_equal delete/prod-b/job3 1
 
 # Delete only the producer
-ecs_api_edp_delete_producer 204 prod-b
+ics_api_edp_delete_producer 204 prod-b
 
-ecs_api_edp_get_producer_status 404 prod-b
+ics_api_edp_get_producer_status 404 prod-b
 
-ecs_api_a1_get_job_ids 200 NOTYPE NOWNER job1 job2 job3
-if [ $ECS_VERSION == "V1-1" ]; then
-    ecs_api_edp_get_producer_ids 200 prod-a prod-c
+ics_api_a1_get_job_ids 200 NOTYPE NOWNER job1 job2 job3
+if [ $ICS_VERSION == "V1-1" ]; then
+    ics_api_edp_get_producer_ids 200 prod-a prod-c
 else
-    ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-a prod-c
+    ics_api_edp_get_producer_ids_2 200 NOTYPE prod-a prod-c
 fi
 
 if [  -z "$FLAT_A1_EI" ]; then
-    ecs_api_a1_get_job_status 200 type2 job3 DISABLED
+    ics_api_a1_get_job_status 200 type2 job3 DISABLED
 else
-    ecs_api_a1_get_job_status 200 job3 DISABLED
+    ics_api_a1_get_job_status 200 job3 DISABLED
 fi
 
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 5 30
-    cr_equal received_callbacks?id=type-status1 4
-    cr_equal received_callbacks?id=job3-status 1
-    cr_api_check_all_ecs_events 200 job3-status DISABLED
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+    cr_equal received_callbacks 5 30
+    cr_equal received_callbacks?id=type-status1 4
+    cr_equal received_callbacks?id=job3-status 1
+    cr_api_check_all_ics_events 200 0 job3-status DISABLED
 else
-    cr_equal received_callbacks 1 30
-    cr_equal received_callbacks?id=job3-status 1
-    cr_api_check_all_ecs_events 200 job3-status DISABLED
+    cr_equal received_callbacks 1 30
+    cr_equal received_callbacks?id=job3-status 1
+    cr_api_check_all_ics_events 200 0 job3-status DISABLED
 fi
 
 # Re-create the producer
-if [ $ECS_VERSION == "V1-1" ]; then
-    ecs_api_edp_put_producer 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type2 testdata/ecs/ei-type-2.json
+if [ $ICS_VERSION == "V1-1" ]; then
+    ics_api_edp_put_producer 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type2 testdata/ics/ei-type-2.json
 else
-    ecs_api_edp_put_producer_2 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type2
+    ics_api_edp_put_producer_2 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type2
 fi
 
-ecs_api_edp_get_producer_status 200 prod-b ENABLED
+ics_api_edp_get_producer_status 200 prod-b ENABLED
 
 if [  -z "$FLAT_A1_EI" ]; then
-    ecs_api_a1_get_job_status 200 type2 job3 ENABLED
+    ics_api_a1_get_job_status 200 type2 job3 ENABLED
 else
-    ecs_api_a1_get_job_status 200 job3 ENABLED
+    ics_api_a1_get_job_status 200 job3 ENABLED
 fi
 
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 6 30
-    cr_equal received_callbacks?id=type-status1 4
-    cr_equal received_callbacks?id=job3-status 2
-    cr_api_check_all_ecs_events 200 job3-status ENABLED
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+    cr_equal received_callbacks 6 30
+    cr_equal received_callbacks?id=type-status1 4
+    cr_equal received_callbacks?id=job3-status 2
+    cr_api_check_all_ics_events 200 0 job3-status ENABLED
 else
-    cr_equal received_callbacks 2 30
-    cr_equal received_callbacks?id=job3-status 2
-    cr_api_check_all_ecs_events 200 job3-status ENABLED
+    cr_equal received_callbacks 2 30
+    cr_equal received_callbacks?id=job3-status 2
+    cr_api_check_all_ics_events 200 0 job3-status ENABLED
 fi
 
-if [ $ECS_VERSION == "V1-1" ]; then
-    prodstub_check_jobdata 200 prod-b job3 type2 $TARGET3 ricsim_g3_3 testdata/ecs/job-template2.json
+if [ $ICS_VERSION == "V1-1" ]; then
+    prodstub_check_jobdata 200 prod-b job3 type2 $TARGET3 ricsim_g3_3 testdata/ics/job-template2.json
 else
-    if [[ "$ECS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
-        prodstub_check_jobdata_2 200 prod-b job3 type2 $TARGET3 ricsim_g3_3 testdata/ecs/job-template2.json
+    if [[ "$ICS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
+        prodstub_check_jobdata_2 200 prod-b job3 type2 $TARGET3 ricsim_g3_3 testdata/ics/job-template2.json
     else
-        prodstub_check_jobdata_3 200 prod-b job3 type2 $TARGET3 ricsim_g3_3 testdata/ecs/job-template2.json
+        prodstub_check_jobdata_3 200 prod-b job3 type2 $TARGET3 ricsim_g3_3 testdata/ics/job-template2.json
     fi
 fi
 
 ## Setup prod-d
-if [ $ECS_VERSION == "V1-1" ]; then
-    ecs_api_edp_put_producer 201 prod-d $CB_JOB/prod-d $CB_SV/prod-d type4 testdata/ecs/ei-type-4.json
+if [ $ICS_VERSION == "V1-1" ]; then
+    ics_api_edp_put_producer 201 prod-d $CB_JOB/prod-d $CB_SV/prod-d type4 testdata/ics/ei-type-4.json
 else
-    ecs_api_edp_put_type_2 201 type4 testdata/ecs/ei-type-4.json
-    ecs_api_edp_put_producer_2 201 prod-d $CB_JOB/prod-d $CB_SV/prod-d type4
+    ics_api_edp_put_type_2 201 type4 testdata/ics/ei-type-4.json
+    ics_api_edp_put_producer_2 201 prod-d $CB_JOB/prod-d $CB_SV/prod-d type4
 fi
 
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 7 30
-    cr_equal received_callbacks?id=type-status1 5
-    cr_api_check_all_ecs_subscription_events 200 type-status1 type4 testdata/ecs/ei-type-4.json REGISTERED
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+    cr_equal received_callbacks 7 30
+    cr_equal received_callbacks?id=type-status1 5
+    cr_api_check_all_ics_subscription_events 200 0 type-status1 type4 testdata/ics/ei-type-4.json REGISTERED
 fi
 
-ecs_api_a1_get_job_ids 200 type4 NOWNER EMPTY
+ics_api_a1_get_job_ids 200 type4 NOWNER EMPTY
 
 if [  -z "$FLAT_A1_EI" ]; then
-    ecs_api_a1_put_job 201 type4 job8 $TARGET8 ricsim_g3_4 testdata/ecs/job-template.json
+    ics_api_a1_put_job 201 type4 job8 $TARGET8 ricsim_g3_4 testdata/ics/job-template.json
 else
-    ecs_api_a1_put_job 201 job8 type4 $TARGET8 ricsim_g3_4 $STATUS8 testdata/ecs/job-template.json
+    ics_api_a1_put_job 201 job8 type4 $TARGET8 ricsim_g3_4 $STATUS8 testdata/ics/job-template.json
 fi
 
-if [ $ECS_VERSION == "V1-1" ]; then
-    prodstub_check_jobdata 200 prod-d job8 type4 $TARGET8 ricsim_g3_4 testdata/ecs/job-template.json
+if [ $ICS_VERSION == "V1-1" ]; then
+    prodstub_check_jobdata 200 prod-d job8 type4 $TARGET8 ricsim_g3_4 testdata/ics/job-template.json
 else
-    if [[ "$ECS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
-        prodstub_check_jobdata_2 200 prod-d job8 type4 $TARGET8 ricsim_g3_4 testdata/ecs/job-template.json
+    if [[ "$ICS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
+        prodstub_check_jobdata_2 200 prod-d job8 type4 $TARGET8 ricsim_g3_4 testdata/ics/job-template.json
     else
-        prodstub_check_jobdata_3 200 prod-d job8 type4 $TARGET8 ricsim_g3_4 testdata/ecs/job-template.json
+        prodstub_check_jobdata_3 200 prod-d job8 type4 $TARGET8 ricsim_g3_4 testdata/ics/job-template.json
     fi
 fi
 
 prodstub_equal create/prod-d/job8 1
 prodstub_equal delete/prod-d/job8 0
 
-ecs_api_a1_get_job_ids 200 type4 NOWNER job8
+ics_api_a1_get_job_ids 200 type4 NOWNER job8
 
 if [  -z "$FLAT_A1_EI" ]; then
-    ecs_api_a1_get_job_status 200 type4 job8 ENABLED
+    ics_api_a1_get_job_status 200 type4 job8 ENABLED
 else
-    ecs_api_a1_get_job_status 200 job8 ENABLED
+    ics_api_a1_get_job_status 200 job8 ENABLED
 fi
 
 # Re-PUT the producer with zero types
-if [ $ECS_VERSION == "V1-1" ]; then
-    ecs_api_edp_put_producer 200 prod-d $CB_JOB/prod-d $CB_SV/prod-d NOTYPE
+if [ $ICS_VERSION == "V1-1" ]; then
+    ics_api_edp_put_producer 200 prod-d $CB_JOB/prod-d $CB_SV/prod-d NOTYPE
 else
-    ecs_api_edp_put_producer_2 200 prod-d $CB_JOB/prod-d $CB_SV/prod-d NOTYPE
+    ics_api_edp_put_producer_2 200 prod-d $CB_JOB/prod-d $CB_SV/prod-d NOTYPE
 fi
 
 if [  -z "$FLAT_A1_EI" ]; then
-    ecs_api_a1_get_job_ids 404 type4 NOWNER
+    ics_api_a1_get_job_ids 404 type4 NOWNER
 else
-    ecs_api_a1_get_job_ids 200 type4 NOWNER job8
-    ecs_api_a1_get_job_ids 200 NOTYPE NOWNER job1 job2 job3 job8
+    ics_api_a1_get_job_ids 200 type4 NOWNER job8
+    ics_api_a1_get_job_ids 200 NOTYPE NOWNER job1 job2 job3 job8
 fi
 
 if [  -z "$FLAT_A1_EI" ]; then
-    ecs_api_a1_get_job_status 200 type4 job8 DISABLED
+    ics_api_a1_get_job_status 200 type4 job8 DISABLED
 else
-    ecs_api_a1_get_job_status 200 job8 DISABLED
+    ics_api_a1_get_job_status 200 job8 DISABLED
 fi
 
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 8 30
-    cr_equal received_callbacks?id=type-status1 5
-    cr_equal received_callbacks?id=job8-status 1
-    cr_api_check_all_ecs_events 200 job8-status DISABLED
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+    cr_equal received_callbacks 8 30
+    cr_equal received_callbacks?id=type-status1 5
+    cr_equal received_callbacks?id=job8-status 1
+    cr_api_check_all_ics_events 200 0 job8-status DISABLED
 else
-    cr_equal received_callbacks 3 30
-    cr_equal received_callbacks?id=job8-status 1
-    cr_api_check_all_ecs_events 200 job8-status DISABLED
+    cr_equal received_callbacks 3 30
+    cr_equal received_callbacks?id=job8-status 1
+    cr_api_check_all_ics_events 200 0 job8-status DISABLED
 fi
 
 prodstub_equal create/prod-d/job8 1
 prodstub_equal delete/prod-d/job8 0
 
 ## Re-setup prod-d
-if [ $ECS_VERSION == "V1-1" ]; then
-    ecs_api_edp_put_producer 200 prod-d $CB_JOB/prod-d $CB_SV/prod-d type4 testdata/ecs/ei-type-4.json
+if [ $ICS_VERSION == "V1-1" ]; then
+    ics_api_edp_put_producer 200 prod-d $CB_JOB/prod-d $CB_SV/prod-d type4 testdata/ics/ei-type-4.json
 else
-    ecs_api_edp_put_type_2 200 type4 testdata/ecs/ei-type-4.json
-    ecs_api_edp_put_producer_2 200 prod-d $CB_JOB/prod-d $CB_SV/prod-d type4
+    ics_api_edp_put_type_2 200 type4 testdata/ics/ei-type-4.json
+    ics_api_edp_put_producer_2 200 prod-d $CB_JOB/prod-d $CB_SV/prod-d type4
 fi
 
 if [  -z "$FLAT_A1_EI" ]; then
-    ecs_api_a1_get_job_ids 404 type4 NOWNER
+    ics_api_a1_get_job_ids 404 type4 NOWNER
 else
-    ecs_api_a1_get_job_ids 200 type4 NOWNER job8
-    ecs_api_a1_get_job_ids 200 NOTYPE NOWNER job1 job2 job3 job8
+    ics_api_a1_get_job_ids 200 type4 NOWNER job8
+    ics_api_a1_get_job_ids 200 NOTYPE NOWNER job1 job2 job3 job8
 fi
 
 if [  -z "$FLAT_A1_EI" ]; then
-    ecs_api_a1_get_job_status 200 type4 job8 ENABLED
+    ics_api_a1_get_job_status 200 type4 job8 ENABLED
 else
-    ecs_api_a1_get_job_status 200 job8 ENABLED
+    ics_api_a1_get_job_status 200 job8 ENABLED
 fi
 
-ecs_api_edp_get_producer_status 200 prod-a ENABLED
-ecs_api_edp_get_producer_status 200 prod-b ENABLED
-ecs_api_edp_get_producer_status 200 prod-c ENABLED
-ecs_api_edp_get_producer_status 200 prod-d ENABLED
+ics_api_edp_get_producer_status 200 prod-a ENABLED
+ics_api_edp_get_producer_status 200 prod-b ENABLED
+ics_api_edp_get_producer_status 200 prod-c ENABLED
+ics_api_edp_get_producer_status 200 prod-d ENABLED
 
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 10 30
-    cr_equal received_callbacks?id=type-status1 6
-    cr_api_check_all_ecs_subscription_events 200 type-status1 type4 testdata/ecs/ei-type-4.json REGISTERED
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+    cr_equal received_callbacks 10 30
+    cr_equal received_callbacks?id=type-status1 6
+    cr_api_check_all_ics_subscription_events 200 0 type-status1 type4 testdata/ics/ei-type-4.json REGISTERED
 
-    cr_equal received_callbacks?id=job8-status 2
-    cr_api_check_all_ecs_events 200 job8-status ENABLED
+    cr_equal received_callbacks?id=job8-status 2
+    cr_api_check_all_ics_events 200 0 job8-status ENABLED
 else
-    cr_equal received_callbacks 4 30
-    cr_equal received_callbacks?id=job8-status 2
-    cr_api_check_all_ecs_events 200 job8-status ENABLED
+    cr_equal received_callbacks 4 30
+    cr_equal received_callbacks?id=job8-status 2
+    cr_api_check_all_ics_events 200 0 job8-status ENABLED
 fi
 
 prodstub_equal create/prod-d/job8 2
 prodstub_equal delete/prod-d/job8 0
 
 ## Setup prod-e
-if [ $ECS_VERSION == "V1-1" ]; then
-    ecs_api_edp_put_producer 201 prod-e $CB_JOB/prod-e $CB_SV/prod-e type6 testdata/ecs/ei-type-6.json
+if [ $ICS_VERSION == "V1-1" ]; then
+    ics_api_edp_put_producer 201 prod-e $CB_JOB/prod-e $CB_SV/prod-e type6 testdata/ics/ei-type-6.json
 else
-    ecs_api_edp_put_type_2 201 type6 testdata/ecs/ei-type-6.json
-    ecs_api_edp_put_producer_2 201 prod-e $CB_JOB/prod-e $CB_SV/prod-e type6
+    ics_api_edp_put_type_2 201 type6 testdata/ics/ei-type-6.json
+    ics_api_edp_put_producer_2 201 prod-e $CB_JOB/prod-e $CB_SV/prod-e type6
 fi
 
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 11 30
-    cr_equal received_callbacks?id=type-status1 7
-    cr_api_check_all_ecs_subscription_events 200 type-status1 type6 testdata/ecs/ei-type-6.json REGISTERED
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+    cr_equal received_callbacks 11 30
+    cr_equal received_callbacks?id=type-status1 7
+    cr_api_check_all_ics_subscription_events 200 0 type-status1 type6 testdata/ics/ei-type-6.json REGISTERED
 fi
 
-ecs_api_a1_get_job_ids 200 type6 NOWNER EMPTY
+ics_api_a1_get_job_ids 200 type6 NOWNER EMPTY
 
 if [  -z "$FLAT_A1_EI" ]; then
-    ecs_api_a1_put_job 201 type6 job10 $TARGET10 ricsim_g3_4 testdata/ecs/job-template.json
+    ics_api_a1_put_job 201 type6 job10 $TARGET10 ricsim_g3_4 testdata/ics/job-template.json
 else
-    ecs_api_a1_put_job 201 job10 type6 $TARGET10 ricsim_g3_4 $STATUS10 testdata/ecs/job-template.json
+    ics_api_a1_put_job 201 job10 type6 $TARGET10 ricsim_g3_4 $STATUS10 testdata/ics/job-template.json
 fi
 
-if [ $ECS_VERSION == "V1-1" ]; then
-    prodstub_check_jobdata 200 prod-e job10 type6 $TARGET10 ricsim_g3_4 testdata/ecs/job-template.json
+if [ $ICS_VERSION == "V1-1" ]; then
+    prodstub_check_jobdata 200 prod-e job10 type6 $TARGET10 ricsim_g3_4 testdata/ics/job-template.json
 else
-    if [[ "$ECS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
-        prodstub_check_jobdata_2 200 prod-e job10 type6 $TARGET10 ricsim_g3_4 testdata/ecs/job-template.json
+    if [[ "$ICS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
+        prodstub_check_jobdata_2 200 prod-e job10 type6 $TARGET10 ricsim_g3_4 testdata/ics/job-template.json
     else
-        prodstub_check_jobdata_3 200 prod-e job10 type6 $TARGET10 ricsim_g3_4 testdata/ecs/job-template.json
+        prodstub_check_jobdata_3 200 prod-e job10 type6 $TARGET10 ricsim_g3_4 testdata/ics/job-template.json
     fi
 fi
 
 prodstub_equal create/prod-e/job10 1
 prodstub_equal delete/prod-e/job10 0
 
-ecs_api_a1_get_job_ids 200 type6 NOWNER job10
+ics_api_a1_get_job_ids 200 type6 NOWNER job10
 
 if [  -z "$FLAT_A1_EI" ]; then
-    ecs_api_a1_get_job_status 200 type6 job10 ENABLED
+    ics_api_a1_get_job_status 200 type6 job10 ENABLED
 else
-    ecs_api_a1_get_job_status 200 job10 ENABLED
+    ics_api_a1_get_job_status 200 job10 ENABLED
 fi
 
 ## Setup prod-f
-if [ $ECS_VERSION == "V1-1" ]; then
-    ecs_api_edp_put_producer 201 prod-f $CB_JOB/prod-f $CB_SV/prod-f type6 testdata/ecs/ei-type-6.json
+if [ $ICS_VERSION == "V1-1" ]; then
+    ics_api_edp_put_producer 201 prod-f $CB_JOB/prod-f $CB_SV/prod-f type6 testdata/ics/ei-type-6.json
 else
-    ecs_api_edp_put_type_2 200 type6 testdata/ecs/ei-type-6.json
-    ecs_api_edp_put_producer_2 201 prod-f $CB_JOB/prod-f $CB_SV/prod-f type6
+    ics_api_edp_put_type_2 200 type6 testdata/ics/ei-type-6.json
+    ics_api_edp_put_producer_2 201 prod-f $CB_JOB/prod-f $CB_SV/prod-f type6
 fi
 
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 12 30
-    cr_equal received_callbacks?id=type-status1 8
-    cr_api_check_all_ecs_subscription_events 200 type-status1 type6 testdata/ecs/ei-type-6.json REGISTERED
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+    cr_equal received_callbacks 12 30
+    cr_equal received_callbacks?id=type-status1 8
+    cr_api_check_all_ics_subscription_events 200 0 type-status1 type6 testdata/ics/ei-type-6.json REGISTERED
 fi
 
-ecs_api_a1_get_job_ids 200 type6 NOWNER job10
+ics_api_a1_get_job_ids 200 type6 NOWNER job10
 
-if [ $ECS_VERSION == "V1-1" ]; then
-    prodstub_check_jobdata 200 prod-f job10 type6 $TARGET10 ricsim_g3_4 testdata/ecs/job-template.json
+if [ $ICS_VERSION == "V1-1" ]; then
+    prodstub_check_jobdata 200 prod-f job10 type6 $TARGET10 ricsim_g3_4 testdata/ics/job-template.json
 else
-    if [[ "$ECS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
-        prodstub_check_jobdata_2 200 prod-f job10 type6 $TARGET10 ricsim_g3_4 testdata/ecs/job-template.json
+    if [[ "$ICS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
+        prodstub_check_jobdata_2 200 prod-f job10 type6 $TARGET10 ricsim_g3_4 testdata/ics/job-template.json
     else
-        prodstub_check_jobdata_3 200 prod-f job10 type6 $TARGET10 ricsim_g3_4 testdata/ecs/job-template.json
+        prodstub_check_jobdata_3 200 prod-f job10 type6 $TARGET10 ricsim_g3_4 testdata/ics/job-template.json
     fi
 fi
 
 prodstub_equal create/prod-f/job10 1
 prodstub_equal delete/prod-f/job10 0
 
-ecs_api_a1_get_job_ids 200 type6 NOWNER job10
+ics_api_a1_get_job_ids 200 type6 NOWNER job10
 
 if [  -z "$FLAT_A1_EI" ]; then
-    ecs_api_a1_get_job_status 200 type6 job10 ENABLED
+    ics_api_a1_get_job_status 200 type6 job10 ENABLED
 else
-    ecs_api_a1_get_job_status 200 job10 ENABLED
+    ics_api_a1_get_job_status 200 job10 ENABLED
 fi
 
 ## Status updates prod-a and jobs
 
-if [ $ECS_VERSION == "V1-1" ]; then
-    ecs_api_edp_get_producer_ids 200 prod-a prod-b prod-c prod-d prod-e prod-f
+if [ $ICS_VERSION == "V1-1" ]; then
+    ics_api_edp_get_producer_ids 200 prod-a prod-b prod-c prod-d prod-e prod-f
 else
-    ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-a prod-b prod-c prod-d prod-e prod-f
+    ics_api_edp_get_producer_ids_2 200 NOTYPE prod-a prod-b prod-c prod-d prod-e prod-f
 fi
 
-ecs_api_edp_get_producer_status 200 prod-a ENABLED
-ecs_api_edp_get_producer_status 200 prod-b ENABLED
-ecs_api_edp_get_producer_status 200 prod-c ENABLED
-ecs_api_edp_get_producer_status 200 prod-d ENABLED
-ecs_api_edp_get_producer_status 200 prod-e ENABLED
-ecs_api_edp_get_producer_status 200 prod-f ENABLED
+ics_api_edp_get_producer_status 200 prod-a ENABLED
+ics_api_edp_get_producer_status 200 prod-b ENABLED
+ics_api_edp_get_producer_status 200 prod-c ENABLED
+ics_api_edp_get_producer_status 200 prod-d ENABLED
+ics_api_edp_get_producer_status 200 prod-e ENABLED
+ics_api_edp_get_producer_status 200 prod-f ENABLED
 
 # Arm producer prod-a for supervision failure
 prodstub_arm_producer 200 prod-a 400
 
 # Wait for producer prod-a to go disabled
-ecs_api_edp_get_producer_status 200 prod-a DISABLED 360
+ics_api_edp_get_producer_status 200 prod-a DISABLED 360
 
-if [ $ECS_VERSION == "V1-1" ]; then
-    ecs_api_edp_get_producer_ids 200 prod-a prod-b prod-c prod-d  prod-e prod-f
+if [ $ICS_VERSION == "V1-1" ]; then
+    ics_api_edp_get_producer_ids 200 prod-a prod-b prod-c prod-d  prod-e prod-f
 else
-    ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-a prod-b prod-c prod-d  prod-e prod-f
+    ics_api_edp_get_producer_ids_2 200 NOTYPE prod-a prod-b prod-c prod-d  prod-e prod-f
 fi
 
-ecs_api_edp_get_producer_status 200 prod-a DISABLED
-ecs_api_edp_get_producer_status 200 prod-b ENABLED
-ecs_api_edp_get_producer_status 200 prod-c ENABLED
-ecs_api_edp_get_producer_status 200 prod-d ENABLED
-ecs_api_edp_get_producer_status 200 prod-e ENABLED
-ecs_api_edp_get_producer_status 200 prod-f ENABLED
+ics_api_edp_get_producer_status 200 prod-a DISABLED
+ics_api_edp_get_producer_status 200 prod-b ENABLED
+ics_api_edp_get_producer_status 200 prod-c ENABLED
+ics_api_edp_get_producer_status 200 prod-d ENABLED
+ics_api_edp_get_producer_status 200 prod-e ENABLED
+ics_api_edp_get_producer_status 200 prod-f ENABLED
 
 
 if [  -z "$FLAT_A1_EI" ]; then
-    ecs_api_a1_get_job_status 200 type1 job1 ENABLED
-    ecs_api_a1_get_job_status 200 type1 job2 ENABLED
-    ecs_api_a1_get_job_status 200 type2 job3 ENABLED
-    ecs_api_a1_get_job_status 200 type4 job8 ENABLED
-    ecs_api_a1_get_job_status 200 type6 job10 ENABLED
+    ics_api_a1_get_job_status 200 type1 job1 ENABLED
+    ics_api_a1_get_job_status 200 type1 job2 ENABLED
+    ics_api_a1_get_job_status 200 type2 job3 ENABLED
+    ics_api_a1_get_job_status 200 type4 job8 ENABLED
+    ics_api_a1_get_job_status 200 type6 job10 ENABLED
 else
-    ecs_api_a1_get_job_status 200 job1 ENABLED
-    ecs_api_a1_get_job_status 200 job2 ENABLED
-    ecs_api_a1_get_job_status 200 job3 ENABLED
-    ecs_api_a1_get_job_status 200 job8 ENABLED
-    ecs_api_a1_get_job_status 200 job10 ENABLED
+    ics_api_a1_get_job_status 200 job1 ENABLED
+    ics_api_a1_get_job_status 200 job2 ENABLED
+    ics_api_a1_get_job_status 200 job3 ENABLED
+    ics_api_a1_get_job_status 200 job8 ENABLED
+    ics_api_a1_get_job_status 200 job10 ENABLED
 fi
 
 # Arm producer prod-a for supervision
 prodstub_arm_producer 200 prod-a 200
 
 # Wait for producer prod-a to go enabled
-ecs_api_edp_get_producer_status 200 prod-a ENABLED 360
+ics_api_edp_get_producer_status 200 prod-a ENABLED 360
 
-if [ $ECS_VERSION == "V1-1" ]; then
-    ecs_api_edp_get_producer_ids 200 prod-a prod-b prod-c prod-d prod-e prod-f
+if [ $ICS_VERSION == "V1-1" ]; then
+    ics_api_edp_get_producer_ids 200 prod-a prod-b prod-c prod-d prod-e prod-f
 else
-    ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-a prod-b prod-c prod-d prod-e prod-f
+    ics_api_edp_get_producer_ids_2 200 NOTYPE prod-a prod-b prod-c prod-d prod-e prod-f
 fi
 
-ecs_api_edp_get_producer_status 200 prod-a ENABLED
-ecs_api_edp_get_producer_status 200 prod-b ENABLED
-ecs_api_edp_get_producer_status 200 prod-c ENABLED
-ecs_api_edp_get_producer_status 200 prod-d ENABLED
-ecs_api_edp_get_producer_status 200 prod-e ENABLED
-ecs_api_edp_get_producer_status 200 prod-f ENABLED
+ics_api_edp_get_producer_status 200 prod-a ENABLED
+ics_api_edp_get_producer_status 200 prod-b ENABLED
+ics_api_edp_get_producer_status 200 prod-c ENABLED
+ics_api_edp_get_producer_status 200 prod-d ENABLED
+ics_api_edp_get_producer_status 200 prod-e ENABLED
+ics_api_edp_get_producer_status 200 prod-f ENABLED
 
 if [  -z "$FLAT_A1_EI" ]; then
-    ecs_api_a1_get_job_status 200 type1 job1 ENABLED
-    ecs_api_a1_get_job_status 200 type1 job2 ENABLED
-    ecs_api_a1_get_job_status 200 type2 job3 ENABLED
-    ecs_api_a1_get_job_status 200 type4 job8 ENABLED
-    ecs_api_a1_get_job_status 200 type6 job10 ENABLED
+    ics_api_a1_get_job_status 200 type1 job1 ENABLED
+    ics_api_a1_get_job_status 200 type1 job2 ENABLED
+    ics_api_a1_get_job_status 200 type2 job3 ENABLED
+    ics_api_a1_get_job_status 200 type4 job8 ENABLED
+    ics_api_a1_get_job_status 200 type6 job10 ENABLED
 else
-    ecs_api_a1_get_job_status 200 job1 ENABLED
-    ecs_api_a1_get_job_status 200 job2 ENABLED
-    ecs_api_a1_get_job_status 200 job3 ENABLED
-    ecs_api_a1_get_job_status 200 job8 ENABLED
-    ecs_api_a1_get_job_status 200 job10 ENABLED
+    ics_api_a1_get_job_status 200 job1 ENABLED
+    ics_api_a1_get_job_status 200 job2 ENABLED
+    ics_api_a1_get_job_status 200 job3 ENABLED
+    ics_api_a1_get_job_status 200 job8 ENABLED
+    ics_api_a1_get_job_status 200 job10 ENABLED
 fi
 
 # Arm producer prod-a for supervision failure
 prodstub_arm_producer 200 prod-a 400
 
 # Wait for producer prod-a to go disabled
-ecs_api_edp_get_producer_status 200 prod-a DISABLED 360
+ics_api_edp_get_producer_status 200 prod-a DISABLED 360
 
-if [ $ECS_VERSION == "V1-1" ]; then
-    ecs_api_edp_get_producer_ids 200 prod-a prod-b prod-c prod-d prod-e prod-f
+if [ $ICS_VERSION == "V1-1" ]; then
+    ics_api_edp_get_producer_ids 200 prod-a prod-b prod-c prod-d prod-e prod-f
 else
-    ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-a prod-b prod-c prod-d prod-e prod-f
+    ics_api_edp_get_producer_ids_2 200 NOTYPE prod-a prod-b prod-c prod-d prod-e prod-f
 fi
 
-ecs_api_edp_get_producer_status 200 prod-a DISABLED
-ecs_api_edp_get_producer_status 200 prod-b ENABLED
-ecs_api_edp_get_producer_status 200 prod-c ENABLED
-ecs_api_edp_get_producer_status 200 prod-d ENABLED
-ecs_api_edp_get_producer_status 200 prod-e ENABLED
-ecs_api_edp_get_producer_status 200 prod-f ENABLED
+ics_api_edp_get_producer_status 200 prod-a DISABLED
+ics_api_edp_get_producer_status 200 prod-b ENABLED
+ics_api_edp_get_producer_status 200 prod-c ENABLED
+ics_api_edp_get_producer_status 200 prod-d ENABLED
+ics_api_edp_get_producer_status 200 prod-e ENABLED
+ics_api_edp_get_producer_status 200 prod-f ENABLED
 
 if [  -z "$FLAT_A1_EI" ]; then
-    ecs_api_a1_get_job_status 200 type1 job1 ENABLED
-    ecs_api_a1_get_job_status 200 type1 job2 ENABLED
-    ecs_api_a1_get_job_status 200 type2 job3 ENABLED
-    ecs_api_a1_get_job_status 200 type4 job8 ENABLED
-    ecs_api_a1_get_job_status 200 type6 job10 ENABLED
+    ics_api_a1_get_job_status 200 type1 job1 ENABLED
+    ics_api_a1_get_job_status 200 type1 job2 ENABLED
+    ics_api_a1_get_job_status 200 type2 job3 ENABLED
+    ics_api_a1_get_job_status 200 type4 job8 ENABLED
+    ics_api_a1_get_job_status 200 type6 job10 ENABLED
 else
-    ecs_api_a1_get_job_status 200 job1 ENABLED
-    ecs_api_a1_get_job_status 200 job2 ENABLED
-    ecs_api_a1_get_job_status 200 job3 ENABLED
-    ecs_api_a1_get_job_status 200 job8 ENABLED
-    ecs_api_a1_get_job_status 200 job10 ENABLED
+    ics_api_a1_get_job_status 200 job1 ENABLED
+    ics_api_a1_get_job_status 200 job2 ENABLED
+    ics_api_a1_get_job_status 200 job3 ENABLED
+    ics_api_a1_get_job_status 200 job8 ENABLED
+    ics_api_a1_get_job_status 200 job10 ENABLED
 fi
 
 # Wait for producer prod-a to be removed
-if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
-    ecs_equal json:data-producer/v1/info-producers 5 1000
+if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+    ics_equal json:data-producer/v1/info-producers 5 1000
 else
-    ecs_equal json:ei-producer/v1/eiproducers 5 1000
+    ics_equal json:ei-producer/v1/eiproducers 5 1000
 fi
 
-if [ $ECS_VERSION == "V1-1" ]; then
-    ecs_api_edp_get_producer_ids 200 prod-b prod-c prod-d prod-e prod-f
+if [ $ICS_VERSION == "V1-1" ]; then
+    ics_api_edp_get_producer_ids 200 prod-b prod-c prod-d prod-e prod-f
 else
-    ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-b prod-c prod-d prod-e prod-f
+    ics_api_edp_get_producer_ids_2 200 NOTYPE prod-b prod-c prod-d prod-e prod-f
 fi
 
 
-ecs_api_edp_get_producer_status 404 prod-a
-ecs_api_edp_get_producer_status 200 prod-b ENABLED
-ecs_api_edp_get_producer_status 200 prod-c ENABLED
-ecs_api_edp_get_producer_status 200 prod-d ENABLED
-ecs_api_edp_get_producer_status 200 prod-e ENABLED
-ecs_api_edp_get_producer_status 200 prod-f ENABLED
+ics_api_edp_get_producer_status 404 prod-a
+ics_api_edp_get_producer_status 200 prod-b ENABLED
+ics_api_edp_get_producer_status 200 prod-c ENABLED
+ics_api_edp_get_producer_status 200 prod-d ENABLED
+ics_api_edp_get_producer_status 200 prod-e ENABLED
+ics_api_edp_get_producer_status 200 prod-f ENABLED
 
 if [  -z "$FLAT_A1_EI" ]; then
-    ecs_api_a1_get_job_status 200 type1 job1 DISABLED
-    ecs_api_a1_get_job_status 200 type1 job2 DISABLED
-    ecs_api_a1_get_job_status 200 type2 job3 ENABLED
-    ecs_api_a1_get_job_status 200 type4 job8 ENABLED
-    ecs_api_a1_get_job_status 200 type6 job10 ENABLED
+    ics_api_a1_get_job_status 200 type1 job1 DISABLED
+    ics_api_a1_get_job_status 200 type1 job2 DISABLED
+    ics_api_a1_get_job_status 200 type2 job3 ENABLED
+    ics_api_a1_get_job_status 200 type4 job8 ENABLED
+    ics_api_a1_get_job_status 200 type6 job10 ENABLED
 else
-    ecs_api_a1_get_job_status 200 job1 DISABLED
-    ecs_api_a1_get_job_status 200 job2 DISABLED
-    ecs_api_a1_get_job_status 200 job3 ENABLED
-    ecs_api_a1_get_job_status 200 job8 ENABLED
-    ecs_api_a1_get_job_status 200 job10 ENABLED
+    ics_api_a1_get_job_status 200 job1 DISABLED
+    ics_api_a1_get_job_status 200 job2 DISABLED
+    ics_api_a1_get_job_status 200 job3 ENABLED
+    ics_api_a1_get_job_status 200 job8 ENABLED
+    ics_api_a1_get_job_status 200 job10 ENABLED
 fi
 
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 14 30
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+    cr_equal received_callbacks 14 30
 else
-    cr_equal received_callbacks 6 30
+    cr_equal received_callbacks 6 30
 fi
 
-cr_equal received_callbacks?id=job1-status 1
-cr_equal received_callbacks?id=job2-status 1
+cr_equal received_callbacks?id=job1-status 1
+cr_equal received_callbacks?id=job2-status 1
 
-cr_api_check_all_ecs_events 200 job1-status DISABLED
-cr_api_check_all_ecs_events 200 job2-status DISABLED
+cr_api_check_all_ics_events 200 0 job1-status DISABLED
+cr_api_check_all_ics_events 200 0 job2-status DISABLED
 
 
 # Arm producer prod-e for supervision failure
 prodstub_arm_producer 200 prod-e 400
 
-ecs_api_edp_get_producer_status 200 prod-e DISABLED 1000
+ics_api_edp_get_producer_status 200 prod-e DISABLED 1000
 
-if [ $ECS_VERSION == "V1-1" ]; then
-    ecs_api_edp_get_producer_ids 200 prod-b prod-c prod-d prod-e prod-f
+if [ $ICS_VERSION == "V1-1" ]; then
+    ics_api_edp_get_producer_ids 200 prod-b prod-c prod-d prod-e prod-f
 else
-    ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-b prod-c prod-d prod-e prod-f
+    ics_api_edp_get_producer_ids_2 200 NOTYPE prod-b prod-c prod-d prod-e prod-f
 fi
 
-ecs_api_edp_get_producer_status 404 prod-a
-ecs_api_edp_get_producer_status 200 prod-b ENABLED
-ecs_api_edp_get_producer_status 200 prod-c ENABLED
-ecs_api_edp_get_producer_status 200 prod-d ENABLED
-ecs_api_edp_get_producer_status 200 prod-e DISABLED
-ecs_api_edp_get_producer_status 200 prod-f ENABLED
+ics_api_edp_get_producer_status 404 prod-a
+ics_api_edp_get_producer_status 200 prod-b ENABLED
+ics_api_edp_get_producer_status 200 prod-c ENABLED
+ics_api_edp_get_producer_status 200 prod-d ENABLED
+ics_api_edp_get_producer_status 200 prod-e DISABLED
+ics_api_edp_get_producer_status 200 prod-f ENABLED
 
 if [  -z "$FLAT_A1_EI" ]; then
-    ecs_api_a1_get_job_status 200 type1 job1 DISABLED
-    ecs_api_a1_get_job_status 200 type1 job2 DISABLED
-    ecs_api_a1_get_job_status 200 type2 job3 ENABLED
-    ecs_api_a1_get_job_status 200 type4 job8 ENABLED
-    ecs_api_a1_get_job_status 200 type6 job10 ENABLED
+    ics_api_a1_get_job_status 200 type1 job1 DISABLED
+    ics_api_a1_get_job_status 200 type1 job2 DISABLED
+    ics_api_a1_get_job_status 200 type2 job3 ENABLED
+    ics_api_a1_get_job_status 200 type4 job8 ENABLED
+    ics_api_a1_get_job_status 200 type6 job10 ENABLED
 else
-    ecs_api_a1_get_job_status 200 job1 DISABLED
-    ecs_api_a1_get_job_status 200 job2 DISABLED
-    ecs_api_a1_get_job_status 200 job3 ENABLED
-    ecs_api_a1_get_job_status 200 job8 ENABLED
-    ecs_api_a1_get_job_status 200 job10 ENABLED
+    ics_api_a1_get_job_status 200 job1 DISABLED
+    ics_api_a1_get_job_status 200 job2 DISABLED
+    ics_api_a1_get_job_status 200 job3 ENABLED
+    ics_api_a1_get_job_status 200 job8 ENABLED
+    ics_api_a1_get_job_status 200 job10 ENABLED
 fi
 
 #Disable create for job10 in prod-e
@@ -1174,83 +1165,83 @@ prodstub_arm_job_create 200 prod-e job10 400
 
 #Update tjob 10 - only prod-f will be updated
 if [  -z "$FLAT_A1_EI" ]; then
-    ecs_api_a1_put_job 200 type6 job10 $TARGET10 ricsim_g3_4 testdata/ecs/job-template2.json
+    ics_api_a1_put_job 200 type6 job10 $TARGET10 ricsim_g3_4 testdata/ics/job-template2.json
 else
-    ecs_api_a1_put_job 200 job10 type6 $TARGET10 ricsim_g3_4 $STATUS10 testdata/ecs/job-template2.json
+    ics_api_a1_put_job 200 job10 type6 $TARGET10 ricsim_g3_4 $STATUS10 testdata/ics/job-template2.json
 fi
 #Reset producer and job responses
 prodstub_arm_producer 200 prod-e 200
 prodstub_arm_job_create 200 prod-e job10 200
 
-ecs_api_edp_get_producer_status 200 prod-e ENABLED 360
+ics_api_edp_get_producer_status 200 prod-e ENABLED 360
 
-if [ $ECS_VERSION == "V1-1" ]; then
-    ecs_api_edp_get_producer_ids 200 prod-b prod-c prod-d prod-e prod-f
+if [ $ICS_VERSION == "V1-1" ]; then
+    ics_api_edp_get_producer_ids 200 prod-b prod-c prod-d prod-e prod-f
 else
-    ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-b prod-c prod-d prod-e prod-f
+    ics_api_edp_get_producer_ids_2 200 NOTYPE prod-b prod-c prod-d prod-e prod-f
 fi
 
 #Wait for job to be updated
 sleep_wait 120
 
-if [ $ECS_VERSION == "V1-1" ]; then
-    prodstub_check_jobdata 200 prod-f job10 type6 $TARGET10 ricsim_g3_4 testdata/ecs/job-template2.json
+if [ $ICS_VERSION == "V1-1" ]; then
+    prodstub_check_jobdata 200 prod-f job10 type6 $TARGET10 ricsim_g3_4 testdata/ics/job-template2.json
 else
-    if [[ "$ECS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
-        prodstub_check_jobdata_2 200 prod-f job10 type6 $TARGET10 ricsim_g3_4 testdata/ecs/job-template2.json
+    if [[ "$ICS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
+        prodstub_check_jobdata_2 200 prod-f job10 type6 $TARGET10 ricsim_g3_4 testdata/ics/job-template2.json
     else
-        prodstub_check_jobdata_3 200 prod-f job10 type6 $TARGET10 ricsim_g3_4 testdata/ecs/job-template2.json
+        prodstub_check_jobdata_3 200 prod-f job10 type6 $TARGET10 ricsim_g3_4 testdata/ics/job-template2.json
     fi
 fi
 
 prodstub_arm_producer 200 prod-f 400
 
-ecs_api_edp_get_producer_status 200 prod-f DISABLED 360
+ics_api_edp_get_producer_status 200 prod-f DISABLED 360
 
-if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
-    ecs_equal json:data-producer/v1/info-producers 4 1000
+if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+    ics_equal json:data-producer/v1/info-producers 4 1000
 else
-    ecs_equal json:ei-producer/v1/eiproducers 4 1000
+    ics_equal json:ei-producer/v1/eiproducers 4 1000
 fi
 
-if [ $ECS_VERSION == "V1-1" ]; then
-    ecs_api_edp_get_producer_ids 200 prod-b prod-c prod-d prod-e
+if [ $ICS_VERSION == "V1-1" ]; then
+    ics_api_edp_get_producer_ids 200 prod-b prod-c prod-d prod-e
 else
-    ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-b prod-c prod-d prod-e
+    ics_api_edp_get_producer_ids_2 200 NOTYPE prod-b prod-c prod-d prod-e
 fi
 
-ecs_api_edp_get_producer_status 404 prod-a
-ecs_api_edp_get_producer_status 200 prod-b ENABLED
-ecs_api_edp_get_producer_status 200 prod-c ENABLED
-ecs_api_edp_get_producer_status 200 prod-d ENABLED
-ecs_api_edp_get_producer_status 200 prod-e ENABLED
-ecs_api_edp_get_producer_status 404 prod-f
+ics_api_edp_get_producer_status 404 prod-a
+ics_api_edp_get_producer_status 200 prod-b ENABLED
+ics_api_edp_get_producer_status 200 prod-c ENABLED
+ics_api_edp_get_producer_status 200 prod-d ENABLED
+ics_api_edp_get_producer_status 200 prod-e ENABLED
+ics_api_edp_get_producer_status 404 prod-f
 
 if [  -z "$FLAT_A1_EI" ]; then
-    ecs_api_a1_get_job_status 200 type1 job1 DISABLED
-    ecs_api_a1_get_job_status 200 type1 job2 DISABLED
-    ecs_api_a1_get_job_status 200 type2 job3 ENABLED
-    ecs_api_a1_get_job_status 200 type4 job8 ENABLED
-    ecs_api_a1_get_job_status 200 type6 job10 ENABLED
+    ics_api_a1_get_job_status 200 type1 job1 DISABLED
+    ics_api_a1_get_job_status 200 type1 job2 DISABLED
+    ics_api_a1_get_job_status 200 type2 job3 ENABLED
+    ics_api_a1_get_job_status 200 type4 job8 ENABLED
+    ics_api_a1_get_job_status 200 type6 job10 ENABLED
 else
-    ecs_api_a1_get_job_status 200 job1 DISABLED
-    ecs_api_a1_get_job_status 200 job2 DISABLED
-    ecs_api_a1_get_job_status 200 job3 ENABLED
-    ecs_api_a1_get_job_status 200 job8 ENABLED
-    ecs_api_a1_get_job_status 200 job10 ENABLED
+    ics_api_a1_get_job_status 200 job1 DISABLED
+    ics_api_a1_get_job_status 200 job2 DISABLED
+    ics_api_a1_get_job_status 200 job3 ENABLED
+    ics_api_a1_get_job_status 200 job8 ENABLED
+    ics_api_a1_get_job_status 200 job10 ENABLED
 fi
 
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 14 30
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+    cr_equal received_callbacks 14 30
 else
-    cr_equal received_callbacks 6 30
+    cr_equal received_callbacks 6 30
 fi
 
 
-if [[ "$ECS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
+if [[ "$ICS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
 
     # End test if info types is not impl in tested version
-    check_ecs_logs
+    check_ics_logs
 
     store_logs END
 
@@ -1329,338 +1320,342 @@ prodstub_arm_job_create 200 prod-if job110
 ### Initial tests - no config made
 ### GET: type ids, types, producer ids, producers, job ids, jobs
 ### DELETE: jobs
-ecs_api_idc_get_type_ids 200 type1 type2 type4 type6
-ecs_api_idc_get_type 404 test-type
+ics_api_idc_get_type_ids 200 type1 type2 type4 type6
+ics_api_idc_get_type 404 test-type
 
-ecs_api_edp_get_type_ids 200 type1 type2 type4 type6
-ecs_api_edp_get_type_2 404 test-type
+ics_api_edp_get_type_ids 200 type1 type2 type4 type6
+ics_api_edp_get_type_2 404 test-type
 
-ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-b prod-c prod-d prod-e
-ecs_api_edp_get_producer_2 404 test-prod
-ecs_api_edp_get_producer_status 404 test-prod
+ics_api_edp_get_producer_ids_2 200 NOTYPE prod-b prod-c prod-d prod-e
+ics_api_edp_get_producer_2 404 test-prod
+ics_api_edp_get_producer_status 404 test-prod
 
-ecs_api_edp_delete_producer 404 test-prod
+ics_api_edp_delete_producer 404 test-prod
 
-ecs_api_idc_get_job_ids 200 test-type NOWNER EMPTY
-ecs_api_idc_get_job_ids 200 test-type test-owner EMPTY
+ics_api_idc_get_job_ids 200 test-type NOWNER EMPTY
+ics_api_idc_get_job_ids 200 test-type test-owner EMPTY
 
-ecs_api_idc_get_job 404 test-job
+ics_api_idc_get_job 404 test-job
 
-ecs_api_idc_get_job_status2 404 test-job
+ics_api_idc_get_job_status2 404 test-job
 
-ecs_api_idc_delete_job 404 test-job
+ics_api_idc_delete_job 404 test-job
 
-ecs_api_edp_get_producer_jobs_2 404 test-prod
+ics_api_edp_get_producer_jobs_2 404 test-prod
 
-ecs_api_edp_get_type_2 404 test-type
-ecs_api_edp_delete_type_2 404 test-type
+ics_api_edp_get_type_2 404 test-type
+ics_api_edp_delete_type_2 404 test-type
 
 ### Setup of producer/job and testing apis ###
 
 ## Setup prod-ia
-ecs_api_edp_get_type_ids 200 type1 type2 type4 type6
-ecs_api_edp_get_type_2 404 type101
-ecs_api_edp_put_producer_2 404 prod-ia $CB_JOB/prod-ia $CB_SV/prod-ia type101
+ics_api_edp_get_type_ids 200 type1 type2 type4 type6
+ics_api_edp_get_type_2 404 type101
+ics_api_edp_put_producer_2 404 prod-ia $CB_JOB/prod-ia $CB_SV/prod-ia type101
 
 # Create type, delete and create again
-ecs_api_edp_put_type_2 201 type101 testdata/ecs/info-type-1.json
-ecs_api_edp_get_type_2 200 type101
-ecs_api_edp_get_type_ids 200 type101 type1 type2 type4 type6
-ecs_api_edp_delete_type_2 204 type101
-ecs_api_edp_get_type_2 404 type101
-ecs_api_edp_get_type_ids 200 type1 type2 type4 type6
-ecs_api_edp_put_type_2 201 type101 testdata/ecs/info-type-1.json
-ecs_api_edp_get_type_ids 200 type101 type1 type2 type4 type6
-ecs_api_edp_get_type_2 200 type101 testdata/ecs/info-type-1.json
+ics_api_edp_put_type_2 201 type101 testdata/ics/info-type-1.json
+ics_api_edp_get_type_2 200 type101
+ics_api_edp_get_type_ids 200 type101 type1 type2 type4 type6
+ics_api_edp_delete_type_2 204 type101
+ics_api_edp_get_type_2 404 type101
+ics_api_edp_get_type_ids 200 type1 type2 type4 type6
+ics_api_edp_put_type_2 201 type101 testdata/ics/info-type-1.json
+ics_api_edp_get_type_ids 200 type101 type1 type2 type4 type6
+ics_api_edp_get_type_2 200 type101 testdata/ics/info-type-1.json
 
-ecs_api_edp_put_producer_2 201 prod-ia $CB_JOB/prod-ia $CB_SV/prod-ia type101
-ecs_api_edp_put_producer_2 200 prod-ia $CB_JOB/prod-ia $CB_SV/prod-ia type101
+ics_api_edp_put_producer_2 201 prod-ia $CB_JOB/prod-ia $CB_SV/prod-ia type101
+ics_api_edp_put_producer_2 200 prod-ia $CB_JOB/prod-ia $CB_SV/prod-ia type101
 
-ecs_api_edp_delete_type_2 406 type101
+if [[ "$ICS_FEATURE_LEVEL" == *"RESP_CODE_CHANGE_1" ]]; then
+    ics_api_edp_delete_type_2 409 type101
+else
+    ics_api_edp_delete_type_2 406 type101
+fi
 
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 17 30
-    cr_equal received_callbacks?id=type-status1 11
-    cr_api_check_all_ecs_subscription_events 200 type-status1 type101 testdata/ecs/info-type-1.json REGISTERED type101 testdata/ecs/info-type-1.json DEREGISTERED type101 testdata/ecs/info-type-1.json REGISTERED
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+    cr_equal received_callbacks 17 30
+    cr_equal received_callbacks?id=type-status1 11
+    cr_api_check_all_ics_subscription_events 200 0 type-status1 type101 testdata/ics/info-type-1.json REGISTERED type101 testdata/ics/info-type-1.json DEREGISTERED type101 testdata/ics/info-type-1.json REGISTERED
 else
-    cr_equal received_callbacks 6
+    cr_equal received_callbacks 6
 fi
 
-ecs_api_edp_get_type_ids 200 type101 type1 type2 type4 type6
-ecs_api_edp_get_type_2 200 type101 testdata/ecs/info-type-1.json
+ics_api_edp_get_type_ids 200 type101 type1 type2 type4 type6
+ics_api_edp_get_type_2 200 type101 testdata/ics/info-type-1.json
 
-ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-b prod-c prod-d prod-e
-ecs_api_edp_get_producer_ids_2 200 type101 prod-ia
-ecs_api_edp_get_producer_ids_2 200 type102 EMPTY
+ics_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-b prod-c prod-d prod-e
+ics_api_edp_get_producer_ids_2 200 type101 prod-ia
+ics_api_edp_get_producer_ids_2 200 type102 EMPTY
 
-ecs_api_edp_get_producer_2 200 prod-ia $CB_JOB/prod-ia $CB_SV/prod-ia type101
+ics_api_edp_get_producer_2 200 prod-ia $CB_JOB/prod-ia $CB_SV/prod-ia type101
 
-ecs_api_edp_get_producer_status 200 prod-ia ENABLED
+ics_api_edp_get_producer_status 200 prod-ia ENABLED
 
-ecs_api_idc_get_job_ids 200 type101 NOWNER EMPTY
-ecs_api_idc_get_job_ids 200 type101 test-owner EMPTY
+ics_api_idc_get_job_ids 200 type101 NOWNER EMPTY
+ics_api_idc_get_job_ids 200 type101 test-owner EMPTY
 
-ecs_api_idc_get_job 404 test-job
+ics_api_idc_get_job 404 test-job
 
-ecs_api_idc_get_job_status2 404 test-job
-ecs_api_edp_get_producer_jobs_2 200 prod-ia EMPTY
+ics_api_idc_get_job_status2 404 test-job
+ics_api_edp_get_producer_jobs_2 200 prod-ia EMPTY
 
 ## Create a job for prod-ia
 ## job101 - prod-ia
-ecs_api_idc_put_job 201 job101 type101 $TARGET101 info-owner-1 $INFOSTATUS101 testdata/ecs/job-template.json VALIDATE
+ics_api_idc_put_job 201 job101 type101 $TARGET101 info-owner-1 $INFOSTATUS101 testdata/ics/job-template.json VALIDATE
 
 # Check the job data in the producer
-prodstub_check_jobdata_3 200 prod-ia job101 type101 $TARGET101 info-owner-1 testdata/ecs/job-template.json
+prodstub_check_jobdata_3 200 prod-ia job101 type101 $TARGET101 info-owner-1 testdata/ics/job-template.json
 
-ecs_api_idc_get_job_ids 200 type101 NOWNER job101
-ecs_api_idc_get_job_ids 200 type101 info-owner-1 job101
+ics_api_idc_get_job_ids 200 type101 NOWNER job101
+ics_api_idc_get_job_ids 200 type101 info-owner-1 job101
 
-ecs_api_idc_get_job_ids 200 NOTYPE NOWNER job101 job1 job2 job3 job8 job10
+ics_api_idc_get_job_ids 200 NOTYPE NOWNER job101 job1 job2 job3 job8 job10
 
-ecs_api_idc_get_job 200 job101 type101 $TARGET101 info-owner-1 $INFOSTATUS101 testdata/ecs/job-template.json
+ics_api_idc_get_job 200 job101 type101 $TARGET101 info-owner-1 $INFOSTATUS101 testdata/ics/job-template.json
 
-ecs_api_idc_get_job_status2 200 job101 ENABLED  1 prod-ia
+ics_api_idc_get_job_status2 200 job101 ENABLED  1 prod-ia
 
 prodstub_equal create/prod-ia/job101 1
 
-ecs_api_edp_get_producer_jobs_2 200 prod-ia job101 type101 $TARGET101 info-owner-1 testdata/ecs/job-template.json
+ics_api_edp_get_producer_jobs_2 200 prod-ia job101 type101 $TARGET101 info-owner-1 testdata/ics/job-template.json
 
 ## Create a second job for prod-ia
 ## job102 - prod-ia
-ecs_api_idc_put_job 201 job102 type101 $TARGET102 info-owner-2 $INFOSTATUS102 testdata/ecs/job-template.json  VALIDATE
+ics_api_idc_put_job 201 job102 type101 $TARGET102 info-owner-2 $INFOSTATUS102 testdata/ics/job-template.json  VALIDATE
 
 # Check the job data in the producer
-prodstub_check_jobdata_3 200 prod-ia job102 type101 $TARGET102 info-owner-2 testdata/ecs/job-template.json
-ecs_api_idc_get_job_ids 200 type101 NOWNER job101 job102
-ecs_api_idc_get_job_ids 200 type101 info-owner-1 job101
-ecs_api_idc_get_job_ids 200 type101 info-owner-2 job102
-ecs_api_idc_get_job_ids 200 NOTYPE NOWNER job101 job102 job1 job2 job3 job8 job10
+prodstub_check_jobdata_3 200 prod-ia job102 type101 $TARGET102 info-owner-2 testdata/ics/job-template.json
+ics_api_idc_get_job_ids 200 type101 NOWNER job101 job102
+ics_api_idc_get_job_ids 200 type101 info-owner-1 job101
+ics_api_idc_get_job_ids 200 type101 info-owner-2 job102
+ics_api_idc_get_job_ids 200 NOTYPE NOWNER job101 job102 job1 job2 job3 job8 job10
 
-ecs_api_idc_get_job 200 job102 type101 $TARGET102 info-owner-2 $INFOSTATUS102 testdata/ecs/job-template.json
+ics_api_idc_get_job 200 job102 type101 $TARGET102 info-owner-2 $INFOSTATUS102 testdata/ics/job-template.json
 
-ecs_api_idc_get_job_status2 200 job102 ENABLED 1 prod-ia
+ics_api_idc_get_job_status2 200 job102 ENABLED 1 prod-ia
 
 prodstub_equal create/prod-ia/job102 1
 
-ecs_api_edp_get_producer_jobs_2 200 prod-ia job101 type101 $TARGET101 info-owner-1 testdata/ecs/job-template.json job102 type101 $TARGET102 info-owner-2 testdata/ecs/job-template.json
+ics_api_edp_get_producer_jobs_2 200 prod-ia job101 type101 $TARGET101 info-owner-1 testdata/ics/job-template.json job102 type101 $TARGET102 info-owner-2 testdata/ics/job-template.json
 
 
 ## Setup prod-ib
-ecs_api_edp_put_type_2 201 type102 testdata/ecs/info-type-2.json
-ecs_api_edp_put_producer_2 201 prod-ib $CB_JOB/prod-ib $CB_SV/prod-ib type102
+ics_api_edp_put_type_2 201 type102 testdata/ics/info-type-2.json
+ics_api_edp_put_producer_2 201 prod-ib $CB_JOB/prod-ib $CB_SV/prod-ib type102
 
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 18 30
-    cr_equal received_callbacks?id=type-status1 12
-    cr_api_check_all_ecs_subscription_events 200 type-status1 type102 testdata/ecs/info-type-2.json REGISTERED
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+    cr_equal received_callbacks 18 30
+    cr_equal received_callbacks?id=type-status1 12
+    cr_api_check_all_ics_subscription_events 200 0 type-status1 type102 testdata/ics/info-type-2.json REGISTERED
 else
-    cr_equal received_callbacks 6
+    cr_equal received_callbacks 6
 fi
 
-ecs_api_idc_get_type_ids 200 type101 type102 type1 type2 type4 type6
+ics_api_idc_get_type_ids 200 type101 type102 type1 type2 type4 type6
 
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    ecs_api_idc_get_type 200 type101 testdata/ecs/info-type-1.json ENABLED 1
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+    ics_api_idc_get_type 200 type101 testdata/ics/info-type-1.json ENABLED 1
 
-    ecs_api_idc_get_type 200 type102 testdata/ecs/info-type-2.json ENABLED 1
+    ics_api_idc_get_type 200 type102 testdata/ics/info-type-2.json ENABLED 1
 else
-    ecs_api_idc_get_type 200 type101 testdata/ecs/info-type-1.json
+    ics_api_idc_get_type 200 type101 testdata/ics/info-type-1.json
 
-    ecs_api_idc_get_type 200 type102 testdata/ecs/info-type-2.json
+    ics_api_idc_get_type 200 type102 testdata/ics/info-type-2.json
 fi
 
-ecs_api_edp_get_type_ids 200 type101 type102 type1 type2 type4 type6
-ecs_api_edp_get_type_2 200 type101 testdata/ecs/info-type-1.json
-ecs_api_edp_get_type_2 200 type102 testdata/ecs/info-type-2.json
+ics_api_edp_get_type_ids 200 type101 type102 type1 type2 type4 type6
+ics_api_edp_get_type_2 200 type101 testdata/ics/info-type-1.json
+ics_api_edp_get_type_2 200 type102 testdata/ics/info-type-2.json
 
-ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ib prod-b prod-c prod-d prod-e
+ics_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ib prod-b prod-c prod-d prod-e
 
-ecs_api_edp_get_producer_2 200 prod-ia $CB_JOB/prod-ia $CB_SV/prod-ia type101
-ecs_api_edp_get_producer_2 200 prod-ib $CB_JOB/prod-ib $CB_SV/prod-ib type102
+ics_api_edp_get_producer_2 200 prod-ia $CB_JOB/prod-ia $CB_SV/prod-ia type101
+ics_api_edp_get_producer_2 200 prod-ib $CB_JOB/prod-ib $CB_SV/prod-ib type102
 
-ecs_api_edp_get_producer_status 200 prod-ib ENABLED
+ics_api_edp_get_producer_status 200 prod-ib ENABLED
 
 ## Create job for prod-ib
 ##  job103 - prod-ib
-ecs_api_idc_put_job 201 job103 type102 $TARGET103 info-owner-3 $INFOSTATUS103 testdata/ecs/job-template.json  VALIDATE
+ics_api_idc_put_job 201 job103 type102 $TARGET103 info-owner-3 $INFOSTATUS103 testdata/ics/job-template.json  VALIDATE
 
 prodstub_equal create/prod-ib/job103 1
 
 # Check the job data in the producer
-prodstub_check_jobdata_3 200 prod-ib job103 type102 $TARGET103 info-owner-3 testdata/ecs/job-template.json
+prodstub_check_jobdata_3 200 prod-ib job103 type102 $TARGET103 info-owner-3 testdata/ics/job-template.json
 
-ecs_api_idc_get_job_ids 200 type101 NOWNER job101 job102
-ecs_api_idc_get_job_ids 200 type102 NOWNER job103
-ecs_api_idc_get_job_ids 200 type101 info-owner-1 job101
-ecs_api_idc_get_job_ids 200 type101 info-owner-2 job102
-ecs_api_idc_get_job_ids 200 type102 info-owner-3 job103
+ics_api_idc_get_job_ids 200 type101 NOWNER job101 job102
+ics_api_idc_get_job_ids 200 type102 NOWNER job103
+ics_api_idc_get_job_ids 200 type101 info-owner-1 job101
+ics_api_idc_get_job_ids 200 type101 info-owner-2 job102
+ics_api_idc_get_job_ids 200 type102 info-owner-3 job103
 
-ecs_api_idc_get_job 200 job103 type102 $TARGET103 info-owner-3 $INFOSTATUS103 testdata/ecs/job-template.json
+ics_api_idc_get_job 200 job103 type102 $TARGET103 info-owner-3 $INFOSTATUS103 testdata/ics/job-template.json
 
-ecs_api_idc_get_job_status2 200 job103 ENABLED 1 prod-ib
+ics_api_idc_get_job_status2 200 job103 ENABLED 1 prod-ib
 
-ecs_api_edp_get_producer_jobs_2 200 prod-ia job101 type101 $TARGET101 info-owner-1 testdata/ecs/job-template.json job102 type101 $TARGET102 info-owner-2 testdata/ecs/job-template.json
-ecs_api_edp_get_producer_jobs_2 200 prod-ib job103 type102 $TARGET103 info-owner-3 testdata/ecs/job-template.json
+ics_api_edp_get_producer_jobs_2 200 prod-ia job101 type101 $TARGET101 info-owner-1 testdata/ics/job-template.json job102 type101 $TARGET102 info-owner-2 testdata/ics/job-template.json
+ics_api_edp_get_producer_jobs_2 200 prod-ib job103 type102 $TARGET103 info-owner-3 testdata/ics/job-template.json
 
 ## Setup prod-ic (no types)
-ecs_api_edp_put_producer_2 201 prod-ic $CB_JOB/prod-ic $CB_SV/prod-ic NOTYPE
+ics_api_edp_put_producer_2 201 prod-ic $CB_JOB/prod-ic $CB_SV/prod-ic NOTYPE
 
-ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ib prod-ic prod-b prod-c prod-d prod-e
+ics_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ib prod-ic prod-b prod-c prod-d prod-e
 
-ecs_api_edp_get_producer_2 200 prod-ia $CB_JOB/prod-ia $CB_SV/prod-ia type101
-ecs_api_edp_get_producer_2 200 prod-ib $CB_JOB/prod-ib $CB_SV/prod-ib type102
-ecs_api_edp_get_producer_2 200 prod-ic $CB_JOB/prod-ic $CB_SV/prod-ic EMPTY
+ics_api_edp_get_producer_2 200 prod-ia $CB_JOB/prod-ia $CB_SV/prod-ia type101
+ics_api_edp_get_producer_2 200 prod-ib $CB_JOB/prod-ib $CB_SV/prod-ib type102
+ics_api_edp_get_producer_2 200 prod-ic $CB_JOB/prod-ic $CB_SV/prod-ic EMPTY
 
-ecs_api_edp_get_producer_status 200 prod-ic ENABLED
+ics_api_edp_get_producer_status 200 prod-ic ENABLED
 
 
 ## Delete job103 and prod-ib and re-create if different order
 
 # Delete job then producer
-ecs_api_idc_get_job_ids 200 NOTYPE NOWNER job101 job102 job103 job1 job2 job3 job8 job10
-ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ib prod-ic prod-b prod-c prod-d prod-e
+ics_api_idc_get_job_ids 200 NOTYPE NOWNER job101 job102 job103 job1 job2 job3 job8 job10
+ics_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ib prod-ic prod-b prod-c prod-d prod-e
 
-ecs_api_idc_delete_job 204 job103
+ics_api_idc_delete_job 204 job103
 
-ecs_api_idc_get_job_ids 200 NOTYPE NOWNER job101 job102 job1 job2 job3 job8 job10
-ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ib prod-ic prod-b prod-c prod-d prod-e
+ics_api_idc_get_job_ids 200 NOTYPE NOWNER job101 job102 job1 job2 job3 job8 job10
+ics_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ib prod-ic prod-b prod-c prod-d prod-e
 
-ecs_api_edp_delete_producer 204 prod-ib
+ics_api_edp_delete_producer 204 prod-ib
 
-ecs_api_edp_get_producer_status 404 prod-ib
+ics_api_edp_get_producer_status 404 prod-ib
 
-ecs_api_idc_get_job_ids 200 NOTYPE NOWNER job101 job102 job1 job2 job3 job8 job10
-ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ic prod-b prod-c prod-d prod-e
+ics_api_idc_get_job_ids 200 NOTYPE NOWNER job101 job102 job1 job2 job3 job8 job10
+ics_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ic prod-b prod-c prod-d prod-e
 
 prodstub_equal delete/prod-ib/job103 1
 
-ecs_api_idc_put_job 201 job103 type102 $TARGET103 info-owner-3 $INFOSTATUS103 testdata/ecs/job-template.json VALIDATE
-ecs_api_idc_get_job_status2 200 job103 DISABLED EMPTYPROD
+ics_api_idc_put_job 201 job103 type102 $TARGET103 info-owner-3 $INFOSTATUS103 testdata/ics/job-template.json VALIDATE
+ics_api_idc_get_job_status2 200 job103 DISABLED EMPTYPROD
 
 # Put producer then job
-ecs_api_edp_put_producer_2 201 prod-ib $CB_JOB/prod-ib $CB_SV/prod-ib type102
+ics_api_edp_put_producer_2 201 prod-ib $CB_JOB/prod-ib $CB_SV/prod-ib type102
 
-ecs_api_edp_get_producer_status 200 prod-ib ENABLED
+ics_api_edp_get_producer_status 200 prod-ib ENABLED
 
-ecs_api_idc_put_job 200 job103 type102 $TARGET103 info-owner-3 $INFOSTATUS103 testdata/ecs/job-template2.json  VALIDATE
-ecs_api_idc_get_job_status2 200 job103 ENABLED 1 prod-ib
+ics_api_idc_put_job 200 job103 type102 $TARGET103 info-owner-3 $INFOSTATUS103 testdata/ics/job-template2.json  VALIDATE
+ics_api_idc_get_job_status2 200 job103 ENABLED 1 prod-ib
 
-prodstub_check_jobdata_3 200 prod-ib job103 type102 $TARGET103 info-owner-3 testdata/ecs/job-template2.json
+prodstub_check_jobdata_3 200 prod-ib job103 type102 $TARGET103 info-owner-3 testdata/ics/job-template2.json
 
-ecs_api_idc_get_job_ids 200 NOTYPE NOWNER job101 job102 job103 job1 job2 job3 job8 job10
-ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ib prod-ic prod-b prod-c prod-d prod-e
+ics_api_idc_get_job_ids 200 NOTYPE NOWNER job101 job102 job103 job1 job2 job3 job8 job10
+ics_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ib prod-ic prod-b prod-c prod-d prod-e
 
 prodstub_equal create/prod-ib/job103 3
 prodstub_equal delete/prod-ib/job103 1
 
 # Delete only the producer
-ecs_api_edp_delete_producer 204 prod-ib
+ics_api_edp_delete_producer 204 prod-ib
 
-ecs_api_edp_get_producer_status 404 prod-ib
+ics_api_edp_get_producer_status 404 prod-ib
 
-ecs_api_idc_get_job_ids 200 NOTYPE NOWNER job101 job102 job103  job1 job2 job3 job8 job10
-ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ic prod-b prod-c prod-d prod-e
+ics_api_idc_get_job_ids 200 NOTYPE NOWNER job101 job102 job103  job1 job2 job3 job8 job10
+ics_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ic prod-b prod-c prod-d prod-e
 
-ecs_api_idc_get_job_status2 200 job103 DISABLED EMPTYPROD
+ics_api_idc_get_job_status2 200 job103 DISABLED EMPTYPROD
 
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 19 30
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+    cr_equal received_callbacks 19 30
 
-    cr_equal received_callbacks?id=info-job103-status 1
-    cr_api_check_all_ecs_events 200 info-job103-status DISABLED
+    cr_equal received_callbacks?id=info-job103-status 1
+    cr_api_check_all_ics_events 200 0 info-job103-status DISABLED
 else
-    cr_equal received_callbacks 7 30
-    cr_equal received_callbacks?id=info-job103-status 1
-    cr_api_check_all_ecs_events 200 info-job103-status DISABLED
+    cr_equal received_callbacks 7 30
+    cr_equal received_callbacks?id=info-job103-status 1
+    cr_api_check_all_ics_events 200 0 info-job103-status DISABLED
 fi
 
 # Re-create the producer
-ecs_api_edp_put_producer_2 201 prod-ib $CB_JOB/prod-ib $CB_SV/prod-ib type102
+ics_api_edp_put_producer_2 201 prod-ib $CB_JOB/prod-ib $CB_SV/prod-ib type102
 
-ecs_api_edp_get_producer_status 200 prod-ib ENABLED
+ics_api_edp_get_producer_status 200 prod-ib ENABLED
 
-ecs_api_idc_get_job_status2 200 job103 ENABLED 1 prod-ib
+ics_api_idc_get_job_status2 200 job103 ENABLED 1 prod-ib
 
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 20 30
-    cr_equal received_callbacks?id=info-job103-status 2
-    cr_api_check_all_ecs_events 200 info-job103-status ENABLED
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+    cr_equal received_callbacks 20 30
+    cr_equal received_callbacks?id=info-job103-status 2
+    cr_api_check_all_ics_events 200 0 info-job103-status ENABLED
 else
-    cr_equal received_callbacks 8 30
-    cr_equal received_callbacks?id=info-job103-status 2
-    cr_api_check_all_ecs_events 200 info-job103-status ENABLED
+    cr_equal received_callbacks 8 30
+    cr_equal received_callbacks?id=info-job103-status 2
+    cr_api_check_all_ics_events 200 0 info-job103-status ENABLED
 fi
 
-prodstub_check_jobdata_3 200 prod-ib job103 type102 $TARGET103 info-owner-3 testdata/ecs/job-template2.json
+prodstub_check_jobdata_3 200 prod-ib job103 type102 $TARGET103 info-owner-3 testdata/ics/job-template2.json
 
 ## Setup prod-id
-ecs_api_edp_put_type_2 201 type104 testdata/ecs/info-type-4.json
-ecs_api_edp_put_producer_2 201 prod-id $CB_JOB/prod-id $CB_SV/prod-id type104
+ics_api_edp_put_type_2 201 type104 testdata/ics/info-type-4.json
+ics_api_edp_put_producer_2 201 prod-id $CB_JOB/prod-id $CB_SV/prod-id type104
 
-ecs_api_idc_get_job_ids 200 type104 NOWNER EMPTY
+ics_api_idc_get_job_ids 200 type104 NOWNER EMPTY
 
-ecs_api_idc_put_job 201 job108 type104 $TARGET108 info-owner-4 $INFOSTATUS108 testdata/ecs/job-template.json  VALIDATE
+ics_api_idc_put_job 201 job108 type104 $TARGET108 info-owner-4 $INFOSTATUS108 testdata/ics/job-template.json  VALIDATE
 
-prodstub_check_jobdata_3 200 prod-id job108 type104 $TARGET108 info-owner-4 testdata/ecs/job-template.json
+prodstub_check_jobdata_3 200 prod-id job108 type104 $TARGET108 info-owner-4 testdata/ics/job-template.json
 
 prodstub_equal create/prod-id/job108 1
 prodstub_equal delete/prod-id/job108 0
 
-ecs_api_idc_get_job_ids 200 type104 NOWNER job108
+ics_api_idc_get_job_ids 200 type104 NOWNER job108
 
-ecs_api_idc_get_job_status2 200 job108 ENABLED 1 prod-id
+ics_api_idc_get_job_status2 200 job108 ENABLED 1 prod-id
 
 # Re-PUT the producer with zero types
-ecs_api_edp_put_producer_2 200 prod-id $CB_JOB/prod-id $CB_SV/prod-id NOTYPE
+ics_api_edp_put_producer_2 200 prod-id $CB_JOB/prod-id $CB_SV/prod-id NOTYPE
 
-ecs_api_idc_get_job_ids 200 type104 NOWNER job108
-ecs_api_idc_get_job_ids 200 NOTYPE NOWNER job101 job102 job103 job108  job1 job2 job3 job8 job10
+ics_api_idc_get_job_ids 200 type104 NOWNER job108
+ics_api_idc_get_job_ids 200 NOTYPE NOWNER job101 job102 job103 job108  job1 job2 job3 job8 job10
 
-ecs_api_idc_get_job_status2 200 job108 DISABLED EMPTYPROD
+ics_api_idc_get_job_status2 200 job108 DISABLED EMPTYPROD
 
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 22 30
-    cr_equal received_callbacks?id=type-status1 13
-    cr_api_check_all_ecs_subscription_events 200 type-status1 type104 testdata/ecs/info-type-4.json REGISTERED
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+    cr_equal received_callbacks 22 30
+    cr_equal received_callbacks?id=type-status1 13
+    cr_api_check_all_ics_subscription_events 200 0 type-status1 type104 testdata/ics/info-type-4.json REGISTERED
 
-    cr_equal received_callbacks?id=info-job108-status 1
-    cr_api_check_all_ecs_events 200 info-job108-status DISABLED
+    cr_equal received_callbacks?id=info-job108-status 1
+    cr_api_check_all_ics_events 200 0 info-job108-status DISABLED
 else
-    cr_equal received_callbacks 9 30
-    cr_equal received_callbacks?id=info-job108-status 1
-    cr_api_check_all_ecs_events 200 info-job108-status DISABLED
+    cr_equal received_callbacks 9 30
+    cr_equal received_callbacks?id=info-job108-status 1
+    cr_api_check_all_ics_events 200 0 info-job108-status DISABLED
 fi
 
 prodstub_equal create/prod-id/job108 1
 prodstub_equal delete/prod-id/job108 0
 
 ## Re-setup prod-id
-ecs_api_edp_put_type_2 200 type104 testdata/ecs/info-type-4.json
-ecs_api_edp_put_producer_2 200 prod-id $CB_JOB/prod-id $CB_SV/prod-id type104
+ics_api_edp_put_type_2 200 type104 testdata/ics/info-type-4.json
+ics_api_edp_put_producer_2 200 prod-id $CB_JOB/prod-id $CB_SV/prod-id type104
 
 
-ecs_api_idc_get_job_ids 200 type104 NOWNER job108
-ecs_api_idc_get_job_ids 200 NOTYPE NOWNER job101 job102 job103 job108 job1 job2 job3 job8 job10
+ics_api_idc_get_job_ids 200 type104 NOWNER job108
+ics_api_idc_get_job_ids 200 NOTYPE NOWNER job101 job102 job103 job108 job1 job2 job3 job8 job10
 
-ecs_api_idc_get_job_status2 200 job108 ENABLED  1 prod-id
+ics_api_idc_get_job_status2 200 job108 ENABLED  1 prod-id
 
-ecs_api_edp_get_producer_status 200 prod-ia ENABLED
-ecs_api_edp_get_producer_status 200 prod-ib ENABLED
-ecs_api_edp_get_producer_status 200 prod-ic ENABLED
-ecs_api_edp_get_producer_status 200 prod-id ENABLED
+ics_api_edp_get_producer_status 200 prod-ia ENABLED
+ics_api_edp_get_producer_status 200 prod-ib ENABLED
+ics_api_edp_get_producer_status 200 prod-ic ENABLED
+ics_api_edp_get_producer_status 200 prod-id ENABLED
 
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 24 30
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+    cr_equal received_callbacks 24 30
 
-    cr_equal received_callbacks?id=type-status1 14
-    cr_api_check_all_ecs_subscription_events 200 type-status1 type104 testdata/ecs/info-type-4.json REGISTERED
+    cr_equal received_callbacks?id=type-status1 14
+    cr_api_check_all_ics_subscription_events 200 0 type-status1 type104 testdata/ics/info-type-4.json REGISTERED
 
-    cr_equal received_callbacks?id=info-job108-status 2
-    cr_api_check_all_ecs_events 200 info-job108-status ENABLED
+    cr_equal received_callbacks?id=info-job108-status 2
+    cr_api_check_all_ics_events 200 0 info-job108-status ENABLED
 else
-    cr_equal received_callbacks 10 30
-    cr_equal received_callbacks?id=info-job108-status 2
-    cr_api_check_all_ecs_events 200 info-job108-status ENABLED
+    cr_equal received_callbacks 10 30
+    cr_equal received_callbacks?id=info-job108-status 2
+    cr_api_check_all_ics_events 200 0 info-job108-status ENABLED
 fi
 
 prodstub_equal create/prod-id/job108 2
@@ -1668,327 +1663,331 @@ prodstub_equal delete/prod-id/job108 0
 
 
 ## Setup prod-ie
-ecs_api_edp_put_type_2 201 type106 testdata/ecs/info-type-6.json
-ecs_api_edp_put_producer_2 201 prod-ie $CB_JOB/prod-ie $CB_SV/prod-ie type106
+ics_api_edp_put_type_2 201 type106 testdata/ics/info-type-6.json
+ics_api_edp_put_producer_2 201 prod-ie $CB_JOB/prod-ie $CB_SV/prod-ie type106
 
-ecs_api_idc_get_job_ids 200 type106 NOWNER EMPTY
+ics_api_idc_get_job_ids 200 type106 NOWNER EMPTY
 
-ecs_api_idc_put_job 201 job110 type106 $TARGET110 info-owner-4 $INFOSTATUS110 testdata/ecs/job-template.json  VALIDATE
+ics_api_idc_put_job 201 job110 type106 $TARGET110 info-owner-4 $INFOSTATUS110 testdata/ics/job-template.json  VALIDATE
 
-prodstub_check_jobdata_3 200 prod-ie job110 type106 $TARGET110 info-owner-4 testdata/ecs/job-template.json
+prodstub_check_jobdata_3 200 prod-ie job110 type106 $TARGET110 info-owner-4 testdata/ics/job-template.json
 
 prodstub_equal create/prod-ie/job110 1
 prodstub_equal delete/prod-ie/job110 0
 
-ecs_api_idc_get_job_ids 200 type106 NOWNER job110
+ics_api_idc_get_job_ids 200 type106 NOWNER job110
 
-ecs_api_idc_get_job_status2 200 job110 ENABLED 1 prod-ie
+ics_api_idc_get_job_status2 200 job110 ENABLED 1 prod-ie
 
 ## Setup prod-if
-ecs_api_edp_put_type_2 200 type106 testdata/ecs/info-type-6.json
-ecs_api_edp_put_producer_2 201 prod-if $CB_JOB/prod-if $CB_SV/prod-if type106
+ics_api_edp_put_type_2 200 type106 testdata/ics/info-type-6.json
+ics_api_edp_put_producer_2 201 prod-if $CB_JOB/prod-if $CB_SV/prod-if type106
 
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 26 30
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+    cr_equal received_callbacks 26 30
 
-    cr_equal received_callbacks?id=type-status1 16
-    cr_api_check_all_ecs_subscription_events 200 type-status1 type106 testdata/ecs/info-type-6.json REGISTERED type106 testdata/ecs/info-type-6.json REGISTERED
+    cr_equal received_callbacks?id=type-status1 16
+    cr_api_check_all_ics_subscription_events 200 0 type-status1 type106 testdata/ics/info-type-6.json REGISTERED type106 testdata/ics/info-type-6.json REGISTERED
 fi
 
 
-ecs_api_idc_get_job_ids 200 type106 NOWNER job110
+ics_api_idc_get_job_ids 200 type106 NOWNER job110
 
-prodstub_check_jobdata_3 200 prod-if job110 type106 $TARGET110 info-owner-4 testdata/ecs/job-template.json
+prodstub_check_jobdata_3 200 prod-if job110 type106 $TARGET110 info-owner-4 testdata/ics/job-template.json
 
 prodstub_equal create/prod-if/job110 1
 prodstub_equal delete/prod-if/job110 0
 
-ecs_api_idc_get_job_ids 200 type106 NOWNER job110
+ics_api_idc_get_job_ids 200 type106 NOWNER job110
 
-ecs_api_idc_get_job_status2 200 job110 ENABLED  2 prod-ie prod-if
+ics_api_idc_get_job_status2 200 job110 ENABLED  2 prod-ie prod-if
 
 ## Status updates prod-ia and jobs
 
-ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ib prod-ic prod-id prod-ie prod-if  prod-b prod-c prod-d prod-e
+ics_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ib prod-ic prod-id prod-ie prod-if  prod-b prod-c prod-d prod-e
 
-ecs_api_edp_get_producer_status 200 prod-ia ENABLED
-ecs_api_edp_get_producer_status 200 prod-ib ENABLED
-ecs_api_edp_get_producer_status 200 prod-ic ENABLED
-ecs_api_edp_get_producer_status 200 prod-id ENABLED
-ecs_api_edp_get_producer_status 200 prod-ie ENABLED
-ecs_api_edp_get_producer_status 200 prod-if ENABLED
+ics_api_edp_get_producer_status 200 prod-ia ENABLED
+ics_api_edp_get_producer_status 200 prod-ib ENABLED
+ics_api_edp_get_producer_status 200 prod-ic ENABLED
+ics_api_edp_get_producer_status 200 prod-id ENABLED
+ics_api_edp_get_producer_status 200 prod-ie ENABLED
+ics_api_edp_get_producer_status 200 prod-if ENABLED
 
 # Arm producer prod-ia for supervision failure
 prodstub_arm_producer 200 prod-ia 400
 
 # Wait for producer prod-ia to go disabled
-ecs_api_edp_get_producer_status 200 prod-ia DISABLED 360
+ics_api_edp_get_producer_status 200 prod-ia DISABLED 360
 
-ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ib prod-ic prod-id  prod-ie prod-if prod-b prod-c prod-d prod-e
+ics_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ib prod-ic prod-id  prod-ie prod-if prod-b prod-c prod-d prod-e
 
-ecs_api_edp_get_producer_status 200 prod-ia DISABLED
-ecs_api_edp_get_producer_status 200 prod-ib ENABLED
-ecs_api_edp_get_producer_status 200 prod-ic ENABLED
-ecs_api_edp_get_producer_status 200 prod-id ENABLED
-ecs_api_edp_get_producer_status 200 prod-ie ENABLED
-ecs_api_edp_get_producer_status 200 prod-if ENABLED
+ics_api_edp_get_producer_status 200 prod-ia DISABLED
+ics_api_edp_get_producer_status 200 prod-ib ENABLED
+ics_api_edp_get_producer_status 200 prod-ic ENABLED
+ics_api_edp_get_producer_status 200 prod-id ENABLED
+ics_api_edp_get_producer_status 200 prod-ie ENABLED
+ics_api_edp_get_producer_status 200 prod-if ENABLED
 
 
-ecs_api_idc_get_job_status2 200 job101 ENABLED 1 prod-ia
-ecs_api_idc_get_job_status2 200 job102 ENABLED 1 prod-ia
-ecs_api_idc_get_job_status2 200 job103 ENABLED 1 prod-ib
-ecs_api_idc_get_job_status2 200 job108 ENABLED 1 prod-id
-ecs_api_idc_get_job_status2 200 job110 ENABLED 2 prod-ie prod-if
+ics_api_idc_get_job_status2 200 job101 ENABLED 1 prod-ia
+ics_api_idc_get_job_status2 200 job102 ENABLED 1 prod-ia
+ics_api_idc_get_job_status2 200 job103 ENABLED 1 prod-ib
+ics_api_idc_get_job_status2 200 job108 ENABLED 1 prod-id
+ics_api_idc_get_job_status2 200 job110 ENABLED 2 prod-ie prod-if
 
 # Arm producer prod-ia for supervision
 prodstub_arm_producer 200 prod-ia 200
 
 # Wait for producer prod-ia to go enabled
-ecs_api_edp_get_producer_status 200 prod-ia ENABLED 360
+ics_api_edp_get_producer_status 200 prod-ia ENABLED 360
 
-ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ib prod-ic prod-id prod-ie prod-if prod-b prod-c prod-d prod-e
+ics_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ib prod-ic prod-id prod-ie prod-if prod-b prod-c prod-d prod-e
 
-ecs_api_edp_get_producer_status 200 prod-ia ENABLED
-ecs_api_edp_get_producer_status 200 prod-ib ENABLED
-ecs_api_edp_get_producer_status 200 prod-ic ENABLED
-ecs_api_edp_get_producer_status 200 prod-id ENABLED
-ecs_api_edp_get_producer_status 200 prod-ie ENABLED
-ecs_api_edp_get_producer_status 200 prod-if ENABLED
+ics_api_edp_get_producer_status 200 prod-ia ENABLED
+ics_api_edp_get_producer_status 200 prod-ib ENABLED
+ics_api_edp_get_producer_status 200 prod-ic ENABLED
+ics_api_edp_get_producer_status 200 prod-id ENABLED
+ics_api_edp_get_producer_status 200 prod-ie ENABLED
+ics_api_edp_get_producer_status 200 prod-if ENABLED
 
-ecs_api_idc_get_job_status2 200 job101 ENABLED 1 prod-ia
-ecs_api_idc_get_job_status2 200 job102 ENABLED 1 prod-ia
-ecs_api_idc_get_job_status2 200 job103 ENABLED 1 prod-ib
-ecs_api_idc_get_job_status2 200 job108 ENABLED 1 prod-id
-ecs_api_idc_get_job_status2 200 job110 ENABLED 2 prod-ie prod-if
+ics_api_idc_get_job_status2 200 job101 ENABLED 1 prod-ia
+ics_api_idc_get_job_status2 200 job102 ENABLED 1 prod-ia
+ics_api_idc_get_job_status2 200 job103 ENABLED 1 prod-ib
+ics_api_idc_get_job_status2 200 job108 ENABLED 1 prod-id
+ics_api_idc_get_job_status2 200 job110 ENABLED 2 prod-ie prod-if
 
 # Arm producer prod-ia for supervision failure
 prodstub_arm_producer 200 prod-ia 400
 
 # Wait for producer prod-ia to go disabled
-ecs_api_edp_get_producer_status 200 prod-ia DISABLED 360
+ics_api_edp_get_producer_status 200 prod-ia DISABLED 360
 
-ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ib prod-ic prod-id prod-ie prod-if prod-b prod-c prod-d prod-e
+ics_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ib prod-ic prod-id prod-ie prod-if prod-b prod-c prod-d prod-e
 
-ecs_api_edp_get_producer_status 200 prod-ia DISABLED
-ecs_api_edp_get_producer_status 200 prod-ib ENABLED
-ecs_api_edp_get_producer_status 200 prod-ic ENABLED
-ecs_api_edp_get_producer_status 200 prod-id ENABLED
-ecs_api_edp_get_producer_status 200 prod-ie ENABLED
-ecs_api_edp_get_producer_status 200 prod-if ENABLED
+ics_api_edp_get_producer_status 200 prod-ia DISABLED
+ics_api_edp_get_producer_status 200 prod-ib ENABLED
+ics_api_edp_get_producer_status 200 prod-ic ENABLED
+ics_api_edp_get_producer_status 200 prod-id ENABLED
+ics_api_edp_get_producer_status 200 prod-ie ENABLED
+ics_api_edp_get_producer_status 200 prod-if ENABLED
 
-ecs_api_idc_get_job_status2 200 job101 ENABLED 1 prod-ia
-ecs_api_idc_get_job_status2 200 job102 ENABLED 1 prod-ia
-ecs_api_idc_get_job_status2 200 job103 ENABLED 1 prod-ib
-ecs_api_idc_get_job_status2 200 job108 ENABLED 1 prod-id
-ecs_api_idc_get_job_status2 200 job110 ENABLED 2 prod-ie prod-if
+ics_api_idc_get_job_status2 200 job101 ENABLED 1 prod-ia
+ics_api_idc_get_job_status2 200 job102 ENABLED 1 prod-ia
+ics_api_idc_get_job_status2 200 job103 ENABLED 1 prod-ib
+ics_api_idc_get_job_status2 200 job108 ENABLED 1 prod-id
+ics_api_idc_get_job_status2 200 job110 ENABLED 2 prod-ie prod-if
 
 # Wait for producer prod-ia to be removed
-if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
-    ecs_equal json:data-producer/v1/info-producers 9 1000
+if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+    ics_equal json:data-producer/v1/info-producers 9 1000
 else
-    ecs_equal json:ei-producer/v1/eiproducers 9 1000
+    ics_equal json:ei-producer/v1/eiproducers 9 1000
 fi
 
-ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-ib prod-ic prod-id prod-ie prod-if  prod-b prod-c prod-d prod-e
+ics_api_edp_get_producer_ids_2 200 NOTYPE prod-ib prod-ic prod-id prod-ie prod-if  prod-b prod-c prod-d prod-e
 
 
-ecs_api_edp_get_producer_status 404 prod-ia
-ecs_api_edp_get_producer_status 200 prod-ib ENABLED
-ecs_api_edp_get_producer_status 200 prod-ic ENABLED
-ecs_api_edp_get_producer_status 200 prod-id ENABLED
-ecs_api_edp_get_producer_status 200 prod-ie ENABLED
-ecs_api_edp_get_producer_status 200 prod-if ENABLED
+ics_api_edp_get_producer_status 404 prod-ia
+ics_api_edp_get_producer_status 200 prod-ib ENABLED
+ics_api_edp_get_producer_status 200 prod-ic ENABLED
+ics_api_edp_get_producer_status 200 prod-id ENABLED
+ics_api_edp_get_producer_status 200 prod-ie ENABLED
+ics_api_edp_get_producer_status 200 prod-if ENABLED
 
-ecs_api_idc_get_job_status2 200 job101 DISABLED EMPTYPROD
-ecs_api_idc_get_job_status2 200 job102 DISABLED EMPTYPROD
-ecs_api_idc_get_job_status2 200 job103 ENABLED 1 prod-ib
-ecs_api_idc_get_job_status2 200 job108 ENABLED 1 prod-id
-ecs_api_idc_get_job_status2 200 job110 ENABLED 2 prod-ie prod-if
+ics_api_idc_get_job_status2 200 job101 DISABLED EMPTYPROD
+ics_api_idc_get_job_status2 200 job102 DISABLED EMPTYPROD
+ics_api_idc_get_job_status2 200 job103 ENABLED 1 prod-ib
+ics_api_idc_get_job_status2 200 job108 ENABLED 1 prod-id
+ics_api_idc_get_job_status2 200 job110 ENABLED 2 prod-ie prod-if
 
 
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 28 30
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+    cr_equal received_callbacks 28 30
 
-    cr_equal received_callbacks?id=info-job101-status 1
-    cr_equal received_callbacks?id=info-job102-status 1
-    cr_api_check_all_ecs_events 200 info-job101-status DISABLED
-    cr_api_check_all_ecs_events 200 info-job102-status DISABLED
+    cr_equal received_callbacks?id=info-job101-status 1
+    cr_equal received_callbacks?id=info-job102-status 1
+    cr_api_check_all_ics_events 200 0 info-job101-status DISABLED
+    cr_api_check_all_ics_events 200 0 info-job102-status DISABLED
 else
-    cr_equal received_callbacks 12 30
+    cr_equal received_callbacks 12 30
 
-    cr_equal received_callbacks?id=info-job101-status 1
-    cr_equal received_callbacks?id=info-job102-status 1
-    cr_api_check_all_ecs_events 200 info-job101-status DISABLED
-    cr_api_check_all_ecs_events 200 info-job102-status DISABLED
+    cr_equal received_callbacks?id=info-job101-status 1
+    cr_equal received_callbacks?id=info-job102-status 1
+    cr_api_check_all_ics_events 200 0 info-job101-status DISABLED
+    cr_api_check_all_ics_events 200 0 info-job102-status DISABLED
 fi
 
 
 # Arm producer prod-ie for supervision failure
 prodstub_arm_producer 200 prod-ie 400
 
-ecs_api_edp_get_producer_status 200 prod-ie DISABLED 1000
+ics_api_edp_get_producer_status 200 prod-ie DISABLED 1000
 
-ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-ib prod-ic prod-id prod-ie prod-if prod-b prod-c prod-d prod-e
+ics_api_edp_get_producer_ids_2 200 NOTYPE prod-ib prod-ic prod-id prod-ie prod-if prod-b prod-c prod-d prod-e
 
-ecs_api_edp_get_producer_status 404 prod-ia
-ecs_api_edp_get_producer_status 200 prod-ib ENABLED
-ecs_api_edp_get_producer_status 200 prod-ic ENABLED
-ecs_api_edp_get_producer_status 200 prod-id ENABLED
-ecs_api_edp_get_producer_status 200 prod-ie DISABLED
-ecs_api_edp_get_producer_status 200 prod-if ENABLED
+ics_api_edp_get_producer_status 404 prod-ia
+ics_api_edp_get_producer_status 200 prod-ib ENABLED
+ics_api_edp_get_producer_status 200 prod-ic ENABLED
+ics_api_edp_get_producer_status 200 prod-id ENABLED
+ics_api_edp_get_producer_status 200 prod-ie DISABLED
+ics_api_edp_get_producer_status 200 prod-if ENABLED
 
-ecs_api_idc_get_job_status2 200 job101 DISABLED EMPTYPROD
-ecs_api_idc_get_job_status2 200 job102 DISABLED EMPTYPROD
-ecs_api_idc_get_job_status2 200 job103 ENABLED 1 prod-ib
-ecs_api_idc_get_job_status2 200 job108 ENABLED 1 prod-id
-ecs_api_idc_get_job_status2 200 job110 ENABLED 2 prod-ie prod-if
+ics_api_idc_get_job_status2 200 job101 DISABLED EMPTYPROD
+ics_api_idc_get_job_status2 200 job102 DISABLED EMPTYPROD
+ics_api_idc_get_job_status2 200 job103 ENABLED 1 prod-ib
+ics_api_idc_get_job_status2 200 job108 ENABLED 1 prod-id
+ics_api_idc_get_job_status2 200 job110 ENABLED 2 prod-ie prod-if
 
 #Disable create for job110 in prod-ie
 prodstub_arm_job_create 200 prod-ie job110 400
 
 #Update tjob 10 - only prod-if will be updated
-ecs_api_idc_put_job 200 job110 type106 $TARGET110 info-owner-4 $INFOSTATUS110 testdata/ecs/job-template2.json  VALIDATE
+ics_api_idc_put_job 200 job110 type106 $TARGET110 info-owner-4 $INFOSTATUS110 testdata/ics/job-template2.json  VALIDATE
 #Reset producer and job responses
 prodstub_arm_producer 200 prod-ie 200
 prodstub_arm_job_create 200 prod-ie job110 200
 
-ecs_api_edp_get_producer_status 200 prod-ie ENABLED 360
+ics_api_edp_get_producer_status 200 prod-ie ENABLED 360
 
-ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-ib prod-ic prod-id prod-ie prod-if  prod-b prod-c prod-d prod-e
+ics_api_edp_get_producer_ids_2 200 NOTYPE prod-ib prod-ic prod-id prod-ie prod-if  prod-b prod-c prod-d prod-e
 
 #Wait for job to be updated
 sleep_wait 120
 
-prodstub_check_jobdata_3 200 prod-if job110 type106 $TARGET110 info-owner-4 testdata/ecs/job-template2.json
+prodstub_check_jobdata_3 200 prod-if job110 type106 $TARGET110 info-owner-4 testdata/ics/job-template2.json
 
 prodstub_arm_producer 200 prod-if 400
 
-ecs_api_edp_get_producer_status 200 prod-if DISABLED 360
+ics_api_edp_get_producer_status 200 prod-if DISABLED 360
 
-if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
-    ecs_equal json:data-producer/v1/info-producers 8 1000
+if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+    ics_equal json:data-producer/v1/info-producers 8 1000
 else
-    ecs_equal json:ei-producer/v1/eiproducers 8 1000
+    ics_equal json:ei-producer/v1/eiproducers 8 1000
 fi
 
-ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-ib prod-ic prod-id prod-ie prod-b prod-c prod-d prod-e
+ics_api_edp_get_producer_ids_2 200 NOTYPE prod-ib prod-ic prod-id prod-ie prod-b prod-c prod-d prod-e
 
-ecs_api_edp_get_producer_status 404 prod-ia
-ecs_api_edp_get_producer_status 200 prod-ib ENABLED
-ecs_api_edp_get_producer_status 200 prod-ic ENABLED
-ecs_api_edp_get_producer_status 200 prod-id ENABLED
-ecs_api_edp_get_producer_status 200 prod-ie ENABLED
-ecs_api_edp_get_producer_status 404 prod-if
+ics_api_edp_get_producer_status 404 prod-ia
+ics_api_edp_get_producer_status 200 prod-ib ENABLED
+ics_api_edp_get_producer_status 200 prod-ic ENABLED
+ics_api_edp_get_producer_status 200 prod-id ENABLED
+ics_api_edp_get_producer_status 200 prod-ie ENABLED
+ics_api_edp_get_producer_status 404 prod-if
 
-ecs_api_idc_get_job_status2 200 job101 DISABLED EMPTYPROD
-ecs_api_idc_get_job_status2 200 job102 DISABLED EMPTYPROD
-ecs_api_idc_get_job_status2 200 job103 ENABLED 1 prod-ib
-ecs_api_idc_get_job_status2 200 job108 ENABLED 1 prod-id
-ecs_api_idc_get_job_status2 200 job110 ENABLED 1 prod-ie
+ics_api_idc_get_job_status2 200 job101 DISABLED EMPTYPROD
+ics_api_idc_get_job_status2 200 job102 DISABLED EMPTYPROD
+ics_api_idc_get_job_status2 200 job103 ENABLED 1 prod-ib
+ics_api_idc_get_job_status2 200 job108 ENABLED 1 prod-id
+ics_api_idc_get_job_status2 200 job110 ENABLED 1 prod-ie
 
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 28
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+    cr_equal received_callbacks 28
 else
-    cr_equal received_callbacks 12
+    cr_equal received_callbacks 12
 fi
 ### Test of pre and post validation
 
-ecs_api_idc_get_type_ids 200 type1 type2 type4 type6 type101 type102 type104 type106
-ecs_api_idc_put_job 404 job150 type150 $TARGET150 info-owner-1 $INFOSTATUS150 testdata/ecs/job-template.json VALIDATE
-ecs_api_idc_put_job 201 job160 type160 $TARGET160 info-owner-1 $INFOSTATUS160 testdata/ecs/job-template.json
+ics_api_idc_get_type_ids 200 type1 type2 type4 type6 type101 type102 type104 type106
+ics_api_idc_put_job 404 job150 type150 $TARGET150 info-owner-1 $INFOSTATUS150 testdata/ics/job-template.json VALIDATE
+ics_api_idc_put_job 201 job160 type160 $TARGET160 info-owner-1 $INFOSTATUS160 testdata/ics/job-template.json
 
-ecs_api_idc_get_job_status2 404 job150
-ecs_api_idc_get_job_status2 200 job160 DISABLED EMPTYPROD 60
+ics_api_idc_get_job_status2 404 job150
+ics_api_idc_get_job_status2 200 job160 DISABLED EMPTYPROD 60
 
 prodstub_arm_producer 200 prod-ig
 prodstub_arm_job_create 200 prod-ig job150
 prodstub_arm_job_create 200 prod-ig job160
 
-ecs_api_edp_put_producer_2 201 prod-ig $CB_JOB/prod-ig $CB_SV/prod-ig NOTYPE
-ecs_api_edp_get_producer_status 200 prod-ig ENABLED 360
+ics_api_edp_put_producer_2 201 prod-ig $CB_JOB/prod-ig $CB_SV/prod-ig NOTYPE
+ics_api_edp_get_producer_status 200 prod-ig ENABLED 360
 
-ecs_api_edp_get_producer_2 200 prod-ig $CB_JOB/prod-ig $CB_SV/prod-ig EMPTY
+ics_api_edp_get_producer_2 200 prod-ig $CB_JOB/prod-ig $CB_SV/prod-ig EMPTY
 
-ecs_api_idc_get_job_status2 404 job150
-ecs_api_idc_get_job_status2 200 job160 DISABLED EMPTYPROD 60
+ics_api_idc_get_job_status2 404 job150
+ics_api_idc_get_job_status2 200 job160 DISABLED EMPTYPROD 60
 
 prodstub_arm_type 200 prod-ig type160
 
-ecs_api_edp_put_type_2 201 type160 testdata/ecs/info-type-60.json
-ecs_api_idc_get_type_ids 200 type1 type2 type4 type6 type101 type102 type104 type106 type160
+ics_api_edp_put_type_2 201 type160 testdata/ics/info-type-60.json
+ics_api_idc_get_type_ids 200 type1 type2 type4 type6 type101 type102 type104 type106 type160
 
-ecs_api_edp_put_producer_2 200 prod-ig $CB_JOB/prod-ig $CB_SV/prod-ig type160
-ecs_api_edp_get_producer_status 200 prod-ig ENABLED 360
-ecs_api_edp_get_producer_2 200 prod-ig $CB_JOB/prod-ig $CB_SV/prod-ig type160
+ics_api_edp_put_producer_2 200 prod-ig $CB_JOB/prod-ig $CB_SV/prod-ig type160
+ics_api_edp_get_producer_status 200 prod-ig ENABLED 360
+ics_api_edp_get_producer_2 200 prod-ig $CB_JOB/prod-ig $CB_SV/prod-ig type160
 
-ecs_api_idc_put_job 404 job150 type150 $TARGET150 info-owner-1 $INFOSTATUS150 testdata/ecs/job-template.json VALIDATE
+ics_api_idc_put_job 404 job150 type150 $TARGET150 info-owner-1 $INFOSTATUS150 testdata/ics/job-template.json VALIDATE
 
-ecs_api_idc_get_job_status2 404 job150
-ecs_api_idc_get_job_status2 200 job160 ENABLED 1 prod-ig 60
+ics_api_idc_get_job_status2 404 job150
+ics_api_idc_get_job_status2 200 job160 ENABLED 1 prod-ig 60
 
-prodstub_check_jobdata_3 200 prod-ig job160 type160 $TARGET160 info-owner-1 testdata/ecs/job-template.json
+prodstub_check_jobdata_3 200 prod-ig job160 type160 $TARGET160 info-owner-1 testdata/ics/job-template.json
 
 prodstub_equal create/prod-ig/job160 1
 prodstub_equal delete/prod-ig/job160 0
 
 prodstub_arm_type 200 prod-ig type150
 
-ecs_api_edp_put_type_2 201 type150 testdata/ecs/info-type-50.json
-ecs_api_idc_get_type_ids 200 type1 type2 type4 type6 type101 type102 type104 type106 type160 type150
+ics_api_edp_put_type_2 201 type150 testdata/ics/info-type-50.json
+ics_api_idc_get_type_ids 200 type1 type2 type4 type6 type101 type102 type104 type106 type160 type150
 
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 30 30
-    cr_equal received_callbacks?id=type-status1 18
-    cr_api_check_all_ecs_subscription_events 200 type-status1 type160 testdata/ecs/info-type-60.json REGISTERED type150 testdata/ecs/info-type-50.json REGISTERED
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+    cr_equal received_callbacks 30 30
+    cr_equal received_callbacks?id=type-status1 18
+    cr_api_check_all_ics_subscription_events 200 0 type-status1 type160 testdata/ics/info-type-60.json REGISTERED type150 testdata/ics/info-type-50.json REGISTERED
 else
-    cr_equal received_callbacks 12
+    cr_equal received_callbacks 12
 fi
 
-ecs_api_edp_put_producer_2 200 prod-ig $CB_JOB/prod-ig $CB_SV/prod-ig type160 type150
-ecs_api_edp_get_producer_status 200 prod-ig ENABLED 360
+ics_api_edp_put_producer_2 200 prod-ig $CB_JOB/prod-ig $CB_SV/prod-ig type160 type150
+ics_api_edp_get_producer_status 200 prod-ig ENABLED 360
 
-ecs_api_edp_get_producer_2 200 prod-ig $CB_JOB/prod-ig $CB_SV/prod-ig type160 type150
+ics_api_edp_get_producer_2 200 prod-ig $CB_JOB/prod-ig $CB_SV/prod-ig type160 type150
 
-ecs_api_idc_get_job_status2 404 job150
-ecs_api_idc_get_job_status2 200 job160 ENABLED  1 prod-ig
+ics_api_idc_get_job_status2 404 job150
+ics_api_idc_get_job_status2 200 job160 ENABLED  1 prod-ig
 
-ecs_api_idc_put_job 201 job150 type150 $TARGET150 info-owner-1 $INFOSTATUS150 testdata/ecs/job-template.json VALIDATE
+ics_api_idc_put_job 201 job150 type150 $TARGET150 info-owner-1 $INFOSTATUS150 testdata/ics/job-template.json VALIDATE
 
-ecs_api_idc_get_job_status2 200 job150 ENABLED  1 prod-ig 60
-ecs_api_idc_get_job_status2 200 job160 ENABLED  1 prod-ig
+ics_api_idc_get_job_status2 200 job150 ENABLED  1 prod-ig 60
+ics_api_idc_get_job_status2 200 job160 ENABLED  1 prod-ig
 
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 30 30
-    cr_equal received_callbacks?id=type-status1 18
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+    cr_equal received_callbacks 30 30
+    cr_equal received_callbacks?id=type-status1 18
 else
-    cr_equal received_callbacks 12
+    cr_equal received_callbacks 12
 fi
 
 # Test job deletion at type delete
 
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
 
-    ecs_api_edp_delete_type_2 406 type104
+    if [[ "$ICS_FEATURE_LEVEL" == *"RESP_CODE_CHANGE_1" ]]; then
+        ics_api_edp_delete_type_2 409 type104
+    else
+        ics_api_edp_delete_type_2 406 type104
+    fi
 
-    ecs_api_edp_delete_producer 204 prod-id
+    ics_api_edp_delete_producer 204 prod-id
 
-    ecs_api_edp_delete_type_2 204 type104
+    ics_api_edp_delete_type_2 204 type104
 
-    cr_equal received_callbacks 32 30
-    cr_equal received_callbacks?id=info-job108-status 3
-    cr_equal received_callbacks?id=type-status1 19
-    cr_api_check_all_ecs_subscription_events 200 type-status1 type104 testdata/ecs/info-type-4.json DEREGISTERED
-    cr_api_check_all_ecs_events 200 info-job108-status DISABLED
+    cr_equal received_callbacks 32 30
+    cr_equal received_callbacks?id=info-job108-status 3
+    cr_equal received_callbacks?id=type-status1 19
+    cr_api_check_all_ics_subscription_events 200 0 type-status1 type104 testdata/ics/info-type-4.json DEREGISTERED
+    cr_api_check_all_ics_events 200 0 info-job108-status DISABLED
 
-    ecs_api_edp_get_producer 404 prod-id
+    ics_api_edp_get_producer 404 prod-id
 
-    ecs_api_idc_get_job 404 job-108
+    ics_api_idc_get_job 404 job-108
 
 else
-    cr_equal received_callbacks 12
+    cr_equal received_callbacks 12
 fi
 
-check_ecs_logs
+check_ics_logs
 
 store_logs END
 
index f011a21..822f835 100755 (executable)
@@ -32,10 +32,7 @@ SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ORAN-CHERRY ORAN-D-R
 #Supported run modes
 SUPPORTED_RUNMODES="DOCKER KUBE"
 
-. ../common/testcase_common.sh  $@
-. ../common/controller_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
+. ../common/testcase_common.sh $@
 
 setup_testenvironment
 
index 6241f3c..0948b65 100755 (executable)
 #
 
 
-TC_ONELINE_DESCR="ECS Create 10000 jobs (ei and info) restart, test job persistency"
+TC_ONELINE_DESCR="ICS Create 10000 jobs (ei and info) restart, test job persistency"
 
 #App names to include in the test when running docker, space separated list
-DOCKER_INCLUDED_IMAGES="ECS PRODSTUB CR CP NGW KUBEPROXY"
+DOCKER_INCLUDED_IMAGES="ICS PRODSTUB CR CP NGW KUBEPROXY"
 
 #App names to include in the test when running kubernetes, space separated list
-KUBE_INCLUDED_IMAGES="ECS PRODSTUB CP CR KUBEPROXY NGW"
+KUBE_INCLUDED_IMAGES="ICS PRODSTUB CP CR KUBEPROXY NGW"
 #Prestarted app (not started by script) to include in the test when running kubernetes, space separated list
 KUBE_PRESTARTED_IMAGES=""
 
@@ -38,14 +38,7 @@ SUPPORTED_PROFILES="ONAP-HONOLULU ONAP-ISTANBUL ORAN-CHERRY ORAN-D-RELEASE ORAN-
 #Supported run modes
 SUPPORTED_RUNMODES="DOCKER KUBE"
 
-. ../common/testcase_common.sh  $@
-. ../common/ecs_api_functions.sh
-. ../common/prodstub_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
+. ../common/testcase_common.sh $@
 
 setup_testenvironment
 
@@ -57,15 +50,15 @@ clean_environment
 
 start_kube_proxy
 
-use_ecs_rest_http
+use_ics_rest_http
 
 use_prod_stub_http
 
-start_ecs NOPROXY $SIM_GROUP/$ECS_COMPOSE_DIR/$ECS_CONFIG_FILE
+start_ics NOPROXY $SIM_GROUP/$ICS_COMPOSE_DIR/$ICS_CONFIG_FILE
 
 start_prod_stub
 
-set_ecs_trace
+set_ics_trace
 
 start_control_panel $SIM_GROUP/$CONTROL_PANEL_COMPOSE_DIR/$CONTROL_PANEL_CONFIG_FILE
 
@@ -73,7 +66,7 @@ if [ ! -z "$NRT_GATEWAY_APP_NAME" ]; then
     start_gateway $SIM_GROUP/$NRT_GATEWAY_COMPOSE_DIR/$NRT_GATEWAY_CONFIG_FILE
 fi
 
-start_cr
+start_cr 1
 
 CB_JOB="$PROD_STUB_SERVICE_PATH$PROD_STUB_JOB_CALLBACK"
 CB_SV="$PROD_STUB_SERVICE_PATH$PROD_STUB_SUPERVISION_CALLBACK"
@@ -81,23 +74,23 @@ TARGET="http://localhost:80/target"  # Dummy target
 
 NUM_JOBS=10000
 use_info_jobs=false  #Set flag if interface supporting info-types is used
-if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
     use_info_jobs=true
     NUM_JOBS=5000 # 5K ei jobs and 5K info jobs
 fi
 
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
     #Type registration status callbacks
-    TYPESTATUS1="$CR_SERVICE_APP_PATH/type-status1"
-    TYPESTATUS2="$CR_SERVICE_APP_PATH/type-status2"
+    TYPESTATUS1="$CR_SERVICE_APP_PATH_0/type-status1"
+    TYPESTATUS2="$CR_SERVICE_APP_PATH_0/type-status2"
 
-    ecs_api_idc_put_subscription 201 subscription-id-1 owner1 $TYPESTATUS1
+    ics_api_idc_put_subscription 201 subscription-id-1 owner1 $TYPESTATUS1
 
-    ecs_api_idc_get_subscription_ids 200 owner1 subscription-id-1
+    ics_api_idc_get_subscription_ids 200 owner1 subscription-id-1
 
-    ecs_api_idc_put_subscription 201 subscription-id-2 owner2 $TYPESTATUS2
+    ics_api_idc_put_subscription 201 subscription-id-2 owner2 $TYPESTATUS2
 
-    ecs_api_idc_get_subscription_ids 200 owner2 subscription-id-2
+    ics_api_idc_get_subscription_ids 200 owner2 subscription-id-2
 
 fi
 
@@ -183,225 +176,225 @@ if [ $use_info_jobs ]; then
 fi
 
 
-if [ $ECS_VERSION == "V1-1" ]; then
+if [ $ICS_VERSION == "V1-1" ]; then
 
-    ecs_api_edp_put_producer 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ecs/ei-type-1.json
+    ics_api_edp_put_producer 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ics/ei-type-1.json
 
-    ecs_api_edp_put_producer 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type1 testdata/ecs/ei-type-1.json type2 testdata/ecs/ei-type-2.json
+    ics_api_edp_put_producer 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type1 testdata/ics/ei-type-1.json type2 testdata/ics/ei-type-2.json
 
-    ecs_api_edp_put_producer 201 prod-c $CB_JOB/prod-c $CB_SV/prod-c type1 testdata/ecs/ei-type-1.json type2 testdata/ecs/ei-type-2.json type3 testdata/ecs/ei-type-3.json
+    ics_api_edp_put_producer 201 prod-c $CB_JOB/prod-c $CB_SV/prod-c type1 testdata/ics/ei-type-1.json type2 testdata/ics/ei-type-2.json type3 testdata/ics/ei-type-3.json
 
-    ecs_api_edp_put_producer 201 prod-d $CB_JOB/prod-d $CB_SV/prod-d type4 testdata/ecs/ei-type-4.json type5 testdata/ecs/ei-type-5.json
+    ics_api_edp_put_producer 201 prod-d $CB_JOB/prod-d $CB_SV/prod-d type4 testdata/ics/ei-type-4.json type5 testdata/ics/ei-type-5.json
 
 else
 
-    ecs_api_edp_put_type_2 201 type1 testdata/ecs/ei-type-1.json
-    ecs_api_edp_put_type_2 201 type2 testdata/ecs/ei-type-2.json
-    ecs_api_edp_put_type_2 201 type3 testdata/ecs/ei-type-3.json
-    ecs_api_edp_put_type_2 201 type4 testdata/ecs/ei-type-4.json
-    ecs_api_edp_put_type_2 201 type5 testdata/ecs/ei-type-5.json
+    ics_api_edp_put_type_2 201 type1 testdata/ics/ei-type-1.json
+    ics_api_edp_put_type_2 201 type2 testdata/ics/ei-type-2.json
+    ics_api_edp_put_type_2 201 type3 testdata/ics/ei-type-3.json
+    ics_api_edp_put_type_2 201 type4 testdata/ics/ei-type-4.json
+    ics_api_edp_put_type_2 201 type5 testdata/ics/ei-type-5.json
 
-    ecs_api_edp_put_producer_2 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
+    ics_api_edp_put_producer_2 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
 
-    ecs_api_edp_put_producer_2 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type1 type2
+    ics_api_edp_put_producer_2 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type1 type2
 
-    ecs_api_edp_put_producer_2 201 prod-c $CB_JOB/prod-c $CB_SV/prod-c type1 type2 type3
+    ics_api_edp_put_producer_2 201 prod-c $CB_JOB/prod-c $CB_SV/prod-c type1 type2 type3
 
-    ecs_api_edp_put_producer_2 201 prod-d $CB_JOB/prod-d $CB_SV/prod-d type4 type5
+    ics_api_edp_put_producer_2 201 prod-d $CB_JOB/prod-d $CB_SV/prod-d type4 type5
 
     if [ $use_info_jobs ]; then
-        ecs_api_edp_put_type_2 201 type101 testdata/ecs/info-type-1.json
-        ecs_api_edp_put_type_2 201 type102 testdata/ecs/info-type-2.json
-        ecs_api_edp_put_type_2 201 type103 testdata/ecs/info-type-3.json
-        ecs_api_edp_put_type_2 201 type104 testdata/ecs/info-type-4.json
-        ecs_api_edp_put_type_2 201 type105 testdata/ecs/info-type-5.json
+        ics_api_edp_put_type_2 201 type101 testdata/ics/info-type-1.json
+        ics_api_edp_put_type_2 201 type102 testdata/ics/info-type-2.json
+        ics_api_edp_put_type_2 201 type103 testdata/ics/info-type-3.json
+        ics_api_edp_put_type_2 201 type104 testdata/ics/info-type-4.json
+        ics_api_edp_put_type_2 201 type105 testdata/ics/info-type-5.json
 
 
 
-        if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-            cr_equal received_callbacks 20 30
-            cr_equal received_callbacks?id=type-status1 10
-            cr_equal received_callbacks?id=type-status2 10
+        if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+            cr_equal received_callbacks 20 30
+            cr_equal received_callbacks?id=type-status1 10
+            cr_equal received_callbacks?id=type-status2 10
 
-            cr_api_check_all_ecs_subscription_events 200 type-status1 \
-                type1 testdata/ecs/ei-type-1.json REGISTERED \
-                type2 testdata/ecs/ei-type-2.json REGISTERED \
-                type3 testdata/ecs/ei-type-3.json REGISTERED \
-                type4 testdata/ecs/ei-type-4.json REGISTERED \
-                type5 testdata/ecs/ei-type-5.json REGISTERED \
-                type101 testdata/ecs/info-type-1.json REGISTERED \
-                type102 testdata/ecs/info-type-2.json REGISTERED \
-                type103 testdata/ecs/info-type-3.json REGISTERED \
-                type104 testdata/ecs/info-type-4.json REGISTERED \
-                type105 testdata/ecs/info-type-5.json REGISTERED
+            cr_api_check_all_ics_subscription_events 200 0 type-status1 \
+                type1 testdata/ics/ei-type-1.json REGISTERED \
+                type2 testdata/ics/ei-type-2.json REGISTERED \
+                type3 testdata/ics/ei-type-3.json REGISTERED \
+                type4 testdata/ics/ei-type-4.json REGISTERED \
+                type5 testdata/ics/ei-type-5.json REGISTERED \
+                type101 testdata/ics/info-type-1.json REGISTERED \
+                type102 testdata/ics/info-type-2.json REGISTERED \
+                type103 testdata/ics/info-type-3.json REGISTERED \
+                type104 testdata/ics/info-type-4.json REGISTERED \
+                type105 testdata/ics/info-type-5.json REGISTERED
 
-            cr_api_check_all_ecs_subscription_events 200 type-status2 \
-                type1 testdata/ecs/ei-type-1.json REGISTERED \
-                type2 testdata/ecs/ei-type-2.json REGISTERED \
-                type3 testdata/ecs/ei-type-3.json REGISTERED \
-                type4 testdata/ecs/ei-type-4.json REGISTERED \
-                type5 testdata/ecs/ei-type-5.json REGISTERED \
-                type101 testdata/ecs/info-type-1.json REGISTERED \
-                type102 testdata/ecs/info-type-2.json REGISTERED \
-                type103 testdata/ecs/info-type-3.json REGISTERED \
-                type104 testdata/ecs/info-type-4.json REGISTERED \
-                type105 testdata/ecs/info-type-5.json REGISTERED
+            cr_api_check_all_ics_subscription_events 200 0 type-status2 \
+                type1 testdata/ics/ei-type-1.json REGISTERED \
+                type2 testdata/ics/ei-type-2.json REGISTERED \
+                type3 testdata/ics/ei-type-3.json REGISTERED \
+                type4 testdata/ics/ei-type-4.json REGISTERED \
+                type5 testdata/ics/ei-type-5.json REGISTERED \
+                type101 testdata/ics/info-type-1.json REGISTERED \
+                type102 testdata/ics/info-type-2.json REGISTERED \
+                type103 testdata/ics/info-type-3.json REGISTERED \
+                type104 testdata/ics/info-type-4.json REGISTERED \
+                type105 testdata/ics/info-type-5.json REGISTERED
 
         fi
 
-        ecs_api_edp_put_producer_2 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 type101
+        ics_api_edp_put_producer_2 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 type101
 
-        ecs_api_edp_put_producer_2 200 prod-b $CB_JOB/prod-b $CB_SV/prod-b type1 type2 type101 type102
+        ics_api_edp_put_producer_2 200 prod-b $CB_JOB/prod-b $CB_SV/prod-b type1 type2 type101 type102
 
-        ecs_api_edp_put_producer_2 200 prod-c $CB_JOB/prod-c $CB_SV/prod-c type1 type2 type3 type101 type102 type103
+        ics_api_edp_put_producer_2 200 prod-c $CB_JOB/prod-c $CB_SV/prod-c type1 type2 type3 type101 type102 type103
 
-        ecs_api_edp_put_producer_2 200 prod-d $CB_JOB/prod-d $CB_SV/prod-d type4 type5 type104 type105
+        ics_api_edp_put_producer_2 200 prod-d $CB_JOB/prod-d $CB_SV/prod-d type4 type5 type104 type105
     fi
 fi
 
 if [ $use_info_jobs ]; then
-    ecs_equal json:data-producer/v1/info-producers 4
+    ics_equal json:data-producer/v1/info-producers 4
 else
-    ecs_equal json:ei-producer/v1/eiproducers 4
+    ics_equal json:ei-producer/v1/eiproducers 4
 fi
 
-ecs_api_edp_get_producer_status 200 prod-a ENABLED
-ecs_api_edp_get_producer_status 200 prod-b ENABLED
-ecs_api_edp_get_producer_status 200 prod-c ENABLED
-ecs_api_edp_get_producer_status 200 prod-d ENABLED
+ics_api_edp_get_producer_status 200 prod-a ENABLED
+ics_api_edp_get_producer_status 200 prod-b ENABLED
+ics_api_edp_get_producer_status 200 prod-c ENABLED
+ics_api_edp_get_producer_status 200 prod-d ENABLED
 
 for ((i=1; i<=$NUM_JOBS; i++))
 do
     if [ $(($i%5)) -eq 0 ]; then
-        ecs_api_a1_put_job 201 job$i type1 $TARGET ric1 $CR_SERVICE_APP_PATH/job_status_ric1 testdata/ecs/job-template.json
+        ics_api_a1_put_job 201 job$i type1 $TARGET ric1 $CR_SERVICE_APP_PATH_0/job_status_ric1 testdata/ics/job-template.json
         if [  -z "$FLAT_A1_EI" ]; then
-            ecs_api_a1_get_job_status 200 type1 job$i ENABLED
+            ics_api_a1_get_job_status 200 type1 job$i ENABLED
         else
-            ecs_api_a1_get_job_status 200 job$i ENABLED 120
+            ics_api_a1_get_job_status 200 job$i ENABLED 120
         fi
         if [ $use_info_jobs ]; then
-            ecs_api_idc_put_job 201 job$(($i+$NUM_JOBS)) type101 $TARGET info-owner $CR_SERVICE_APP_PATH/job_status_info-owner testdata/ecs/job-template.json VALIDATE
-            ecs_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 3 prod-a prod-b prod-c 120
+            ics_api_idc_put_job 201 job$(($i+$NUM_JOBS)) type101 $TARGET info-owner $CR_SERVICE_APP_PATH_0/job_status_info-owner testdata/ics/job-template.json VALIDATE
+            ics_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 3 prod-a prod-b prod-c 120
         fi
     fi
     if [ $(($i%5)) -eq 1 ]; then
-        ecs_api_a1_put_job 201 job$i type2 $TARGET ric1 $CR_SERVICE_APP_PATH/job_status_ric1 testdata/ecs/job-template.json
+        ics_api_a1_put_job 201 job$i type2 $TARGET ric1 $CR_SERVICE_APP_PATH_0/job_status_ric1 testdata/ics/job-template.json
         if [  -z "$FLAT_A1_EI" ]; then
-            ecs_api_a1_get_job_status 200 type2 job$i ENABLED
+            ics_api_a1_get_job_status 200 type2 job$i ENABLED
         else
-            ecs_api_a1_get_job_status 200 job$i ENABLED 120
+            ics_api_a1_get_job_status 200 job$i ENABLED 120
         fi
         if [ $use_info_jobs ]; then
-            ecs_api_idc_put_job 201 job$(($i+$NUM_JOBS)) type102 $TARGET info-owner $CR_SERVICE_APP_PATH/job_status_info-owner testdata/ecs/job-template.json VALIDATE
-            ecs_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 2 prod-b prod-c 120
+            ics_api_idc_put_job 201 job$(($i+$NUM_JOBS)) type102 $TARGET info-owner $CR_SERVICE_APP_PATH_0/job_status_info-owner testdata/ics/job-template.json VALIDATE
+            ics_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 2 prod-b prod-c 120
         fi
     fi
     if [ $(($i%5)) -eq 2 ]; then
-        ecs_api_a1_put_job 201 job$i type3 $TARGET ric1 $CR_SERVICE_APP_PATH/job_status_ric1 testdata/ecs/job-template.json
+        ics_api_a1_put_job 201 job$i type3 $TARGET ric1 $CR_SERVICE_APP_PATH_0/job_status_ric1 testdata/ics/job-template.json
         if [  -z "$FLAT_A1_EI" ]; then
-            ecs_api_a1_get_job_status 200 type3 job$i ENABLED
+            ics_api_a1_get_job_status 200 type3 job$i ENABLED
         else
-            ecs_api_a1_get_job_status 200 job$i ENABLED 120
+            ics_api_a1_get_job_status 200 job$i ENABLED 120
         fi
         if [ $use_info_jobs ]; then
-            ecs_api_idc_put_job 201 job$(($i+$NUM_JOBS)) type103 $TARGET info-owner $CR_SERVICE_APP_PATH/job_status_info-owner testdata/ecs/job-template.json VALIDATE
-            ecs_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 1 prod-c 120
+            ics_api_idc_put_job 201 job$(($i+$NUM_JOBS)) type103 $TARGET info-owner $CR_SERVICE_APP_PATH_0/job_status_info-owner testdata/ics/job-template.json VALIDATE
+            ics_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 1 prod-c 120
         fi
     fi
     if [ $(($i%5)) -eq 3 ]; then
-        ecs_api_a1_put_job 201 job$i type4 $TARGET ric1 $CR_SERVICE_APP_PATH/job_status_ric1 testdata/ecs/job-template.json
+        ics_api_a1_put_job 201 job$i type4 $TARGET ric1 $CR_SERVICE_APP_PATH_0/job_status_ric1 testdata/ics/job-template.json
         if [  -z "$FLAT_A1_EI" ]; then
-            ecs_api_a1_get_job_status 200 type4 job$i ENABLED
+            ics_api_a1_get_job_status 200 type4 job$i ENABLED
         else
-            ecs_api_a1_get_job_status 200 job$i ENABLED 120
+            ics_api_a1_get_job_status 200 job$i ENABLED 120
         fi
         if [ $use_info_jobs ]; then
-            ecs_api_idc_put_job 201 job$(($i+$NUM_JOBS)) type104 $TARGET info-owner $CR_SERVICE_APP_PATH/job_status_info-owner testdata/ecs/job-template.json VALIDATE
-            ecs_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 1 prod-d 120
+            ics_api_idc_put_job 201 job$(($i+$NUM_JOBS)) type104 $TARGET info-owner $CR_SERVICE_APP_PATH_0/job_status_info-owner testdata/ics/job-template.json VALIDATE
+            ics_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 1 prod-d 120
         fi
     fi
     if [ $(($i%5)) -eq 4 ]; then
-        ecs_api_a1_put_job 201 job$i type5 $TARGET ric1 $CR_SERVICE_APP_PATH/job_status_ric1 testdata/ecs/job-template.json
+        ics_api_a1_put_job 201 job$i type5 $TARGET ric1 $CR_SERVICE_APP_PATH_0/job_status_ric1 testdata/ics/job-template.json
         if [  -z "$FLAT_A1_EI" ]; then
-            ecs_api_a1_get_job_status 200 type5 job$i ENABLED
+            ics_api_a1_get_job_status 200 type5 job$i ENABLED
         else
-            ecs_api_a1_get_job_status 200 job$i ENABLED 120
+            ics_api_a1_get_job_status 200 job$i ENABLED 120
         fi
         if [ $use_info_jobs ]; then
-            ecs_api_idc_put_job 201 job$(($i+$NUM_JOBS)) type105 $TARGET info-owner $CR_SERVICE_APP_PATH/job_status_info-owner testdata/ecs/job-template.json VALIDATE
-            ecs_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 1 prod-d 120
+            ics_api_idc_put_job 201 job$(($i+$NUM_JOBS)) type105 $TARGET info-owner $CR_SERVICE_APP_PATH_0/job_status_info-owner testdata/ics/job-template.json VALIDATE
+            ics_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 1 prod-d 120
         fi
     fi
 done
 
 if [  -z "$FLAT_A1_EI" ]; then
-    ecs_equal json:A1-EI/v1/eitypes/type1/eijobs $(($NUM_JOBS/5))
-    ecs_equal json:A1-EI/v1/eitypes/type2/eijobs $(($NUM_JOBS/5))
-    ecs_equal json:A1-EI/v1/eitypes/type3/eijobs $(($NUM_JOBS/5))
-    ecs_equal json:A1-EI/v1/eitypes/type4/eijobs $(($NUM_JOBS/5))
-    ecs_equal json:A1-EI/v1/eitypes/type5/eijobs $(($NUM_JOBS/5))
+    ics_equal json:A1-EI/v1/eitypes/type1/eijobs $(($NUM_JOBS/5))
+    ics_equal json:A1-EI/v1/eitypes/type2/eijobs $(($NUM_JOBS/5))
+    ics_equal json:A1-EI/v1/eitypes/type3/eijobs $(($NUM_JOBS/5))
+    ics_equal json:A1-EI/v1/eitypes/type4/eijobs $(($NUM_JOBS/5))
+    ics_equal json:A1-EI/v1/eitypes/type5/eijobs $(($NUM_JOBS/5))
 else
-    ecs_equal json:A1-EI/v1/eijobs?eiTypeId=type1 $(($NUM_JOBS/5))
-    ecs_equal json:A1-EI/v1/eijobs?eiTypeId=type2 $(($NUM_JOBS/5))
-    ecs_equal json:A1-EI/v1/eijobs?eiTypeId=type3 $(($NUM_JOBS/5))
-    ecs_equal json:A1-EI/v1/eijobs?eiTypeId=type4 $(($NUM_JOBS/5))
-    ecs_equal json:A1-EI/v1/eijobs?eiTypeId=type5 $(($NUM_JOBS/5))
+    ics_equal json:A1-EI/v1/eijobs?eiTypeId=type1 $(($NUM_JOBS/5))
+    ics_equal json:A1-EI/v1/eijobs?eiTypeId=type2 $(($NUM_JOBS/5))
+    ics_equal json:A1-EI/v1/eijobs?eiTypeId=type3 $(($NUM_JOBS/5))
+    ics_equal json:A1-EI/v1/eijobs?eiTypeId=type4 $(($NUM_JOBS/5))
+    ics_equal json:A1-EI/v1/eijobs?eiTypeId=type5 $(($NUM_JOBS/5))
 fi
 if [ $use_info_jobs ]; then
-    ecs_equal json:data-consumer/v1/info-jobs?infoTypeId=type101 $(($NUM_JOBS/5))
-    ecs_equal json:data-consumer/v1/info-jobs?infoTypeId=type102 $(($NUM_JOBS/5))
-    ecs_equal json:data-consumer/v1/info-jobs?infoTypeId=type103 $(($NUM_JOBS/5))
-    ecs_equal json:data-consumer/v1/info-jobs?infoTypeId=type104 $(($NUM_JOBS/5))
-    ecs_equal json:data-consumer/v1/info-jobs?infoTypeId=type105 $(($NUM_JOBS/5))
+    ics_equal json:data-consumer/v1/info-jobs?infoTypeId=type101 $(($NUM_JOBS/5))
+    ics_equal json:data-consumer/v1/info-jobs?infoTypeId=type102 $(($NUM_JOBS/5))
+    ics_equal json:data-consumer/v1/info-jobs?infoTypeId=type103 $(($NUM_JOBS/5))
+    ics_equal json:data-consumer/v1/info-jobs?infoTypeId=type104 $(($NUM_JOBS/5))
+    ics_equal json:data-consumer/v1/info-jobs?infoTypeId=type105 $(($NUM_JOBS/5))
 fi
 
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 20 30
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+    cr_equal received_callbacks 20 30
 
 else
-    cr_equal received_callbacks 0 30
+    cr_equal received_callbacks 0 30
 
 fi
 
 
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
 
-    ecs_equal json:/data-consumer/v1/info-type-subscription 2 200
+    ics_equal json:/data-consumer/v1/info-type-subscription 2 200
 
-    ecs_api_idc_get_subscription_ids 200 owner1 subscription-id-1
-    ecs_api_idc_get_subscription_ids 200 owner2 subscription-id-2
+    ics_api_idc_get_subscription_ids 200 owner1 subscription-id-1
+    ics_api_idc_get_subscription_ids 200 owner2 subscription-id-2
 
     if [ $use_info_jobs ]; then
-        ecs_equal json:data-producer/v1/info-types 10 1000
+        ics_equal json:data-producer/v1/info-types 10 1000
     else
-        ecs_equal json:ei-producer/v1/eitypes 5 1000
+        ics_equal json:ei-producer/v1/eitypes 5 1000
     fi
 
 fi
 
-stop_ecs
+stop_ics
 
-cr_api_reset
+cr_api_reset 0
 
-start_stopped_ecs
+start_stopped_ics
 
-set_ecs_trace
+set_ics_trace
 
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
 
-    ecs_equal json:/data-consumer/v1/info-type-subscription 2 200
+    ics_equal json:/data-consumer/v1/info-type-subscription 2 200
 
-    ecs_api_idc_get_subscription_ids 200 owner1 subscription-id-1
-    ecs_api_idc_get_subscription_ids 200 owner2 subscription-id-2
+    ics_api_idc_get_subscription_ids 200 owner1 subscription-id-1
+    ics_api_idc_get_subscription_ids 200 owner2 subscription-id-2
 
     if [ $use_info_jobs ]; then
-        ecs_equal json:data-producer/v1/info-types 10 1000
+        ics_equal json:data-producer/v1/info-types 10 1000
     else
-        ecs_equal json:ei-producer/v1/eitypes 5 1000
+        ics_equal json:ei-producer/v1/eitypes 5 1000
     fi
 fi
 
-cr_equal received_callbacks 0
+cr_equal received_callbacks 0
 
 for ((i=1; i<=$NUM_JOBS; i++))
 do
@@ -443,267 +436,267 @@ do
     fi
 done
 
-ecs_api_edp_get_producer_status 404 prod-a
-ecs_api_edp_get_producer_status 404 prod-b
-ecs_api_edp_get_producer_status 404 prod-c
-ecs_api_edp_get_producer_status 404 prod-d
+ics_api_edp_get_producer_status 404 prod-a
+ics_api_edp_get_producer_status 404 prod-b
+ics_api_edp_get_producer_status 404 prod-c
+ics_api_edp_get_producer_status 404 prod-d
 
 for ((i=1; i<=$NUM_JOBS; i++))
 do
     if [ $(($i%5)) -eq 0 ]; then
         if [  -z "$FLAT_A1_EI" ]; then
-            ecs_api_a1_get_job_status 200 type1 job$i DISABLED
+            ics_api_a1_get_job_status 200 type1 job$i DISABLED
         else
-            ecs_api_a1_get_job_status 200 job$i DISABLED 120
+            ics_api_a1_get_job_status 200 job$i DISABLED 120
         fi
         if [ $use_info_jobs ]; then
-            ecs_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) DISABLED EMPTYPROD 120
+            ics_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) DISABLED EMPTYPROD 120
         fi
     fi
     if [ $(($i%5)) -eq 1 ]; then
         if [  -z "$FLAT_A1_EI" ]; then
-            ecs_api_a1_get_job_status 200 type2 job$i DISABLED
+            ics_api_a1_get_job_status 200 type2 job$i DISABLED
         else
-            ecs_api_a1_get_job_status 200 job$i DISABLED 120
+            ics_api_a1_get_job_status 200 job$i DISABLED 120
         fi
         if [ $use_info_jobs ]; then
-            ecs_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) DISABLED EMPTYPROD 120
+            ics_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) DISABLED EMPTYPROD 120
         fi
     fi
     if [ $(($i%5)) -eq 2 ]; then
         if [  -z "$FLAT_A1_EI" ]; then
-            ecs_api_a1_get_job_status 200 type3 job$i DISABLED
+            ics_api_a1_get_job_status 200 type3 job$i DISABLED
         else
-            ecs_api_a1_get_job_status 200 job$i DISABLED 120
+            ics_api_a1_get_job_status 200 job$i DISABLED 120
         fi
         if [ $use_info_jobs ]; then
-            ecs_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) DISABLED EMPTYPROD 120
+            ics_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) DISABLED EMPTYPROD 120
         fi
     fi
     if [ $(($i%5)) -eq 3 ]; then
         if [  -z "$FLAT_A1_EI" ]; then
-            ecs_api_a1_get_job_status 200 type4 job$i DISABLED
+            ics_api_a1_get_job_status 200 type4 job$i DISABLED
         else
-            ecs_api_a1_get_job_status 200 job$i DISABLED 120
+            ics_api_a1_get_job_status 200 job$i DISABLED 120
         fi
         if [ $use_info_jobs ]; then
-            ecs_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) DISABLED EMPTYPROD 120
+            ics_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) DISABLED EMPTYPROD 120
         fi
     fi
     if [ $(($i%5)) -eq 4 ]; then
         if [  -z "$FLAT_A1_EI" ]; then
-            ecs_api_a1_get_job_status 200 type5 job$i DISABLED
+            ics_api_a1_get_job_status 200 type5 job$i DISABLED
         else
-            ecs_api_a1_get_job_status 200 job$i DISABLED 120
+            ics_api_a1_get_job_status 200 job$i DISABLED 120
         fi
         if [ $use_info_jobs ]; then
-            ecs_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) DISABLED EMPTYPROD 120
+            ics_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) DISABLED EMPTYPROD 120
         fi
     fi
 done
 
-if [ $ECS_VERSION == "V1-1" ]; then
+if [ $ICS_VERSION == "V1-1" ]; then
 
-    ecs_api_edp_put_producer 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ecs/ei-type-1.json
+    ics_api_edp_put_producer 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ics/ei-type-1.json
 
-    ecs_api_edp_put_producer 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type1 testdata/ecs/ei-type-1.json type2 testdata/ecs/ei-type-2.json
+    ics_api_edp_put_producer 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type1 testdata/ics/ei-type-1.json type2 testdata/ics/ei-type-2.json
 
-    ecs_api_edp_put_producer 201 prod-c $CB_JOB/prod-c $CB_SV/prod-c type1 testdata/ecs/ei-type-1.json type2 testdata/ecs/ei-type-2.json type3 testdata/ecs/ei-type-3.json
+    ics_api_edp_put_producer 201 prod-c $CB_JOB/prod-c $CB_SV/prod-c type1 testdata/ics/ei-type-1.json type2 testdata/ics/ei-type-2.json type3 testdata/ics/ei-type-3.json
 
-    ecs_api_edp_put_producer 201 prod-d $CB_JOB/prod-d $CB_SV/prod-d type4 testdata/ecs/ei-type-4.json type5 testdata/ecs/ei-type-5.json
+    ics_api_edp_put_producer 201 prod-d $CB_JOB/prod-d $CB_SV/prod-d type4 testdata/ics/ei-type-4.json type5 testdata/ics/ei-type-5.json
 
 else
     if [ $use_info_jobs ]; then
-        ecs_api_edp_put_producer_2 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1  type101
+        ics_api_edp_put_producer_2 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1  type101
 
-        ecs_api_edp_put_producer_2 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type1 type2  type101 type102
+        ics_api_edp_put_producer_2 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type1 type2  type101 type102
 
-        ecs_api_edp_put_producer_2 201 prod-c $CB_JOB/prod-c $CB_SV/prod-c type1 type2 type3  type101 type102 type103
+        ics_api_edp_put_producer_2 201 prod-c $CB_JOB/prod-c $CB_SV/prod-c type1 type2 type3  type101 type102 type103
 
-        ecs_api_edp_put_producer_2 201 prod-d $CB_JOB/prod-d $CB_SV/prod-d type4 type5  type104 type105
+        ics_api_edp_put_producer_2 201 prod-d $CB_JOB/prod-d $CB_SV/prod-d type4 type5  type104 type105
     else
-        ecs_api_edp_put_producer_2 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
+        ics_api_edp_put_producer_2 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
 
-        ecs_api_edp_put_producer_2 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type1 type2
+        ics_api_edp_put_producer_2 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type1 type2
 
-        ecs_api_edp_put_producer_2 201 prod-c $CB_JOB/prod-c $CB_SV/prod-c type1 type2 type3
+        ics_api_edp_put_producer_2 201 prod-c $CB_JOB/prod-c $CB_SV/prod-c type1 type2 type3
 
-        ecs_api_edp_put_producer_2 201 prod-d $CB_JOB/prod-d $CB_SV/prod-d type4 type5
+        ics_api_edp_put_producer_2 201 prod-d $CB_JOB/prod-d $CB_SV/prod-d type4 type5
     fi
 
 fi
 
 if [ $use_info_jobs ]; then
-    ecs_equal json:data-producer/v1/info-producers 4
+    ics_equal json:data-producer/v1/info-producers 4
 else
-    ecs_equal json:ei-producer/v1/eiproducers 4
+    ics_equal json:ei-producer/v1/eiproducers 4
 fi
 
-ecs_api_edp_get_producer_status 200 prod-a ENABLED
-ecs_api_edp_get_producer_status 200 prod-b ENABLED
-ecs_api_edp_get_producer_status 200 prod-c ENABLED
-ecs_api_edp_get_producer_status 200 prod-d ENABLED
+ics_api_edp_get_producer_status 200 prod-a ENABLED
+ics_api_edp_get_producer_status 200 prod-b ENABLED
+ics_api_edp_get_producer_status 200 prod-c ENABLED
+ics_api_edp_get_producer_status 200 prod-d ENABLED
 
 for ((i=1; i<=$NUM_JOBS; i++))
 do
     if [ $(($i%5)) -eq 0 ]; then
         if [  -z "$FLAT_A1_EI" ]; then
-            ecs_api_a1_get_job_status 200 type1 job$i ENABLED
+            ics_api_a1_get_job_status 200 type1 job$i ENABLED
         else
-            ecs_api_a1_get_job_status 200 job$i ENABLED 120
+            ics_api_a1_get_job_status 200 job$i ENABLED 120
         fi
         if [ $use_info_jobs ]; then
-            ecs_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 3 prod-a prod-b prod-c 120
+            ics_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 3 prod-a prod-b prod-c 120
         fi
     fi
     if [ $(($i%5)) -eq 1 ]; then
         if [  -z "$FLAT_A1_EI" ]; then
-            ecs_api_a1_get_job_status 200 type2 job$i ENABLED
+            ics_api_a1_get_job_status 200 type2 job$i ENABLED
         else
-            ecs_api_a1_get_job_status 200 job$i ENABLED 120
+            ics_api_a1_get_job_status 200 job$i ENABLED 120
         fi
         if [ $use_info_jobs ]; then
-            ecs_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 2 prod-b prod-c 120
+            ics_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 2 prod-b prod-c 120
         fi
     fi
     if [ $(($i%5)) -eq 2 ]; then
         if [  -z "$FLAT_A1_EI" ]; then
-            ecs_api_a1_get_job_status 200 type3 job$i ENABLED
+            ics_api_a1_get_job_status 200 type3 job$i ENABLED
         else
-            ecs_api_a1_get_job_status 200 job$i ENABLED 120
+            ics_api_a1_get_job_status 200 job$i ENABLED 120
         fi
         if [ $use_info_jobs ]; then
-            ecs_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 1 prod-c 120
+            ics_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 1 prod-c 120
         fi
     fi
     if [ $(($i%5)) -eq 3 ]; then
         if [  -z "$FLAT_A1_EI" ]; then
-            ecs_api_a1_get_job_status 200 type4 job$i ENABLED
+            ics_api_a1_get_job_status 200 type4 job$i ENABLED
         else
-            ecs_api_a1_get_job_status 200 job$i ENABLED 120
+            ics_api_a1_get_job_status 200 job$i ENABLED 120
         fi
         if [ $use_info_jobs ]; then
-            ecs_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 1 prod-d 120
+            ics_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 1 prod-d 120
         fi
     fi
     if [ $(($i%5)) -eq 4 ]; then
         if [  -z "$FLAT_A1_EI" ]; then
-            ecs_api_a1_get_job_status 200 type5 job$i ENABLED
+            ics_api_a1_get_job_status 200 type5 job$i ENABLED
         else
-            ecs_api_a1_get_job_status 200 job$i ENABLED 120
+            ics_api_a1_get_job_status 200 job$i ENABLED 120
         fi
         if [ $use_info_jobs ]; then
-            ecs_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 1 prod-d 120
+            ics_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 1 prod-d 120
         fi
     fi
 done
 
 
 if [  -z "$FLAT_A1_EI" ]; then
-    ecs_equal json:A1-EI/v1/eitypes/type1/eijobs $(($NUM_JOBS/5))
-    ecs_equal json:A1-EI/v1/eitypes/type2/eijobs $(($NUM_JOBS/5))
-    ecs_equal json:A1-EI/v1/eitypes/type3/eijobs $(($NUM_JOBS/5))
-    ecs_equal json:A1-EI/v1/eitypes/type4/eijobs $(($NUM_JOBS/5))
-    ecs_equal json:A1-EI/v1/eitypes/type5/eijobs $(($NUM_JOBS/5))
+    ics_equal json:A1-EI/v1/eitypes/type1/eijobs $(($NUM_JOBS/5))
+    ics_equal json:A1-EI/v1/eitypes/type2/eijobs $(($NUM_JOBS/5))
+    ics_equal json:A1-EI/v1/eitypes/type3/eijobs $(($NUM_JOBS/5))
+    ics_equal json:A1-EI/v1/eitypes/type4/eijobs $(($NUM_JOBS/5))
+    ics_equal json:A1-EI/v1/eitypes/type5/eijobs $(($NUM_JOBS/5))
 else
-    ecs_equal json:A1-EI/v1/eijobs?eiTypeId=type1 $(($NUM_JOBS/5))
-    ecs_equal json:A1-EI/v1/eijobs?eiTypeId=type2 $(($NUM_JOBS/5))
-    ecs_equal json:A1-EI/v1/eijobs?eiTypeId=type3 $(($NUM_JOBS/5))
-    ecs_equal json:A1-EI/v1/eijobs?eiTypeId=type4 $(($NUM_JOBS/5))
-    ecs_equal json:A1-EI/v1/eijobs?eiTypeId=type5 $(($NUM_JOBS/5))
+    ics_equal json:A1-EI/v1/eijobs?eiTypeId=type1 $(($NUM_JOBS/5))
+    ics_equal json:A1-EI/v1/eijobs?eiTypeId=type2 $(($NUM_JOBS/5))
+    ics_equal json:A1-EI/v1/eijobs?eiTypeId=type3 $(($NUM_JOBS/5))
+    ics_equal json:A1-EI/v1/eijobs?eiTypeId=type4 $(($NUM_JOBS/5))
+    ics_equal json:A1-EI/v1/eijobs?eiTypeId=type5 $(($NUM_JOBS/5))
 fi
 
 if [ $use_info_jobs ]; then
-    ecs_equal json:data-consumer/v1/info-jobs?infoTypeId=type101 $(($NUM_JOBS/5))
-    ecs_equal json:data-consumer/v1/info-jobs?infoTypeId=type102 $(($NUM_JOBS/5))
-    ecs_equal json:data-consumer/v1/info-jobs?infoTypeId=type103 $(($NUM_JOBS/5))
-    ecs_equal json:data-consumer/v1/info-jobs?infoTypeId=type104 $(($NUM_JOBS/5))
-    ecs_equal json:data-consumer/v1/info-jobs?infoTypeId=type105 $(($NUM_JOBS/5))
+    ics_equal json:data-consumer/v1/info-jobs?infoTypeId=type101 $(($NUM_JOBS/5))
+    ics_equal json:data-consumer/v1/info-jobs?infoTypeId=type102 $(($NUM_JOBS/5))
+    ics_equal json:data-consumer/v1/info-jobs?infoTypeId=type103 $(($NUM_JOBS/5))
+    ics_equal json:data-consumer/v1/info-jobs?infoTypeId=type104 $(($NUM_JOBS/5))
+    ics_equal json:data-consumer/v1/info-jobs?infoTypeId=type105 $(($NUM_JOBS/5))
 fi
 
 for ((i=1; i<=$NUM_JOBS; i++))
 do
     if [ $(($i%5)) -eq 0 ]; then
-        if [ $ECS_VERSION == "V1-1" ]; then
-            prodstub_check_jobdata 200 prod-a job$i type1 $TARGET ric1 testdata/ecs/job-template.json
-            prodstub_check_jobdata 200 prod-b job$i type1 $TARGET ric1 testdata/ecs/job-template.json
-            prodstub_check_jobdata 200 prod-c job$i type1 $TARGET ric1 testdata/ecs/job-template.json
+        if [ $ICS_VERSION == "V1-1" ]; then
+            prodstub_check_jobdata 200 prod-a job$i type1 $TARGET ric1 testdata/ics/job-template.json
+            prodstub_check_jobdata 200 prod-b job$i type1 $TARGET ric1 testdata/ics/job-template.json
+            prodstub_check_jobdata 200 prod-c job$i type1 $TARGET ric1 testdata/ics/job-template.json
         else
             if [ $use_info_jobs ]; then
-                prodstub_check_jobdata_3 200 prod-a job$i type1 $TARGET ric1 testdata/ecs/job-template.json
-                prodstub_check_jobdata_3 200 prod-b job$i type1 $TARGET ric1 testdata/ecs/job-template.json
-                prodstub_check_jobdata_3 200 prod-c job$i type1 $TARGET ric1 testdata/ecs/job-template.json
+                prodstub_check_jobdata_3 200 prod-a job$i type1 $TARGET ric1 testdata/ics/job-template.json
+                prodstub_check_jobdata_3 200 prod-b job$i type1 $TARGET ric1 testdata/ics/job-template.json
+                prodstub_check_jobdata_3 200 prod-c job$i type1 $TARGET ric1 testdata/ics/job-template.json
             else
-                prodstub_check_jobdata_2 200 prod-a job$i type1 $TARGET ric1 testdata/ecs/job-template.json
-                prodstub_check_jobdata_2 200 prod-b job$i type1 $TARGET ric1 testdata/ecs/job-template.json
-                prodstub_check_jobdata_2 200 prod-c job$i type1 $TARGET ric1 testdata/ecs/job-template.json
+                prodstub_check_jobdata_2 200 prod-a job$i type1 $TARGET ric1 testdata/ics/job-template.json
+                prodstub_check_jobdata_2 200 prod-b job$i type1 $TARGET ric1 testdata/ics/job-template.json
+                prodstub_check_jobdata_2 200 prod-c job$i type1 $TARGET ric1 testdata/ics/job-template.json
             fi
         fi
         if [ $use_info_jobs ]; then
-            prodstub_check_jobdata_3 200 prod-a job$(($i+$NUM_JOBS)) type101 $TARGET info-owner testdata/ecs/job-template.json
-            prodstub_check_jobdata_3 200 prod-b job$(($i+$NUM_JOBS)) type101 $TARGET info-owner testdata/ecs/job-template.json
-            prodstub_check_jobdata_3 200 prod-c job$(($i+$NUM_JOBS)) type101 $TARGET info-owner testdata/ecs/job-template.json
+            prodstub_check_jobdata_3 200 prod-a job$(($i+$NUM_JOBS)) type101 $TARGET info-owner testdata/ics/job-template.json
+            prodstub_check_jobdata_3 200 prod-b job$(($i+$NUM_JOBS)) type101 $TARGET info-owner testdata/ics/job-template.json
+            prodstub_check_jobdata_3 200 prod-c job$(($i+$NUM_JOBS)) type101 $TARGET info-owner testdata/ics/job-template.json
         fi
 
     fi
     if [ $(($i%5)) -eq 1 ]; then
-        if [ $ECS_VERSION == "V1-1" ]; then
-            prodstub_check_jobdata 200 prod-b job$i type2 $TARGET ric1 testdata/ecs/job-template.json
-            prodstub_check_jobdata 200 prod-c job$i type2 $TARGET ric1 testdata/ecs/job-template.json
+        if [ $ICS_VERSION == "V1-1" ]; then
+            prodstub_check_jobdata 200 prod-b job$i type2 $TARGET ric1 testdata/ics/job-template.json
+            prodstub_check_jobdata 200 prod-c job$i type2 $TARGET ric1 testdata/ics/job-template.json
         else
             if [ $use_info_jobs ]; then
-                prodstub_check_jobdata_3 200 prod-b job$i type2 $TARGET ric1 testdata/ecs/job-template.json
-                prodstub_check_jobdata_3 200 prod-c job$i type2 $TARGET ric1 testdata/ecs/job-template.json
+                prodstub_check_jobdata_3 200 prod-b job$i type2 $TARGET ric1 testdata/ics/job-template.json
+                prodstub_check_jobdata_3 200 prod-c job$i type2 $TARGET ric1 testdata/ics/job-template.json
             else
-                prodstub_check_jobdata_2 200 prod-b job$i type2 $TARGET ric1 testdata/ecs/job-template.json
-                prodstub_check_jobdata_2 200 prod-c job$i type2 $TARGET ric1 testdata/ecs/job-template.json
+                prodstub_check_jobdata_2 200 prod-b job$i type2 $TARGET ric1 testdata/ics/job-template.json
+                prodstub_check_jobdata_2 200 prod-c job$i type2 $TARGET ric1 testdata/ics/job-template.json
             fi
         fi
         if [ $use_info_jobs ]; then
-            prodstub_check_jobdata_3 200 prod-b job$(($i+$NUM_JOBS)) type102 $TARGET info-owner testdata/ecs/job-template.json
-            prodstub_check_jobdata_3 200 prod-c job$(($i+$NUM_JOBS)) type102 $TARGET info-owner testdata/ecs/job-template.json
+            prodstub_check_jobdata_3 200 prod-b job$(($i+$NUM_JOBS)) type102 $TARGET info-owner testdata/ics/job-template.json
+            prodstub_check_jobdata_3 200 prod-c job$(($i+$NUM_JOBS)) type102 $TARGET info-owner testdata/ics/job-template.json
         fi
     fi
     if [ $(($i%5)) -eq 2 ]; then
-        if [ $ECS_VERSION == "V1-1" ]; then
-            prodstub_check_jobdata 200 prod-c job$i type3 $TARGET ric1 testdata/ecs/job-template.json
+        if [ $ICS_VERSION == "V1-1" ]; then
+            prodstub_check_jobdata 200 prod-c job$i type3 $TARGET ric1 testdata/ics/job-template.json
         else
             if [ $use_info_jobs ]; then
-                prodstub_check_jobdata_3 200 prod-c job$i type3 $TARGET ric1 testdata/ecs/job-template.json
+                prodstub_check_jobdata_3 200 prod-c job$i type3 $TARGET ric1 testdata/ics/job-template.json
             else
-                prodstub_check_jobdata_2 200 prod-c job$i type3 $TARGET ric1 testdata/ecs/job-template.json
+                prodstub_check_jobdata_2 200 prod-c job$i type3 $TARGET ric1 testdata/ics/job-template.json
             fi
         fi
         if [ $use_info_jobs ]; then
-            prodstub_check_jobdata_3 200 prod-c job$(($i+$NUM_JOBS)) type103 $TARGET info-owner testdata/ecs/job-template.json
+            prodstub_check_jobdata_3 200 prod-c job$(($i+$NUM_JOBS)) type103 $TARGET info-owner testdata/ics/job-template.json
         fi
     fi
     if [ $(($i%5)) -eq 3 ]; then
-        if [ $ECS_VERSION == "V1-1" ]; then
-            prodstub_check_jobdata 200 prod-d job$i type4 $TARGET ric1 testdata/ecs/job-template.json
+        if [ $ICS_VERSION == "V1-1" ]; then
+            prodstub_check_jobdata 200 prod-d job$i type4 $TARGET ric1 testdata/ics/job-template.json
         else
             if [ $use_info_jobs ]; then
-                prodstub_check_jobdata_3 200 prod-d job$i type4 $TARGET ric1 testdata/ecs/job-template.json
+                prodstub_check_jobdata_3 200 prod-d job$i type4 $TARGET ric1 testdata/ics/job-template.json
             else
-                prodstub_check_jobdata_2 200 prod-d job$i type4 $TARGET ric1 testdata/ecs/job-template.json
+                prodstub_check_jobdata_2 200 prod-d job$i type4 $TARGET ric1 testdata/ics/job-template.json
             fi
         fi
         if [ $use_info_jobs ]; then
-            prodstub_check_jobdata_3 200 prod-d job$(($i+$NUM_JOBS)) type104 $TARGET info-owner testdata/ecs/job-template.json
+            prodstub_check_jobdata_3 200 prod-d job$(($i+$NUM_JOBS)) type104 $TARGET info-owner testdata/ics/job-template.json
         fi
     fi
     if [ $(($i%5)) -eq 4 ]; then
-        if [ $ECS_VERSION == "V1-1" ]; then
-            prodstub_check_jobdata 200 prod-d job$i type5 $TARGET ric1 testdata/ecs/job-template.json
+        if [ $ICS_VERSION == "V1-1" ]; then
+            prodstub_check_jobdata 200 prod-d job$i type5 $TARGET ric1 testdata/ics/job-template.json
         else
             if [ $use_info_jobs ]; then
-                prodstub_check_jobdata_3 200 prod-d job$i type5 $TARGET ric1 testdata/ecs/job-template.json
+                prodstub_check_jobdata_3 200 prod-d job$i type5 $TARGET ric1 testdata/ics/job-template.json
             else
-                prodstub_check_jobdata_2 200 prod-d job$i type5 $TARGET ric1 testdata/ecs/job-template.json
+                prodstub_check_jobdata_2 200 prod-d job$i type5 $TARGET ric1 testdata/ics/job-template.json
             fi
         fi
         if [ $use_info_jobs ]; then
-            prodstub_check_jobdata_3 200 prod-d job$(($i+$NUM_JOBS)) type105 $TARGET info-owner testdata/ecs/job-template.json
+            prodstub_check_jobdata_3 200 prod-d job$(($i+$NUM_JOBS)) type105 $TARGET info-owner testdata/ics/job-template.json
         fi
     fi
 done
@@ -712,104 +705,104 @@ done
 for ((i=1; i<=$NUM_JOBS; i++))
 do
     if [ $(($i%5)) -eq 0 ]; then
-        ecs_api_a1_delete_job 204 job$i
+        ics_api_a1_delete_job 204 job$i
         if [ $use_info_jobs ]; then
-            ecs_api_idc_delete_job 204 job$(($i+$NUM_JOBS))
+            ics_api_idc_delete_job 204 job$(($i+$NUM_JOBS))
         fi
     fi
     if [ $(($i%5)) -eq 1 ]; then
-        ecs_api_a1_delete_job 204 job$i
+        ics_api_a1_delete_job 204 job$i
         if [ $use_info_jobs ]; then
-            ecs_api_idc_delete_job 204 job$(($i+$NUM_JOBS))
+            ics_api_idc_delete_job 204 job$(($i+$NUM_JOBS))
         fi
     fi
     if [ $(($i%5)) -eq 2 ]; then
-        ecs_api_a1_delete_job 204 job$i
+        ics_api_a1_delete_job 204 job$i
         if [ $use_info_jobs ]; then
-            ecs_api_idc_delete_job 204 job$(($i+$NUM_JOBS))
+            ics_api_idc_delete_job 204 job$(($i+$NUM_JOBS))
         fi
     fi
     if [ $(($i%5)) -eq 3 ]; then
-        ecs_api_a1_delete_job 204 job$i
+        ics_api_a1_delete_job 204 job$i
         if [ $use_info_jobs ]; then
-            ecs_api_idc_delete_job 204 job$(($i+$NUM_JOBS))
+            ics_api_idc_delete_job 204 job$(($i+$NUM_JOBS))
         fi
     fi
     if [ $(($i%5)) -eq 4 ]; then
-        ecs_api_a1_delete_job 204 job$i
+        ics_api_a1_delete_job 204 job$i
         if [ $use_info_jobs ]; then
-            ecs_api_idc_delete_job 204 job$(($i+$NUM_JOBS))
+            ics_api_idc_delete_job 204 job$(($i+$NUM_JOBS))
         fi
     fi
 done
 
 if [ $use_info_jobs ]; then
-    ecs_equal json:data-producer/v1/info-producers 4
+    ics_equal json:data-producer/v1/info-producers 4
 else
-    ecs_equal json:ei-producer/v1/eiproducers 4
+    ics_equal json:ei-producer/v1/eiproducers 4
 fi
 
-ecs_api_edp_get_producer_status 200 prod-a ENABLED
-ecs_api_edp_get_producer_status 200 prod-b ENABLED
-ecs_api_edp_get_producer_status 200 prod-c ENABLED
-ecs_api_edp_get_producer_status 200 prod-d ENABLED
+ics_api_edp_get_producer_status 200 prod-a ENABLED
+ics_api_edp_get_producer_status 200 prod-b ENABLED
+ics_api_edp_get_producer_status 200 prod-c ENABLED
+ics_api_edp_get_producer_status 200 prod-d ENABLED
 
 if [  -z "$FLAT_A1_EI" ]; then
-    ecs_equal json:A1-EI/v1/eitypes/type1/eijobs 0
-    ecs_equal json:A1-EI/v1/eitypes/type2/eijobs 0
-    ecs_equal json:A1-EI/v1/eitypes/type3/eijobs 0
-    ecs_equal json:A1-EI/v1/eitypes/type4/eijobs 0
-    ecs_equal json:A1-EI/v1/eitypes/type5/eijobs 0
+    ics_equal json:A1-EI/v1/eitypes/type1/eijobs 0
+    ics_equal json:A1-EI/v1/eitypes/type2/eijobs 0
+    ics_equal json:A1-EI/v1/eitypes/type3/eijobs 0
+    ics_equal json:A1-EI/v1/eitypes/type4/eijobs 0
+    ics_equal json:A1-EI/v1/eitypes/type5/eijobs 0
 else
-    ecs_equal json:A1-EI/v1/eijobs?eiTypeId=type1 0
-    ecs_equal json:A1-EI/v1/eijobs?eiTypeId=type2 0
-    ecs_equal json:A1-EI/v1/eijobs?eiTypeId=type3 0
-    ecs_equal json:A1-EI/v1/eijobs?eiTypeId=type4 0
-    ecs_equal json:A1-EI/v1/eijobs?eiTypeId=type5 0
+    ics_equal json:A1-EI/v1/eijobs?eiTypeId=type1 0
+    ics_equal json:A1-EI/v1/eijobs?eiTypeId=type2 0
+    ics_equal json:A1-EI/v1/eijobs?eiTypeId=type3 0
+    ics_equal json:A1-EI/v1/eijobs?eiTypeId=type4 0
+    ics_equal json:A1-EI/v1/eijobs?eiTypeId=type5 0
 fi
 
 if [ $use_info_jobs ]; then
-    ecs_equal json:data-consumer/v1/info-jobs?infoTypeId=type101 0
-    ecs_equal json:data-consumer/v1/info-jobs?infoTypeId=type102 0
-    ecs_equal json:data-consumer/v1/info-jobs?infoTypeId=type103 0
-    ecs_equal json:data-consumer/v1/info-jobs?infoTypeId=type104 0
-    ecs_equal json:data-consumer/v1/info-jobs?infoTypeId=type105 0
+    ics_equal json:data-consumer/v1/info-jobs?infoTypeId=type101 0
+    ics_equal json:data-consumer/v1/info-jobs?infoTypeId=type102 0
+    ics_equal json:data-consumer/v1/info-jobs?infoTypeId=type103 0
+    ics_equal json:data-consumer/v1/info-jobs?infoTypeId=type104 0
+    ics_equal json:data-consumer/v1/info-jobs?infoTypeId=type105 0
 fi
 
 if [ $use_info_jobs ]; then
-    if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-        ecs_api_edp_put_type_2 200 type101 testdata/ecs/info-type-1.json
-        ecs_api_edp_put_type_2 200 type102 testdata/ecs/info-type-2.json
-        ecs_api_edp_put_type_2 200 type103 testdata/ecs/info-type-3.json
-        ecs_api_edp_put_type_2 200 type104 testdata/ecs/info-type-4.json
-        ecs_api_edp_put_type_2 200 type105 testdata/ecs/info-type-5.json
+    if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+        ics_api_edp_put_type_2 200 type101 testdata/ics/info-type-1.json
+        ics_api_edp_put_type_2 200 type102 testdata/ics/info-type-2.json
+        ics_api_edp_put_type_2 200 type103 testdata/ics/info-type-3.json
+        ics_api_edp_put_type_2 200 type104 testdata/ics/info-type-4.json
+        ics_api_edp_put_type_2 200 type105 testdata/ics/info-type-5.json
     fi
 fi
 
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 10 30
-    cr_equal received_callbacks?id=type-status1 5
-    cr_equal received_callbacks?id=type-status2 5
-
-    cr_api_check_all_ecs_subscription_events 200 type-status1 \
-        type101 testdata/ecs/info-type-1.json REGISTERED \
-        type102 testdata/ecs/info-type-2.json REGISTERED \
-        type103 testdata/ecs/info-type-3.json REGISTERED \
-        type104 testdata/ecs/info-type-4.json REGISTERED \
-        type105 testdata/ecs/info-type-5.json REGISTERED
-
-    cr_api_check_all_ecs_subscription_events 200 type-status2 \
-        type101 testdata/ecs/info-type-1.json REGISTERED \
-        type102 testdata/ecs/info-type-2.json REGISTERED \
-        type103 testdata/ecs/info-type-3.json REGISTERED \
-        type104 testdata/ecs/info-type-4.json REGISTERED \
-        type105 testdata/ecs/info-type-5.json REGISTERED
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+    cr_equal received_callbacks 10 30
+    cr_equal received_callbacks?id=type-status1 5
+    cr_equal received_callbacks?id=type-status2 5
+
+    cr_api_check_all_ics_subscription_events 200 0 type-status1 \
+        type101 testdata/ics/info-type-1.json REGISTERED \
+        type102 testdata/ics/info-type-2.json REGISTERED \
+        type103 testdata/ics/info-type-3.json REGISTERED \
+        type104 testdata/ics/info-type-4.json REGISTERED \
+        type105 testdata/ics/info-type-5.json REGISTERED
+
+    cr_api_check_all_ics_subscription_events 200 0 type-status2 \
+        type101 testdata/ics/info-type-1.json REGISTERED \
+        type102 testdata/ics/info-type-2.json REGISTERED \
+        type103 testdata/ics/info-type-3.json REGISTERED \
+        type104 testdata/ics/info-type-4.json REGISTERED \
+        type105 testdata/ics/info-type-5.json REGISTERED
 
 else
-    cr_equal received_callbacks 0 30
+    cr_equal received_callbacks 0 30
 fi
 
-check_ecs_logs
+check_ics_logs
 
 store_logs END
 
index f194817..fa1aea1 100755 (executable)
 #  ============LICENSE_END=================================================
 #
 
-TC_ONELINE_DESCR="Testing southbound proxy for PMS and ECS"
+TC_ONELINE_DESCR="Testing southbound proxy for PMS and ICS"
 
 #App names to include in the test when running docker, space separated list
-DOCKER_INCLUDED_IMAGES="CBS CONSUL CP CR MR PA RICSIM ECS PRODSTUB HTTPPROXY NGW KUBEPROXY"
+DOCKER_INCLUDED_IMAGES="CBS CONSUL CP CR MR PA RICSIM ICS PRODSTUB HTTPPROXY NGW KUBEPROXY"
 
 #App names to include in the test when running kubernetes, space separated list
-KUBE_INCLUDED_IMAGES=" MR CR PA PRODSTUB RICSIM CP ECS HTTPPROXY KUBEPROXY NGW"
+KUBE_INCLUDED_IMAGES=" MR CR PA PRODSTUB RICSIM CP ICS HTTPPROXY KUBEPROXY NGW"
 #Prestarted app (not started by script) to include in the test when running kubernetes, space separated list
 KUBE_PRESTARTED_IMAGES=""
 
@@ -38,17 +38,6 @@ SUPPORTED_PROFILES="ONAP-HONOLULU ONAP-ISTANBUL ORAN-CHERRY ORAN-D-RELEASE ORAN-
 SUPPORTED_RUNMODES="DOCKER KUBE"
 
 . ../common/testcase_common.sh $@
-. ../common/agent_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/ecs_api_functions.sh
-. ../common/prodstub_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/http_proxy_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
 
 setup_testenvironment
 
@@ -60,11 +49,11 @@ setup_testenvironment
 use_cr_https
 use_agent_rest_https
 use_simulator_https
-use_ecs_rest_https
+use_ics_rest_https
 use_prod_stub_https
 
 if [ "$PMS_VERSION" == "V2" ]; then
-    notificationurl=$CR_SERVICE_APP_PATH"/test"
+    notificationurl=$CR_SERVICE_APP_PATH_0"/test"
 else
    echo "PMS VERSION 2 (V2) is required"
    exit 1
@@ -102,15 +91,15 @@ else
     consul_config_app                      ".consul_config.json"
 fi
 
-start_cr
+start_cr 1
 
 start_prod_stub
 
-start_ecs PROXY $SIM_GROUP/$ECS_COMPOSE_DIR/$ECS_CONFIG_FILE
+start_ics PROXY $SIM_GROUP/$ICS_COMPOSE_DIR/$ICS_CONFIG_FILE
 
 set_agent_trace
 
-set_ecs_debug
+set_ics_debug
 
 api_get_status 200
 
@@ -144,7 +133,7 @@ done
 #Check the number of types
 api_equal json:policy-types 2 300
 
-api_put_service 201 "Emergency-response-app" 0 "$CR_SERVICE_APP_PATH/1"
+api_put_service 201 "Emergency-response-app" 0 "$CR_SERVICE_APP_PATH_0/1"
 
 # Create policies in STD
 for ((i=1; i<=$STD_NUM_RICS; i++))
@@ -182,58 +171,58 @@ fi
 TARGET1="$RIC_SIM_HTTPX://$RIC_G1_1:$RIC_SIM_PORT/datadelivery"
 TARGET2="$RIC_SIM_HTTPX://$RIC_G1_1:$RIC_SIM_PORT/datadelivery"
 
-STATUS1="$CR_SERVICE_APP_PATH/job1-status"
-STATUS2="$CR_SERVICE_APP_PATH/job2-status"
+STATUS1="$CR_SERVICE_APP_PATH_0/job1-status"
+STATUS2="$CR_SERVICE_APP_PATH_0/job2-status"
 
 prodstub_arm_producer 200 prod-a
 prodstub_arm_type 200 prod-a type1
 prodstub_arm_job_create 200 prod-a job1
 prodstub_arm_job_create 200 prod-a job2
 
-### ecs status
-ecs_api_service_status 200
+### ics status
+ics_api_service_status 200
 
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
     #Type registration status callbacks
-    TYPESTATUS1="$CR_SERVICE_APP_PATH/type-status1"
+    TYPESTATUS1="$CR_SERVICE_APP_PATH_0/type-status1"
 
-    ecs_api_idc_put_subscription 201 subscription-id-1 owner1 $TYPESTATUS1
+    ics_api_idc_put_subscription 201 subscription-id-1 owner1 $TYPESTATUS1
 
-    ecs_api_idc_get_subscription_ids 200 owner1 subscription-id-1
+    ics_api_idc_get_subscription_ids 200 owner1 subscription-id-1
 fi
 
 ## Setup prod-a
-if [ $ECS_VERSION == "V1-1" ]; then
-    ecs_api_edp_put_producer 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ecs/ei-type-1.json
+if [ $ICS_VERSION == "V1-1" ]; then
+    ics_api_edp_put_producer 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ics/ei-type-1.json
 
-    ecs_api_edp_get_producer 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ecs/ei-type-1.json
+    ics_api_edp_get_producer 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ics/ei-type-1.json
 else
-    ecs_api_edp_put_type_2 201 type1 testdata/ecs/ei-type-1.json
+    ics_api_edp_put_type_2 201 type1 testdata/ics/ei-type-1.json
 
-    ecs_api_edp_put_producer_2 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
+    ics_api_edp_put_producer_2 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
 
-    ecs_api_edp_get_producer_2 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
+    ics_api_edp_get_producer_2 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
 fi
 
-ecs_api_edp_get_producer_status 200 prod-a ENABLED
+ics_api_edp_get_producer_status 200 prod-a ENABLED
 
 
 ## Create a job for prod-a
 ## job1 - prod-a
 if [  -z "$FLAT_A1_EI" ]; then
-    ecs_api_a1_put_job 201 type1 job1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json
+    ics_api_a1_put_job 201 type1 job1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json
 else
-    ecs_api_a1_put_job 201 job1 type1 $TARGET1 ricsim_g3_1 $STATUS1 testdata/ecs/job-template.json
+    ics_api_a1_put_job 201 job1 type1 $TARGET1 ricsim_g3_1 $STATUS1 testdata/ics/job-template.json
 fi
 
 # Check the job data in the producer
-if [ $ECS_VERSION == "V1-1" ]; then
-    prodstub_check_jobdata 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json
+if [ $ICS_VERSION == "V1-1" ]; then
+    prodstub_check_jobdata 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json
 else
-    if [[ "$ECS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
-        prodstub_check_jobdata_2 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json
+    if [[ "$ICS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
+        prodstub_check_jobdata_2 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json
     else
-        prodstub_check_jobdata_3 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json
+        prodstub_check_jobdata_3 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json
     fi
 fi
 
@@ -241,19 +230,19 @@ fi
 ## Create a second job for prod-a
 ## job2 - prod-a
 if [  -z "$FLAT_A1_EI" ]; then
-    ecs_api_a1_put_job 201 type1 job2 $TARGET2 ricsim_g3_2 testdata/ecs/job-template.json
+    ics_api_a1_put_job 201 type1 job2 $TARGET2 ricsim_g3_2 testdata/ics/job-template.json
 else
-    ecs_api_a1_put_job 201 job2 type1 $TARGET2 ricsim_g3_2 $STATUS2 testdata/ecs/job-template.json
+    ics_api_a1_put_job 201 job2 type1 $TARGET2 ricsim_g3_2 $STATUS2 testdata/ics/job-template.json
 fi
 
 # Check the job data in the producer
-if [ $ECS_VERSION == "V1-1" ]; then
-    prodstub_check_jobdata 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ecs/job-template.json
+if [ $ICS_VERSION == "V1-1" ]; then
+    prodstub_check_jobdata 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ics/job-template.json
 else
-    if [[ "$ECS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
-        prodstub_check_jobdata_2 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ecs/job-template.json
+    if [[ "$ICS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
+        prodstub_check_jobdata_2 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ics/job-template.json
     else
-        prodstub_check_jobdata_3 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ecs/job-template.json
+        prodstub_check_jobdata_3 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ics/job-template.json
     fi
 fi
 
@@ -261,29 +250,29 @@ fi
 prodstub_arm_producer 200 prod-a 400
 
 # Wait for producer prod-a to go disabled
-ecs_api_edp_get_producer_status 200 prod-a DISABLED 360
+ics_api_edp_get_producer_status 200 prod-a DISABLED 360
 
-if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
-    ecs_equal json:data-producer/v1/info-producers 0 1000
+if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+    ics_equal json:data-producer/v1/info-producers 0 1000
 else
-    ecs_equal json:ei-producer/v1/eiproducers 0 1000
+    ics_equal json:ei-producer/v1/eiproducers 0 1000
 fi
 
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
-    cr_equal received_callbacks 3 30
-    cr_api_check_all_ecs_subscription_events 200 type-status1 type1 testdata/ecs/ei-type-1.json REGISTERED
-    cr_api_check_all_ecs_events 200 job1-status DISABLED
-    cr_api_check_all_ecs_events 200 job2-status DISABLED
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+    cr_equal received_callbacks 3 30
+    cr_api_check_all_ics_subscription_events 200 0 type-status1 type1 testdata/ics/ei-type-1.json REGISTERED
+    cr_api_check_all_ics_events 200 0 job1-status DISABLED
+    cr_api_check_all_ics_events 200 0 job2-status DISABLED
 else
-    cr_equal received_callbacks 2 30
-    cr_api_check_all_ecs_events 200 job1-status DISABLED
-    cr_api_check_all_ecs_events 200 job2-status DISABLED
+    cr_equal received_callbacks 2 30
+    cr_api_check_all_ics_events 200 0 job1-status DISABLED
+    cr_api_check_all_ics_events 200 0 job2-status DISABLED
 fi
 
-cr_contains_str remote_hosts $HTTP_PROXY_APP_NAME
+cr_contains_str remote_hosts $HTTP_PROXY_APP_NAME
 
 check_policy_agent_logs
-check_ecs_logs
+check_ics_logs
 
 #### TEST COMPLETE ####
 
index 321dd24..1b05763 100755 (executable)
@@ -31,11 +31,7 @@ SUPPORTED_PROFILES="ONAP-ISTANBUL"
 #Supported run modes
 SUPPORTED_RUNMODES="DOCKER"
 
-. ../common/testcase_common.sh  $@
-. ../common/controller_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/http_proxy_api_functions.sh
+. ../common/testcase_common.sh $@
 
 setup_testenvironment
 
index 6e22ced..32412b7 100755 (executable)
 TC_ONELINE_DESCR="Testing southbound proxy for Dmaap Adaptor"
 
 #App names to include in the test when running docker, space separated list
-DOCKER_INCLUDED_IMAGES="CR MR ECS HTTPPROXY KUBEPROXY DMAAPADP"
+DOCKER_INCLUDED_IMAGES="CR MR ICS HTTPPROXY KUBEPROXY DMAAPADP"
 
 #App names to include in the test when running kubernetes, space separated list
-KUBE_INCLUDED_IMAGES=" CR MR ECS HTTPPROXY KUBEPROXY DMAAPADP"
+KUBE_INCLUDED_IMAGES=" CR MR ICS HTTPPROXY KUBEPROXY DMAAPADP"
 #Prestarted app (not started by script) to include in the test when running kubernetes, space separated list
 KUBE_PRESTARTED_IMAGES=""
 
@@ -38,12 +38,6 @@ SUPPORTED_PROFILES="ORAN-E-RELEASE"
 SUPPORTED_RUNMODES="DOCKER KUBE"
 
 . ../common/testcase_common.sh $@
-. ../common/ecs_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/http_proxy_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/dmaapadp_api_functions.sh
 
 setup_testenvironment
 
@@ -58,7 +52,7 @@ NUM_JOBS=10
 clean_environment
 
 use_cr_https
-use_ecs_rest_https
+use_ics_rest_https
 use_mr_https
 use_dmaapadp_https
 
@@ -66,11 +60,11 @@ start_kube_proxy
 
 start_http_proxy
 
-start_cr
+start_cr 1
 
-start_ecs NOPROXY $SIM_GROUP/$ECS_COMPOSE_DIR/$ECS_CONFIG_FILE
+start_ics NOPROXY $SIM_GROUP/$ICS_COMPOSE_DIR/$ICS_CONFIG_FILE
 
-set_ecs_trace
+set_ics_trace
 
 start_mr
 
@@ -78,26 +72,26 @@ start_dmaapadp PROXY $SIM_GROUP/$DMAAP_ADP_COMPOSE_DIR/$DMAAP_ADP_CONFIG_FILE $S
 
 set_dmaapadp_trace
 
-if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
-    ecs_equal json:data-producer/v1/info-producers 1 60
+if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+    ics_equal json:data-producer/v1/info-producers 1 60
 else
-    ecs_equal json:ei-producer/v1/eiproducers 1 60
+    ics_equal json:ei-producer/v1/eiproducers 1 60
 fi
 
-ecs_api_idc_get_job_ids 200 NOTYPE NOWNER EMPTY
-ecs_api_idc_get_type_ids 200 ExampleInformationType
+ics_api_idc_get_job_ids 200 NOTYPE NOWNER EMPTY
+ics_api_idc_get_type_ids 200 ExampleInformationType
 
 
-ecs_api_edp_get_producer_ids_2 200 NOTYPE DmaapGenericInfoProducer
+ics_api_edp_get_producer_ids_2 200 NOTYPE DmaapGenericInfoProducer
 
 for ((i=1; i<=$NUM_JOBS; i++))
 do
-    ecs_api_idc_put_job 201 joby$i ExampleInformationType $CR_SERVICE_MR_PATH/joby-data$i info-ownery$i $CR_SERVICE_MR_PATH/job_status_info-ownery$i testdata/dmaap-adapter/job-template.json
+    ics_api_idc_put_job 201 joby$i ExampleInformationType $CR_SERVICE_MR_PATH_0/joby-data$i info-ownery$i $CR_SERVICE_MR_PATH_0/job_status_info-ownery$i testdata/dmaap-adapter/job-template.json
 done
 
 for ((i=1; i<=$NUM_JOBS; i++))
 do
-    ecs_api_a1_get_job_status 200 joby$i ENABLED 30
+    ics_api_a1_get_job_status 200 joby$i ENABLED 30
 done
 
 
@@ -105,20 +99,20 @@ done
 mr_api_send_json "/events/unauthenticated.dmaapadp.json" '{"msg":"msg-1"}'
 mr_api_send_json "/events/unauthenticated.dmaapadp.json" '{"msg":"msg-3"}'
 
-cr_equal received_callbacks $(($NUM_JOBS*2)) 60
+cr_equal received_callbacks $(($NUM_JOBS*2)) 60
 
 for ((i=1; i<=$NUM_JOBS; i++))
 do
-    cr_equal received_callbacks?id=joby-data$i 2
+    cr_equal received_callbacks?id=joby-data$i 2
 done
 
 for ((i=1; i<=$NUM_JOBS; i++))
 do
-    cr_api_check_single_genric_json_event 200 joby-data$i '{"msg":"msg-1"}'
-    cr_api_check_single_genric_json_event 200 joby-data$i '{"msg":"msg-3"}'
+    cr_api_check_single_genric_json_event 200 joby-data$i '{"msg":"msg-1"}'
+    cr_api_check_single_genric_json_event 200 joby-data$i '{"msg":"msg-3"}'
 done
 
-cr_contains_str remote_hosts $HTTP_PROXY_APP_NAME
+cr_contains_str remote_hosts $HTTP_PROXY_APP_NAME
 
 #### TEST COMPLETE ####
 
index 4503c88..232e5a8 100755 (executable)
@@ -37,16 +37,7 @@ SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ORAN-CHERRY ORAN-D-R
 #Supported run modes
 SUPPORTED_RUNMODES="DOCKER KUBE"
 
-. ../common/testcase_common.sh  $@
-. ../common/agent_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
+. ../common/testcase_common.sh $@
 
 setup_testenvironment
 
@@ -109,7 +100,7 @@ for __httpx in $TESTED_PROTOCOLS ; do
 
         start_mr
 
-        start_cr
+        start_cr 1
 
         if [ $RUNMODE == "DOCKER" ]; then
             start_consul_cbs
@@ -156,13 +147,13 @@ for __httpx in $TESTED_PROTOCOLS ; do
             api_equal json:policy_types 2 120  #Wait for the agent to refresh types from the simulator
         fi
 
-        api_put_service 201 "serv1" 3600 "$CR_SERVICE_APP_PATH/1"
+        api_put_service 201 "serv1" 3600 "$CR_SERVICE_APP_PATH_0/1"
 
         START_ID=2000
         NUM_POLICIES=10000  # Must be at least 100
 
         if [ "$PMS_VERSION" == "V2" ]; then
-            notificationurl=$CR_SERVICE_APP_PATH"/test"
+            notificationurl=$CR_SERVICE_APP_PATH_0"/test"
         else
             notificationurl=""
         fi
index 4c261b4..5b8544a 100755 (executable)
 TC_ONELINE_DESCR="App test DMAAP Meditor and DMAAP Adapter"
 
 #App names to include in the test when running docker, space separated list
-DOCKER_INCLUDED_IMAGES="ECS DMAAPMED DMAAPADP KUBEPROXY MR DMAAPMR CR"
+DOCKER_INCLUDED_IMAGES="ICS DMAAPMED DMAAPADP KUBEPROXY MR DMAAPMR CR KAFKAPC"
 
 #App names to include in the test when running kubernetes, space separated list
-KUBE_INCLUDED_IMAGES=" ECS DMAAPMED DMAAPADP KUBEPROXY MR DMAAPMR CR"
+KUBE_INCLUDED_IMAGES=" ICS DMAAPMED DMAAPADP KUBEPROXY MR DMAAPMR CR KAFKAPC"
 
 #Prestarted app (not started by script) to include in the test when running kubernetes, space separated list
 KUBE_PRESTARTED_IMAGES=""
@@ -39,21 +39,6 @@ SUPPORTED_PROFILES="ORAN-E-RELEASE"
 SUPPORTED_RUNMODES="DOCKER KUBE"
 
 . ../common/testcase_common.sh $@
-. ../common/agent_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/ecs_api_functions.sh
-. ../common/prodstub_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/rapp_catalogue_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/http_proxy_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
-. ../common/dmaapmed_api_functions.sh
-. ../common/dmaapadp_api_functions.sh
 
 setup_testenvironment
 
@@ -62,28 +47,39 @@ setup_testenvironment
 #Local vars in test script
 ##########################
 FLAT_A1_EI="1"
-NUM_JOBS=100  # Mediator and adapter gets same number of jobs
+NUM_CR=10 # Number of callback receivers, divide all callbacks to this number of servers - for load sharing
+## Note: The number jobs must be a multiple of the number of CRs in order to calculate the number of expected event in each CR
+NUM_JOBS=200  # Mediator and adapter gets same number of jobs for every type
+
+if [ $NUM_JOBS -lt $NUM_CR ]; then
+    __log_conf_fail_general "Number of jobs: $NUM_JOBS must be greater then the number of CRs: $NUM_CR"
+fi
 
 clean_environment
 
 #use_cr_https
 use_cr_http
-use_ecs_rest_https
+use_ics_rest_https
 use_mr_https
 use_dmaapadp_https
 use_dmaapmed_https
 
 start_kube_proxy
 
-start_cr
+start_cr $NUM_CR
 
-start_ecs NOPROXY $SIM_GROUP/$ECS_COMPOSE_DIR/$ECS_CONFIG_FILE
+start_ics NOPROXY $SIM_GROUP/$ICS_COMPOSE_DIR/$ICS_CONFIG_FILE
 
-set_ecs_trace
+set_ics_trace
 
 start_mr    "unauthenticated.dmaapmed.json" "/events" "dmaapmediatorproducer/STD_Fault_Messages" \
-            "unauthenticated.dmaapadp.json" "/events" "dmaapadapterproducer/msgs" \
-            "unauthenticated.dmaapadp_kafka.text" "/events" "dmaapadapterproducer/msgs"
+            "unauthenticated.dmaapadp.json" "/events" "dmaapadapterproducer/msgs"
+
+start_kafkapc
+
+kafkapc_api_create_topic 201 "unauthenticated.dmaapadp_kafka.text" "text/plain"
+
+kafkapc_api_start_sending 200 "unauthenticated.dmaapadp_kafka.text"
 
 start_dmaapadp NOPROXY $SIM_GROUP/$DMAAP_ADP_COMPOSE_DIR/$DMAAP_ADP_CONFIG_FILE $SIM_GROUP/$DMAAP_ADP_COMPOSE_DIR/$DMAAP_ADP_DATA_FILE
 
@@ -91,152 +87,212 @@ set_dmaapadp_trace
 
 start_dmaapmed NOPROXY $SIM_GROUP/$DMAAP_MED_COMPOSE_DIR/$DMAAP_MED_DATA_FILE
 
-ecs_equal json:data-producer/v1/info-producers 2 60
+ics_equal json:data-producer/v1/info-producers 2 60
 
 # Check producers
-ecs_api_idc_get_job_ids 200 NOTYPE NOWNER EMPTY
-ecs_api_idc_get_type_ids 200 ExampleInformationType STD_Fault_Messages ExampleInformationTypeKafka
-ecs_api_edp_get_producer_ids_2 200 NOTYPE DmaapGenericInfoProducer DMaaP_Mediator_Producer
+ics_api_idc_get_job_ids 200 NOTYPE NOWNER EMPTY
+ics_api_idc_get_type_ids 200 ExampleInformationType STD_Fault_Messages ExampleInformationTypeKafka
+ics_api_edp_get_producer_ids_2 200 NOTYPE DmaapGenericInfoProducer DMaaP_Mediator_Producer
 
 
 # Create jobs for adapter - CR stores data as MD5 hash
 start_timer "Create adapter jobs: $NUM_JOBS"
 for ((i=1; i<=$NUM_JOBS; i++))
 do
-    ecs_api_idc_put_job 201 job-adp-$i ExampleInformationType $CR_SERVICE_MR_PATH/job-adp-data$i"?storeas=md5" info-owner-adp-$i $CR_SERVICE_APP_PATH/job_status_info-owner-adp-$i testdata/dmaap-adapter/job-template.json
+    cr_index=$(($i%$NUM_CR))
+    service_mr="CR_SERVICE_MR_PATH_"$cr_index
+    service_app="CR_SERVICE_APP_PATH_"$cr_index
+    ics_api_idc_put_job 201 job-adp-$i ExampleInformationType ${!service_mr}/job-adp-data$i"?storeas=md5" info-owner-adp-$i ${!service_app}/job_status_info-owner-adp-$i testdata/dmaap-adapter/job-template.json
 
 done
-print_timer "Create adapter jobs: $NUM_JOBS"
+print_timer
 
 # Create jobs for adapter kafka - CR stores data as MD5 hash
 start_timer "Create adapter (kafka) jobs: $NUM_JOBS"
 for ((i=1; i<=$NUM_JOBS; i++))
 do
-    ecs_api_idc_put_job 201 job-adp-kafka-$i ExampleInformationTypeKafka $CR_SERVICE_TEXT_PATH/job-adp-kafka-data$i"?storeas=md5" info-owner-adp-kafka-$i $CR_SERVICE_APP_PATH/job_status_info-owner-adp-kafka-$i testdata/dmaap-adapter/job-template-1-kafka.json
+    cr_index=$(($i%$NUM_CR))
+    service_text="CR_SERVICE_TEXT_PATH_"$cr_index
+    service_app="CR_SERVICE_APP_PATH_"$cr_index
+    ics_api_idc_put_job 201 job-adp-kafka-$i ExampleInformationTypeKafka ${!service_text}/job-adp-kafka-data$i"?storeas=md5" info-owner-adp-kafka-$i ${!service_app}/job_status_info-owner-adp-kafka-$i testdata/dmaap-adapter/job-template-1-kafka.json
 
 done
-print_timer "Create adapter (kafka) jobs: $NUM_JOBS"
+print_timer
 
 # Create jobs for mediator - CR stores data as MD5 hash
 start_timer "Create mediator jobs: $NUM_JOBS"
 for ((i=1; i<=$NUM_JOBS; i++))
 do
-    ecs_api_idc_put_job 201 job-med-$i STD_Fault_Messages $CR_SERVICE_MR_PATH/job-med-data$i"?storeas=md5" info-owner-med-$i $CR_SERVICE_APP_PATH/job_status_info-owner-med-$i testdata/dmaap-adapter/job-template.json
+    cr_index=$(($i%$NUM_CR))
+    service_mr="CR_SERVICE_MR_PATH_"$cr_index
+    service_app="CR_SERVICE_APP_PATH_"$cr_index
+    ics_api_idc_put_job 201 job-med-$i STD_Fault_Messages ${!service_mr}/job-med-data$i"?storeas=md5" info-owner-med-$i ${!service_app}/job_status_info-owner-med-$i testdata/dmaap-adapter/job-template.json
 done
-print_timer "Create mediator jobs: $NUM_JOBS"
+print_timer
 
 # Check job status
 for ((i=1; i<=$NUM_JOBS; i++))
 do
-    ecs_api_a1_get_job_status 200 job-med-$i ENABLED 30
-    ecs_api_a1_get_job_status 200 job-adp-$i ENABLED 30
-    ecs_api_a1_get_job_status 200 job-adp-kafka-$i ENABLED 30
+    ics_api_a1_get_job_status 200 job-med-$i ENABLED 30
+    ics_api_a1_get_job_status 200 job-adp-$i ENABLED 30
+    ics_api_a1_get_job_status 200 job-adp-kafka-$i ENABLED 30
 done
 
 
-EXPECTED_DATA_DELIV=0
+EXPECTED_DATA_DELIV=0 #Total delivered msg per CR
+DATA_DELIV_JOBS=0 #Total delivered msg per job per CR
 
 mr_api_generate_json_payload_file 1 ./tmp/data_for_dmaap_test.json
-mr_api_generate_text_payload_file 1 ./tmp/data_for_dmaap_test.txt
+kafkapc_api_generate_text_payload_file 1 ./tmp/data_for_dmaap_test.txt
 
 ## Send json file via message-router to adapter
-
-EXPECTED_DATA_DELIV=$(($NUM_JOBS+$EXPECTED_DATA_DELIV))
-
+DATA_DELIV_JOBS=5 #Each job will eventuall get 2 msgs
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
 mr_api_send_json_file "/events/unauthenticated.dmaapadp.json" ./tmp/data_for_dmaap_test.json
-cr_equal received_callbacks $EXPECTED_DATA_DELIV 200
+for ((i=0; i<$NUM_CR; i++))
+do
+    cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
 
-EXPECTED_DATA_DELIV=$(($NUM_JOBS+$EXPECTED_DATA_DELIV))
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
 mr_api_send_json_file "/events/unauthenticated.dmaapadp.json" ./tmp/data_for_dmaap_test.json
-cr_equal received_callbacks $EXPECTED_DATA_DELIV 200
+for ((i=0; i<$NUM_CR; i++))
+do
+    cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
 
-EXPECTED_DATA_DELIV=$(($NUM_JOBS+$EXPECTED_DATA_DELIV))
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
 mr_api_send_json_file "/events/unauthenticated.dmaapadp.json" ./tmp/data_for_dmaap_test.json
-cr_equal received_callbacks $EXPECTED_DATA_DELIV 200
+for ((i=0; i<$NUM_CR; i++))
+do
+    cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
 
-EXPECTED_DATA_DELIV=$(($NUM_JOBS+$EXPECTED_DATA_DELIV))
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
 mr_api_send_json_file "/events/unauthenticated.dmaapadp.json" ./tmp/data_for_dmaap_test.json
-cr_equal received_callbacks $EXPECTED_DATA_DELIV 200
+for ((i=0; i<$NUM_CR; i++))
+do
+    cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
 
-EXPECTED_DATA_DELIV=$(($NUM_JOBS+$EXPECTED_DATA_DELIV))
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
 mr_api_send_json_file "/events/unauthenticated.dmaapadp.json" ./tmp/data_for_dmaap_test.json
-cr_equal received_callbacks $EXPECTED_DATA_DELIV 200
+for ((i=0; i<$NUM_CR; i++))
+do
+    cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
 
 # Check received data callbacks from adapter
 for ((i=1; i<=$NUM_JOBS; i++))
 do
-    cr_api_check_single_genric_event_md5_file 200 job-adp-data$i ./tmp/data_for_dmaap_test.json
-    cr_api_check_single_genric_event_md5_file 200 job-adp-data$i ./tmp/data_for_dmaap_test.json
-    cr_api_check_single_genric_event_md5_file 200 job-adp-data$i ./tmp/data_for_dmaap_test.json
-    cr_api_check_single_genric_event_md5_file 200 job-adp-data$i ./tmp/data_for_dmaap_test.json
-    cr_api_check_single_genric_event_md5_file 200 job-adp-data$i ./tmp/data_for_dmaap_test.json
+    cr_index=$(($i%$NUM_CR))
+    cr_api_check_single_genric_event_md5_file 200 $cr_index job-adp-data$i ./tmp/data_for_dmaap_test.json
+    cr_api_check_single_genric_event_md5_file 200 $cr_index job-adp-data$i ./tmp/data_for_dmaap_test.json
+    cr_api_check_single_genric_event_md5_file 200 $cr_index job-adp-data$i ./tmp/data_for_dmaap_test.json
+    cr_api_check_single_genric_event_md5_file 200 $cr_index job-adp-data$i ./tmp/data_for_dmaap_test.json
+    cr_api_check_single_genric_event_md5_file 200 $cr_index job-adp-data$i ./tmp/data_for_dmaap_test.json
 done
 
 
 ## Send text file via message-router to adapter kafka
 
-EXPECTED_DATA_DELIV=$(($NUM_JOBS+$EXPECTED_DATA_DELIV))
-
-mr_api_send_text_file "/events/unauthenticated.dmaapadp_kafka.text" ./tmp/data_for_dmaap_test.txt
-cr_equal received_callbacks $EXPECTED_DATA_DELIV 200
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
+kafkapc_api_post_msg_from_file 200 "unauthenticated.dmaapadp_kafka.text" "text/plain" ./tmp/data_for_dmaap_test.txt
+kafkapc_equal topics/unauthenticated.dmaapadp_kafka.text/counters/sent 1 30
+for ((i=0; i<$NUM_CR; i++))
+do
+    cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
 
-EXPECTED_DATA_DELIV=$(($NUM_JOBS+$EXPECTED_DATA_DELIV))
-mr_api_send_text_file "/events/unauthenticated.dmaapadp_kafka.text" ./tmp/data_for_dmaap_test.txt
-cr_equal received_callbacks $EXPECTED_DATA_DELIV 200
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
+kafkapc_api_post_msg_from_file 200 "unauthenticated.dmaapadp_kafka.text" "text/plain" ./tmp/data_for_dmaap_test.txt
+kafkapc_equal topics/unauthenticated.dmaapadp_kafka.text/counters/sent 2 30
+for ((i=0; i<$NUM_CR; i++))
+do
+    cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
 
-EXPECTED_DATA_DELIV=$(($NUM_JOBS+$EXPECTED_DATA_DELIV))
-mr_api_send_text_file "/events/unauthenticated.dmaapadp_kafka.text" ./tmp/data_for_dmaap_test.txt
-cr_equal received_callbacks $EXPECTED_DATA_DELIV 200
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
+kafkapc_api_post_msg_from_file 200 "unauthenticated.dmaapadp_kafka.text" "text/plain" ./tmp/data_for_dmaap_test.txt
+kafkapc_equal topics/unauthenticated.dmaapadp_kafka.text/counters/sent 3 30
+for ((i=0; i<$NUM_CR; i++))
+do
+    cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
 
-EXPECTED_DATA_DELIV=$(($NUM_JOBS+$EXPECTED_DATA_DELIV))
-mr_api_send_text_file "/events/unauthenticated.dmaapadp_kafka.text" ./tmp/data_for_dmaap_test.txt
-cr_equal received_callbacks $EXPECTED_DATA_DELIV 200
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
+kafkapc_api_post_msg_from_file 200 "unauthenticated.dmaapadp_kafka.text" "text/plain" ./tmp/data_for_dmaap_test.txt
+kafkapc_equal topics/unauthenticated.dmaapadp_kafka.text/counters/sent 4 30
+for ((i=0; i<$NUM_CR; i++))
+do
+    cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
 
-EXPECTED_DATA_DELIV=$(($NUM_JOBS+$EXPECTED_DATA_DELIV))
-mr_api_send_text_file "/events/unauthenticated.dmaapadp_kafka.text" ./tmp/data_for_dmaap_test.txt
-cr_equal received_callbacks $EXPECTED_DATA_DELIV 200
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
+kafkapc_api_post_msg_from_file 200 "unauthenticated.dmaapadp_kafka.text" "text/plain" ./tmp/data_for_dmaap_test.txt
+kafkapc_equal topics/unauthenticated.dmaapadp_kafka.text/counters/sent 5 30
+for ((i=0; i<$NUM_CR; i++))
+do
+    cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
 
 # Check received data callbacks from adapter kafka
 for ((i=1; i<=$NUM_JOBS; i++))
 do
-    cr_api_check_single_genric_event_md5_file 200 job-adp-kafka-data$i ./tmp/data_for_dmaap_test.txt
-    cr_api_check_single_genric_event_md5_file 200 job-adp-kafka-data$i ./tmp/data_for_dmaap_test.txt
-    cr_api_check_single_genric_event_md5_file 200 job-adp-kafka-data$i ./tmp/data_for_dmaap_test.txt
-    cr_api_check_single_genric_event_md5_file 200 job-adp-kafka-data$i ./tmp/data_for_dmaap_test.txt
-    cr_api_check_single_genric_event_md5_file 200 job-adp-kafka-data$i ./tmp/data_for_dmaap_test.txt
+    cr_index=$(($i%$NUM_CR))
+    cr_api_check_single_genric_event_md5_file 200 $cr_index job-adp-kafka-data$i ./tmp/data_for_dmaap_test.txt
+    cr_api_check_single_genric_event_md5_file 200 $cr_index job-adp-kafka-data$i ./tmp/data_for_dmaap_test.txt
+    cr_api_check_single_genric_event_md5_file 200 $cr_index job-adp-kafka-data$i ./tmp/data_for_dmaap_test.txt
+    cr_api_check_single_genric_event_md5_file 200 $cr_index job-adp-kafka-data$i ./tmp/data_for_dmaap_test.txt
+    cr_api_check_single_genric_event_md5_file 200 $cr_index job-adp-kafka-data$i ./tmp/data_for_dmaap_test.txt
 done
 
 ## Send json file via message-router to mediator
 
-EXPECTED_DATA_DELIV=$(($NUM_JOBS+$EXPECTED_DATA_DELIV))
-
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
 mr_api_send_json_file "/events/unauthenticated.dmaapmed.json" ./tmp/data_for_dmaap_test.json
-cr_equal received_callbacks $EXPECTED_DATA_DELIV 200
+for ((i=0; i<$NUM_CR; i++))
+do
+    cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
 
-EXPECTED_DATA_DELIV=$(($NUM_JOBS+$EXPECTED_DATA_DELIV))
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
 mr_api_send_json_file "/events/unauthenticated.dmaapmed.json" ./tmp/data_for_dmaap_test.json
-cr_equal received_callbacks $EXPECTED_DATA_DELIV 200
+for ((i=0; i<$NUM_CR; i++))
+do
+    cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
 
-EXPECTED_DATA_DELIV=$(($NUM_JOBS+$EXPECTED_DATA_DELIV))
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
 mr_api_send_json_file "/events/unauthenticated.dmaapmed.json" ./tmp/data_for_dmaap_test.json
-cr_equal received_callbacks $EXPECTED_DATA_DELIV 200
+for ((i=0; i<$NUM_CR; i++))
+do
+    cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
 
-EXPECTED_DATA_DELIV=$(($NUM_JOBS+$EXPECTED_DATA_DELIV))
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
 mr_api_send_json_file "/events/unauthenticated.dmaapmed.json" ./tmp/data_for_dmaap_test.json
-cr_equal received_callbacks $EXPECTED_DATA_DELIV 200
+for ((i=0; i<$NUM_CR; i++))
+do
+    cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
 
-EXPECTED_DATA_DELIV=$(($NUM_JOBS+$EXPECTED_DATA_DELIV))
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
 mr_api_send_json_file "/events/unauthenticated.dmaapmed.json" ./tmp/data_for_dmaap_test.json
-cr_equal received_callbacks $EXPECTED_DATA_DELIV 200
+for ((i=0; i<$NUM_CR; i++))
+do
+    cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
 
 # Check received data callbacks from mediator
 for ((i=1; i<=$NUM_JOBS; i++))
 do
-    cr_api_check_single_genric_event_md5_file 200 job-med-data$i ./tmp/data_for_dmaap_test.json
-    cr_api_check_single_genric_event_md5_file 200 job-med-data$i ./tmp/data_for_dmaap_test.json
-    cr_api_check_single_genric_event_md5_file 200 job-med-data$i ./tmp/data_for_dmaap_test.json
-    cr_api_check_single_genric_event_md5_file 200 job-med-data$i ./tmp/data_for_dmaap_test.json
-    cr_api_check_single_genric_event_md5_file 200 job-med-data$i ./tmp/data_for_dmaap_test.json
+    cr_index=$(($i%$NUM_CR))
+    cr_api_check_single_genric_event_md5_file 200 $cr_index job-med-data$i ./tmp/data_for_dmaap_test.json
+    cr_api_check_single_genric_event_md5_file 200 $cr_index job-med-data$i ./tmp/data_for_dmaap_test.json
+    cr_api_check_single_genric_event_md5_file 200 $cr_index job-med-data$i ./tmp/data_for_dmaap_test.json
+    cr_api_check_single_genric_event_md5_file 200 $cr_index job-med-data$i ./tmp/data_for_dmaap_test.json
+    cr_api_check_single_genric_event_md5_file 200 $cr_index job-med-data$i ./tmp/data_for_dmaap_test.json
 done
 
 
@@ -244,75 +300,99 @@ done
 mr_api_send_json "/events/unauthenticated.dmaapadp.json" '{"msg":"msg-1"}'
 mr_api_send_json "/events/unauthenticated.dmaapadp.json" '{"msg":"msg-3"}'
 
+DATA_DELIV_JOBS=7 #Each job will eventuall get 5+2 msgs
+
 # Wait for data recetption, adapter
-EXPECTED_DATA_DELIV=$(($NUM_JOBS*2+$EXPECTED_DATA_DELIV))
+EXPECTED_DATA_DELIV=$(($NUM_JOBS*2/$NUM_CR+$EXPECTED_DATA_DELIV))
 start_timer "Data delivery adapter, 2 json per job"
-cr_equal received_callbacks $EXPECTED_DATA_DELIV 100
-print_timer "Data delivery adapter, 2 json per job"
+for ((i=0; i<$NUM_CR; i++))
+do
+    cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
+print_timer
 
 # Send small text via message-routere to adapter
-mr_api_send_text "/events/unauthenticated.dmaapadp_kafka.text" 'Message-------1'
-mr_api_send_text "/events/unauthenticated.dmaapadp_kafka.text" 'Message-------3'
+kafkapc_api_post_msg 200 "unauthenticated.dmaapadp_kafka.text" "text/plain" 'Message-------1'
+kafkapc_api_post_msg 200 "unauthenticated.dmaapadp_kafka.text" "text/plain" 'Message-------3'
+kafkapc_equal topics/unauthenticated.dmaapadp_kafka.text/counters/sent 7 30
 
 # Wait for data recetption, adapter kafka
-EXPECTED_DATA_DELIV=$(($NUM_JOBS*2+$EXPECTED_DATA_DELIV))
-start_timer "Data delivery adapte kafkar, 2 strings per job"
-cr_equal received_callbacks $EXPECTED_DATA_DELIV 100
-print_timer "Data delivery adapte kafkar, 2 strings per job"
+EXPECTED_DATA_DELIV=$(($NUM_JOBS*2/$NUM_CR+$EXPECTED_DATA_DELIV))
+start_timer "Data delivery adapter kafka, 2 strings per job"
+for ((i=0; i<$NUM_CR; i++))
+do
+    cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
+print_timer
 
 # Send small json via message-router to mediator
 mr_api_send_json "/events/unauthenticated.dmaapmed.json" '{"msg":"msg-0"}'
 mr_api_send_json "/events/unauthenticated.dmaapmed.json" '{"msg":"msg-2"}'
 
 # Wait for data reception, mediator
-EXPECTED_DATA_DELIV=$(($NUM_JOBS*2+$EXPECTED_DATA_DELIV))
+EXPECTED_DATA_DELIV=$(($NUM_JOBS*2/$NUM_CR+$EXPECTED_DATA_DELIV))
 start_timer "Data delivery mediator, 2 json per job"
-cr_equal received_callbacks $EXPECTED_DATA_DELIV 100
-print_timer "Data delivery mediator, 2 json per job"
+for ((i=0; i<$NUM_CR; i++))
+do
+    cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 100
+done
+print_timer
 
 # Check received number of messages for mediator and adapter callbacks
 for ((i=1; i<=$NUM_JOBS; i++))
 do
-    cr_equal received_callbacks?id=job-med-data$i 7
-    cr_equal received_callbacks?id=job-adp-data$i 7
-    cr_equal received_callbacks?id=job-adp-kafka-data$i 7
+    cr_index=$(($i%$NUM_CR))
+    cr_equal $cr_index received_callbacks?id=job-med-data$i $DATA_DELIV_JOBS
+    cr_equal $cr_index received_callbacks?id=job-adp-data$i $DATA_DELIV_JOBS
+    cr_equal $cr_index received_callbacks?id=job-adp-kafka-data$i $DATA_DELIV_JOBS
 done
 
 # Check received data and order for mediator and adapter callbacks
 for ((i=1; i<=$NUM_JOBS; i++))
 do
-    cr_api_check_single_genric_event_md5 200 job-med-data$i '{"msg":"msg-0"}'
-    cr_api_check_single_genric_event_md5 200 job-med-data$i '{"msg":"msg-2"}'
-    cr_api_check_single_genric_event_md5 200 job-adp-data$i '{"msg":"msg-1"}'
-    cr_api_check_single_genric_event_md5 200 job-adp-data$i '{"msg":"msg-3"}'
-    cr_api_check_single_genric_event_md5 200 job-adp-kafka-data$i 'Message-------1'
-    cr_api_check_single_genric_event_md5 200 job-adp-kafka-data$i 'Message-------3'
+    cr_index=$(($i%$NUM_CR))
+    cr_api_check_single_genric_event_md5 200 $cr_index job-med-data$i '{"msg":"msg-0"}'
+    cr_api_check_single_genric_event_md5 200 $cr_index job-med-data$i '{"msg":"msg-2"}'
+    cr_api_check_single_genric_event_md5 200 $cr_index job-adp-data$i '{"msg":"msg-1"}'
+    cr_api_check_single_genric_event_md5 200 $cr_index job-adp-data$i '{"msg":"msg-3"}'
+    cr_api_check_single_genric_event_md5 200 $cr_index job-adp-kafka-data$i 'Message-------1'
+    cr_api_check_single_genric_event_md5 200 $cr_index job-adp-kafka-data$i 'Message-------3'
 done
 
 # Set delay in the callback receiver to slow down callbacks
 SEC_DELAY=2
-cr_delay_callback 200 $SEC_DELAY
+for ((i=0; i<$NUM_CR; i++))
+do
+    cr_delay_callback 200 $i $SEC_DELAY
+done
 
 # Send small json via message-router to adapter
 mr_api_send_json "/events/unauthenticated.dmaapadp.json" '{"msg":"msg-5"}'
 mr_api_send_json "/events/unauthenticated.dmaapadp.json" '{"msg":"msg-7"}'
 
 # Wait for data recetption, adapter
-EXPECTED_DATA_DELIV=$(($NUM_JOBS*2+$EXPECTED_DATA_DELIV))
+EXPECTED_DATA_DELIV=$(($NUM_JOBS*2/$NUM_CR+$EXPECTED_DATA_DELIV))
 start_timer "Data delivery adapter with $SEC_DELAY seconds delay in consumer, 2 json per job"
-cr_equal received_callbacks $EXPECTED_DATA_DELIV $(($NUM_JOBS+300))
-print_timer "Data delivery adapter with $SEC_DELAY seconds delay in consumer, 2 json per job"
+for ((i=0; i<$NUM_CR; i++))
+do
+    cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 100
+done
+print_timer
 
 
 # Send small text via message-router to adapter kafka
-mr_api_send_text "/events/unauthenticated.dmaapadp_kafka.text" 'Message-------5'
-mr_api_send_text "/events/unauthenticated.dmaapadp_kafka.text" 'Message-------7'
+kafkapc_api_post_msg 200 "unauthenticated.dmaapadp_kafka.text" "text/plain" 'Message-------5'
+kafkapc_api_post_msg 200 "unauthenticated.dmaapadp_kafka.text" "text/plain" 'Message-------7'
+kafkapc_equal topics/unauthenticated.dmaapadp_kafka.text/counters/sent 9 30
 
 # Wait for data recetption, adapter kafka
-EXPECTED_DATA_DELIV=$(($NUM_JOBS*2+$EXPECTED_DATA_DELIV))
+EXPECTED_DATA_DELIV=$(($NUM_JOBS*2/$NUM_CR+$EXPECTED_DATA_DELIV))
 start_timer "Data delivery adapter kafka with $SEC_DELAY seconds delay in consumer, 2 strings per job"
-cr_equal received_callbacks $EXPECTED_DATA_DELIV $(($NUM_JOBS+300))
-print_timer "Data delivery adapter with kafka $SEC_DELAY seconds delay in consumer, 2 strings per job"
+for ((i=0; i<$NUM_CR; i++))
+do
+    cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 100
+done
+print_timer
 
 
 # Send small json via message-router to mediator
@@ -320,28 +400,33 @@ mr_api_send_json "/events/unauthenticated.dmaapmed.json" '{"msg":"msg-4"}'
 mr_api_send_json "/events/unauthenticated.dmaapmed.json" '{"msg":"msg-6"}'
 
 # Wait for data reception, mediator
-EXPECTED_DATA_DELIV=$(($NUM_JOBS*2+$EXPECTED_DATA_DELIV))
+EXPECTED_DATA_DELIV=$(($NUM_JOBS*2/$NUM_CR+$EXPECTED_DATA_DELIV))
 start_timer "Data delivery mediator with $SEC_DELAY seconds delay in consumer, 2 json per job"
-cr_equal received_callbacks $EXPECTED_DATA_DELIV 1000
-print_timer "Data delivery mediator with $SEC_DELAY seconds delay in consumer, 2 json per job"
+for ((i=0; i<$NUM_CR; i++))
+do
+    cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 100
+done
+print_timer
 
 # Check received number of messages for mediator and adapter callbacks
 for ((i=1; i<=$NUM_JOBS; i++))
 do
-    cr_equal received_callbacks?id=job-med-data$i 9
-    cr_equal received_callbacks?id=job-adp-data$i 9
-    cr_equal received_callbacks?id=job-adp-kafka-data$i 9
+    cr_index=$(($i%$NUM_CR))
+    cr_equal $cr_index received_callbacks?id=job-med-data$i 9
+    cr_equal $cr_index received_callbacks?id=job-adp-data$i 9
+    cr_equal $cr_index received_callbacks?id=job-adp-kafka-data$i 9
 done
 
 # Check received data and order for mediator and adapter callbacks
 for ((i=1; i<=$NUM_JOBS; i++))
 do
-    cr_api_check_single_genric_event_md5 200 job-med-data$i '{"msg":"msg-4"}'
-    cr_api_check_single_genric_event_md5 200 job-med-data$i '{"msg":"msg-6"}'
-    cr_api_check_single_genric_event_md5 200 job-adp-data$i '{"msg":"msg-5"}'
-    cr_api_check_single_genric_event_md5 200 job-adp-data$i '{"msg":"msg-7"}'
-    cr_api_check_single_genric_event_md5 200 job-adp-kafka-data$i 'Message-------5'
-    cr_api_check_single_genric_event_md5 200 job-adp-kafka-data$i 'Message-------7'
+    cr_index=$(($i%$NUM_CR))
+    cr_api_check_single_genric_event_md5 200 $cr_index job-med-data$i '{"msg":"msg-4"}'
+    cr_api_check_single_genric_event_md5 200 $cr_index job-med-data$i '{"msg":"msg-6"}'
+    cr_api_check_single_genric_event_md5 200 $cr_index job-adp-data$i '{"msg":"msg-5"}'
+    cr_api_check_single_genric_event_md5 200 $cr_index job-adp-data$i '{"msg":"msg-7"}'
+    cr_api_check_single_genric_event_md5 200 $cr_index job-adp-kafka-data$i 'Message-------5'
+    cr_api_check_single_genric_event_md5 200 $cr_index job-adp-kafka-data$i 'Message-------7'
 done
 
 #### TEST COMPLETE ####
index e017643..53437e8 100755 (executable)
@@ -28,15 +28,7 @@ SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ORAN-CHERRY ORAN-D-R
 #Supported run modes
 SUPPORTED_RUNMODES="DOCKER"
 
-. ../common/testcase_common.sh  $@
-. ../common/agent_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/gateway_api_functions.sh
+. ../common/testcase_common.sh $@
 
 setup_testenvironment
 
@@ -62,7 +54,7 @@ for consul_conf in $TESTED_VARIANTS ; do
 
     # Create service to be able to receive events when rics becomes available
     # Must use rest towards the agent since dmaap is not configured yet
-    api_put_service 201 "ric-registration" 0 "$CR_SERVICE_APP_PATH/ric-registration"
+    api_put_service 201 "ric-registration" 0 "$CR_SERVICE_APP_PATH_0/ric-registration"
 
     # Start one RIC of each type
     start_ric_simulators ricsim_g1 1  OSC_2.1.0
@@ -73,7 +65,7 @@ for consul_conf in $TESTED_VARIANTS ; do
 
     start_mr
 
-    start_cr
+    start_cr 1
 
     start_control_panel $SIM_GROUP/$CONTROL_PANEL_COMPOSE_DIR/$CONTROL_PANEL_CONFIG_FILE
 
@@ -93,9 +85,9 @@ for consul_conf in $TESTED_VARIANTS ; do
     if [ "$PMS_VERSION" == "V2" ]; then
         api_equal json:rics 3 300
 
-        cr_equal received_callbacks 3 120
+        cr_equal received_callbacks 3 120
 
-        cr_api_check_all_sync_events 200 ric-registration ricsim_g1_1 ricsim_g2_1 ricsim_g3_1
+        cr_api_check_all_sync_events 200 ric-registration ricsim_g1_1 ricsim_g2_1 ricsim_g3_1
     else
         api_equal json:rics 2 300
     fi
@@ -114,9 +106,9 @@ for consul_conf in $TESTED_VARIANTS ; do
     if [ "$PMS_VERSION" == "V2" ]; then
         api_equal json:rics 4 120
 
-        cr_equal received_callbacks 4 120
+        cr_equal received_callbacks 4 120
 
-        cr_api_check_all_sync_events 200 ric-registration ricsim_g2_2
+        cr_api_check_all_sync_events 200 ric-registration ricsim_g2_2
     else
         api_equal json:rics 3 120
     fi
@@ -138,7 +130,7 @@ for consul_conf in $TESTED_VARIANTS ; do
     if [ "$PMS_VERSION" == "V2" ]; then
         api_equal json:rics 3 120
 
-        cr_equal received_callbacks 4 120
+        cr_equal received_callbacks 4 120
     else
         api_equal json:rics 2 120
     fi
index 25bdc4c..31e40ab 100755 (executable)
@@ -32,16 +32,7 @@ SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ORAN-CHERRY ORAN-D-R
 #Supported run modes
 SUPPORTED_RUNMODES="DOCKER KUBE"
 
-. ../common/testcase_common.sh  $@
-. ../common/agent_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
+. ../common/testcase_common.sh $@
 
 setup_testenvironment
 
@@ -75,7 +66,7 @@ for interface in $TESTED_VARIANTS ; do
 
     start_ric_simulators ricsim_g1 $NUM_RICS_2 OSC_2.1.0
 
-    start_cr
+    start_cr 1
 
     start_mr
 
@@ -108,7 +99,7 @@ for interface in $TESTED_VARIANTS ; do
 
     # Create service to be able to receive events when rics becomes available
     # Must use rest towards the agent since dmaap is not configured yet
-    api_put_service 201 "ric-registration" 0 "$CR_SERVICE_APP_PATH/ric-registration"
+    api_put_service 201 "ric-registration" 0 "$CR_SERVICE_APP_PATH_0/ric-registration"
 
     #Load first config
     if [ $RUNMODE == "KUBE" ]; then
@@ -126,8 +117,8 @@ for interface in $TESTED_VARIANTS ; do
     api_equal json:rics 8 300
 
     if [ "$PMS_VERSION" == "V2" ]; then
-        cr_equal received_callbacks?id=ric-registration 8 120
-        cr_api_check_all_sync_events 200 ric-registration ricsim_g1_1 ricsim_g1_2  ricsim_g1_3 ricsim_g1_4 ricsim_g1_5 ricsim_g1_6  ricsim_g1_7  ricsim_g1_8
+        cr_equal received_callbacks?id=ric-registration 8 120
+        cr_api_check_all_sync_events 200 ric-registration ricsim_g1_1 ricsim_g1_2  ricsim_g1_3 ricsim_g1_4 ricsim_g1_5 ricsim_g1_6  ricsim_g1_7  ricsim_g1_8
     fi
 
     api_get_rics 200 NOTYPE "ricsim_g1_1:me1_ricsim_g1_1,me2_ricsim_g1_1:NOTYPE:???? \
@@ -205,8 +196,8 @@ for interface in $TESTED_VARIANTS ; do
                              ricsim_g1_8:me1_ricsim_g1_8,me2_ricsim_g1_8:4,5:???? "
 
     if [ "$PMS_VERSION" == "V2" ]; then
-        cr_equal received_callbacks?id=ric-registration 16 120
-        cr_api_check_all_sync_events 200 ric-registration ricsim_g1_1 ricsim_g1_2  ricsim_g1_3 ricsim_g1_4 ricsim_g1_5 ricsim_g1_6  ricsim_g1_7  ricsim_g1_8
+        cr_equal received_callbacks?id=ric-registration 16 120
+        cr_api_check_all_sync_events 200 ric-registration ricsim_g1_1 ricsim_g1_2  ricsim_g1_3 ricsim_g1_4 ricsim_g1_5 ricsim_g1_6  ricsim_g1_7  ricsim_g1_8
     fi
 
     #Load config with all rics
@@ -219,8 +210,8 @@ for interface in $TESTED_VARIANTS ; do
     api_equal json:rics 10 120
 
     if [ "$PMS_VERSION" == "V2" ]; then
-        cr_equal received_callbacks?id=ric-registration 18 120
-        cr_api_check_all_sync_events 200 ric-registration ricsim_g1_9  ricsim_g1_10
+        cr_equal received_callbacks?id=ric-registration 18 120
+        cr_api_check_all_sync_events 200 ric-registration ricsim_g1_9  ricsim_g1_10
     fi
 
     sim_put_policy_type 201 ricsim_g1_9 5 testdata/OSC/sim_5.json
@@ -269,8 +260,8 @@ for interface in $TESTED_VARIANTS ; do
                              ricsim_g1_10:me1_ricsim_g1_10,me2_ricsim_g1_10:NOTYPE:???? "
 
     if [ "$PMS_VERSION" == "V2" ]; then
-        cr_equal received_callbacks?id=ric-registration 19 120
-        cr_api_check_all_sync_events 200 ric-registration ricsim_g1_9
+        cr_equal received_callbacks?id=ric-registration 19 120
+        cr_api_check_all_sync_events 200 ric-registration ricsim_g1_9
     fi
 
     #No policy type in sim #10
@@ -281,10 +272,10 @@ for interface in $TESTED_VARIANTS ; do
         api_equal json:policy_types 5
     fi
 
-    api_put_service 201 "serv1" 3600 "$CR_SERVICE_APP_PATH/serv1"
+    api_put_service 201 "serv1" 3600 "$CR_SERVICE_APP_PATH_0/serv1"
 
     if [ "$PMS_VERSION" == "V2" ]; then
-        notificationurl=$CR_SERVICE_APP_PATH"/test"
+        notificationurl=$CR_SERVICE_APP_PATH_0"/test"
     else
         notificationurl=""
     fi
@@ -301,8 +292,8 @@ for interface in $TESTED_VARIANTS ; do
     api_equal json:rics 8 120
 
     if [ "$PMS_VERSION" == "V2" ]; then
-        cr_equal received_callbacks?id=ric-registration 19 120
-        cr_api_check_all_sync_events 200 ric-registration EMPTY
+        cr_equal received_callbacks?id=ric-registration 19 120
+        cr_api_check_all_sync_events 200 ric-registration EMPTY
     fi
 
     if [ "$PMS_VERSION" == "V2" ]; then
index 27675be..e509f6c 100755 (executable)
@@ -37,16 +37,7 @@ SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ORAN-CHERRY ORAN-D-R
 #Supported run modes
 SUPPORTED_RUNMODES="DOCKER KUBE"
 
-. ../common/testcase_common.sh  $@
-. ../common/agent_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
+. ../common/testcase_common.sh $@
 
 setup_testenvironment
 
@@ -101,7 +92,7 @@ for __httpx in $TESTED_PROTOCOLS ; do
 
         start_mr
 
-        start_cr
+        start_cr 1
 
         start_control_panel $SIM_GROUP/$CONTROL_PANEL_COMPOSE_DIR/$CONTROL_PANEL_CONFIG_FILE
 
@@ -151,10 +142,10 @@ for __httpx in $TESTED_PROTOCOLS ; do
             api_equal json:policy_types 2 300  #Wait for the agent to refresh types from the simulators
         fi
 
-        api_put_service 201 "serv1" 3600 "$CR_SERVICE_APP_PATH/1"
+        api_put_service 201 "serv1" 3600 "$CR_SERVICE_APP_PATH_0/1"
 
         if [ "$PMS_VERSION" == "V2" ]; then
-            notificationurl=$CR_SERVICE_APP_PATH"/test"
+            notificationurl=$CR_SERVICE_APP_PATH_0"/test"
         else
             notificationurl=""
         fi
index 02f6758..af46814 100755 (executable)
@@ -37,15 +37,7 @@ SUPPORTED_PROFILES="ONAP-ISTANBUL ORAN-D-RELEASE ORAN-E-RELEASE"
 #Supported run modes
 SUPPORTED_RUNMODES="DOCKER KUBE"
 
-. ../common/testcase_common.sh  $@
-. ../common/agent_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
+. ../common/testcase_common.sh $@
 
 setup_testenvironment
 
@@ -67,7 +59,7 @@ NUM_POLICIES_PER_RIC=2000
 generate_policy_uuid
 
 if [ "$PMS_VERSION" == "V2" ]; then
-    notificationurl=$CR_SERVICE_APP_PATH"/test"
+    notificationurl=$CR_SERVICE_APP_PATH_0"/test"
 else
     notificationurl=""
 fi
@@ -131,7 +123,7 @@ for __httpx in $TESTED_PROTOCOLS ; do
             consul_config_app                      ".consul_config.json"
         fi
 
-        start_cr
+        start_cr 1
 
         api_get_status 200
 
@@ -152,7 +144,7 @@ for __httpx in $TESTED_PROTOCOLS ; do
             api_equal json:policy_types 1 300  #Wait for the agent to refresh types from the simulator
         fi
 
-        api_put_service 201 "serv1" 0 "$CR_SERVICE_APP_PATH/1"
+        api_put_service 201 "serv1" 0 "$CR_SERVICE_APP_PATH_0/1"
 
         echo "Check the number of types in the agent for each ric is 1"
         for ((i=1; i<=$NUM_RICS; i++))
@@ -180,7 +172,7 @@ for __httpx in $TESTED_PROTOCOLS ; do
             sim_equal ricsim_g1_$i num_instances $NUM_POLICIES_PER_RIC
         done
 
-        api_get_services 200 "serv1" "serv1" 0 "$CR_SERVICE_APP_PATH/1"
+        api_get_services 200 "serv1" "serv1" 0 "$CR_SERVICE_APP_PATH_0/1"
 
         stop_policy_agent
 
@@ -217,7 +209,7 @@ for __httpx in $TESTED_PROTOCOLS ; do
 
         print_timer "Restore $((NUM_POLICIES_PER_RIC*$NUM_RICS)) polices after restart over $interface using "$__httpx
 
-        api_get_services 200 "serv1" "serv1" 0 "$CR_SERVICE_APP_PATH/1"
+        api_get_services 200 "serv1" "serv1" 0 "$CR_SERVICE_APP_PATH_0/1"
 
         start_timer "Delete $((NUM_POLICIES_PER_RIC*$NUM_RICS)) polices over $interface using "$__httpx
 
@@ -247,7 +239,7 @@ for __httpx in $TESTED_PROTOCOLS ; do
 
         sleep_wait 200
 
-        api_get_services 200 "serv1" "serv1" 0 "$CR_SERVICE_APP_PATH/1"
+        api_get_services 200 "serv1" "serv1" 0 "$CR_SERVICE_APP_PATH_0/1"
 
         api_equal json:policies 0
 
index 02b8db9..ad71f46 100755 (executable)
@@ -37,16 +37,7 @@ SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ORAN-CHERRY ORAN-D-R
 #Supported run modes
 SUPPORTED_RUNMODES="DOCKER KUBE"
 
-. ../common/testcase_common.sh  $@
-. ../common/agent_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
+. ../common/testcase_common.sh $@
 
 setup_testenvironment
 
@@ -97,7 +88,7 @@ fi
 
 start_mr
 
-start_cr
+start_cr 1
 
 start_control_panel $SIM_GROUP/$CONTROL_PANEL_COMPOSE_DIR/$CONTROL_PANEL_CONFIG_FILE
 
@@ -197,7 +188,7 @@ do
 done
 
 echo "Register a service"
-api_put_service 201 "serv1" 0 "$CR_SERVICE_APP_PATH/1"
+api_put_service 201 "serv1" 0 "$CR_SERVICE_APP_PATH_0/1"
 
 TEST_DURATION=$((24*3600*$DAYS))
 TEST_START=$SECONDS
@@ -207,7 +198,7 @@ AGENT_INTERFACES="REST REST_PARALLEL DMAAP DMAAP-BATCH"
 MR_MESSAGES=0
 
 if [ "$PMS_VERSION" == "V2" ]; then
-      notificationurl=$CR_SERVICE_APP_PATH"/test"
+      notificationurl=$CR_SERVICE_APP_PATH_0"/test"
 else
       notificationurl=""
 fi
index a5f1978..bd61b3a 100755 (executable)
@@ -37,16 +37,7 @@ SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ORAN-CHERRY ORAN-D-R
 #Supported run modes
 SUPPORTED_RUNMODES="DOCKER KUBE"
 
-. ../common/testcase_common.sh  $@
-. ../common/agent_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
+. ../common/testcase_common.sh $@
 
 setup_testenvironment
 
@@ -67,7 +58,7 @@ NUM_POLICIES_PER_RIC=500
 generate_policy_uuid
 
 if [ "$PMS_VERSION" == "V2" ]; then
-    notificationurl=$CR_SERVICE_APP_PATH"/test"
+    notificationurl=$CR_SERVICE_APP_PATH_0"/test"
 else
     notificationurl=""
 fi
@@ -135,7 +126,7 @@ for __httpx in $TESTED_PROTOCOLS ; do
 
         start_mr # Not used, but removes error messages from the agent log
 
-        start_cr
+        start_cr 1
 
         api_get_status 200
 
@@ -156,7 +147,7 @@ for __httpx in $TESTED_PROTOCOLS ; do
             api_equal json:policy_types 1 300  #Wait for the agent to refresh types from the simulator
         fi
 
-        api_put_service 201 "serv1" 600 "$CR_SERVICE_APP_PATH/1"
+        api_put_service 201 "serv1" 600 "$CR_SERVICE_APP_PATH_0/1"
 
         echo "Check the number of types in the agent for each ric is 1"
         for ((i=1; i<=$NUM_RICS; i++))
index e698f62..886b664 100755 (executable)
@@ -38,15 +38,6 @@ SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ORAN-CHERRY ORAN-D-R
 SUPPORTED_RUNMODES="DOCKER KUBE"
 
 . ../common/testcase_common.sh $@
-. ../common/agent_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
-. ../common/cr_api_functions.sh
 
 setup_testenvironment
 
@@ -184,10 +175,10 @@ fi
 # Create policies
 use_agent_rest_http
 
-api_put_service 201 "Emergency-response-app" 0 "$CR_SERVICE_APP_PATH/1"
+api_put_service 201 "Emergency-response-app" 0 "$CR_SERVICE_APP_PATH_0/1"
 
 if [ "$PMS_VERSION" == "V2" ]; then
-    notificationurl=$CR_SERVICE_APP_PATH"/test"
+    notificationurl=$CR_SERVICE_APP_PATH_0"/test"
 else
     notificationurl=""
 fi
index 71a5d50..2a6a307 100755 (executable)
@@ -27,7 +27,7 @@ DOCKER_INCLUDED_IMAGES="" # Not used -  KUBE only test script
 #App names to include in the test when running kubernetes, space separated list
 KUBE_INCLUDED_IMAGES=" MR CR  PRODSTUB KUBEPROXY"
 #Prestarted app (not started by script) to include in the test when running kubernetes, space separated list
-KUBE_PRESTARTED_IMAGES=" PA RICSIM CP ECS RC SDNC DMAAPMED DMAAPADP"
+KUBE_PRESTARTED_IMAGES=" PA RICSIM CP ICS RC SDNC DMAAPMED DMAAPADP"
 
 #Supported test environment profiles
 SUPPORTED_PROFILES="ORAN-E-RELEASE"
@@ -35,18 +35,6 @@ SUPPORTED_PROFILES="ORAN-E-RELEASE"
 SUPPORTED_RUNMODES="KUBE"
 
 . ../common/testcase_common.sh $@
-. ../common/agent_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/ecs_api_functions.sh
-. ../common/prodstub_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/rapp_catalogue_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/dmaapmed_api_functions.sh
-. ../common/dmaapadp_api_functions.sh
 
 setup_testenvironment
 
@@ -57,10 +45,10 @@ use_cr_https
 use_agent_rest_https
 use_sdnc_https
 use_simulator_https
-use_ecs_rest_https
+use_ics_rest_https
 use_prod_stub_https
 
-if [ $ECS_VERSION == "V1-1" ]; then
+if [ $ICS_VERSION == "V1-1" ]; then
     use_rapp_catalogue_http # https not yet supported
 else
     ########################################use_rapp_catalogue_https
@@ -81,7 +69,7 @@ clean_environment
 
 pms_kube_pvc_reset
 
-ecs_kube_pvc_reset
+ics_kube_pvc_reset
 
 start_kube_proxy
 
@@ -108,13 +96,13 @@ start_sdnc
 
 start_policy_agent
 
-start_cr
+start_cr 1
 
 start_prod_stub
 
-start_ecs NOPROXY
+start_ics NOPROXY
 
-set_ecs_trace
+set_ics_trace
 
 start_rapp_catalogue
 
@@ -240,16 +228,16 @@ else
     api_equal json:policy_ids 0
 fi
 
-api_put_service 201 "Emergency-response-app" 0 "$CR_SERVICE_APP_PATH/ER-app"
+api_put_service 201 "Emergency-response-app" 0 "$CR_SERVICE_APP_PATH_0/ER-app"
 
 # Create policies in STD
 for ((i=0; i<$STD_NUM_RICS; i++))
 do
     ricid=$((3+$i))
     generate_policy_uuid
-    api_put_policy 201 "Emergency-response-app" ric$ricid NOTYPE $((1100+$i)) NOTRANSIENT $CR_SERVICE_APP_PATH/"std2" testdata/STD/pi1_template.json 1
+    api_put_policy 201 "Emergency-response-app" ric$ricid NOTYPE $((1100+$i)) NOTRANSIENT $CR_SERVICE_APP_PATH_0/"std2" testdata/STD/pi1_template.json 1
     generate_policy_uuid
-    api_put_policy 201 "Emergency-response-app" ric$ricid NOTYPE $((1200+$i)) NOTRANSIENT $CR_SERVICE_APP_PATH/"std2" testdata/STD/pi1_template.json 1
+    api_put_policy 201 "Emergency-response-app" ric$ricid NOTYPE $((1200+$i)) NOTRANSIENT $CR_SERVICE_APP_PATH_0/"std2" testdata/STD/pi1_template.json 1
 done
 
 #Create policies in STD 2
@@ -257,9 +245,9 @@ for ((i=0; i<$STD_NUM_RICS; i++))
 do
    ricid=$((5+$i))
    generate_policy_uuid
-   api_put_policy 201 "Emergency-response-app" ric$ricid STD_QOS_0_2_0 $((2100+$i)) NOTRANSIENT $CR_SERVICE_APP_PATH/"std2" testdata/STD2/pi_qos_template.json 1
+   api_put_policy 201 "Emergency-response-app" ric$ricid STD_QOS_0_2_0 $((2100+$i)) NOTRANSIENT $CR_SERVICE_APP_PATH_0/"std2" testdata/STD2/pi_qos_template.json 1
    generate_policy_uuid
-   api_put_policy 201 "Emergency-response-app" ric$ricid STD_QOS2_0.1.0 $((2200+$i)) NOTRANSIENT $CR_SERVICE_APP_PATH/"std2" testdata/STD2/pi_qos2_template.json 1
+   api_put_policy 201 "Emergency-response-app" ric$ricid STD_QOS2_0.1.0 $((2200+$i)) NOTRANSIENT $CR_SERVICE_APP_PATH_0/"std2" testdata/STD2/pi_qos2_template.json 1
 done
 
 # Create policies in OSC
@@ -267,9 +255,9 @@ for ((i=0; i<$OSC_NUM_RICS; i++))
 do
     ricid=$((1+$i))
     generate_policy_uuid
-    api_put_policy 201 "Emergency-response-app" ric$ricid 1 $((3100+$i)) NOTRANSIENT $CR_SERVICE_APP_PATH/"osc" testdata/OSC/pi1_template.json 1
+    api_put_policy 201 "Emergency-response-app" ric$ricid 1 $((3100+$i)) NOTRANSIENT $CR_SERVICE_APP_PATH_0/"osc" testdata/OSC/pi1_template.json 1
     generate_policy_uuid
-    api_put_policy 201 "Emergency-response-app" ric$ricid 2 $((3200+$i)) NOTRANSIENT $CR_SERVICE_APP_PATH/"osc" testdata/OSC/pi2_template.json 1
+    api_put_policy 201 "Emergency-response-app" ric$ricid 2 $((3200+$i)) NOTRANSIENT $CR_SERVICE_APP_PATH_0/"osc" testdata/OSC/pi2_template.json 1
 done
 
 
@@ -331,15 +319,15 @@ echo "ADD MR CHECK"
 
 FLAT_A1_EI="1"
 
-ecs_api_admin_reset
+ics_api_admin_reset
 
 CB_JOB="$PROD_STUB_SERVICE_PATH$PROD_STUB_JOB_CALLBACK"
 CB_SV="$PROD_STUB_SERVICE_PATH$PROD_STUB_SUPERVISION_CALLBACK"
 TARGET1="$RIC_SIM_HTTPX://a1-sim-std2-0.a1-sim:$RIC_SIM_PORT/datadelivery"
 TARGET2="$RIC_SIM_HTTPX://a1-sim-std2-1.a1-sim:$RIC_SIM_PORT/datadelivery"
 
-STATUS1="$CR_SERVICE_APP_PATH/job1-status"
-STATUS2="$CR_SERVICE_APP_PATH/job2-status"
+STATUS1="$CR_SERVICE_APP_PATH_0/job1-status"
+STATUS2="$CR_SERVICE_APP_PATH_0/job2-status"
 
 prodstub_arm_producer 200 prod-a
 prodstub_arm_type 200 prod-a type1
@@ -347,61 +335,61 @@ prodstub_arm_job_create 200 prod-a job1
 prodstub_arm_job_create 200 prod-a job2
 
 
-### ecs status
-ecs_api_service_status 200
+### ics status
+ics_api_service_status 200
 
 ## Setup prod-a
-if [ $ECS_VERSION == "V1-1" ]; then
-    ecs_api_edp_put_producer 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ecs/ei-type-1.json
+if [ $ICS_VERSION == "V1-1" ]; then
+    ics_api_edp_put_producer 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ics/ei-type-1.json
 
-    ecs_api_edp_get_producer 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ecs/ei-type-1.json
+    ics_api_edp_get_producer 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ics/ei-type-1.json
 else
-    ecs_api_edp_put_type_2 201 type1 testdata/ecs/ei-type-1.json
-    ecs_api_edp_get_type_2 200 type1
-    ecs_api_edp_get_type_ids 200 type1
+    ics_api_edp_put_type_2 201 type1 testdata/ics/ei-type-1.json
+    ics_api_edp_get_type_2 200 type1
+    ics_api_edp_get_type_ids 200 type1
 
-    ecs_api_edp_put_producer_2 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
-    ecs_api_edp_put_producer_2 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
+    ics_api_edp_put_producer_2 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
+    ics_api_edp_put_producer_2 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
 fi
 
-ecs_api_edp_get_producer_status 200 prod-a ENABLED
+ics_api_edp_get_producer_status 200 prod-a ENABLED
 
 
 ## Create a job for prod-a
 ## job1 - prod-a
 if [  -z "$FLAT_A1_EI" ]; then
-    ecs_api_a1_put_job 201 type1 job1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json
+    ics_api_a1_put_job 201 type1 job1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json
 else
-    ecs_api_a1_put_job 201 job1 type1 $TARGET1 ricsim_g3_1 $STATUS1 testdata/ecs/job-template.json
+    ics_api_a1_put_job 201 job1 type1 $TARGET1 ricsim_g3_1 $STATUS1 testdata/ics/job-template.json
 fi
 
 # Check the job data in the producer
-if [ $ECS_VERSION == "V1-1" ]; then
-    prodstub_check_jobdata 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json
+if [ $ICS_VERSION == "V1-1" ]; then
+    prodstub_check_jobdata 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json
 else
-    if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
-        prodstub_check_jobdata_3 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json
+    if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+        prodstub_check_jobdata_3 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json
     else
-        prodstub_check_jobdata_2 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json
+        prodstub_check_jobdata_2 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json
     fi
 fi
 
 ## Create a second job for prod-a
 ## job2 - prod-a
 if [  -z "$FLAT_A1_EI" ]; then
-    ecs_api_a1_put_job 201 type1 job2 $TARGET2 ricsim_g3_2 testdata/ecs/job-template.json
+    ics_api_a1_put_job 201 type1 job2 $TARGET2 ricsim_g3_2 testdata/ics/job-template.json
 else
-    ecs_api_a1_put_job 201 job2 type1 $TARGET2 ricsim_g3_2 $STATUS2 testdata/ecs/job-template.json
+    ics_api_a1_put_job 201 job2 type1 $TARGET2 ricsim_g3_2 $STATUS2 testdata/ics/job-template.json
 fi
 
 # Check the job data in the producer
-if [ $ECS_VERSION == "V1-1" ]; then
-    prodstub_check_jobdata 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ecs/job-template.json
+if [ $ICS_VERSION == "V1-1" ]; then
+    prodstub_check_jobdata 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ics/job-template.json
 else
-    if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
-        prodstub_check_jobdata_3 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ecs/job-template.json
+    if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+        prodstub_check_jobdata_3 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ics/job-template.json
     else
-        prodstub_check_jobdata_2 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ecs/job-template.json
+        prodstub_check_jobdata_2 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ics/job-template.json
     fi
 fi
 
@@ -410,27 +398,27 @@ start_dmaapadp NOPROXY $SIM_GROUP/$DMAAP_ADP_COMPOSE_DIR/$DMAAP_ADP_CONFIG_FILE
 
 start_dmaapmed NOPROXY $SIM_GROUP/$DMAAP_MED_COMPOSE_DIR/$DMAAP_MED_DATA_FILE
 
-ecs_equal json:ei-producer/v1/eiproducers 2 60
+ics_equal json:ei-producer/v1/eiproducers 2 60
 
-ecs_api_idc_get_type_ids 200 ExampleInformationType STD_Fault_Messages
+ics_api_idc_get_type_ids 200 ExampleInformationType STD_Fault_Messages
 
-ecs_api_edp_get_producer_ids_2 200 NOTYPE DmaapGenericInfoProducer DMaaP_Mediator_Producer
+ics_api_edp_get_producer_ids_2 200 NOTYPE DmaapGenericInfoProducer DMaaP_Mediator_Producer
 
 NUM_JOBS=5
 
 for ((i=1; i<=$NUM_JOBS; i++))
 do
-    ecs_api_idc_put_job 201 jobx$i STD_Fault_Messages $CR_SERVICE_MR_PATH/jobx-data$i info-ownerx$i $CR_SERVICE_MR_PATH/job_status_info-ownerx$i testdata/dmaap-adapter/job-template.json
+    ics_api_idc_put_job 201 jobx$i STD_Fault_Messages $CR_SERVICE_MR_PATH_0/jobx-data$i info-ownerx$i $CR_SERVICE_MR_PATH_0/job_status_info-ownerx$i testdata/dmaap-adapter/job-template.json
 done
 
 for ((i=1; i<=$NUM_JOBS; i++))
 do
-    ecs_api_idc_put_job 201 joby$i ExampleInformationType $CR_SERVICE_MR_PATH/joby-data$i info-ownery$i $CR_SERVICE_MR_PATH/job_status_info-ownery$i testdata/dmaap-adapter/job-template.json
+    ics_api_idc_put_job 201 joby$i ExampleInformationType $CR_SERVICE_MR_PATH_0/joby-data$i info-ownery$i $CR_SERVICE_MR_PATH_0/job_status_info-ownery$i testdata/dmaap-adapter/job-template.json
 done
 
 for ((i=1; i<=$NUM_JOBS; i++))
 do
-    ecs_api_a1_get_job_status 200 jobx$i ENABLED 30
+    ics_api_a1_get_job_status 200 jobx$i ENABLED 30
 done
 
 mr_api_send_json "/events/unauthenticated.dmaapmed.json" '{"msg":"msg-0"}'
@@ -438,38 +426,38 @@ mr_api_send_json "/events/unauthenticated.dmaapadp.json" '{"msg":"msg-1"}'
 mr_api_send_json "/events/unauthenticated.dmaapmed.json" '{"msg":"msg-2"}'
 mr_api_send_json "/events/unauthenticated.dmaapadp.json" '{"msg":"msg-3"}'
 
-cr_equal received_callbacks $(($NUM_JOBS*2*2)) 60
+cr_equal received_callbacks $(($NUM_JOBS*2*2)) 60
 for ((i=1; i<=$NUM_JOBS; i++))
 do
-    cr_equal received_callbacks?id=jobx-data$i 2
-    cr_equal received_callbacks?id=joby-data$i 2
+    cr_equal received_callbacks?id=jobx-data$i 2
+    cr_equal received_callbacks?id=joby-data$i 2
 done
 
 for ((i=1; i<=$NUM_JOBS; i++))
 do
-    cr_api_check_single_genric_json_event 200 jobx-data$i '{"msg":"msg-0"}'
-    cr_api_check_single_genric_json_event 200 jobx-data$i '{"msg":"msg-2"}'
-    cr_api_check_single_genric_json_event 200 joby-data$i '{"msg":"msg-1"}'
-    cr_api_check_single_genric_json_event 200 joby-data$i '{"msg":"msg-3"}'
+    cr_api_check_single_genric_json_event 200 jobx-data$i '{"msg":"msg-0"}'
+    cr_api_check_single_genric_json_event 200 jobx-data$i '{"msg":"msg-2"}'
+    cr_api_check_single_genric_json_event 200 joby-data$i '{"msg":"msg-1"}'
+    cr_api_check_single_genric_json_event 200 joby-data$i '{"msg":"msg-3"}'
 done
 
 
-stop_ecs
+stop_ics
 
-start_stopped_ecs
+start_stopped_ics
 
-# Check ECS status after restart
+# Check ICS status after restart
 
 if [  -z "$FLAT_A1_EI" ]; then
-    ecs_api_a1_get_job_status 200 type1 job1 DISABLED
-    ecs_api_a1_get_job_status 200 type1 job2 DISABLED
+    ics_api_a1_get_job_status 200 type1 job1 DISABLED
+    ics_api_a1_get_job_status 200 type1 job2 DISABLED
 else
-    ecs_api_a1_get_job_status 200 job1 DISABLED
-    ecs_api_a1_get_job_status 200 job2 DISABLED
+    ics_api_a1_get_job_status 200 job1 DISABLED
+    ics_api_a1_get_job_status 200 job2 DISABLED
 fi
 
 check_policy_agent_logs
-check_ecs_logs
+check_ics_logs
 check_sdnc_logs
 
 #### TEST COMPLETE ####
index 03697bc..5d23034 100755 (executable)
@@ -38,15 +38,6 @@ SUPPORTED_PROFILES="ONAP-HONOLULU ONAP-ISTANBUL"
 SUPPORTED_RUNMODES="DOCKER KUBE"
 
 . ../common/testcase_common.sh $@
-. ../common/agent_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
 
 setup_testenvironment
 
@@ -62,7 +53,7 @@ use_simulator_https
 use_mr_https
 
 if [ "$PMS_VERSION" == "V2" ]; then
-    notificationurl=$CR_SERVICE_APP_PATH"/test"
+    notificationurl=$CR_SERVICE_APP_PATH_0"/test"
 else
     echo "Version V2 of PMS is needed, exiting..."
     exit 1
@@ -212,7 +203,7 @@ for interface in $TESTED_VARIANTS ; do
     # Create policies
     use_agent_rest_http
 
-    api_put_service 201 "Emergency-response-app" 0 "$CR_SERVICE_APP_PATH/1"
+    api_put_service 201 "Emergency-response-app" 0 "$CR_SERVICE_APP_PATH_0/1"
 
     # Create policies in OSC
     for ((i=1; i<=$OSC_NUM_RICS; i++))
index 20a02cb..f3d5dd4 100755 (executable)
@@ -38,15 +38,6 @@ SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ORAN-CHERRY ORAN-D-R
 SUPPORTED_RUNMODES="DOCKER KUBE"
 
 . ../common/testcase_common.sh $@
-. ../common/agent_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
 
 setup_testenvironment
 
@@ -61,7 +52,7 @@ use_sdnc_https
 use_simulator_https
 
 if [ "$PMS_VERSION" == "V2" ]; then
-    notificationurl=$CR_SERVICE_APP_PATH"/test"
+    notificationurl=$CR_SERVICE_APP_PATH_0"/test"
 else
     notificationurl=""
 fi
@@ -210,7 +201,7 @@ fi
 # Create policies
 use_agent_rest_http
 
-api_put_service 201 "Emergency-response-app" 0 "$CR_SERVICE_APP_PATH/1"
+api_put_service 201 "Emergency-response-app" 0 "$CR_SERVICE_APP_PATH_0/1"
 
 # Create policies in OSC
 for ((i=1; i<=$OSC_NUM_RICS; i++))
index c93a6d7..4e6b87c 100755 (executable)
 #  ============LICENSE_END=================================================
 #
 
-TC_ONELINE_DESCR="Preparation demo setup  - policy management and enrichment information"
+TC_ONELINE_DESCR="Preparation demo setup  - policy management and information information"
 
 #App names to include in the test when running docker, space separated list
-DOCKER_INCLUDED_IMAGES="CBS CONSUL CP CR MR PA RICSIM SDNC ECS PRODSTUB RC HTTPPROXY KUBEPROXY NGW"
+DOCKER_INCLUDED_IMAGES="CBS CONSUL CP CR MR PA RICSIM SDNC ICS PRODSTUB RC HTTPPROXY KUBEPROXY NGW"
 
 #App names to include in the test when running kubernetes, space separated list
-KUBE_INCLUDED_IMAGES=" MR CR PA RC PRODSTUB RICSIM CP ECS SDNC HTTPPROXY KUBEPROXY NGW"
+KUBE_INCLUDED_IMAGES=" MR CR PA RC PRODSTUB RICSIM CP ICS SDNC HTTPPROXY KUBEPROXY NGW"
 #Prestarted app (not started by script) to include in the test when running kubernetes, space separated list
 KUBE_PRESTARTED_IMAGES=""
 
@@ -38,19 +38,6 @@ SUPPORTED_PROFILES="ONAP-HONOLULU ONAP-ISTANBUL ORAN-CHERRY ORAN-D-RELEASE ORAN-
 SUPPORTED_RUNMODES="DOCKER KUBE"
 
 . ../common/testcase_common.sh $@
-. ../common/agent_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/ecs_api_functions.sh
-. ../common/prodstub_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/rapp_catalogue_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/http_proxy_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
 
 setup_testenvironment
 
@@ -63,9 +50,9 @@ use_cr_https
 use_agent_rest_https
 use_sdnc_https
 use_simulator_https
-use_ecs_rest_https
+use_ics_rest_https
 use_prod_stub_https
-if [ $ECS_VERSION == "V1-1" ]; then
+if [ $ICS_VERSION == "V1-1" ]; then
     use_rapp_catalogue_http # https not yet supported
 else
     use_rapp_catalogue_https
@@ -73,7 +60,7 @@ fi
 
 
 if [ "$PMS_VERSION" == "V2" ]; then
-    notificationurl=$CR_SERVICE_APP_PATH"/test"
+    notificationurl=$CR_SERVICE_APP_PATH_0"/test"
 else
    echo "PMS VERSION 2 (V2) is required"
    exit 1
@@ -113,20 +100,20 @@ else
     consul_config_app                      ".consul_config.json"
 fi
 
-start_cr
+start_cr 1
 
 start_prod_stub
 
-start_ecs PROXY $SIM_GROUP/$ECS_COMPOSE_DIR/$ECS_CONFIG_FILE
+start_ics PROXY $SIM_GROUP/$ICS_COMPOSE_DIR/$ICS_CONFIG_FILE
 
 start_rapp_catalogue
 
 set_agent_trace
 
-set_ecs_trace
+set_ics_trace
 
 use_info_jobs=false  #Set flag if interface supporting info-types is used
-if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
     use_info_jobs=true
 fi
 
@@ -168,7 +155,7 @@ done
 #Check the number of types
 api_equal json:policy-types 2 300
 
-api_put_service 201 "Emergency-response-app" 0 "$CR_SERVICE_APP_PATH/1"
+api_put_service 201 "Emergency-response-app" 0 "$CR_SERVICE_APP_PATH_0/1"
 
 # Create policies in STD
 for ((i=1; i<=$STD_NUM_RICS; i++))
@@ -205,49 +192,49 @@ fi
 TARGET1="$RIC_SIM_HTTPX://$RIC_G1_1:$RIC_SIM_PORT/datadelivery"
 TARGET2="$RIC_SIM_HTTPX://$RIC_G1_1:$RIC_SIM_PORT/datadelivery"
 
-STATUS1="$CR_SERVICE_APP_PATH/callbacks/job1-status"
-STATUS2="$CR_SERVICE_APP_PATH/callbacks/job2-status"
+STATUS1="$CR_SERVICE_APP_PATH_0/callbacks/job1-status"
+STATUS2="$CR_SERVICE_APP_PATH_0/callbacks/job2-status"
 
 prodstub_arm_producer 200 prod-a
 prodstub_arm_type 200 prod-a type1
 prodstub_arm_job_create 200 prod-a job1
 prodstub_arm_job_create 200 prod-a job2
 
-### ecs status
-ecs_api_service_status 200
+### ics status
+ics_api_service_status 200
 
 ## Setup prod-a
-if [ $ECS_VERSION == "V1-1" ]; then
-    ecs_api_edp_put_producer 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ecs/ei-type-1.json
+if [ $ICS_VERSION == "V1-1" ]; then
+    ics_api_edp_put_producer 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ics/ei-type-1.json
 
-    ecs_api_edp_get_producer 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ecs/ei-type-1.json
+    ics_api_edp_get_producer 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ics/ei-type-1.json
 else
-    ecs_api_edp_put_type_2 201 type1 testdata/ecs/ei-type-1.json
+    ics_api_edp_put_type_2 201 type1 testdata/ics/ei-type-1.json
 
-    ecs_api_edp_put_producer_2 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
+    ics_api_edp_put_producer_2 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
 
-    ecs_api_edp_get_producer_2 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
+    ics_api_edp_get_producer_2 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
 fi
 
-ecs_api_edp_get_producer_status 200 prod-a ENABLED
+ics_api_edp_get_producer_status 200 prod-a ENABLED
 
 
 ## Create a job for prod-a
 ## job1 - prod-a
 if [  -z "$FLAT_A1_EI" ]; then
-    ecs_api_a1_put_job 201 type1 job1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json
+    ics_api_a1_put_job 201 type1 job1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json
 else
-    ecs_api_a1_put_job 201 job1 type1 $TARGET1 ricsim_g3_1 $STATUS1 testdata/ecs/job-template.json
+    ics_api_a1_put_job 201 job1 type1 $TARGET1 ricsim_g3_1 $STATUS1 testdata/ics/job-template.json
 fi
 
 # Check the job data in the producer
-if [ $ECS_VERSION == "V1-1" ]; then
-    prodstub_check_jobdata 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json
+if [ $ICS_VERSION == "V1-1" ]; then
+    prodstub_check_jobdata 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json
 else
     if [ $use_info_jobs ]; then
-        prodstub_check_jobdata_3 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json
+        prodstub_check_jobdata_3 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json
     else
-        prodstub_check_jobdata_2 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json
+        prodstub_check_jobdata_2 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json
     fi
 fi
 
@@ -255,24 +242,24 @@ fi
 ## Create a second job for prod-a
 ## job2 - prod-a
 if [  -z "$FLAT_A1_EI" ]; then
-    ecs_api_a1_put_job 201 type1 job2 $TARGET2 ricsim_g3_2 testdata/ecs/job-template.json
+    ics_api_a1_put_job 201 type1 job2 $TARGET2 ricsim_g3_2 testdata/ics/job-template.json
 else
-    ecs_api_a1_put_job 201 job2 type1 $TARGET2 ricsim_g3_2 $STATUS2 testdata/ecs/job-template.json
+    ics_api_a1_put_job 201 job2 type1 $TARGET2 ricsim_g3_2 $STATUS2 testdata/ics/job-template.json
 fi
 
 # Check the job data in the producer
-if [ $ECS_VERSION == "V1-1" ]; then
-    prodstub_check_jobdata 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ecs/job-template.json
+if [ $ICS_VERSION == "V1-1" ]; then
+    prodstub_check_jobdata 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ics/job-template.json
 else
     if [ $use_info_jobs ]; then
-        prodstub_check_jobdata_3 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ecs/job-template.json
+        prodstub_check_jobdata_3 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ics/job-template.json
     else
-        prodstub_check_jobdata_2 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ecs/job-template.json
+        prodstub_check_jobdata_2 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ics/job-template.json
     fi
 fi
 
 check_policy_agent_logs
-check_ecs_logs
+check_ics_logs
 check_sdnc_logs
 
 #### TEST COMPLETE ####
index 1f8ef5d..cd3c778 100644 (file)
@@ -58,7 +58,7 @@ ONAP GUILIN
 
 >```./PM_DEMO.sh remote-remove  kube  release --env-file ../common/test_env-onap-guilin.sh```
 
-Note that ECS was not available before oran cherry so a test script without ECS is used.
+Note that ICS was not available before oran cherry so a test script without ICS is used.
 
 ONAP HONOLULU
 =============
@@ -76,7 +76,7 @@ Note: When istanbul is released, add the 'release' arg to run released images.
 
 ## Test case categories
 
-The test script are number using these basic categories where 0-999 are releated to the policy managment and 1000-1999 are related to enrichment management. 2000-2999 are for southbound http proxy. There are also demo test cases that test more or less all components. These test scripts does not use the numbering scheme below.
+The test script are number using these basic categories where 0-999 are releated to the policy managment and 1000-1999 are related to information management. 2000-2999 are for southbound http proxy. There are also demo test cases that test more or less all components. These test scripts does not use the numbering scheme below.
 
 The numbering in each series corresponds to the following groupings
 1-99 - Basic sanity tests
@@ -89,9 +89,9 @@ The numbering in each series corresponds to the following groupings
 
 900-999 - Misc test
 
-11XX - ECS API Tests
+11XX - ICS API Tests
 
-18XX - ECS Stability and capacity test
+18XX - ICS Stability and capacity test
 
 2000 - Southbound http proxy tests
 
@@ -127,8 +127,7 @@ SUPPORTED_RUNMODES=<List of runmodes, DOCKER and/or KUBE>
 
 CONDITIONALLY_IGNORED_IMAGES=<list of images to exclude if it does not exist in the profile file>
 
-. ../common/testcase_common.sh  $@
-< other scripts need to be sourced for specific interfaces>
+. ../common/testcase_common.sh $@
 
 setup_testenvironment
 
index 27bdb4e..2ae6781 100755 (executable)
 #  ============LICENSE_END=================================================
 #
 
-
 TC_ONELINE_DESCR="Starts DMAAP MR"
 
 #App names to include in the test when running docker, space separated list
-DOCKER_INCLUDED_IMAGES="MR DMAAPMR KUBEPROXY"
+DOCKER_INCLUDED_IMAGES="MR DMAAPMR KUBEPROXY KAFKAPC"
 
 #App names to include in the test when running kubernetes, space separated list
-KUBE_INCLUDED_IMAGES="MR DMAAPMR KUBEPROXY"
+KUBE_INCLUDED_IMAGES="MR DMAAPMR KUBEPROXY KAFKAPC"
 #Prestarted app (not started by script) to include in the test when running kubernetes, space separated list
 KUBE_PRESTARTED_IMAGES=""
 
@@ -34,21 +33,11 @@ KUBE_PRESTARTED_IMAGES=""
 CONDITIONALLY_IGNORED_IMAGES=""
 
 #Supported test environment profiles
-SUPPORTED_PROFILES="ONAP-HONOLULU ONAP-ISTANBUL ORAN-D-RELEASE ORAN-E-RELEASE"
+SUPPORTED_PROFILES="ORAN-E-RELEASE"
 #Supported run modes
 SUPPORTED_RUNMODES="DOCKER KUBE"
 
-. ../common/testcase_common.sh  $@
-. ../common/agent_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/http_proxy_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
+. ../common/testcase_common.sh $@
 
 setup_testenvironment
 
@@ -57,9 +46,20 @@ setup_testenvironment
 clean_environment
 start_kube_proxy
 start_mr    "$MR_READ_TOPIC"  "/events" "users/policy-agent" \
-            "$MR_WRITE_TOPIC" "/events" "users/mr-stub" \
-            "unauthenticated.dmaapadp.json" "/events" "dmaapadapterproducer/msgs" \
-            "unauthenticated.dmaapmed.json" "/events" "maapmediatorproducer/STD_Fault_Messages"
+            "$MR_WRITE_TOPIC" "/events" "users/mr-stub"
+            #\
+            #"unauthenticated.dmaapadp.json" "/events" "dmaapadapterproducer/msgs" \
+            #"unauthenticated.dmaapmed.json" "/events" "maapmediatorproducer/STD_Fault_Messages"
+
+start_kafkapc
+
+kafkapc_api_reset 200
+
+kafkapc_api_create_topic 201 "unauthenticated.dmaapadp.json" "application/json"
+
+kafkapc_api_create_topic 201 "unauthenticated.dmaapmed.json" "application/json"
+
+dmaap_api_print_topics
 
 if [ $RUNMODE == "KUBE" ]; then
     :
index 3577cfa..c52ee9c 100644 (file)
@@ -9,7 +9,7 @@ Some of the scripts can also be used for other kinds of tests, for example basic
 Contains functions for adapting towards the Policy Management Service (PMS) API, also via dmaap (using a message-router stub interface)
 
 `api_curl.sh` \
-A common curl based function for the agent and ecs apis. Also partly used for the Callback receiver and RAPP Catalogue apis.
+A common curl based function for the agent and ics apis. Also partly used for the Callback receiver and RAPP Catalogue apis.
 
 `clean-kube.sh` \
 Cleans all services, deployments, pods, replica set etc started by the test environment in kubernetes.
@@ -44,8 +44,8 @@ A python script to delete a batch of policies. The script is intended to run in
 `do_curl_function.sh`
 A script for executing a curl call with a specific url and optional payload. It also compare the response with an expected result in terms of response code and optional returned payload. Intended to be used by test script (for example basic test scripts of other components)
 
-`ecs_api_functions.sh` \
-Contains functions for adapting towards the ECS API
+`ics_api_functions.sh` \
+Contains functions for adapting towards the ICS API
 
 `extract_sdnc_reply.py` \
 A python script to extract the information from an sdnc (A1 Controller) reply json. Helper for the test environment.
@@ -203,17 +203,6 @@ Print the value of the timer (in seconds) previously started by 'start_timer'. (
 | --------- | ----------- |
 | `<timer-message-to-print>` | Any text message to be printed along with the timer result.(It is good practice to use same args for this function as for the `start_timer`) |
 
-## Function: print_and_reset_timer ##
-
-Print the value of the timer (in seconds) previously started by 'start_timer'. Also reset the timer to 0. The result of the timer as well as the args to the function will also be printed in the test report.
-| arg list |
-|--|
-| `<timer-message-to-print>` |
-
-| parameter | description |
-| --------- | ----------- |
-| `<timer-message-to-print>` | Any text message to be printed along with the timer result.(It is good practice to use same args for this function as for the `start_timer`) |
-
 ## Function: deviation ##
 
 Mark a test as a deviation from the requirements. The list of deviations will be printed in the test report.
@@ -1041,9 +1030,9 @@ Check the contents of all ric events received for a callback id.
 | `EMPTY` | Indicator for an empty list  |
 | `<ric-id>` | Id of the ric  |
 
-## Function: cr_api_check_all_ecs_events() ##
+## Function: cr_api_check_all_ics_events() ##
 
-Check the contents of all current status events for one id from ECS
+Check the contents of all current status events for one id from ICS
 
 | arg list |
 |--|
@@ -1056,9 +1045,9 @@ Check the contents of all current status events for one id from ECS
 | `EMPTY` | Indicator for an empty list  |
 | `<status>` | Status string  |
 
-## Function: cr_api_check_all_ecs_subscription_events() ##
+## Function: cr_api_check_all_ics_subscription_events() ##
 
-Check the contents of all current subscription events for one id from ECS
+Check the contents of all current subscription events for one id from ICS
 
 | arg list |
 |--|
@@ -1083,81 +1072,81 @@ Reset the callback receiver
 | - |
 
 
-# Description of functions in ecs_api_functions.sh #
+# Description of functions in ics_api_functions.sh #
 
-## Function: use_ecs_rest_http ##
+## Function: use_ics_rest_http ##
 
-Use http for all API calls to the ECS. This is the default protocol.
+Use http for all API calls to the ICS. This is the default protocol.
 | arg list |
 |--|
 | None |
 
-## Function: use_ecs_rest_https ##
+## Function: use_ics_rest_https ##
 
-Use https for all API calls to the ECS.
+Use https for all API calls to the ICS.
 | arg list |
 |--|
 | None |
 
-## Function: use_ecs_dmaap_http ##
+## Function: use_ics_dmaap_http ##
 
-Send and recieve all API calls to the ECS over Dmaap via the MR using http.
+Send and recieve all API calls to the ICS over Dmaap via the MR using http.
 | arg list |
 |--|
 | None |
 
-## Function: use_ecs_dmaap_https ##
+## Function: use_ics_dmaap_https ##
 
-Send and recieve all API calls to the ECS over Dmaap via the MR using https.
+Send and recieve all API calls to the ICS over Dmaap via the MR using https.
 | arg list |
 |--|
 | None |
 
-## Function: start_ecs ##
+## Function: start_ics ##
 
-Start the ECS container in docker or kube depending on running mode.
+Start the ICS container in docker or kube depending on running mode.
 | arg list |
 |--|
 | None |
 
-## Function: stop_ecs ##
+## Function: stop_ics ##
 
-Stop the ECS container.
+Stop the ICS container.
 | arg list |
 |--|
 | None |
 
-## Function: start_stopped_ecs ##
+## Function: start_stopped_ics ##
 
-Start a previously stopped ecs.
+Start a previously stopped ics.
 | arg list |
 |--|
 | None |
 
-## Function: set_ecs_debug ##
+## Function: set_ics_debug ##
 
-Configure the ECS log on debug level. The ECS must be running.
+Configure the ICS log on debug level. The ICS must be running.
 | arg list |
 |--|
 | None |
 
-## Function: set_ecs_trace ##
+## Function: set_ics_trace ##
 
-Configure the ECS log on trace level. The ECS must be running.
+Configure the ICS log on trace level. The ICS must be running.
 | arg list |
 |--|
 | None |
 
-## Function: check_ecs_logs ##
+## Function: check_ics_logs ##
 
-Check the ECS log for any warnings and errors and print the count of each.
+Check the ICS log for any warnings and errors and print the count of each.
 | arg list |
 |--|
 | None |
 
-## Function: ecs_equal ##
+## Function: ics_equal ##
 
-Tests if a variable value in the ECS is equal to a target value.
+Tests if a variable value in the ICS is equal to a target value.
 Without the timeout, the test sets pass or fail immediately depending on if the variable is equal to the target or not.
 With the timeout, the test waits up to the timeout seconds before setting pass or fail depending on if the variable value becomes equal to the target value or not.
 See the 'a1-interface' repo for more details.
@@ -1168,11 +1157,11 @@ See the 'a1-interface' repo for more details.
 
 | parameter | description |
 | --------- | ----------- |
-| `<variable-name>` | Variable name in ecs  |
+| `<variable-name>` | Variable name in ics  |
 | `<target-value>` | Target value for the variable  |
 | `<timeout-in-sec>` | Max time to wait for the variable to reach the target value  |
 
-## Function: ecs_api_a1_get_job_ids() ##
+## Function: ics_api_a1_get_job_ids() ##
 
 Test of GET '/A1-EI​/v1​/eitypes​/{eiTypeId}​/eijobs' and optional check of the array of returned job ids.
 To test the response code only, provide the response code parameter as well as a type id and an owner id.
@@ -1191,7 +1180,7 @@ To also test the response payload add the 'EMPTY' for an expected empty array or
 | `<job-id>` | Id of the expected job  |
 | `EMPTY` | The expected list of job id shall be empty  |
 
-## Function: ecs_api_a1_get_type() ##
+## Function: ics_api_a1_get_type() ##
 
 Test of GET '/A1-EI​/v1​/eitypes​/{eiTypeId}' and optional check of the returned schema.
 To test the response code only, provide the response code parameter as well as the type-id.
@@ -1207,7 +1196,7 @@ To also test the response payload add a path to the expected schema file.
 | `<type-id>` | Id of the EI type  |
 | `<schema-file>` | Path to a schema file to compare with the returned schema  |
 
-## Function: ecs_api_a1_get_type_ids() ##
+## Function: ics_api_a1_get_type_ids() ##
 
 Test of GET '/A1-EI​/v1​/eitypes' and optional check of returned list of type ids.
 To test the response code only, provide the response only.
@@ -1223,7 +1212,7 @@ To also test the response payload add the list of expected type ids (or EMPTY if
 | `EMPTY` | The expected list of type ids shall be empty  |
 | `<type-id>` | Id of the EI type  |
 
-## Function: ecs_api_a1_get_job_status() ##
+## Function: ics_api_a1_get_job_status() ##
 
 Test of GET '/A1-EI​/v1​/eitypes​/{eiTypeId}​/eijobs​/{eiJobId}​/status' and optional check of the returned status.
 To test the response code only, provide the response code, type id and job id.
@@ -1240,7 +1229,7 @@ To also test the response payload add the expected status.
 | `<job-id>` | Id of the job  |
 | `<status>` | Expected status  |
 
-## Function: ecs_api_a1_get_job() ##
+## Function: ics_api_a1_get_job() ##
 
 Test of GET '/A1-EI​/v1​/eitypes​/{eiTypeId}​/eijobs​/{eiJobId}' and optional check of the returned job.
 To test the response code only, provide the response code, type id and job id.
@@ -1259,7 +1248,7 @@ To also test the response payload add the remaining parameters.
 | `<owner-id>` | Expected owner for the job  |
 | `<template-job-file>` | Path to a job template for job parameters of the job  |
 
-## Function: ecs_api_a1_delete_job() ##
+## Function: ics_api_a1_delete_job() ##
 
 Test of DELETE '/A1-EI​/v1​/eitypes​/{eiTypeId}​/eijobs​/{eiJobId}'.
 To test, provide all the specified parameters.
@@ -1274,7 +1263,7 @@ To test, provide all the specified parameters.
 | `<type-id>` | Id of the EI type  |
 | `<job-id>` | Id of the job  |
 
-## Function: ecs_api_a1_put_job() ##
+## Function: ics_api_a1_put_job() ##
 
 Test of PUT '/A1-EI​/v1​/eitypes​/{eiTypeId}​/eijobs​/{eiJobId}'.
 To test, provide all the specified parameters.
@@ -1292,9 +1281,9 @@ To test, provide all the specified parameters.
 | `<owner-id>` | Owner of the job  |
 | `<template-job-file>` | Path to a job template for job parameters of the job  |
 
-## Function: ecs_api_edp_get_type_ids() ##
+## Function: ics_api_edp_get_type_ids() ##
 
-Test of GET '/ei-producer/v1/eitypes' or '/data-producer/v1/info-types' depending on ecs version and an optional check of the returned list of type ids.
+Test of GET '/ei-producer/v1/eitypes' or '/data-producer/v1/info-types' depending on ics version and an optional check of the returned list of type ids.
 To test the response code only, provide the response code.
 To also test the response payload add list of expected type ids (or EMPTY if the list is expected to be empty).
 
@@ -1308,9 +1297,9 @@ To also test the response payload add list of expected type ids (or EMPTY if the
 | `<type-id>` | Id of the type  |
 | `EMPTY` | The expected list of type ids shall be empty  |
 
-## Function: ecs_api_edp_get_producer_status() ##
+## Function: ics_api_edp_get_producer_status() ##
 
-Test of GET '/ei-producer/v1/eiproducers/{eiProducerId}/status' or '/data-producer/v1/info-producers/{infoProducerId}/status' depending on ecs version and optional check of the returned status.
+Test of GET '/ei-producer/v1/eiproducers/{eiProducerId}/status' or '/data-producer/v1/info-producers/{infoProducerId}/status' depending on ics version and optional check of the returned status.
 To test the response code only, provide the response code and producer id.
 To also test the response payload add the expected status.
 
@@ -1324,7 +1313,7 @@ To also test the response payload add the expected status.
 | `<producer-id>` | Id of the producer  |
 | `<status>` | The expected status string  |
 
-## Function: ecs_api_edp_get_producer_ids() ##
+## Function: ics_api_edp_get_producer_ids() ##
 
 Test of GET '/ei-producer/v1/eiproducers' and optional check of the returned producer ids.
 To test the response code only, provide the response.
@@ -1340,9 +1329,9 @@ To also test the response payload add the list of expected producer-ids (or EMPT
 | `<producer-id>` | Id of the producer  |
 | `EMPTY` | The expected list of type ids shall be empty  |
 
-## Function: ecs_api_edp_get_producer_ids_2() ##
+## Function: ics_api_edp_get_producer_ids_2() ##
 
-Test of GET '/ei-producer/v1/eiproducers' or '/data-producer/v1/info-producers' depending on ecs version and optional check of the returned producer ids.
+Test of GET '/ei-producer/v1/eiproducers' or '/data-producer/v1/info-producers' depending on ics version and optional check of the returned producer ids.
 To test the response code only, provide the response.
 To also test the response payload add the type (if any) and a list of expected producer-ids (or EMPTY if the list of ids is expected to be empty).
 
@@ -1358,7 +1347,7 @@ To also test the response payload add the type (if any) and a list of expected p
 | `<producer-id>` | Id of the producer  |
 | `EMPTY` | The expected list of type ids shall be empty  |
 
-## Function: ecs_api_edp_get_type() ##
+## Function: ics_api_edp_get_type() ##
 
 Test of GET '/ei-producer/v1/eitypes/{eiTypeId}' and optional check of the returned type.
 To test the response code only, provide the response and the type-id.
@@ -1376,9 +1365,9 @@ To also test the response payload add a path to a job schema file and a list exp
 | `<producer-id>` | Id of the producer  |
 | `EMPTY` | The expected list of type ids shall be empty  |
 
-## Function: ecs_api_edp_get_type_2() ##
+## Function: ics_api_edp_get_type_2() ##
 
-Test of GET '/ei-producer/v1/eitypes/{eiTypeId}' or '/data-producer/v1/info-types/{infoTypeId}' depending on ecs version and optional check of the returned type.
+Test of GET '/ei-producer/v1/eitypes/{eiTypeId}' or '/data-producer/v1/info-types/{infoTypeId}' depending on ics version and optional check of the returned type.
 To test the response code only, provide the response and the type-id.
 To also test the response payload add a path to a job schema file.
 
@@ -1393,9 +1382,9 @@ To also test the response payload add a path to a job schema file.
 | `<job-schema-file>` | Path to a job schema file  |
 | `EMPTY` | The expected list of type ids shall be empty  |
 
-## Function: ecs_api_edp_put_type_2() ##
+## Function: ics_api_edp_put_type_2() ##
 
-Test of PUT '/ei-producer/v1/eitypes/{eiTypeId}' or '/data-producer/v1/info-types/{infoTypeId}' depending on ecs version and optional check of the returned type.
+Test of PUT '/ei-producer/v1/eitypes/{eiTypeId}' or '/data-producer/v1/info-types/{infoTypeId}' depending on ics version and optional check of the returned type.
 
 | arg list |
 |--|
@@ -1408,9 +1397,9 @@ Test of PUT '/ei-producer/v1/eitypes/{eiTypeId}' or '/data-producer/v1/info-type
 | `<job-schema-file>` | Path to a job schema file  |
 | `EMPTY` | The expected list of type ids shall be empty  |
 
-## Function: ecs_api_edp_delete_type_2() ##
+## Function: ics_api_edp_delete_type_2() ##
 
-Test of DELETE '/ei-producer/v1/eitypes/{eiTypeId}' or '/data-producer/v1/info-types/{infoTypeId}' depending on ecs version and optional check of the returned type.
+Test of DELETE '/ei-producer/v1/eitypes/{eiTypeId}' or '/data-producer/v1/info-types/{infoTypeId}' depending on ics version and optional check of the returned type.
 
 | arg list |
 |--|
@@ -1421,7 +1410,7 @@ Test of DELETE '/ei-producer/v1/eitypes/{eiTypeId}' or '/data-producer/v1/info-t
 | `<response-code>` | Expected http response code |
 | `<type-id>` | Id of the type  |
 
-## Function: ecs_api_edp_get_producer() ##
+## Function: ics_api_edp_get_producer() ##
 
 Test of GET '/ei-producer/v1/eiproducers/{eiProducerId}' and optional check of the returned producer.
 To test the response code only, provide the response and the producer-id.
@@ -1442,9 +1431,9 @@ To also test the response payload add the remaining parameters defining thee pro
 | `<schema-file>` | Path to a schema file  |
 | `EMPTY` | The expected list of type schema pairs shall be empty  |
 
-## Function: ecs_api_edp_get_producer_2() ##
+## Function: ics_api_edp_get_producer_2() ##
 
-Test of GET '/ei-producer/v1/eiproducers/{eiProducerId}' or '/data-producer/v1/info-producers/{infoProducerId}' depending on ecs version and optional check of the returned producer.
+Test of GET '/ei-producer/v1/eiproducers/{eiProducerId}' or '/data-producer/v1/info-producers/{infoProducerId}' depending on ics version and optional check of the returned producer.
 To test the response code only, provide the response and the producer-id.
 To also test the response payload add the remaining parameters defining thee producer.
 
@@ -1461,9 +1450,9 @@ To also test the response payload add the remaining parameters defining thee pro
 | `<type-id>` | Id of the type  |
 | `EMPTY` | The expected list of types shall be empty  |
 
-## Function: ecs_api_edp_delete_producer() ##
+## Function: ics_api_edp_delete_producer() ##
 
-Test of DELETE '/ei-producer/v1/eiproducers/{eiProducerId}' or '/data-producer/v1/info-producers/{infoProducerId}' depending on ecs version.
+Test of DELETE '/ei-producer/v1/eiproducers/{eiProducerId}' or '/data-producer/v1/info-producers/{infoProducerId}' depending on ics version.
 To test, provide all parameters.
 
 | arg list |
@@ -1475,7 +1464,7 @@ To test, provide all parameters.
 | `<response-code>` | Expected http response code |
 | `<producer-id>` | Id of the producer  |
 
-## Function: ecs_api_edp_put_producer() ##
+## Function: ics_api_edp_put_producer() ##
 
 Test of PUT '/ei-producer/v1/eiproducers/{eiProducerId}'.
 To test, provide all parameters. The list of type/schema pair may be empty.
@@ -1494,9 +1483,9 @@ To test, provide all parameters. The list of type/schema pair may be empty.
 | `<schema-file>` | Path to a schema file  |
 | `EMPTY` | The list of type/schema pairs is empty  |
 
-## Function: ecs_api_edp_put_producer_2() ##
+## Function: ics_api_edp_put_producer_2() ##
 
-Test of PUT '/ei-producer/v1/eiproducers/{eiProducerId}' or '/data-producer/v1/info-producers/{infoProducerId}' depending on ecs version.
+Test of PUT '/ei-producer/v1/eiproducers/{eiProducerId}' or '/data-producer/v1/info-producers/{infoProducerId}' depending on ics version.
 To test, provide all parameters. The list of type/schema pair may be empty.
 
 | arg list |
@@ -1512,7 +1501,7 @@ To test, provide all parameters. The list of type/schema pair may be empty.
 | `<type-id>` | Id of the type  |
 | `NOTYPE` | The list of types is empty  |
 
-## Function: ecs_api_edp_get_producer_jobs() ##
+## Function: ics_api_edp_get_producer_jobs() ##
 
 Test of GET '/ei-producer/v1/eiproducers/{eiProducerId}/eijobs' and optional check of the returned producer job.
 To test the response code only, provide the response and the producer-id.
@@ -1533,9 +1522,9 @@ To also test the response payload add the remaining parameters.
 | `<template-job-file>` | Path to a job template file  |
 | `EMPTY` | The list of job/type/target/job-file tuples is empty  |
 
-## Function: ecs_api_edp_get_producer_jobs_2() ##
+## Function: ics_api_edp_get_producer_jobs_2() ##
 
-Test of GET '/ei-producer/v1/eiproducers/{eiProducerId}/eijobs' or '/data-producer/v1/info-producers/{infoProducerId}/info-jobs' depending on ecs version and optional check of the returned producer job.
+Test of GET '/ei-producer/v1/eiproducers/{eiProducerId}/eijobs' or '/data-producer/v1/info-producers/{infoProducerId}/info-jobs' depending on ics version and optional check of the returned producer job.
 To test the response code only, provide the response and the producer-id.
 To also test the response payload add the remaining parameters.
 
@@ -1554,7 +1543,7 @@ To also test the response payload add the remaining parameters.
 | `<template-job-file>` | Path to a job template file  |
 | `EMPTY` | The list of job/type/target/job-file tuples is empty  |
 
-## Function: ecs_api_service_status() ##
+## Function: ics_api_service_status() ##
 
 Test of GET '/status'.
 
@@ -1566,7 +1555,7 @@ Test of GET '/status'.
 | --------- | ----------- |
 | `<response-code>` | Expected http response code |
 
-## Function: ecs_api_idc_get_type_ids() ##
+## Function: ics_api_idc_get_type_ids() ##
 
 Test of GET '/data-consumer/v1/info-types' and an optional check of the returned list of type ids.
 To test the response code only, provide the response code.
@@ -1582,7 +1571,7 @@ To also test the response payload add list of expected type ids (or EMPTY if the
 | `<type-id>` | Id of the Info type  |
 | `EMPTY` | The expected list of type ids shall be empty  |
 
-## Function: ecs_api_idc_get_job_ids() ##
+## Function: ics_api_idc_get_job_ids() ##
 
 Test of GET '/data-consumer/v1/info-jobs' and optional check of the array of returned job ids.
 To test the response code only, provide the response code parameter as well as a type id and an owner id.
@@ -1601,7 +1590,7 @@ To also test the response payload add the 'EMPTY' for an expected empty array or
 | `<job-id>` | Id of the expected job  |
 | `EMPTY` | The expected list of job id shall be empty  |
 
-## Function: ecs_api_idc_get_job() ##
+## Function: ics_api_idc_get_job() ##
 
 Test of GET '/data-consumer/v1/info-jobs/{infoJobId}' and optional check of the returned job.
 To test the response code only, provide the response code, type id and job id.
@@ -1620,7 +1609,7 @@ To also test the response payload add the remaining parameters.
 | `<owner-id>` | Expected owner for the job  |
 | `<template-job-file>` | Path to a job template for job parameters of the job  |
 
-## Function: ecs_api_idc_put_job() ##
+## Function: ics_api_idc_put_job() ##
 
 Test of PUT '​/data-consumer/v1/info-jobs/{infoJobId}'.
 To test, provide all the specified parameters.
@@ -1639,7 +1628,7 @@ To test, provide all the specified parameters.
 | `<template-job-file>` | Path to a job template for job parameters of the job  |
 | `VALIIDATE` | Indicator to preform type validation at creation  |
 
-## Function: ecs_api_idc_delete_job() ##
+## Function: ics_api_idc_delete_job() ##
 
 Test of DELETE '/A1-EI​/v1​/eitypes​/{eiTypeId}​/eijobs​/{eiJobId}'.
 To test, provide all the specified parameters.
@@ -1654,7 +1643,7 @@ To test, provide all the specified parameters.
 | `<type-id>` | Id of the type  |
 | `<job-id>` | Id of the job  |
 
-## Function: ecs_api_idc_get_type() ##
+## Function: ics_api_idc_get_type() ##
 
 Test of GET '/data-consumer/v1/info-types/{infoTypeId} and optional check of the returned schema.
 To test the response code only, provide the response code parameter as well as the type-id.
@@ -1670,7 +1659,7 @@ To also test the response payload add a path to the expected schema file.
 | `<type-id>` | Id of the Info type  |
 | `<schema-file>` | Path to a schema file to compare with the returned schema  |
 
-## Function: ecs_api_idc_get_job_status() ##
+## Function: ics_api_idc_get_job_status() ##
 
 Test of GET '/data-consumer/v1/info-jobs/{infoJobId}/status' and optional check of the returned status and timeout.
 To test the response code only, provide the response code and job id.
@@ -1687,7 +1676,7 @@ To also test the response payload add the expected status.
 | `<status>` | Expected status  |
 | `<timeout>` | Timeout |
 
-## Function: ecs_api_idc_get_job_status2() ##
+## Function: ics_api_idc_get_job_status2() ##
 
 Test of GET '/data-consumer/v1/info-jobs/{infoJobId}/status' with returned producers and optional check of the returned status and timeout.
 To test the response code only, provide the response code and job id.
@@ -1708,7 +1697,7 @@ To also test the response payload add the expected status.
 | `<timeout>` | Timeout |
 
 
-## Function: ecs_api_idc_get_subscription_ids() ##
+## Function: ics_api_idc_get_subscription_ids() ##
 Test of GET '/data-consumer/v1/info-type-subscription' with the returned list of subscription ids
 
 | arg list |
@@ -1723,7 +1712,7 @@ Test of GET '/data-consumer/v1/info-type-subscription' with the returned list of
 | `<EMPTY>` | Indicated for empty list of subscription ids  |
 | `<subscription-id>` |Id of the subscription  |
 
-## Function: ecs_api_idc_get_subscription() ##
+## Function: ics_api_idc_get_subscription() ##
 Test of GET '/data-consumer/v1/info-type-subscription/{subscriptionId}' with the subscription information
 
 | arg list |
@@ -1738,7 +1727,7 @@ Test of GET '/data-consumer/v1/info-type-subscription/{subscriptionId}' with the
 | `<status-uri>` | Url for status notifications  |
 
 
-## Function: ecs_api_idc_put_subscription() ##
+## Function: ics_api_idc_put_subscription() ##
 Test of PUT '/data-consumer/v1/info-type-subscription/{subscriptionId}' with the subscription information
 
 | arg list |
@@ -1752,7 +1741,7 @@ Test of PUT '/data-consumer/v1/info-type-subscription/{subscriptionId}' with the
 | `<owner-id>` | Id of the owner  |
 | `<status-uri>` | Url for status notifications  |
 
-## Function: ecs_api_idc_delete_subscription() ##
+## Function: ics_api_idc_delete_subscription() ##
 Test of DELETE /data-consumer/v1/info-type-subscription/{subscriptionId}
 
 | arg list |
@@ -1765,7 +1754,7 @@ Test of DELETE /data-consumer/v1/info-type-subscription/{subscriptionId}
 | `<subscription-id>` |Id of the subscription  |
 
 
-## Function: ecs_api_admin_reset() ##
+## Function: ics_api_admin_reset() ##
 
 Test of GET '/status'.
 
@@ -1826,9 +1815,9 @@ Sample test of pms api (status)
 | --------- | ----------- |
 | `<response-code>` | Expected http response code |
 
-## Function: gateway_ecs_get_types ##
+## Function: gateway_ics_get_types ##
 
-Sample test of ecs api (get types)
+Sample test of ics api (get types)
 Only response code tested - not payload
 | arg list |
 |--|
index f2777eb..85794f8 100644 (file)
 #  ============LICENSE_END=================================================
 #
 
-# Generic function to query the agent/ECS via the REST or DMAAP interface.
-# Used by all other agent/ECS api test functions
+# Generic function to query the agent/ICS via the REST or DMAAP interface.
+# Used by all other agent/ICS api test functions
 # If operation sufffix is '_BATCH' the the send and get response is split in two sequences,
 # one for sending the requests and one for receiving the response
 # but only when using the DMAAP interface
 # REST or DMAAP is controlled of the base url of $XX_ADAPTER
-# arg: (PA|ECS|CR|RC GET|PUT|POST|DELETE|GET_BATCH|PUT_BATCH|POST_BATCH|DELETE_BATCH <url>|<correlation-id> [<file> [mime-type]]) | (PA|ECS RESPONSE <correlation-id>)
+# arg: (PA|ICS|CR|RC GET|PUT|POST|DELETE|GET_BATCH|PUT_BATCH|POST_BATCH|DELETE_BATCH <url>|<correlation-id> [<file> [mime-type]]) | (PA|ICS RESPONSE <correlation-id>)
 # Default mime type for file is application/json unless specified in parameter mime-type
 # (Not for test scripts)
 __do_curl_to_api() {
@@ -49,10 +49,10 @@ __do_curl_to_api() {
                        if [ $PMS_VERSION != "V1" ]; then
                                input_url=$PMS_API_PREFIX$3
                        fi
-        elif [ $1 == "ECS" ]; then
-                       __ADAPTER=$ECS_ADAPTER
-                       __ADAPTER_TYPE=$ECS_ADAPTER_TYPE
-            __RETRY_CODES=$ECS_RETRY_CODES
+        elif [ $1 == "ICS" ]; then
+                       __ADAPTER=$ICS_ADAPTER
+                       __ADAPTER_TYPE=$ICS_ADAPTER_TYPE
+            __RETRY_CODES=$ICS_RETRY_CODES
                elif [ $1 == "CR" ]; then
                        __ADAPTER=$CR_ADAPTER
                        __ADAPTER_TYPE=$CR_ADAPTER_TYPE
@@ -81,6 +81,10 @@ __do_curl_to_api() {
                        __ADAPTER=$MR_DMAAP_ADAPTER_HTTP
                        __ADAPTER_TYPE=$MR_DMAAP_ADAPTER_TYPE
             __RETRY_CODES=""
+        elif [ $1 == "KAFKAPC" ]; then
+                       __ADAPTER=$KAFKAPC_ADAPTER
+                       __ADAPTER_TYPE=$KAFKAPC_ADAPTER_TYPE
+            __RETRY_CODES=""
                else
             paramError=1
         fi
@@ -139,7 +143,6 @@ __do_curl_to_api() {
                        if [ $# -ne 3 ]; then
                                paramError=1
                        fi
-                       #if [ $__ADAPTER == $__RESTBASE ] || [ $__ADAPTER == $__RESTBASE_SECURE ]; then
                        if [ $__ADAPTER_TYPE == "REST" ]; then
                                paramError=1
                        fi
@@ -151,13 +154,12 @@ __do_curl_to_api() {
     if [ $paramError -eq 1 ]; then
                ((RES_CONF_FAIL++))
         echo "-Incorrect number of parameters to __do_curl_to_api " $@ >> $HTTPLOG
-        echo "-Expected: (PA|ECS GET|PUT|POST|DELETE|GET_BATCH|PUT_BATCH|POST_BATCH|DELETE_BATCH <url> [<file>]) | (PA|ECS RESPONSE <correlation-id>)" >> $HTTPLOG
+        echo "-Expected: (PA|ICS GET|PUT|POST|DELETE|GET_BATCH|PUT_BATCH|POST_BATCH|DELETE_BATCH <url> [<file> [mime-type]]) | (PA|ICS RESPONSE <correlation-id>)" >> $HTTPLOG
         echo "-Returning response 000" >> $HTTPLOG
         echo "-000"
         return 1
     fi
 
-    #if [ $__ADAPTER == $__RESTBASE ] || [ $__ADAPTER == $__RESTBASE_SECURE ]; then
        if [ $__ADAPTER_TYPE == "REST" ]; then
         url=" "${__ADAPTER}${input_url}
         oper=" -X "$oper
@@ -248,7 +250,7 @@ __do_curl_to_api() {
                        echo " RESP: "$res >> $HTTPLOG
                        status=${res:${#res}-3}
                        TS=$SECONDS
-                       # wait of the reply from the agent/ECS...
+                       # wait of the reply from the agent/ICS...
                        while [ $status -eq 204 ]; do
                                if [ $(($SECONDS - $TS)) -gt 90 ]; then
                                        echo " RETCODE: (timeout after 90s)" >> $HTTPLOG
diff --git a/test/common/cbs_api_functions.sh b/test/common/cbs_api_functions.sh
new file mode 100644 (file)
index 0000000..f08b250
--- /dev/null
@@ -0,0 +1,23 @@
+#!/bin/bash
+
+#  ============LICENSE_START===============================================
+#  Copyright (C) 2021 Nordix Foundation. All rights reserved.
+#  ========================================================================
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#  ============LICENSE_END=================================================
+#
+
+# Functions for CBS is included in consul_api_functions.sh
+
+
+
index 405c194..a3a3e5b 100755 (executable)
@@ -90,7 +90,7 @@ __kube_delete_all_resources() {
                if [ $? -eq 0 ] && [ ! -z "$result" ]; then
                        for resid in $result; do
                                echo  "  Deleting $restype $resid in namespace $namespace with label autotest "
-                               kubectl delete $restype $resid -n $namespace 1> /dev/null 2> /dev/null
+                               kubectl delete --grace-period=1 $restype $resid -n $namespace 1> /dev/null 2> /dev/null
                        done
                fi
        done
@@ -104,7 +104,7 @@ __kube_delete_all_pv() {
                if [ $? -eq 0 ] && [ ! -z "$result" ]; then
                        for resid in $result; do
                                echo  "  Deleting $restype $resid with label autotest "
-                               kubectl delete $restype $resid 1> /dev/null 2> /dev/null
+                               kubectl delete --grace-period=1 $restype $resid 1> /dev/null 2> /dev/null
                        done
                fi
        done
@@ -119,7 +119,7 @@ __kube_wait_for_delete() {
                if [ $? -eq 0 ] && [ ! -z "$result" ]; then
                        for resid in $result; do
                                echo  "  Deleting $restype $resid in namespace $namespace with label autotest "
-                               kubectl delete $restype $resid -n $namespace #1> /dev/null 2> /dev/null
+                               kubectl delete --grace-period=1 $restype $resid -n $namespace #1> /dev/null 2> /dev/null
                                echo -ne "  Waiting for $restype $resid in namespace $namespace with label autotest to be deleted..."$SAMELINE
                                T_START=$SECONDS
                                result="dummy"
@@ -147,7 +147,7 @@ __kube_wait_for_delete_pv() {
                if [ $? -eq 0 ] && [ ! -z "$result" ]; then
                        for resid in $result; do
                                echo  "  Deleting $restype $resid with label autotest "
-                               kubectl delete $restype $resid -n $namespace #1> /dev/null 2> /dev/null
+                               kubectl delete --grace-period=1 $restype $resid -n $namespace #1> /dev/null 2> /dev/null
                                echo -ne "  Waiting for $restype $resid with label autotest to be deleted..."$SAMELINE
                                T_START=$SECONDS
                                result="dummy"
similarity index 68%
rename from test/common/consul_cbs_functions.sh
rename to test/common/consul_api_functions.sh
index cd1b16c..af85ff3 100644 (file)
@@ -235,123 +235,6 @@ consul_config_app() {
 
 }
 
-# Function to perpare the consul configuration according to the current simulator configuration
-# args: SDNC|NOSDNC <output-file>
-# (Function for test scripts)
-prepare_consul_config() {
-       echo -e $BOLD"Prepare Consul config"$EBOLD
-
-       echo " Writing consul config for "$POLICY_AGENT_APP_NAME" to file: "$2
-
-       if [ $# != 2 ];  then
-               ((RES_CONF_FAIL++))
-       __print_err "need two args,  SDNC|NOSDNC <output-file>" $@
-               exit 1
-       fi
-
-       if [ $1 == "SDNC" ]; then
-               echo -e " Config$BOLD including SDNC$EBOLD configuration"
-       elif [ $1 == "NOSDNC" ];  then
-               echo -e " Config$BOLD excluding SDNC$EBOLD configuration"
-       else
-               ((RES_CONF_FAIL++))
-       __print_err "need two args,  SDNC|NOSDNC <output-file>" $@
-               exit 1
-       fi
-
-       config_json="\n            {"
-       if [ $1 == "SDNC" ]; then
-               config_json=$config_json"\n   \"controller\": ["
-               config_json=$config_json"\n                     {"
-               config_json=$config_json"\n                       \"name\": \"$SDNC_APP_NAME\","
-               config_json=$config_json"\n                       \"baseUrl\": \"$SDNC_SERVICE_PATH\","
-               config_json=$config_json"\n                       \"userName\": \"$SDNC_USER\","
-               config_json=$config_json"\n                       \"password\": \"$SDNC_PWD\""
-               config_json=$config_json"\n                     }"
-               config_json=$config_json"\n   ],"
-       fi
-
-       config_json=$config_json"\n   \"streams_publishes\": {"
-       config_json=$config_json"\n                            \"dmaap_publisher\": {"
-       config_json=$config_json"\n                              \"type\": \"message-router\","
-       config_json=$config_json"\n                              \"dmaap_info\": {"
-       config_json=$config_json"\n                                \"topic_url\": \"$MR_SERVICE_PATH$MR_WRITE_URL\""
-       config_json=$config_json"\n                              }"
-       config_json=$config_json"\n                            }"
-       config_json=$config_json"\n   },"
-       config_json=$config_json"\n   \"streams_subscribes\": {"
-       config_json=$config_json"\n                             \"dmaap_subscriber\": {"
-       config_json=$config_json"\n                               \"type\": \"message-router\","
-       config_json=$config_json"\n                               \"dmaap_info\": {"
-       config_json=$config_json"\n                                   \"topic_url\": \"$MR_SERVICE_PATH$MR_READ_URL\""
-       config_json=$config_json"\n                                 }"
-       config_json=$config_json"\n                               }"
-       config_json=$config_json"\n   },"
-
-       config_json=$config_json"\n   \"ric\": ["
-
-       if [ $RUNMODE == "KUBE" ]; then
-               result=$(kubectl get pods -n $KUBE_A1SIM_NAMESPACE -o jsonpath='{.items[?(@.metadata.labels.autotest=="RICSIM")].metadata.name}')
-               rics=""
-               ric_cntr=0
-               if [ $? -eq 0 ] && [ ! -z "$result" ]; then
-                       for im in $result; do
-                               if [[ $im != *"-0" ]]; then
-                                       ric_subdomain=$(kubectl get pod $im -n $KUBE_A1SIM_NAMESPACE -o jsonpath='{.spec.subdomain}')
-                                       rics=$rics" "$im"."$ric_subdomain"."$KUBE_A1SIM_NAMESPACE
-                                       let ric_cntr=ric_cntr+1
-                               fi
-                       done
-               fi
-               if [ $ric_cntr -eq 0 ]; then
-                       echo $YELLOW"Warning: No rics found for the configuration"$EYELLOW
-               fi
-       else
-               rics=$(docker ps --filter "name=$RIC_SIM_PREFIX" --filter "network=$DOCKER_SIM_NWNAME" --filter "status=running" --format {{.Names}})
-               if [ $? -ne 0 ] || [ -z "$rics" ]; then
-                       echo -e $RED" FAIL - the names of the running RIC Simulator cannot be retrieved." $ERED
-                       ((RES_CONF_FAIL++))
-                       return 1
-               fi
-       fi
-       cntr=0
-       for ric in $rics; do
-               if [ $cntr -gt 0 ]; then
-                       config_json=$config_json"\n          ,"
-               fi
-               config_json=$config_json"\n          {"
-               if [ $RUNMODE == "KUBE" ]; then
-                       ric_id=${ric%.*.*} #extract pod id from full hosthame
-                       ric_id=$(echo "$ric_id" | tr '-' '_')
-               else
-                       ric_id=$ric
-               fi
-               echo " Found a1 sim: "$ric_id
-               config_json=$config_json"\n            \"name\": \"$ric_id\","
-               config_json=$config_json"\n            \"baseUrl\": \"$RIC_SIM_HTTPX://$ric:$RIC_SIM_PORT\","
-               if [ $1 == "SDNC" ]; then
-                       config_json=$config_json"\n            \"controller\": \"$SDNC_APP_NAME\","
-               fi
-               config_json=$config_json"\n            \"managedElementIds\": ["
-               config_json=$config_json"\n              \"me1_$ric_id\","
-               config_json=$config_json"\n              \"me2_$ric_id\""
-               config_json=$config_json"\n            ]"
-               config_json=$config_json"\n          }"
-               let cntr=cntr+1
-       done
-
-       config_json=$config_json"\n           ]"
-       config_json=$config_json"\n}"
-
-       if [ $RUNMODE == "KUBE" ]; then
-               config_json="{\"config\":"$config_json"}"
-       fi
-
-       printf "$config_json">$2
-
-       echo ""
-}
-
 # Start Consul and CBS
 # args: -
 # (Function for test scripts)
similarity index 95%
rename from test/common/control_panel_api_functions.sh
rename to test/common/cp_api_functions.sh
index 295e16a..992fd68 100644 (file)
@@ -129,7 +129,7 @@ use_control_panel_https() {
 # args: <protocol> <internal-port> <external-port>
 __control_panel_set_protocoll() {
        echo -e $BOLD"$CONTROL_PANEL_DISPLAY_NAME protocol setting"$EBOLD
-       echo -e " Using $BOLD http $EBOLD towards $CONTROL_PANEL_DISPLAY_NAME"
+       echo -e " Using $BOLD $1 $EBOLD towards $CONTROL_PANEL_DISPLAY_NAME"
 
        CP_SERVICE_PATH=$1"://"$CONTROL_PANEL_APP_NAME":"$2
        if [ $RUNMODE == "KUBE" ]; then
@@ -160,21 +160,21 @@ __control_panel_export_vars() {
        export CP_PROXY_CONFIGMAP_NAME=$CONTROL_PANEL_APP_NAME"-proxy"
 
        export CONTROL_PANEL_PATH_POLICY_PREFIX
-       export CONTROL_PANEL_PATH_ECS_PREFIX
-       export CONTROL_PANEL_PATH_ECS_PREFIX2
+       export CONTROL_PANEL_PATH_ICS_PREFIX
+       export CONTROL_PANEL_PATH_ICS_PREFIX2
 
        export NRT_GATEWAY_APP_NAME
        export NRT_GATEWAY_EXTERNAL_PORT
 
        export POLICY_AGENT_EXTERNAL_SECURE_PORT
-       export ECS_EXTERNAL_SECURE_PORT
+       export ICS_EXTERNAL_SECURE_PORT
 
        if [ $RUNMODE == "KUBE" ]; then
                export NGW_DOMAIN_NAME=$NRT_GATEWAY_APP_NAME.$KUBE_NONRTRIC_NAMESPACE.svc.cluster.local  # suffix needed for nginx name resolution
                export CP_NGINX_RESOLVER=$CONTROL_PANEL_NGINX_KUBE_RESOLVER
        else
                export POLICY_AGENT_DOMAIN_NAME=$POLICY_AGENT_APP_NAME
-               export ECS_DOMAIN_NAME=$ECS_APP_NAME
+               export ICS_DOMAIN_NAME=$ICS_APP_NAME
 
                export NGW_DOMAIN_NAME=$NRT_GATEWAY_APP_NAME
                export CP_NGINX_RESOLVER=$CONTROL_PANEL_NGINX_DOCKER_RESOLVER
@@ -270,7 +270,7 @@ start_control_panel() {
 
                dest_file=$SIM_GROUP/$CONTROL_PANEL_COMPOSE_DIR/$CONTROL_PANEL_HOST_MNT_DIR/$CONTROL_PANEL_CONFIG_FILE
 
-               envsubst '${NGW_DOMAIN_NAME},${CP_NGINX_RESOLVER},${NRT_GATEWAY_EXTERNAL_PORT},${POLICY_AGENT_EXTERNAL_SECURE_PORT},${ECS_EXTERNAL_SECURE_PORT},${POLICY_AGENT_DOMAIN_NAME},${ECS_DOMAIN_NAME},${CONTROL_PANEL_PATH_POLICY_PREFIX},${CONTROL_PANEL_PATH_ECS_PREFIX} ,${CONTROL_PANEL_PATH_ECS_PREFIX2}' < $1 > $dest_file
+               envsubst '${NGW_DOMAIN_NAME},${CP_NGINX_RESOLVER},${NRT_GATEWAY_EXTERNAL_PORT},${POLICY_AGENT_EXTERNAL_SECURE_PORT},${ICS_EXTERNAL_SECURE_PORT},${POLICY_AGENT_DOMAIN_NAME},${ICS_DOMAIN_NAME},${CONTROL_PANEL_PATH_POLICY_PREFIX},${CONTROL_PANEL_PATH_ICS_PREFIX} ,${CONTROL_PANEL_PATH_ICS_PREFIX2}' < $1 > $dest_file
 
                __start_container $CONTROL_PANEL_COMPOSE_DIR "" NODOCKERARGS 1 $CONTROL_PANEL_APP_NAME
 
index a537bc8..5116273 100644 (file)
@@ -94,9 +94,14 @@ __CR_kube_delete_all() {
 # args: <log-dir> <file-prexix>
 __CR_store_docker_logs() {
        if [ $RUNMODE == "KUBE" ]; then
-               kubectl  logs -l "autotest=CR" -n $KUBE_SIM_NAMESPACE --tail=-1 > $1$2_cr.log 2>&1
+               for podname in $(kubectl get pods -n $KUBE_SIM_NAMESPACE -l "autotest=CR" -o custom-columns=":metadata.name"); do
+                       kubectl logs -n $KUBE_SIM_NAMESPACE $podname --tail=-1 > $1$2_$podname.log 2>&1
+               done
        else
-               docker logs $CR_APP_NAME > $1$2_cr.log 2>&1
+               crs=$(docker ps --filter "name=$CR_APP_NAME" --filter "network=$DOCKER_SIM_NWNAME" --filter "status=running" --format {{.Names}})
+               for crid in $crs; do
+                       docker logs $crid > $1$2_$crid.log 2>&1
+               done
        fi
 }
 
@@ -112,11 +117,18 @@ __CR_initial_setup() {
 # This function is called for apps managed by the test script as well as for prestarted apps.
 # args: -
 __CR_statisics_setup() {
-       if [ $RUNMODE == "KUBE" ]; then
-               echo "CR $CR_APP_NAME $KUBE_SIM_NAMESPACE"
-       else
-               echo "CR $CR_APP_NAME"
-       fi
+       for ((CR_INSTANCE=MAX_CR_APP_COUNT; CR_INSTANCE>0; CR_INSTANCE-- )); do
+               if [ $RUNMODE == "KUBE" ]; then
+                       CR_INSTANCE_KUBE=$(($CR_INSTANCE-1))
+                       echo -n " CR-$CR_INSTANCE_KUBE $CR_APP_NAME-$CR_INSTANCE_KUBE $KUBE_SIM_NAMESPACE "
+               else
+                       if [ $DOCKER_COMPOSE_VERION == "V1" ]; then
+                               echo -n " CR_$CR_INSTANCE ${CR_APP_NAME}_cr_$CR_INSTANCE "
+                       else
+                               echo -n " CR_$CR_INSTANCE ${CR_APP_NAME}-cr-$CR_INSTANCE "
+                       fi
+               fi
+       done
 }
 
 #######################################################
@@ -125,6 +137,10 @@ __CR_statisics_setup() {
 ### CR functions
 ################
 
+#Var to hold the current number of CR instances
+CR_APP_COUNT=1
+MAX_CR_APP_COUNT=10
+
 # Set http as the protocol to use for all communication to the Dmaap adapter
 # args: -
 # (Function for test scripts)
@@ -142,25 +158,34 @@ use_cr_https() {
 # Setup paths to svc/container for internal and external access
 # args: <protocol> <internal-port> <external-port>
 __cr_set_protocoll() {
-       echo -e $BOLD"$CR_DISPLAY_NAME protocol setting"$EBOLD
-       echo -e " Using $BOLD http $EBOLD towards $CR_DISPLAY_NAME"
 
+       echo -e $BOLD"$CR_DISPLAY_NAME protocol setting"$EBOLD
+       echo -e " Using $BOLD $1 $EBOLD towards $CR_DISPLAY_NAME"
        ## Access to Dmaap adapter
-
-       # CR_SERVICE_PATH is the base path to cr
-       CR_SERVICE_PATH=$1"://"$CR_APP_NAME":"$2  # docker access, container->container and script->container via proxy
-       if [ $RUNMODE == "KUBE" ]; then
-               CR_SERVICE_PATH=$1"://"$CR_APP_NAME.$KUBE_SIM_NAMESPACE":"$3 # kube access, pod->svc and script->svc via proxy
-       fi
-       # Service paths are used in test script to provide callbacck urls to app
-       CR_SERVICE_MR_PATH=$CR_SERVICE_PATH$CR_APP_CALLBACK_MR  #Only for messages from dmaap adapter/mediator
-       CR_SERVICE_TEXT_PATH=$CR_SERVICE_PATH$CR_APP_CALLBACK_TEXT  #Callbacks for text payload
-       CR_SERVICE_APP_PATH=$CR_SERVICE_PATH$CR_APP_CALLBACK    #For general callbacks from apps
-
-       # CR_ADAPTER used for switching between REST and DMAAP (only REST supported currently)
-       CR_ADAPTER_TYPE="REST"
-       CR_ADAPTER=$CR_SERVICE_PATH
-
+       for ((CR_INSTANCE=0; CR_INSTANCE<$MAX_CR_APP_COUNT; CR_INSTANCE++ )); do
+               CR_DOCKER_INSTANCE=$(($CR_INSTANCE+1))
+               # CR_SERVICE_PATH is the base path to cr
+               if [ $DOCKER_COMPOSE_VERION == "V1" ]; then
+                       __CR_SERVICE_PATH=$1"://"$CR_APP_NAME"_cr_"${CR_DOCKER_INSTANCE}":"$2  # docker access, container->container and script->container via proxy
+               else
+                       __CR_SERVICE_PATH=$1"://"$CR_APP_NAME"-cr-"${CR_DOCKER_INSTANCE}":"$2  # docker access, container->container and script->container via proxy
+               fi
+               if [ $RUNMODE == "KUBE" ]; then
+                       __CR_SERVICE_PATH=$1"://"$CR_APP_NAME"-"$CR_INSTANCE.$CR_APP_NAME"."$KUBE_SIM_NAMESPACE":"$3 # kube access, pod->svc and script->svc via proxy
+               fi
+               export CR_SERVICE_PATH"_"${CR_INSTANCE}=$__CR_SERVICE_PATH
+               # Service paths are used in test script to provide callbacck urls to app
+               export CR_SERVICE_MR_PATH"_"${CR_INSTANCE}=$__CR_SERVICE_PATH$CR_APP_CALLBACK_MR  #Only for messages from dmaap adapter/mediator
+               export CR_SERVICE_TEXT_PATH"_"${CR_INSTANCE}=$__CR_SERVICE_PATH$CR_APP_CALLBACK_TEXT  #Callbacks for text payload
+               export CR_SERVICE_APP_PATH"_"${CR_INSTANCE}=$__CR_SERVICE_PATH$CR_APP_CALLBACK    #For general callbacks from apps
+
+               if [ $CR_INSTANCE -eq 0 ]; then
+                       # CR_ADAPTER used for switching between REST and DMAAP (only REST supported currently)
+                       # CR_ADDAPTER need to be set before each call to CR....only set for instance 0 here
+                       CR_ADAPTER_TYPE="REST"
+                       CR_ADAPTER=$__CR_SERVICE_PATH
+               fi
+       done
        echo ""
 }
 
@@ -179,15 +204,27 @@ __cr_export_vars() {
        export CR_INTERNAL_SECURE_PORT
        export CR_EXTERNAL_PORT
        export CR_EXTERNAL_SECURE_PORT
+
+       export CR_APP_COUNT
 }
 
 # Start the Callback reciver in the simulator group
-# args: -
+# args: <app-count>
 # (Function for test scripts)
 start_cr() {
 
        echo -e $BOLD"Starting $CR_DISPLAY_NAME"$EBOLD
 
+       if [ $# -ne 1 ]; then
+               echo -e $RED" Number of CR instances missing, usage: start_cr <app-count>"$ERED
+               exit 1
+       fi
+       if [ $1 -lt 1 ] || [ $1 -gt 10 ]; then
+               echo -e $RED" Number of CR shall be 1...10, usage: start_cr <app-count>"$ERED
+               exit 1
+       fi
+       export CR_APP_COUNT=$1
+
        if [ $RUNMODE == "KUBE" ]; then
 
                # Check if app shall be fully managed by the test script
@@ -235,15 +272,13 @@ start_cr() {
 
                fi
 
-               __check_service_start $CR_APP_NAME $CR_SERVICE_PATH$CR_ALIVE_URL
+               for ((CR_INSTANCE=0; CR_INSTANCE<$CR_APP_COUNT; CR_INSTANCE++ )); do
+                       __dynvar="CR_SERVICE_PATH_"$CR_INSTANCE
+                       __cr_app_name=$CR_APP_NAME"-"$CR_INSTANCE
+                       __check_service_start $__cr_app_name ${!__dynvar}$CR_ALIVE_URL
+                       result=$(__do_curl ${!__dynvar}/reset)
+               done
 
-               echo -ne " Service $CR_APP_NAME - reset  "$SAMELINE
-               result=$(__do_curl CR $CR_SERVICE_PATH/reset)
-               if [ $? -ne 0 ]; then
-                       echo -e " Service $CR_APP_NAME - reset  $RED Failed $ERED - will continue"
-               else
-                       echo -e " Service $CR_APP_NAME - reset  $GREEN OK $EGREEN"
-               fi
        else
                # Check if docker app shall be fully managed by the test script
                __check_included_image 'CR'
@@ -255,65 +290,132 @@ start_cr() {
 
                __cr_export_vars
 
-               __start_container $CR_COMPOSE_DIR "" NODOCKERARGS 1 $CR_APP_NAME
+               app_data=""
+               cntr=1
+               while [ $cntr -le $CR_APP_COUNT ]; do
+                       if [ $DOCKER_COMPOSE_VERION == "V1" ]; then
+                               app=$CR_APP_NAME"_cr_"$cntr
+                       else
+                               app=$CR_APP_NAME"-cr-"$cntr
+                       fi
+                       app_data="$app_data $app"
+                       let cntr=cntr+1
+               done
+
+               echo "COMPOSE_PROJECT_NAME="$CR_APP_NAME > $SIM_GROUP/$CR_COMPOSE_DIR/.env
+
+               __start_container $CR_COMPOSE_DIR "" NODOCKERARGS $CR_APP_COUNT $app_data
 
-        __check_service_start $CR_APP_NAME $CR_SERVICE_PATH$CR_ALIVE_URL
+               cntr=1   #Counter for docker instance, starts on 1
+               cntr2=0  #Couter for env var name, starts with 0 to be compablible with kube
+               while [ $cntr -le $CR_APP_COUNT ]; do
+                       if [ $DOCKER_COMPOSE_VERION == "V1" ]; then
+                               app=$CR_APP_NAME"_cr_"$cntr
+                       else
+                               app=$CR_APP_NAME"-cr-"$cntr
+                       fi
+                       __dynvar="CR_SERVICE_PATH_"$cntr2
+                       __check_service_start $app ${!__dynvar}$CR_ALIVE_URL
+                       let cntr=cntr+1
+                       let cntr2=cntr2+1
+               done
        fi
        echo ""
 }
 
+#Convert a cr path id to the value of the environment var holding the url
+# arg: <cr-path-id>
+# returns: <base-url-to-the-app>
+__cr_get_service_path(){
+       if [ $# -ne 1 ]; then
+               echo "DUMMY"
+               return 1
+       fi
+       if [ $1 -lt 0 ] || [ $1 -ge $MAX_CR_APP_COUNT ]; then
+               echo "DUMMY"
+               return 1
+       fi
+       __dynvar="CR_SERVICE_PATH_"$1
+       echo ${!__dynvar}
+       return 0
+}
 
 # Tests if a variable value in the CR is equal to a target value and and optional timeout.
 # Arg: <variable-name> <target-value> - This test set pass or fail depending on if the variable is
 # equal to the target or not.
-# Arg: <variable-name> <target-value> <timeout-in-sec>  - This test waits up to the timeout seconds
+# Arg: <cr-path-id> <variable-name> <target-value> <timeout-in-sec>  - This test waits up to the timeout seconds
 # before setting pass or fail depending on if the variable value becomes equal to the target
 # value or not.
 # (Function for test scripts)
 cr_equal() {
-       if [ $# -eq 2 ] || [ $# -eq 3 ]; then
-               __var_test "CR" "$CR_SERVICE_PATH/counter/" $1 "=" $2 $3
+       if [ $# -eq 3 ] || [ $# -eq 4 ]; then
+               CR_SERVICE_PATH=$(__cr_get_service_path $1)
+               CR_ADAPTER=$CR_SERVICE_PATH
+               if [ $? -ne 0 ]; then
+                       __print_err "<cr-path-id> missing or incorrect" $@
+                       return 1
+               fi
+               __var_test "CR" "$CR_SERVICE_PATH/counter/" $2 "=" $3 $4
        else
-               __print_err "Wrong args to cr_equal, needs two or three args: <sim-param> <target-value> [ timeout ]" $@
+               __print_err "Wrong args to cr_equal, needs three or four args: <cr-path-id>  <variable-name> <target-value> [ timeout ]" $@
        fi
 }
 
 # Tests if a variable value in the CR contains the target string and and optional timeout
 # Arg: <variable-name> <target-value> - This test set pass or fail depending on if the variable contains
 # the target or not.
-# Arg: <variable-name> <target-value> <timeout-in-sec>  - This test waits up to the timeout seconds
+# Arg: <cr-path-id> <variable-name> <target-value> <timeout-in-sec>  - This test waits up to the timeout seconds
 # before setting pass or fail depending on if the variable value contains the target
 # value or not.
 # (Function for test scripts)
 cr_contains_str() {
 
-       if [ $# -eq 2 ] || [ $# -eq 3 ]; then
-               __var_test "CR" "$CR_SERVICE_PATH/counter/" $1 "contain_str" $2 $3
+       if [ $# -eq 3 ] || [ $# -eq 4 ]; then
+               CR_SERVICE_PATH=$(__cr_get_service_path $1)
+               CR_ADAPTER=$CR_SERVICE_PATH
+               if [ $? -ne 0 ]; then
+                       __print_err "<cr-path-id> missing or incorrect" $@
+                       return 1
+               fi
+               __var_test "CR" "$CR_SERVICE_PATH/counter/" $2 "contain_str" $3 $4
                return 0
        else
-               __print_err "needs two or three args: <sim-param> <target-value> [ timeout ]"
+               __print_err "needs two or three args: <cr-path-id> <variable-name> <target-value> [ timeout ]"
                return 1
        fi
 }
 
 # Read a variable value from CR sim and send to stdout. Arg: <variable-name>
 cr_read() {
+       CR_SERVICE_PATH=$(__cr_get_service_path $1)
+       CR_ADAPTER=$CR_SERVICE_PATH
+       if [ $? -ne 0 ]; then
+               __print_err "<cr-path-id> missing or incorrect" $@
+               return  1
+       fi
        echo "$(__do_curl $CR_SERVICE_PATH/counter/$1)"
 }
 
 # Function to configure write delay on callbacks
 # Delay given in seconds.
-# arg <response-code> <delay-in-sec>
+# arg <response-code> <cr-path-id>  <delay-in-sec>
 # (Function for test scripts)
 cr_delay_callback() {
        __log_conf_start $@
 
-       if [ $# -ne 2 ]; then
-        __print_err "<response-code> <delay-in-sec>]" $@
+       if [ $# -ne 3 ]; then
+        __print_err "<response-code> <cr-path-id> <delay-in-sec>]" $@
         return 1
        fi
 
-       res="$(__do_curl_to_api CR POST /forcedelay?delay=$2)"
+       CR_SERVICE_PATH=$(__cr_get_service_path $2)
+       CR_ADAPTER=$CR_SERVICE_PATH
+       if [ $? -ne 0 ]; then
+               __print_err "<cr-path-id> missing or incorrect" $@
+               return 1
+       fi
+
+       res="$(__do_curl_to_api CR POST /forcedelay?delay=$3)"
        status=${res:${#res}-3}
 
        if [ $status -ne 200 ]; then
@@ -326,7 +428,7 @@ cr_delay_callback() {
 }
 
 # CR API: Check the contents of all current ric sync events for one id from PMS
-# <response-code> <id> [ EMPTY | ( <ric-id> )+ ]
+# <response-code> <cr-path-id> <id> [ EMPTY | ( <ric-id> )+ ]
 # (Function for test scripts)
 cr_api_check_all_sync_events() {
        __log_test_start $@
@@ -336,12 +438,19 @@ cr_api_check_all_sync_events() {
                return 1
        fi
 
-    if [ $# -lt 2 ]; then
-        __print_err "<response-code> <id> [ EMPTY | ( <ric-id> )+ ]" $@
+    if [ $# -lt 3 ]; then
+        __print_err "<response-code> <cr-path-id> <id> [ EMPTY | ( <ric-id> )+ ]" $@
         return 1
     fi
 
-       query="/get-all-events/"$2
+       CR_SERVICE_PATH=$(__cr_get_service_path $2)
+       CR_ADAPTER=$CR_SERVICE_PATH
+       if [ $? -ne 0 ]; then
+               __print_err "<cr-path-id> missing or incorrect" $@
+               return 1
+       fi
+
+       query="/get-all-events/"$3
        res="$(__do_curl_to_api CR GET $query)"
        status=${res:${#res}-3}
 
@@ -350,15 +459,15 @@ cr_api_check_all_sync_events() {
                return 1
        fi
 
-       if [ $# -gt 2 ]; then
+       if [ $# -gt 3 ]; then
                body=${res:0:${#res}-3}
-               if [ $# -eq 3 ] && [ $3 == "EMPTY" ]; then
+               if [ $# -eq 4 ] && [ $4 == "EMPTY" ]; then
                        targetJson="["
                else
                        targetJson="["
-                       arr=(${@:3})
+                       arr=(${@:4})
 
-                       for ((i=0; i<$(($#-2)); i=i+1)); do
+                       for ((i=0; i<$(($#-3)); i=i+1)); do
 
                                if [ "$targetJson" != "[" ]; then
                                        targetJson=$targetJson","
@@ -380,18 +489,25 @@ cr_api_check_all_sync_events() {
        return 0
 }
 
-# CR API: Check the contents of all current status events for one id from ECS
-# <response-code> <id> [ EMPTY | ( <status> )+ ]
+# CR API: Check the contents of all current status events for one id from ICS
+# <response-code> <cr-path-id> <id> [ EMPTY | ( <status> )+ ]
 # (Function for test scripts)
-cr_api_check_all_ecs_events() {
+cr_api_check_all_ics_events() {
        __log_test_start $@
 
-    if [ $# -lt 2 ]; then
-        __print_err "<response-code> <id> [ EMPTY | ( <status> )+ ]" $@
+    if [ $# -lt 3 ]; then
+        __print_err "<response-code> <cr-path-id> <id> [ EMPTY | ( <status> )+ ]" $@
         return 1
     fi
 
-       query="/get-all-events/"$2
+       CR_SERVICE_PATH=$(__cr_get_service_path $2)
+       CR_ADAPTER=$CR_SERVICE_PATH
+       if [ $? -ne 0 ]; then
+               __print_err "<cr-path-id> missing or incorrect" $@
+               return 1
+       fi
+
+       query="/get-all-events/"$3
        res="$(__do_curl_to_api CR GET $query)"
        status=${res:${#res}-3}
 
@@ -400,15 +516,15 @@ cr_api_check_all_ecs_events() {
                return 1
        fi
 
-       if [ $# -gt 2 ]; then
+       if [ $# -gt 3 ]; then
                body=${res:0:${#res}-3}
-               if [ $# -eq 3 ] && [ $3 == "EMPTY" ]; then
+               if [ $# -eq 4 ] && [ $4 == "EMPTY" ]; then
                        targetJson="["
                else
                        targetJson="["
-                       arr=(${@:3})
+                       arr=(${@:4})
 
-                       for ((i=0; i<$(($#-2)); i=i+1)); do
+                       for ((i=0; i<$(($#-3)); i=i+1)); do
 
                                if [ "$targetJson" != "[" ]; then
                                        targetJson=$targetJson","
@@ -430,30 +546,37 @@ cr_api_check_all_ecs_events() {
        return 0
 }
 
-# CR API: Check the contents of all current type subscription events for one id from ECS
-# <response-code> <id> [ EMPTY | ( <type-id> <schema> <registration-status> )+ ]
+# CR API: Check the contents of all current type subscription events for one id from ICS
+# <response-code> <cr-path-id> <id> [ EMPTY | ( <type-id> <schema> <registration-status> )+ ]
 # (Function for test scripts)
-cr_api_check_all_ecs_subscription_events() {
+cr_api_check_all_ics_subscription_events() {
        __log_test_start $@
 
-       #Valid number of parameter 2,3,7,11
+       #Valid number of parameter 3,4,8,12
        paramError=1
-       if [ $# -eq 2 ]; then
+       if [ $# -eq 3 ]; then
                paramError=0
        fi
-       if [ $# -eq 3 ] && [ "$3" == "EMPTY" ]; then
+       if [ $# -eq 4 ] && [ "$4" == "EMPTY" ]; then
                paramError=0
        fi
-       variablecount=$(($#-2))
-       if [ $# -gt 3 ] && [ $(($variablecount%3)) -eq 0 ]; then
+       variablecount=$(($#-3))
+       if [ $# -gt 4 ] && [ $(($variablecount%3)) -eq 0 ]; then
                paramError=0
        fi
        if [ $paramError -eq 1 ]; then
-               __print_err "<response-code> <id> [ EMPTY | ( <type-id> <schema> <registration-status> )+ ]" $@
+               __print_err "<response-code> <cr-path-id> <id> [ EMPTY | ( <type-id> <schema> <registration-status> )+ ]" $@
+               return 1
+       fi
+
+       CR_SERVICE_PATH=$(__cr_get_service_path $2)
+       CR_ADAPTER=$CR_SERVICE_PATH
+       if [ $? -ne 0 ]; then
+               __print_err "<cr-path-id> missing or incorrect" $@
                return 1
        fi
 
-       query="/get-all-events/"$2
+       query="/get-all-events/"$3
        res="$(__do_curl_to_api CR GET $query)"
        status=${res:${#res}-3}
 
@@ -462,12 +585,12 @@ cr_api_check_all_ecs_subscription_events() {
                return 1
        fi
 
-       if [ $# -gt 2 ]; then
+       if [ $# -gt 3 ]; then
                body=${res:0:${#res}-3}
                targetJson="["
-               if [ $# -gt 3 ]; then
-                       arr=(${@:3})
-                       for ((i=0; i<$(($#-3)); i=i+3)); do
+               if [ $# -gt 4 ]; then
+                       arr=(${@:4})
+                       for ((i=0; i<$(($#-4)); i=i+3)); do
                                if [ "$targetJson" != "[" ]; then
                                        targetJson=$targetJson","
                                fi
@@ -497,11 +620,23 @@ cr_api_check_all_ecs_subscription_events() {
 
 
 # CR API: Reset all events and counters
-# Arg: -
+# Arg: <cr-path-id>
 # (Function for test scripts)
 cr_api_reset() {
        __log_conf_start $@
 
+       if [ $# -ne 1 ]; then
+               __print_err "<cr-path-id>" $@
+               return 1
+       fi
+
+       CR_SERVICE_PATH=$(__cr_get_service_path $1)
+       CR_ADAPTER=$CR_SERVICE_PATH
+       if [ $? -ne 0 ]; then
+               __print_err "<cr-path-id> missing or incorrect" $@
+               return 1
+       fi
+
        res="$(__do_curl_to_api CR GET /reset)"
        status=${res:${#res}-3}
 
@@ -516,17 +651,24 @@ cr_api_reset() {
 
 
 # CR API: Check the contents of all json events for path
-# <response-code> <topic-url> (EMPTY | <json-msg>+ )
+# <response-code> <cr-path-id> <topic-url> (EMPTY | <json-msg>+ )
 # (Function for test scripts)
 cr_api_check_all_genric_json_events() {
        __log_test_start $@
 
-       if [ $# -lt 3 ]; then
-               __print_err "<response-code> <topic-url> (EMPTY | <json-msg>+ )" $@
+       if [ $# -lt 4 ]; then
+               __print_err "<response-code> <cr-path-id>  <topic-url> (EMPTY | <json-msg>+ )" $@
+               return 1
+       fi
+
+       CR_SERVICE_PATH=$(__cr_get_service_path $2)
+       CR_ADAPTER=$CR_SERVICE_PATH
+       if [ $? -ne 0 ]; then
+               __print_err "<cr-path-id> missing or incorrect" $@
                return 1
        fi
 
-       query="/get-all-events/"$2
+       query="/get-all-events/"$3
        res="$(__do_curl_to_api CR GET $query)"
        status=${res:${#res}-3}
 
@@ -537,7 +679,8 @@ cr_api_check_all_genric_json_events() {
        body=${res:0:${#res}-3}
        targetJson="["
 
-       if [ $3 != "EMPTY" ]; then
+       if [ $4 != "EMPTY" ]; then
+               shift
                shift
                shift
                while [ $# -gt 0 ]; do
@@ -563,19 +706,25 @@ cr_api_check_all_genric_json_events() {
 }
 
 
-
 # CR API: Check a single (oldest) json event (or none if empty) for path
-# <response-code> <topic-url> (EMPTY | <json-msg> )
+# <response-code> <cr-path-id> <topic-url> (EMPTY | <json-msg> )
 # (Function for test scripts)
 cr_api_check_single_genric_json_event() {
        __log_test_start $@
 
-       if [ $# -ne 3 ]; then
-               __print_err "<response-code> <topic-url> (EMPTY | <json-msg> )" $@
+       if [ $# -ne 4 ]; then
+               __print_err "<response-code> <cr-path-id>  <topic-url> (EMPTY | <json-msg> )" $@
+               return 1
+       fi
+
+       CR_SERVICE_PATH=$(__cr_get_service_path $2)
+       CR_ADAPTER=$CR_SERVICE_PATH
+       if [ $? -ne 0 ]; then
+               __print_err "<cr-path-id> missing or incorrect" $@
                return 1
        fi
 
-       query="/get-event/"$2
+       query="/get-event/"$3
        res="$(__do_curl_to_api CR GET $query)"
        status=${res:${#res}-3}
 
@@ -584,7 +733,7 @@ cr_api_check_single_genric_json_event() {
                return 1
        fi
        body=${res:0:${#res}-3}
-       targetJson=$3
+       targetJson=$4
 
        if [ $targetJson == "EMPTY" ] && [ ${#body} -ne 0 ]; then
                __log_test_fail_body
@@ -605,17 +754,24 @@ cr_api_check_single_genric_json_event() {
 # CR API: Check a single (oldest) json in md5 format (or none if empty) for path.
 # Note that if a json message is given, it shall be compact, no ws except inside string.
 # The MD5 will generate different hash if ws is present or not in otherwise equivalent json
-# arg: <response-code> <topic-url> (EMPTY | <data-msg> )
+# arg: <response-code> <cr-path-id> <topic-url> (EMPTY | <data-msg> )
 # (Function for test scripts)
 cr_api_check_single_genric_event_md5() {
        __log_test_start $@
 
-       if [ $# -ne 3 ]; then
-               __print_err "<response-code> <topic-url> (EMPTY | <data-msg> )" $@
+       if [ $# -ne 4 ]; then
+               __print_err "<response-code> <cr-path-id> <topic-url> (EMPTY | <data-msg> )" $@
+               return 1
+       fi
+
+       CR_SERVICE_PATH=$(__cr_get_service_path $2)
+       CR_ADAPTER=$CR_SERVICE_PATH
+       if [ $? -ne 0 ]; then
+               __print_err "<cr-path-id> missing or incorrect" $@
                return 1
        fi
 
-       query="/get-event/"$2
+       query="/get-event/"$3
        res="$(__do_curl_to_api CR GET $query)"
        status=${res:${#res}-3}
 
@@ -624,7 +780,7 @@ cr_api_check_single_genric_event_md5() {
                return 1
        fi
        body=${res:0:${#res}-3}
-       if [ $3 == "EMPTY" ]; then
+       if [ $4 == "EMPTY" ]; then
                if [ ${#body} -ne 0 ]; then
                        __log_test_fail_body
                        return 1
@@ -635,11 +791,11 @@ cr_api_check_single_genric_event_md5() {
        fi
        command -v md5 > /dev/null # Mac
        if [ $? -eq 0 ]; then
-               targetMd5=$(echo -n "$3" | md5)
+               targetMd5=$(echo -n "$4" | md5)
        else
                command -v md5sum > /dev/null # Linux
                if [ $? -eq 0 ]; then
-                       targetMd5=$(echo -n "$3" | md5sum | cut -d' ' -f 1)  # Need to cut additional info printed by cmd
+                       targetMd5=$(echo -n "$4" | md5sum | cut -d' ' -f 1)  # Need to cut additional info printed by cmd
                else
                        __log_test_fail_general "Command md5 nor md5sum is available"
                        return 1
@@ -661,17 +817,24 @@ cr_api_check_single_genric_event_md5() {
 # CR API: Check a single (oldest) event in md5 format (or none if empty) for path.
 # Note that if a file with json message is given, the json shall be compact, no ws except inside string and not newlines.
 # The MD5 will generate different hash if ws/newlines is present or not in otherwise equivalent json
-# arg: <response-code> <topic-url> (EMPTY | <data-file> )
+# arg: <response-code> <cr-path-id> <topic-url> (EMPTY | <data-file> )
 # (Function for test scripts)
 cr_api_check_single_genric_event_md5_file() {
        __log_test_start $@
 
-       if [ $# -ne 3 ]; then
-               __print_err "<response-code> <topic-url> (EMPTY | <data-file> )" $@
+       if [ $# -ne 4 ]; then
+               __print_err "<response-code> <cr-path-id> <topic-url> (EMPTY | <data-file> )" $@
+               return 1
+       fi
+
+       CR_SERVICE_PATH=$(__cr_get_service_path $2)
+       CR_ADAPTER=$CR_SERVICE_PATH
+       if [ $? -ne 0 ]; then
+               __print_err "<cr-path-id> missing or incorrect" $@
                return 1
        fi
 
-       query="/get-event/"$2
+       query="/get-event/"$3
        res="$(__do_curl_to_api CR GET $query)"
        status=${res:${#res}-3}
 
@@ -680,7 +843,7 @@ cr_api_check_single_genric_event_md5_file() {
                return 1
        fi
        body=${res:0:${#res}-3}
-       if [ $3 == "EMPTY" ]; then
+       if [ $4 == "EMPTY" ]; then
                if [ ${#body} -ne 0 ]; then
                        __log_test_fail_body
                        return 1
@@ -690,12 +853,12 @@ cr_api_check_single_genric_event_md5_file() {
                fi
        fi
 
-       if [ ! -f $3 ]; then
+       if [ ! -f $4 ]; then
                __log_test_fail_general "File $3 does not exist"
                return 1
        fi
 
-       filedata=$(cat $3)
+       filedata=$(cat $4)
 
        command -v md5 > /dev/null # Mac
        if [ $? -eq 0 ]; then
index 9b7571f..a9605ec 100644 (file)
@@ -124,7 +124,7 @@ use_dmaapadp_https() {
 # args: <protocol> <internal-port> <external-port>
 __dmaapadp_set_protocoll() {
        echo -e $BOLD"$DMAAP_ADP_DISPLAY_NAME protocol setting"$EBOLD
-       echo -e " Using $BOLD http $EBOLD towards $DMAAP_ADP_DISPLAY_NAME"
+       echo -e " Using $BOLD $1 $EBOLD towards $DMAAP_ADP_DISPLAY_NAME"
 
        ## Access to Dmaap adapter
 
@@ -185,7 +185,7 @@ __dmaapadp_export_vars() {
 
 
        # paths to other components
-       export ECS_SERVICE_PATH
+       export ICS_SERVICE_PATH
        export DMAAP_ADP_SERVICE_PATH
        export MR_SERVICE_PATH
 
index 5188a45..35280a4 100644 (file)
@@ -124,7 +124,7 @@ use_dmaapmed_https() {
 # args: <protocol> <internal-port> <external-port>
 __dmaapmed_set_protocoll() {
        echo -e $BOLD"$DMAAP_MED_DISPLAY_NAME protocol setting"$EBOLD
-       echo -e " Using $BOLD http $EBOLD towards $DMAAP_MED_DISPLAY_NAME"
+       echo -e " Using $BOLD $1 $EBOLD towards $DMAAP_MED_DISPLAY_NAME"
 
        ## Access to Dmaap mediator
 
@@ -177,7 +177,7 @@ __dmaapmed_export_vars() {
        fi
 
        # paths to other components
-       export ECS_SERVICE_PATH
+       export ICS_SERVICE_PATH
 
        export DMAAP_MED_CONF_SELF_HOST=$(echo $DMAAP_MED_SERVICE_PATH | cut -d: -f1-2)
        export DMAAP_MED_CONF_SELF_PORT=$(echo $DMAAP_MED_SERVICE_PATH | cut -d: -f3)
diff --git a/test/common/dmaapmr_api_functions.sh b/test/common/dmaapmr_api_functions.sh
new file mode 100644 (file)
index 0000000..d0f3f0c
--- /dev/null
@@ -0,0 +1,23 @@
+#!/bin/bash
+
+#  ============LICENSE_START===============================================
+#  Copyright (C) 2021 Nordix Foundation. All rights reserved.
+#  ========================================================================
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#  ============LICENSE_END=================================================
+#
+
+# Functions for DMAAPMR is included in mr_api_functions.sh
+
+
+
index 1cff7a1..a3f5507 100755 (executable)
 
 # Function to execute curl towards a container (or process) and compare + print result
 # Intended use is for basic test scripts where testing is done with curl and the returned response and payload need to be checked.
-# args: GET|PUT|POST|DELETE <url> <target-response-code> [<json-file>]
+# args: GET|PUT|POST|DELETE <url> <target-response-code> [<payload-file>]
 # All calls made to 'localhost:'<port>.
 # Expects env PORT set to intended port number
 # Expects env RESULT to contain the target response body.
 # Optional env HTTPX shall contain protocol 'http' or 'https'. If not set, 'http' is used. For 'https' all cert errors are ignored
-#   RESULT="*" means that returned payload is not checked, may container any text
+#   RESULT="*" means that returned payload is not checked, may contain any text
 #   RESULT="<text>" means that the returned payload has to match the <text> exactly
 #   RESULT="json:<returned-payload>" means that the returned json payload is compared with the expected result (order of json keys and index is irrelevant)
 #   RESULT="json-array-size:<integer-size>" means that the returned json payload shall contain the number of element given by the <integer-size>
@@ -54,14 +54,37 @@ do_curl() {
         PROT=$HTTPX
     fi
 
-    curlstr="curl -X "$1" -skw %{http_code} ${PROT}://localhost:$PORT$2 -H accept:*/*"
+    req_content=""
+    if [ -z "$REQ_CONTENT" ]; then
+        if [ $# -gt 3 ]; then
+            req_content="-H Content-Type:application/json" #Assuming json
+        fi
+    else
+        req_content="-H Content-Type:$REQ_CONTENT"
+    fi
+    resp_content=""
+    if [ -z "$RESP_CONTENT" ]; then
+        if [[ "$RESULT" == "json"* ]]; then
+            resp_content="application/json"
+        elif [[ "$RESULT" == "*" ]]; then
+            resp_content=""
+        else
+            resp_content="text/plain"
+        fi
+    else
+        resp_content=$RESP_CONTENT
+    fi
+    curlstr="curl -X "$1" -skw :%{content_type}:%{http_code} ${PROT}://localhost:$PORT$2 -H accept:*/*"
     if [ $# -gt 3 ]; then
-        curlstr=$curlstr" -H Content-Type:application/json --data-binary @"$4
+        curlstr=$curlstr" $req_content --data-binary @"$4
     fi
     echo "  CMD:"$curlstr
     res=$($curlstr)
     status=${res:${#res}-3}
-    body=${res:0:${#res}-3}
+    reminder=${res:0:${#res}-4}
+    content_type="${reminder##*:}"
+    body="${reminder%:*}"
+
     export body
     if [ $status -ne $3 ]; then
         echo "  Error status:"$status" Expected status: "$3
@@ -70,6 +93,14 @@ do_curl() {
         exit 1
     else
         echo "  OK, code: "$status"     (Expected)"
+        if [[ "$content_type" == *"$resp_content"* ]]; then
+            echo "  Content type: "$content_type"     (Expected)"
+        else
+            echo "  Expected content type: "$resp_content
+            echo "  Got: "$content_type
+            echo "Exiting....."
+            exit 1
+        fi
         echo "  Body: "$body
         if [ "$RESULT" == "*" ]; then
             echo "  Body contents not checked"
index 3c329d9..e186f09 100755 (executable)
@@ -31,7 +31,6 @@ print_usage() {
   echo "or"
   echo "Usage: genstat.sh KUBE <start-time-seconds> <log-file> <app-short-name> <app-name> <namespace> [ <app-short-name> <app-name> <namespace> ]*"
 }
-
 STARTTIME=-1
 
 if [ $# -lt 4 ]; then
@@ -66,7 +65,7 @@ else
 fi
 
 
-echo "Time;Name;PIDS;CPU perc;Mem perc" > $LOGFILE
+echo "Name;Time;PIDS;CPU perc;Mem perc" > $LOGFILE
 
 if [ "$STARTTIME" -ne -1 ]; then
     STARTTIME=$(($SECONDS-$STARTTIME))
similarity index 66%
rename from test/common/http_proxy_api_functions.sh
rename to test/common/httpproxy_api_functions.sh
index 3378a1d..af11f14 100644 (file)
@@ -79,7 +79,7 @@ __HTTPPROXY_kube_scale_zero() {
 # Scale kubernetes resources to zero and wait until this has been accomplished, if relevant. If not relevant to scale, then do no action.
 # This function is called for prestarted apps not managed by the test script.
 __HTTPPROXY_kube_scale_zero_and_wait() {
-       echo -e $RED" HTTPPROXY replicas kept as is"$ERED
+       echo -e $RED" HTTPPROXY app is not scaled in this state"$ERED
 }
 
 # Delete all kube resouces for the app
@@ -103,7 +103,7 @@ __HTTPPROXY_store_docker_logs() {
 # This function is called for apps managed by the test script.
 # args: -
 __HTTPPROXY_initial_setup() {
-       :
+       use_http_proxy_http
 }
 
 # Set app short-name, app name and namespace for logging runtime statistics of kubernets pods or docker containers
@@ -120,42 +120,63 @@ __HTTPPROXY_statisics_setup() {
 
 #######################################################
 
-
-## Access to Http Proxy Receiver
-# Host name may be changed if app started by kube
-# Direct access from script
-HTTP_PROXY_HTTPX="http"
-HTTP_PROXY_HOST_NAME=$LOCALHOST_NAME
-HTTP_PROXY_PATH=$HTTP_PROXY_HTTPX"://"$HTTP_PROXY_HOST_NAME":"$HTTP_PROXY_WEB_EXTERNAL_PORT
-
-#########################
-### Http Proxy functions
-#########################
-
-# All calls to httpproxy will be directed to the http interface
+# Set http as the protocol to use for all communication to the http proxy
 # args: -
 # (Function for test scripts)
 use_http_proxy_http() {
-       echo -e $BOLD"$HTTP_PROXY_DISPLAY_NAME protocol setting"$EBOLD
-       echo -e " Using $BOLD http $EBOLD"
-       HTTP_PROXY_HTTPX="http"
-       HTTP_PROXY_PATH=$HTTP_PROXY_HTTPX"://"$HTTP_PROXY_HOST_NAME":"$HTTP_PROXY_EXTERNAL_PORT
-
-       echo ""
+       __http_proxy_set_protocoll "http" $HTTP_PROXY_INTERNAL_PORT $HTTP_PROXY_EXTERNAL_PORT
 }
 
-# All calls to httpproxy will be directed to the https interface
+# Set https as the protocol to use for all communication to the http proxy
 # args: -
 # (Function for test scripts)
 use_http_proxy_https() {
+       __http_proxy_set_protocoll "https" $HTTP_PROXY_INTERNAL_SECURE_PORT $HTTP_PROXY_EXTERNAL_SECURE_PORT
+}
+
+# Setup paths to svc/container for internal and external access
+# args: <protocol> <internal-port> <external-port>
+__http_proxy_set_protocoll() {
        echo -e $BOLD"$HTTP_PROXY_DISPLAY_NAME protocol setting"$EBOLD
-       echo -e " Using $BOLD https $EBOLD"
-       HTTP_PROXY_HTTPX="https"
-       HTTP_PROXY_PATH=$HTTP_PROXY_HTTPX"://"$HTTP_PROXY_HOST_NAME":"$HTTP_PROXY_EXTERNAL_SECURE_PORT
+       echo -e " Using $BOLD $1 $EBOLD towards $HTTP_PROXY_DISPLAY_NAME"
+
+       ## Access to http proxy
+       ## HTTP_PROXY_CONFIG_HOST_NAME and HTTP_PROXY_CONFIG_PORT used by apps as config for proxy host and port
+
+       HTTP_PROXY_SERVICE_PATH=$1"://"$HTTP_PROXY_APP_NAME":"$2  # docker access, container->container and script->container via proxy
+       HTTP_PROXY_CONFIG_HOST_NAME=$HTTP_PROXY_APP_NAME
+       HTTP_PROXY_CONFIG_PORT=$2
+       if [ $RUNMODE == "KUBE" ]; then
+               HTTP_PROXY_CONFIG_HOST_NAME=$HTTP_PROXY_APP_NAME"."$KUBE_SIM_NAMESPACE
+               HTTP_PROXY_CONFIG_PORT=$3
+               HTTP_PROXY_SERVICE_PATH=$1"://"$HTTP_PROXY_APP_NAME.$KUBE_SIM_NAMESPACE":"$3 # kube access, pod->svc and script->svc via proxy
+       fi
 
        echo ""
 }
 
+# Export env vars for config files, docker compose and kube resources
+# args:
+__http_proxy_export_vars() {
+
+       export HTTP_PROXY_APP_NAME
+       export HTTP_PROXY_DISPLAY_NAME
+
+       export HTTP_PROXY_WEB_EXTERNAL_PORT
+       export HTTP_PROXY_WEB_INTERNAL_PORT
+       export HTTP_PROXY_EXTERNAL_PORT
+       export HTTP_PROXY_INTERNAL_PORT
+
+       export HTTP_PROXY_WEB_EXTERNAL_SECURE_PORT
+       export HTTP_PROXY_WEB_INTERNAL_SECURE_PORT
+       export HTTP_PROXY_EXTERNAL_SECURE_PORT
+       export HTTP_PROXY_INTERNAL_SECURE_PORT
+
+       export KUBE_SIM_NAMESPACE
+       export DOCKER_SIM_NWNAME
+       export HTTP_PROXY_IMAGE
+}
+
 # Start the Http Proxy in the simulator group
 # args: -
 # (Function for test scripts)
@@ -193,23 +214,11 @@ start_http_proxy() {
 
                if [ $retcode_i -eq 0 ]; then
                        echo -e " Creating $HTTP_PROXY_APP_NAME deployment and service"
-                       export HTTP_PROXY_APP_NAME
-
-                       export HTTP_PROXY_WEB_EXTERNAL_PORT
-                       export HTTP_PROXY_WEB_INTERNAL_PORT
-                       export HTTP_PROXY_EXTERNAL_PORT
-                       export HTTP_PROXY_INTERNAL_PORT
-
-                       export HTTP_PROXY_WEB_EXTERNAL_SECURE_PORT
-                       export HTTP_PROXY_WEB_INTERNAL_SECURE_PORT
-                       export HTTP_PROXY_EXTERNAL_SECURE_PORT
-                       export HTTP_PROXY_INTERNAL_SECURE_PORT
-
-                       export KUBE_SIM_NAMESPACE
-                       export HTTP_PROXY_IMAGE
 
                        __kube_create_namespace $KUBE_SIM_NAMESPACE
 
+                       __http_proxy_export_vars
+
                        # Create service
                        input_yaml=$SIM_GROUP"/"$HTTP_PROXY_COMPOSE_DIR"/"svc.yaml
                        output_yaml=$PWD/tmp/proxy_svc.yaml
@@ -222,29 +231,7 @@ start_http_proxy() {
 
                fi
 
-               echo " Retrieving host and ports for service..."
-               HTTP_PROXY_HOST_NAME=$(__kube_get_service_host $HTTP_PROXY_APP_NAME $KUBE_SIM_NAMESPACE)
-               HTTP_PROXY_WEB_EXTERNAL_PORT=$(__kube_get_service_port $HTTP_PROXY_APP_NAME $KUBE_SIM_NAMESPACE "web")
-               HTTP_PROXY_WEB_EXTERNAL_SECURE_PORT=$(__kube_get_service_port $HTTP_PROXY_APP_NAME $KUBE_SIM_NAMESPACE "webs")
-
-               HTTP_PROXY_EXTERNAL_PORT=$(__kube_get_service_port $HTTP_PROXY_APP_NAME $KUBE_SIM_NAMESPACE "http")
-               HTTP_PROXY_EXTERNAL_SECURE_PORT=$(__kube_get_service_port $HTTP_PROXY_APP_NAME $KUBE_SIM_NAMESPACE "https")
-
-               if [ $HTTP_PROXY_HTTPX == "http" ]; then
-                       HTTP_PROXY_PATH=$HTTP_PROXY_HTTPX"://"$HTTP_PROXY_HOST_NAME":"$HTTP_PROXY_WEB_EXTERNAL_PORT
-                       HTTP_PROXY_CONFIG_PORT=$HTTP_PROXY_EXTERNAL_PORT
-                       HTTP_PROXY_CONFIG_HOST_NAME=$HTTP_PROXY_APP_NAME"."$KUBE_SIM_NAMESPACE
-
-                       echo " Host IP, http port: $HTTP_PROXY_HOST_NAME $HTTP_PROXY_WEB_EXTERNAL_PORT"
-               else
-                       HTTP_PROXY_PATH=$HTTP_PROXY_HTTPX"://"$HTTP_PROXY_HOST_NAME":"$HTTP_PROXY_WEB_EXTERNAL_SECURE_PORT
-                       HTTP_PROXY_CONFIG_PORT=$HTTP_PROXY_EXTERNAL_SECURE_PORT
-                       HTTP_PROXY_CONFIG_HOST_NAME=$HTTP_PROXY_APP_NAME"."$KUBE_SIM_NAMESPACE
-
-                       echo " Host IP, https port: $HTTP_PROXY_HOST_NAME $HTTP_PROXY_WEB_EXTERNAL_SECURE_PORT"
-               fi
-
-               __check_service_start $HTTP_PROXY_APP_NAME $HTTP_PROXY_PATH$HTTP_PROXY_ALIVE_URL
+               __check_service_start $HTTP_PROXY_APP_NAME $HTTP_PROXY_SERVICE_PATH$HTTP_PROXY_ALIVE_URL
 
        else
                # Check if docker app shall be fully managed by the test script
@@ -255,36 +242,11 @@ start_http_proxy() {
                        exit
                fi
 
-               export HTTP_PROXY_APP_NAME
-               export HTTP_PROXY_EXTERNAL_PORT
-               export HTTP_PROXY_INTERNAL_PORT
-               export HTTP_PROXY_EXTERNAL_SECURE_PORT
-               export HTTP_PROXY_INTERNAL_SECURE_PORT
-               export HTTP_PROXY_WEB_EXTERNAL_PORT
-               export HTTP_PROXY_WEB_INTERNAL_PORT
-               export HTTP_PROXY_WEB_EXTERNAL_SECURE_PORT
-               export HTTP_PROXY_WEB_INTERNAL_SECURE_PORT
-               export DOCKER_SIM_NWNAME
-
-               export HTTP_PROXY_DISPLAY_NAME
+               __http_proxy_export_vars
 
                __start_container $HTTP_PROXY_COMPOSE_DIR "" NODOCKERARGS 1 $HTTP_PROXY_APP_NAME
 
-               if [ $HTTP_PROXY_HTTPX == "http" ]; then
-                       HTTP_PROXY_PATH=$HTTP_PROXY_HTTPX"://"$HTTP_PROXY_HOST_NAME":"$HTTP_PROXY_WEB_INTERNAL_PORT
-               else
-                       HTTP_PROXY_PATH=$HTTP_PROXY_HTTPX"://"$HTTP_PROXY_HOST_NAME":"$HTTP_PROXY_WEB_INTERNAL_SECURE_PORT
-               fi
-        __check_service_start $HTTP_PROXY_APP_NAME $HTTP_PROXY_PATH$HTTP_PROXY_ALIVE_URL
-
-               if [ $HTTP_PROXY_HTTPX == "http" ]; then
-                       HTTP_PROXY_CONFIG_PORT=$HTTP_PROXY_INTERNAL_PORT
-               else
-                       HTTP_PROXY_CONFIG_PORT=$HTTP_PROXY_INTERNAL_SECURE_PORT
-               fi
-               HTTP_PROXY_CONFIG_HOST_NAME=$HTTP_PROXY_APP_NAME
-
+        __check_service_start $HTTP_PROXY_APP_NAME $HTTP_PROXY_SERVICE_PATH$HTTP_PROXY_ALIVE_URL
        fi
        echo ""
 }
-
similarity index 82%
rename from test/common/ecs_api_functions.sh
rename to test/common/ics_api_functions.sh
index b28c061..df2de4f 100644 (file)
 #  ============LICENSE_END=================================================
 #
 
-# This is a script that contains container/service management functions and test functions for ECS
+# This is a script that contains container/service management functions and test functions for ICS
 
 ################ Test engine functions ################
 
 # Create the image var used during the test
 # arg: <image-tag-suffix> (selects staging, snapshot, release etc)
 # <image-tag-suffix> is present only for images with staging, snapshot,release tags
-__ECS_imagesetup() {
-       __check_and_create_image_var ECS "ECS_IMAGE" "ECS_IMAGE_BASE" "ECS_IMAGE_TAG" $1 "$ECS_DISPLAY_NAME"
+__ICS_imagesetup() {
+       __check_and_create_image_var ICS "ICS_IMAGE" "ICS_IMAGE_BASE" "ICS_IMAGE_TAG" $1 "$ICS_DISPLAY_NAME"
 }
 
 # Pull image from remote repo or use locally built image
@@ -33,276 +33,275 @@ __ECS_imagesetup() {
 # <pull-policy-override> Shall be used for images allowing overriding. For example use a local image when test is started to use released images
 # <pull-policy-original> Shall be used for images that does not allow overriding
 # Both var may contain: 'remote', 'remote-remove' or 'local'
-__ECS_imagepull() {
-       __check_and_pull_image $1 "$ECS_DISPLAY_NAME" $ECS_APP_NAME ECS_IMAGE
+__ICS_imagepull() {
+       __check_and_pull_image $1 "$ICS_DISPLAY_NAME" $ICS_APP_NAME ICS_IMAGE
 }
 
 # Build image (only for simulator or interfaces stubs owned by the test environment)
 # arg: <image-tag-suffix> (selects staging, snapshot, release etc)
 # <image-tag-suffix> is present only for images with staging, snapshot,release tags
-__ECS_imagebuild() {
-       echo -e $RED" Image for app ECS shall never be built"$ERED
+__ICS_imagebuild() {
+       echo -e $RED" Image for app ICS shall never be built"$ERED
 }
 
 # Generate a string for each included image using the app display name and a docker images format string
 # If a custom image repo is used then also the source image from the local repo is listed
 # arg: <docker-images-format-string> <file-to-append>
-__ECS_image_data() {
-       echo -e "$ECS_DISPLAY_NAME\t$(docker images --format $1 $ECS_IMAGE)" >>   $2
-       if [ ! -z "$ECS_IMAGE_SOURCE" ]; then
-               echo -e "-- source image --\t$(docker images --format $1 $ECS_IMAGE_SOURCE)" >>   $2
+__ICS_image_data() {
+       echo -e "$ICS_DISPLAY_NAME\t$(docker images --format $1 $ICS_IMAGE)" >>   $2
+       if [ ! -z "$ICS_IMAGE_SOURCE" ]; then
+               echo -e "-- source image --\t$(docker images --format $1 $ICS_IMAGE_SOURCE)" >>   $2
        fi
 }
 
 # Scale kubernetes resources to zero
 # All resources shall be ordered to be scaled to 0, if relevant. If not relevant to scale, then do no action.
 # This function is called for apps fully managed by the test script
-__ECS_kube_scale_zero() {
-       __kube_scale_all_resources $KUBE_NONRTRIC_NAMESPACE autotest ECS
+__ICS_kube_scale_zero() {
+       __kube_scale_all_resources $KUBE_NONRTRIC_NAMESPACE autotest ICS
 }
 
 # Scale kubernetes resources to zero and wait until this has been accomplished, if relevant. If not relevant to scale, then do no action.
 # This function is called for prestarted apps not managed by the test script.
-__ECS_kube_scale_zero_and_wait() {
-       __kube_scale_and_wait_all_resources $KUBE_NONRTRIC_NAMESPACE app "$KUBE_NONRTRIC_NAMESPACE"-enrichmentservice
+__ICS_kube_scale_zero_and_wait() {
+       __kube_scale_and_wait_all_resources $KUBE_NONRTRIC_NAMESPACE app "$KUBE_NONRTRIC_NAMESPACE"-informationservice
 }
 
 # Delete all kube resouces for the app
 # This function is called for apps managed by the test script.
-__ECS_kube_delete_all() {
-       __kube_delete_all_resources $KUBE_NONRTRIC_NAMESPACE autotest ECS
+__ICS_kube_delete_all() {
+       __kube_delete_all_resources $KUBE_NONRTRIC_NAMESPACE autotest ICS
 }
 
 # Store docker logs
 # This function is called for apps managed by the test script.
 # args: <log-dir> <file-prexix>
-__ECS_store_docker_logs() {
+__ICS_store_docker_logs() {
        if [ $RUNMODE == "KUBE" ]; then
-               kubectl  logs -l "autotest=ECS" -n $KUBE_NONRTRIC_NAMESPACE --tail=-1 > $1$2_ecs.log 2>&1
+               kubectl  logs -l "autotest=ICS" -n $KUBE_NONRTRIC_NAMESPACE --tail=-1 > $1$2_ics.log 2>&1
        else
-               docker logs $ECS_APP_NAME > $1$2_ecs.log 2>&1
+               docker logs $ICS_APP_NAME > $1$2_ics.log 2>&1
        fi
 }
 
 # Initial setup of protocol, host and ports
 # This function is called for apps managed by the test script.
 # args: -
-__ECS_initial_setup() {
-       use_ecs_rest_http
+__ICS_initial_setup() {
+       use_ics_rest_http
 }
 
 # Set app short-name, app name and namespace for logging runtime statistics of kubernets pods or docker containers
 # For docker, the namespace shall be excluded
 # This function is called for apps managed by the test script as well as for prestarted apps.
 # args: -
-__ECS_statisics_setup() {
+__ICS_statisics_setup() {
        if [ $RUNMODE == "KUBE" ]; then
-               echo "ECS $ECS_APP_NAME $KUBE_NONRTRIC_NAMESPACE"
+               echo "ICS $ICS_APP_NAME $KUBE_NONRTRIC_NAMESPACE"
        else
-               echo "ECS $ECS_APP_NAME"
+               echo "ICS $ICS_APP_NAME"
        fi
 }
 
 #######################################################
 
 
-# Make curl retries towards ECS for http response codes set in this env var, space separated list of codes
-ECS_RETRY_CODES=""
+# Make curl retries towards ICS for http response codes set in this env var, space separated list of codes
+ICS_RETRY_CODES=""
 
 #Save first worker node the pod is started on
-__ECS_WORKER_NODE=""
+__ICS_WORKER_NODE=""
 
 ###########################
-### ECS functions
+### ICS functions
 ###########################
 
-# All calls to ECS will be directed to the ECS REST interface from now on
+# All calls to ICS will be directed to the ICS REST interface from now on
 # args: -
 # (Function for test scripts)
-use_ecs_rest_http() {
-       __ecs_set_protocoll "http" $ECS_INTERNAL_PORT $ECS_EXTERNAL_PORT
+use_ics_rest_http() {
+       __ics_set_protocoll "http" $ICS_INTERNAL_PORT $ICS_EXTERNAL_PORT
 }
 
-# All calls to ECS will be directed to the ECS REST interface from now on
+# All calls to ICS will be directed to the ICS REST interface from now on
 # args: -
 # (Function for test scripts)
-use_ecs_rest_https() {
-       __ecs_set_protocoll "https" $ECS_INTERNAL_SECURE_PORT $ECS_EXTERNAL_SECURE_PORT
+use_ics_rest_https() {
+       __ics_set_protocoll "https" $ICS_INTERNAL_SECURE_PORT $ICS_EXTERNAL_SECURE_PORT
 }
 
-# All calls to ECS will be directed to the ECS dmaap interface over http from now on
+# All calls to ICS will be directed to the ICS dmaap interface over http from now on
 # args: -
 # (Function for test scripts)
-use_ecs_dmaap_http() {
-       echo -e $BOLD"ECS dmaap protocol setting"$EBOLD
+use_ics_dmaap_http() {
+       echo -e $BOLD"ICS dmaap protocol setting"$EBOLD
        echo -e $RED" - NOT SUPPORTED - "$ERED
-       echo -e " Using $BOLD http $EBOLD and $BOLD DMAAP $EBOLD towards ECS"
-       ECS_ADAPTER_TYPE="MR-HTTP"
+       echo -e " Using $BOLD http $EBOLD and $BOLD DMAAP $EBOLD towards ICS"
+       ICS_ADAPTER_TYPE="MR-HTTP"
        echo ""
 }
 
 # Setup paths to svc/container for internal and external access
 # args: <protocol> <internal-port> <external-port>
-__ecs_set_protocoll() {
-       echo -e $BOLD"$ECS_DISPLAY_NAME protocol setting"$EBOLD
-       echo -e " Using $BOLD http $EBOLD towards $ECS_DISPLAY_NAME"
+__ics_set_protocoll() {
+       echo -e $BOLD"$ICS_DISPLAY_NAME protocol setting"$EBOLD
+       echo -e " Using $BOLD $1 $EBOLD towards $ICS_DISPLAY_NAME"
 
-       ## Access to ECS
+       ## Access to ICS
 
-       ECS_SERVICE_PATH=$1"://"$ECS_APP_NAME":"$2  # docker access, container->container and script->container via proxy
+       ICS_SERVICE_PATH=$1"://"$ICS_APP_NAME":"$2  # docker access, container->container and script->container via proxy
        if [ $RUNMODE == "KUBE" ]; then
-               ECS_SERVICE_PATH=$1"://"$ECS_APP_NAME.$KUBE_NONRTRIC_NAMESPACE":"$3 # kube access, pod->svc and script->svc via proxy
+               ICS_SERVICE_PATH=$1"://"$ICS_APP_NAME.$KUBE_NONRTRIC_NAMESPACE":"$3 # kube access, pod->svc and script->svc via proxy
        fi
 
-       # ECS_ADAPTER used for switching between REST and DMAAP (only REST supported currently)
-       ECS_ADAPTER_TYPE="REST"
-       ECS_ADAPTER=$ECS_SERVICE_PATH
+       # ICS_ADAPTER used for switching between REST and DMAAP (only REST supported currently)
+       ICS_ADAPTER_TYPE="REST"
+       ICS_ADAPTER=$ICS_SERVICE_PATH
 
        echo ""
 }
 
 # Export env vars for config files, docker compose and kube resources
 # args: PROXY|NOPROXY
-__ecs_export_vars() {
-               export ECS_APP_NAME
-               export ECS_APP_NAME_ALIAS
+__ics_export_vars() {
+               export ICS_APP_NAME
+               export ICS_APP_NAME_ALIAS
                export KUBE_NONRTRIC_NAMESPACE
-               export ECS_IMAGE
-               export ECS_INTERNAL_PORT
-               export ECS_INTERNAL_SECURE_PORT
-               export ECS_EXTERNAL_PORT
-               export ECS_EXTERNAL_SECURE_PORT
-               export ECS_CONFIG_MOUNT_PATH
-               export ECS_CONFIG_CONFIGMAP_NAME=$ECS_APP_NAME"-config"
-               export ECS_DATA_CONFIGMAP_NAME=$ECS_APP_NAME"-data"
-               export ECS_CONTAINER_MNT_DIR
-               export ECS_HOST_MNT_DIR
-               export ECS_CONFIG_FILE
+               export ICS_IMAGE
+               export ICS_INTERNAL_PORT
+               export ICS_INTERNAL_SECURE_PORT
+               export ICS_EXTERNAL_PORT
+               export ICS_EXTERNAL_SECURE_PORT
+               export ICS_CONFIG_MOUNT_PATH
+               export ICS_CONFIG_CONFIGMAP_NAME=$ICS_APP_NAME"-config"
+               export ICS_DATA_CONFIGMAP_NAME=$ICS_APP_NAME"-data"
+               export ICS_CONTAINER_MNT_DIR
+               export ICS_HOST_MNT_DIR
+               export ICS_CONFIG_FILE
                export DOCKER_SIM_NWNAME
-               export ECS_DISPLAY_NAME
+               export ICS_DISPLAY_NAME
+               export ICS_LOGPATH
 
-
-               export ECS_DATA_PV_NAME=$ECS_APP_NAME"-pv"
-               export ECS_DATA_PVC_NAME=$ECS_APP_NAME"-pvc"
+               export ICS_DATA_PV_NAME=$ICS_APP_NAME"-pv"
+               export ICS_DATA_PVC_NAME=$ICS_APP_NAME"-pvc"
                #Create a unique path for the pv each time to prevent a previous volume to be reused
-               export ECS_PV_PATH="ecsdata-"$(date +%s)
+               export ICS_PV_PATH="icsdata-"$(date +%s)
 
                if [ $1 == "PROXY" ]; then
-                       export ECS_HTTP_PROXY_CONFIG_PORT=$HTTP_PROXY_CONFIG_PORT  #Set if proxy is started
-                       export ECS_HTTP_PROXY_CONFIG_HOST_NAME=$HTTP_PROXY_CONFIG_HOST_NAME #Set if proxy is started
-                       if [ $ECS_HTTP_PROXY_CONFIG_PORT -eq 0 ] || [ -z "$ECS_HTTP_PROXY_CONFIG_HOST_NAME" ]; then
+                       export ICS_HTTP_PROXY_CONFIG_PORT=$HTTP_PROXY_CONFIG_PORT  #Set if proxy is started
+                       export ICS_HTTP_PROXY_CONFIG_HOST_NAME=$HTTP_PROXY_CONFIG_HOST_NAME #Set if proxy is started
+                       if [ $ICS_HTTP_PROXY_CONFIG_PORT -eq 0 ] || [ -z "$ICS_HTTP_PROXY_CONFIG_HOST_NAME" ]; then
                                echo -e $YELLOW" Warning: HTTP PROXY will not be configured, proxy app not started"$EYELLOW
                        else
                                echo " Configured with http proxy"
                        fi
                else
-                       export ECS_HTTP_PROXY_CONFIG_PORT=0
-                       export ECS_HTTP_PROXY_CONFIG_HOST_NAME=""
+                       export ICS_HTTP_PROXY_CONFIG_PORT=0
+                       export ICS_HTTP_PROXY_CONFIG_HOST_NAME=""
                        echo " Configured without http proxy"
                fi
 }
 
 
-# Start the ECS
+# Start the ICS
 # args: PROXY|NOPROXY <config-file>
 # (Function for test scripts)
-start_ecs() {
+start_ics() {
 
-       echo -e $BOLD"Starting $ECS_DISPLAY_NAME"$EBOLD
+       echo -e $BOLD"Starting $ICS_DISPLAY_NAME"$EBOLD
 
        if [ $RUNMODE == "KUBE" ]; then
 
                # Check if app shall be fully managed by the test script
-               __check_included_image "ECS"
+               __check_included_image "ICS"
                retcode_i=$?
 
                # Check if app shall only be used by the testscipt
-               __check_prestarted_image "ECS"
+               __check_prestarted_image "ICS"
                retcode_p=$?
 
                if [ $retcode_i -ne 0 ] && [ $retcode_p -ne 0 ]; then
-                       echo -e $RED"The $ECS_APP_NAME app is not included as managed nor prestarted in this test script"$ERED
-                       echo -e $RED"The $ECS_APP_NAME will not be started"$ERED
+                       echo -e $RED"The $ICS_APP_NAME app is not included as managed nor prestarted in this test script"$ERED
+                       echo -e $RED"The $ICS_APP_NAME will not be started"$ERED
                        exit
                fi
                if [ $retcode_i -eq 0 ] && [ $retcode_p -eq 0 ]; then
-                       echo -e $RED"The $ECS_APP_NAME app is included both as managed and prestarted in this test script"$ERED
-                       echo -e $RED"The $ECS_APP_NAME will not be started"$ERED
+                       echo -e $RED"The $ICS_APP_NAME app is included both as managed and prestarted in this test script"$ERED
+                       echo -e $RED"The $ICS_APP_NAME will not be started"$ERED
                        exit
                fi
 
-
                if [ $retcode_p -eq 0 ]; then
-                       echo -e " Using existing $ECS_APP_NAME deployment and service"
-                       echo " Setting ECS replicas=1"
-                       res_type=$(__kube_get_resource_type $ECS_APP_NAME $KUBE_NONRTRIC_NAMESPACE)
-                       __kube_scale $res_type $ECS_APP_NAME $KUBE_NONRTRIC_NAMESPACE 1
+                       echo -e " Using existing $ICS_APP_NAME deployment and service"
+                       echo " Setting ICS replicas=1"
+                       res_type=$(__kube_get_resource_type $ICS_APP_NAME $KUBE_NONRTRIC_NAMESPACE)
+                       __kube_scale $res_type $ICS_APP_NAME $KUBE_NONRTRIC_NAMESPACE 1
                fi
 
                # Check if app shall be fully managed by the test script
                if [ $retcode_i -eq 0 ]; then
-                       echo -e " Creating $ECS_APP_NAME app and expose service"
+                       echo -e " Creating $ICS_APP_NAME app and expose service"
 
                        #Check if nonrtric namespace exists, if not create it
                        __kube_create_namespace $KUBE_NONRTRIC_NAMESPACE
 
-                       __ecs_export_vars $1
+                       __ics_export_vars $1
 
                        # Create config map for config
-                       datafile=$PWD/tmp/$ECS_CONFIG_FILE
+                       datafile=$PWD/tmp/$ICS_CONFIG_FILE
                        cp $2 $datafile
-                       output_yaml=$PWD/tmp/ecs_cfc.yaml
-                       __kube_create_configmap $ECS_CONFIG_CONFIGMAP_NAME $KUBE_NONRTRIC_NAMESPACE autotest ECS $datafile $output_yaml
+                       output_yaml=$PWD/tmp/ics_cfc.yaml
+                       __kube_create_configmap $ICS_CONFIG_CONFIGMAP_NAME $KUBE_NONRTRIC_NAMESPACE autotest ICS $datafile $output_yaml
 
                        # Create pv
-                       input_yaml=$SIM_GROUP"/"$ECS_COMPOSE_DIR"/"pv.yaml
-                       output_yaml=$PWD/tmp/ecs_pv.yaml
-                       __kube_create_instance pv $ECS_APP_NAME $input_yaml $output_yaml
+                       input_yaml=$SIM_GROUP"/"$ICS_COMPOSE_DIR"/"pv.yaml
+                       output_yaml=$PWD/tmp/ics_pv.yaml
+                       __kube_create_instance pv $ICS_APP_NAME $input_yaml $output_yaml
 
                        # Create pvc
-                       input_yaml=$SIM_GROUP"/"$ECS_COMPOSE_DIR"/"pvc.yaml
-                       output_yaml=$PWD/tmp/ecs_pvc.yaml
-                       __kube_create_instance pvc $ECS_APP_NAME $input_yaml $output_yaml
+                       input_yaml=$SIM_GROUP"/"$ICS_COMPOSE_DIR"/"pvc.yaml
+                       output_yaml=$PWD/tmp/ics_pvc.yaml
+                       __kube_create_instance pvc $ICS_APP_NAME $input_yaml $output_yaml
 
                        # Create service
-                       input_yaml=$SIM_GROUP"/"$ECS_COMPOSE_DIR"/"svc.yaml
-                       output_yaml=$PWD/tmp/ecs_svc.yaml
-                       __kube_create_instance service $ECS_APP_NAME $input_yaml $output_yaml
+                       input_yaml=$SIM_GROUP"/"$ICS_COMPOSE_DIR"/"svc.yaml
+                       output_yaml=$PWD/tmp/ics_svc.yaml
+                       __kube_create_instance service $ICS_APP_NAME $input_yaml $output_yaml
 
                        # Create app
-                       input_yaml=$SIM_GROUP"/"$ECS_COMPOSE_DIR"/"app.yaml
-                       output_yaml=$PWD/tmp/ecs_app.yaml
-                       __kube_create_instance app $ECS_APP_NAME $input_yaml $output_yaml
+                       input_yaml=$SIM_GROUP"/"$ICS_COMPOSE_DIR"/"app.yaml
+                       output_yaml=$PWD/tmp/ics_app.yaml
+                       __kube_create_instance app $ICS_APP_NAME $input_yaml $output_yaml
                fi
 
-               # Tie the ECS to a worker node so that ECS will always be scheduled to the same worker node if the ECS pod is restarted
-               # A PVC of type hostPath is mounted to ECS, for persistent storage, so the ECS must always be on the node which mounted the volume
+               # Tie the ICS to a worker node so that ICS will always be scheduled to the same worker node if the ICS pod is restarted
+               # A PVC of type hostPath is mounted to ICS, for persistent storage, so the ICS must always be on the node which mounted the volume
 
                # Keep the initial worker node in case the pod need to be "restarted" - must be made to the same node due to a volume mounted on the host
                if [ $retcode_i -eq 0 ]; then
-                       __ECS_WORKER_NODE=$(kubectl get pod -l "autotest=ECS" -n $KUBE_NONRTRIC_NAMESPACE -o jsonpath='{.items[*].spec.nodeName}')
-                       if [ -z "$__ECS_WORKER_NODE" ]; then
-                               echo -e $YELLOW" Cannot find worker node for pod for $ECS_APP_NAME, persistency may not work"$EYELLOW
+                       __ICS_WORKER_NODE=$(kubectl get pod -l "autotest=ICS" -n $KUBE_NONRTRIC_NAMESPACE -o jsonpath='{.items[*].spec.nodeName}')
+                       if [ -z "$__ICS_WORKER_NODE" ]; then
+                               echo -e $YELLOW" Cannot find worker node for pod for $ICS_APP_NAME, persistency may not work"$EYELLOW
                        fi
                else
-                       echo -e $YELLOW" Persistency may not work for app $ECS_APP_NAME in multi-worker node config when running it as a prestarted app"$EYELLOW
+                       echo -e $YELLOW" Persistency may not work for app $ICS_APP_NAME in multi-worker node config when running it as a prestarted app"$EYELLOW
                fi
 
 
-               __check_service_start $ECS_APP_NAME $ECS_SERVICE_PATH$ECS_ALIVE_URL
+               __check_service_start $ICS_APP_NAME $ICS_SERVICE_PATH$ICS_ALIVE_URL
 
        else
-               __check_included_image 'ECS'
+               __check_included_image 'ICS'
                if [ $? -eq 1 ]; then
-                       echo -e $RED"The ECS app is not included in this test script"$ERED
-                       echo -e $RED"ECS will not be started"$ERED
+                       echo -e $RED"The ICS app is not included in this test script"$ERED
+                       echo -e $RED"ICS will not be started"$ERED
                        exit 1
                fi
 
                curdir=$PWD
                cd $SIM_GROUP
-               cd ecs
-               cd $ECS_HOST_MNT_DIR
+               cd ics
+               cd $ICS_HOST_MNT_DIR
                #cd ..
                if [ -d db ]; then
                        if [ "$(ls -A $DIR)" ]; then
@@ -319,49 +318,49 @@ start_ecs() {
 
                cd $curdir
 
-               __ecs_export_vars $1
+               __ics_export_vars $1
 
-               dest_file=$SIM_GROUP/$ECS_COMPOSE_DIR/$ECS_HOST_MNT_DIR/$ECS_CONFIG_FILE
+               dest_file=$SIM_GROUP/$ICS_COMPOSE_DIR/$ICS_HOST_MNT_DIR/$ICS_CONFIG_FILE
 
                envsubst < $2 > $dest_file
 
-               __start_container $ECS_COMPOSE_DIR "" NODOCKERARGS 1 $ECS_APP_NAME
+               __start_container $ICS_COMPOSE_DIR "" NODOCKERARGS 1 $ICS_APP_NAME
 
-               __check_service_start $ECS_APP_NAME $ECS_SERVICE_PATH$ECS_ALIVE_URL
+               __check_service_start $ICS_APP_NAME $ICS_SERVICE_PATH$ICS_ALIVE_URL
        fi
        echo ""
        return 0
 }
 
-# Stop the ecs
+# Stop the ics
 # args: -
 # args: -
 # (Function for test scripts)
-stop_ecs() {
-       echo -e $BOLD"Stopping $ECS_DISPLAY_NAME"$EBOLD
+stop_ics() {
+       echo -e $BOLD"Stopping $ICS_DISPLAY_NAME"$EBOLD
 
        if [ $RUNMODE == "KUBE" ]; then
 
-               __check_prestarted_image "ECS"
+               __check_prestarted_image "ICS"
                if [ $? -eq 0 ]; then
-                       echo -e $YELLOW" Persistency may not work for app $ECS_APP_NAME in multi-worker node config when running it as a prestarted app"$EYELLOW
-                       res_type=$(__kube_get_resource_type $ECS_APP_NAME $KUBE_NONRTRIC_NAMESPACE)
-                       __kube_scale $res_type $ECS_APP_NAME $KUBE_NONRTRIC_NAMESPACE 0
+                       echo -e $YELLOW" Persistency may not work for app $ICS_APP_NAME in multi-worker node config when running it as a prestarted app"$EYELLOW
+                       res_type=$(__kube_get_resource_type $ICS_APP_NAME $KUBE_NONRTRIC_NAMESPACE)
+                       __kube_scale $res_type $ICS_APP_NAME $KUBE_NONRTRIC_NAMESPACE 0
                        return 0
                fi
 
-               __kube_scale_all_resources $KUBE_NONRTRIC_NAMESPACE autotest ECS
+               __kube_scale_all_resources $KUBE_NONRTRIC_NAMESPACE autotest ICS
                echo "  Deleting the replica set - a new will be started when the app is started"
-               tmp=$(kubectl delete rs -n $KUBE_NONRTRIC_NAMESPACE -l "autotest=ECS")
+               tmp=$(kubectl delete rs -n $KUBE_NONRTRIC_NAMESPACE -l "autotest=ICS")
                if [ $? -ne 0 ]; then
                        echo -e $RED" Could not delete replica set "$RED
                        ((RES_CONF_FAIL++))
                        return 1
                fi
        else
-               docker stop $ECS_APP_NAME &> ./tmp/.dockererr
+               docker stop $ICS_APP_NAME &> ./tmp/.dockererr
                if [ $? -ne 0 ]; then
-                       __print_err "Could not stop $ECS_APP_NAME" $@
+                       __print_err "Could not stop $ICS_APP_NAME" $@
                        cat ./tmp/.dockererr
                        ((RES_CONF_FAIL++))
                        return 1
@@ -372,48 +371,48 @@ stop_ecs() {
        return 0
 }
 
-# Start a previously stopped ecs
+# Start a previously stopped ics
 # args: -
 # (Function for test scripts)
-start_stopped_ecs() {
-       echo -e $BOLD"Starting (the previously stopped) $ECS_DISPLAY_NAME"$EBOLD
+start_stopped_ics() {
+       echo -e $BOLD"Starting (the previously stopped) $ICS_DISPLAY_NAME"$EBOLD
 
        if [ $RUNMODE == "KUBE" ]; then
 
-               __check_prestarted_image "ECS"
+               __check_prestarted_image "ICS"
                if [ $? -eq 0 ]; then
-                       echo -e $YELLOW" Persistency may not work for app $ECS_APP_NAME in multi-worker node config when running it as a prestarted app"$EYELLOW
-                       res_type=$(__kube_get_resource_type $ECS_APP_NAME $KUBE_NONRTRIC_NAMESPACE)
-                       __kube_scale $res_type $ECS_APP_NAME $KUBE_NONRTRIC_NAMESPACE 1
-                       __check_service_start $ECS_APP_NAME $ECS_SERVICE_PATH$ECS_ALIVE_URL
+                       echo -e $YELLOW" Persistency may not work for app $ICS_APP_NAME in multi-worker node config when running it as a prestarted app"$EYELLOW
+                       res_type=$(__kube_get_resource_type $ICS_APP_NAME $KUBE_NONRTRIC_NAMESPACE)
+                       __kube_scale $res_type $ICS_APP_NAME $KUBE_NONRTRIC_NAMESPACE 1
+                       __check_service_start $ICS_APP_NAME $ICS_SERVICE_PATH$ICS_ALIVE_URL
                        return 0
                fi
 
                # Tie the PMS to the same worker node it was initially started on
                # A PVC of type hostPath is mounted to PMS, for persistent storage, so the PMS must always be on the node which mounted the volume
-               if [ -z "$__ECS_WORKER_NODE" ]; then
+               if [ -z "$__ICS_WORKER_NODE" ]; then
                        echo -e $RED" No initial worker node found for pod "$RED
                        ((RES_CONF_FAIL++))
                        return 1
                else
-                       echo -e $BOLD" Setting nodeSelector kubernetes.io/hostname=$__ECS_WORKER_NODE to deployment for $ECS_APP_NAME. Pod will always run on this worker node: $__PA_WORKER_NODE"$BOLD
+                       echo -e $BOLD" Setting nodeSelector kubernetes.io/hostname=$__ICS_WORKER_NODE to deployment for $ICS_APP_NAME. Pod will always run on this worker node: $__PA_WORKER_NODE"$BOLD
                        echo -e $BOLD" The mounted volume is mounted as hostPath and only available on that worker node."$BOLD
-                       tmp=$(kubectl patch deployment $ECS_APP_NAME -n $KUBE_NONRTRIC_NAMESPACE --patch '{"spec": {"template": {"spec": {"nodeSelector": {"kubernetes.io/hostname": "'$__ECS_WORKER_NODE'"}}}}}')
+                       tmp=$(kubectl patch deployment $ICS_APP_NAME -n $KUBE_NONRTRIC_NAMESPACE --patch '{"spec": {"template": {"spec": {"nodeSelector": {"kubernetes.io/hostname": "'$__ICS_WORKER_NODE'"}}}}}')
                        if [ $? -ne 0 ]; then
-                               echo -e $YELLOW" Cannot set nodeSelector to deployment for $ECS_APP_NAME, persistency may not work"$EYELLOW
+                               echo -e $YELLOW" Cannot set nodeSelector to deployment for $ICS_APP_NAME, persistency may not work"$EYELLOW
                        fi
-                       __kube_scale deployment $ECS_APP_NAME $KUBE_NONRTRIC_NAMESPACE 1
+                       __kube_scale deployment $ICS_APP_NAME $KUBE_NONRTRIC_NAMESPACE 1
                fi
        else
-               docker start $ECS_APP_NAME &> ./tmp/.dockererr
+               docker start $ICS_APP_NAME &> ./tmp/.dockererr
                if [ $? -ne 0 ]; then
-                       __print_err "Could not start (the stopped) $ECS_APP_NAME" $@
+                       __print_err "Could not start (the stopped) $ICS_APP_NAME" $@
                        cat ./tmp/.dockererr
                        ((RES_CONF_FAIL++))
                        return 1
                fi
        fi
-       __check_service_start $ECS_APP_NAME $ECS_SERVICE_PATH$ECS_ALIVE_URL
+       __check_service_start $ICS_APP_NAME $ICS_SERVICE_PATH$ICS_ALIVE_URL
        if [ $? -ne 0 ]; then
                return 1
        fi
@@ -421,12 +420,12 @@ start_stopped_ecs() {
        return 0
 }
 
-# Turn on debug level tracing in ECS
+# Turn on debug level tracing in ICS
 # args: -
 # (Function for test scripts)
-set_ecs_debug() {
-       echo -e $BOLD"Setting ecs debug logging"$EBOLD
-       curlString="$ECS_SERVICE_PATH$ECS_ACTUATOR -X POST  -H Content-Type:application/json -d {\"configuredLevel\":\"debug\"}"
+set_ics_debug() {
+       echo -e $BOLD"Setting ics debug logging"$EBOLD
+       curlString="$ICS_SERVICE_PATH$ICS_ACTUATOR -X POST  -H Content-Type:application/json -d {\"configuredLevel\":\"debug\"}"
        result=$(__do_curl "$curlString")
        if [ $? -ne 0 ]; then
                __print_err "Could not set debug mode" $@
@@ -437,12 +436,12 @@ set_ecs_debug() {
        return 0
 }
 
-# Turn on trace level tracing in ECS
+# Turn on trace level tracing in ICS
 # args: -
 # (Function for test scripts)
-set_ecs_trace() {
-       echo -e $BOLD"Setting ecs trace logging"$EBOLD
-       curlString="$ECS_SERVICE_PATH/actuator/loggers/org.oransc.enrichment -X POST  -H Content-Type:application/json -d {\"configuredLevel\":\"trace\"}"
+set_ics_trace() {
+       echo -e $BOLD"Setting ics trace logging"$EBOLD
+       curlString="$ICS_SERVICE_PATH/actuator/loggers/org.oransc.information -X POST  -H Content-Type:application/json -d {\"configuredLevel\":\"trace\"}"
        result=$(__do_curl "$curlString")
        if [ $? -ne 0 ]; then
                __print_err "Could not set trace mode" $@
@@ -453,50 +452,50 @@ set_ecs_trace() {
        return 0
 }
 
-# Perform curl retries when making direct call to ECS for the specified http response codes
+# Perform curl retries when making direct call to ICS for the specified http response codes
 # Speace separated list of http response codes
 # args: [<response-code>]*
-use_ecs_retries() {
-       echo -e $BOLD"Do curl retries to the ECS REST inteface for these response codes:$@"$EBOLD
-       ECS_RETRY_CODES=$@
+use_ics_retries() {
+       echo -e $BOLD"Do curl retries to the ICS REST inteface for these response codes:$@"$EBOLD
+       ICS_RETRY_CODES=$@
        echo ""
        return 0
 }
 
-# Check the ecs logs for WARNINGs and ERRORs
+# Check the ics logs for WARNINGs and ERRORs
 # args: -
 # (Function for test scripts)
-check_ecs_logs() {
-       __check_container_logs "ECS" $ECS_APP_NAME $ECS_LOGPATH WARN ERR
+check_ics_logs() {
+       __check_container_logs "ICS" $ICS_APP_NAME $ICS_LOGPATH WARN ERR
 }
 
 
-# Tests if a variable value in the ECS is equal to a target value and and optional timeout.
+# Tests if a variable value in the ICS is equal to a target value and and optional timeout.
 # Arg: <variable-name> <target-value> - This test set pass or fail depending on if the variable is
 # equal to the target or not.
 # Arg: <variable-name> <target-value> <timeout-in-sec>  - This test waits up to the timeout seconds
 # before setting pass or fail depending on if the variable value becomes equal to the target
 # value or not.
 # (Function for test scripts)
-ecs_equal() {
+ics_equal() {
        if [ $# -eq 2 ] || [ $# -eq 3 ]; then
-               __var_test ECS "$ECS_SERVICE_PATH/" $1 "=" $2 $3
+               __var_test ICS "$ICS_SERVICE_PATH/" $1 "=" $2 $3
        else
-               __print_err "Wrong args to ecs_equal, needs two or three args: <sim-param> <target-value> [ timeout ]" $@
+               __print_err "Wrong args to ics_equal, needs two or three args: <sim-param> <target-value> [ timeout ]" $@
        fi
 }
 
 
 ##########################################
-######### A1-E Enrichment  API ##########
+######### A1-E information  API ##########
 ##########################################
-#Function prefix: ecs_api_a1
+#Function prefix: ics_api_a1
 
 # API Test function: GET /A1-EI​/v1​/eitypes​/{eiTypeId}​/eijobs
 # args: <response-code> <type-id>  <owner-id>|NOOWNER [ EMPTY | <job-id>+ ]
 # args (flat uri structure): <response-code> <type-id>|NOTYPE  <owner-id>|NOOWNER [ EMPTY | <job-id>+ ]
 # (Function for test scripts)
-ecs_api_a1_get_job_ids() {
+ics_api_a1_get_job_ids() {
        __log_test_start $@
 
        if [ -z "$FLAT_A1_EI" ]; then
@@ -530,7 +529,7 @@ ecs_api_a1_get_job_ids() {
                fi
                query="/A1-EI/v1/eijobs$search"
        fi
-    res="$(__do_curl_to_api ECS GET $query)"
+    res="$(__do_curl_to_api ICS GET $query)"
     status=${res:${#res}-3}
 
        if [ $status -ne $1 ]; then
@@ -568,7 +567,7 @@ ecs_api_a1_get_job_ids() {
 # API Test function: GET â€‹/A1-EI​/v1​/eitypes​/{eiTypeId}
 # args: <response-code> <type-id> [<schema-file>]
 # (Function for test scripts)
-ecs_api_a1_get_type() {
+ics_api_a1_get_type() {
        __log_test_start $@
 
     if [ $# -lt 2 ] || [ $# -gt 3 ]; then
@@ -577,7 +576,7 @@ ecs_api_a1_get_type() {
        fi
 
        query="/A1-EI/v1/eitypes/$2"
-    res="$(__do_curl_to_api ECS GET $query)"
+    res="$(__do_curl_to_api ICS GET $query)"
     status=${res:${#res}-3}
 
        if [ $status -ne $1 ]; then
@@ -614,7 +613,7 @@ ecs_api_a1_get_type() {
 # API Test function: GET /A1-EI/v1/eitypes
 # args: <response-code> [ (EMPTY | [<type-id>]+) ]
 # (Function for test scripts)
-ecs_api_a1_get_type_ids() {
+ics_api_a1_get_type_ids() {
        __log_test_start $@
 
     if [ $# -lt 1 ]; then
@@ -623,7 +622,7 @@ ecs_api_a1_get_type_ids() {
        fi
 
        query="/A1-EI/v1/eitypes"
-    res="$(__do_curl_to_api ECS GET $query)"
+    res="$(__do_curl_to_api ICS GET $query)"
     status=${res:${#res}-3}
 
        if [ $status -ne $1 ]; then
@@ -659,7 +658,7 @@ ecs_api_a1_get_type_ids() {
 # args: <response-code> <type-id> <job-id> [<status>]
 # args (flat uri structure): <response-code> <job-id> [<status> [<timeout>]]
 # (Function for test scripts)
-ecs_api_a1_get_job_status() {
+ics_api_a1_get_job_status() {
        __log_test_start $@
 
        if [ -z "$FLAT_A1_EI" ]; then
@@ -670,7 +669,7 @@ ecs_api_a1_get_job_status() {
 
                query="/A1-EI/v1/eitypes/$2/eijobs/$3/status"
 
-               res="$(__do_curl_to_api ECS GET $query)"
+               res="$(__do_curl_to_api ICS GET $query)"
                status=${res:${#res}-3}
 
                if [ $status -ne $1 ]; then
@@ -699,7 +698,7 @@ ecs_api_a1_get_job_status() {
 
                start=$SECONDS
                for (( ; ; )); do
-                       res="$(__do_curl_to_api ECS GET $query)"
+                       res="$(__do_curl_to_api ICS GET $query)"
                        status=${res:${#res}-3}
 
                        if [ $# -eq 4 ]; then
@@ -754,7 +753,7 @@ ecs_api_a1_get_job_status() {
 # args: <response-code> <type-id> <job-id> [<target-url> <owner-id> <template-job-file>]
 # args (flat uri structure): <response-code> <job-id> [<type-id> <target-url> <owner-id> <template-job-file>]
 # (Function for test scripts)
-ecs_api_a1_get_job() {
+ics_api_a1_get_job() {
        __log_test_start $@
 
        if [  -z "$FLAT_A1_EI" ]; then
@@ -771,7 +770,7 @@ ecs_api_a1_get_job() {
                fi
                query="/A1-EI/v1/eijobs/$2"
        fi
-    res="$(__do_curl_to_api ECS GET $query)"
+    res="$(__do_curl_to_api ICS GET $query)"
     status=${res:${#res}-3}
 
        if [ $status -ne $1 ]; then
@@ -829,7 +828,7 @@ ecs_api_a1_get_job() {
 # args: <response-code> <type-id> <job-id>
 # args (flat uri structure): <response-code> <job-id>
 # (Function for test scripts)
-ecs_api_a1_delete_job() {
+ics_api_a1_delete_job() {
        __log_test_start $@
 
        if [  -z "$FLAT_A1_EI" ]; then
@@ -847,7 +846,7 @@ ecs_api_a1_delete_job() {
                fi
                query="/A1-EI/v1/eijobs/$2"
        fi
-    res="$(__do_curl_to_api ECS DELETE $query)"
+    res="$(__do_curl_to_api ICS DELETE $query)"
     status=${res:${#res}-3}
 
        if [ $status -ne $1 ]; then
@@ -863,7 +862,7 @@ ecs_api_a1_delete_job() {
 # args: <response-code> <type-id> <job-id> <target-url> <owner-id> <template-job-file>
 # args (flat uri structure): <response-code> <job-id> <type-id> <target-url> <owner-id> <notification-url> <template-job-file>
 # (Function for test scripts)
-ecs_api_a1_put_job() {
+ics_api_a1_put_job() {
        __log_test_start $@
 
        if [  -z "$FLAT_A1_EI" ]; then
@@ -905,7 +904,7 @@ ecs_api_a1_put_job() {
                query="/A1-EI/v1/eijobs/$2"
        fi
 
-    res="$(__do_curl_to_api ECS PUT $query $file)"
+    res="$(__do_curl_to_api ICS PUT $query $file)"
     status=${res:${#res}-3}
 
        if [ $status -ne $1 ]; then
@@ -919,27 +918,27 @@ ecs_api_a1_put_job() {
 
 
 ##########################################
-####   Enrichment Data Producer API   ####
+####   information Data Producer API   ####
 ##########################################
-# Function prefix: ecs_api_edp
+# Function prefix: ics_api_edp
 
 # API Test function: GET /ei-producer/v1/eitypes
 # API Test function: GET /data-producer/v1/info-types
 # args: <response-code> [ EMPTY | <type-id>+]
 # (Function for test scripts)
-ecs_api_edp_get_type_ids() {
+ics_api_edp_get_type_ids() {
        __log_test_start $@
 
     if [ $# -lt 1 ]; then
                __print_err "<response-code> [ EMPTY | <type-id>+]" $@
                return 1
        fi
-       if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+       if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
                query="/data-producer/v1/info-types"
        else
                query="/ei-producer/v1/eitypes"
        fi
-    res="$(__do_curl_to_api ECS GET $query)"
+    res="$(__do_curl_to_api ICS GET $query)"
     status=${res:${#res}-3}
 
        if [ $status -ne $1 ]; then
@@ -976,21 +975,21 @@ ecs_api_edp_get_type_ids() {
 # API Test function: GET /data-producer/v1/info-producers/{infoProducerId}/status
 # args: <response-code> <producer-id> [<status> [<timeout>]]
 # (Function for test scripts)
-ecs_api_edp_get_producer_status() {
+ics_api_edp_get_producer_status() {
        __log_test_start $@
 
     if [ $# -lt 2 ] || [ $# -gt 4 ]; then
                __print_err "<response-code> <producer-id> [<status> [<timeout>]]" $@
                return 1
        fi
-       if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+       if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
                query="/data-producer/v1/info-producers/$2/status"
        else
                query="/ei-producer/v1/eiproducers/$2/status"
        fi
        start=$SECONDS
        for (( ; ; )); do
-               res="$(__do_curl_to_api ECS GET $query)"
+               res="$(__do_curl_to_api ICS GET $query)"
                status=${res:${#res}-3}
 
                if [ $# -eq 4 ]; then
@@ -1041,7 +1040,7 @@ ecs_api_edp_get_producer_status() {
 # API Test function: GET /ei-producer/v1/eiproducers
 # args (v1_1): <response-code> [ EMPTY | <producer-id>+]
 # (Function for test scripts)
-ecs_api_edp_get_producer_ids() {
+ics_api_edp_get_producer_ids() {
        __log_test_start $@
 
     if [ $# -lt 1 ]; then
@@ -1050,7 +1049,7 @@ ecs_api_edp_get_producer_ids() {
        fi
 
        query="/ei-producer/v1/eiproducers"
-    res="$(__do_curl_to_api ECS GET $query)"
+    res="$(__do_curl_to_api ICS GET $query)"
     status=${res:${#res}-3}
 
        if [ $status -ne $1 ]; then
@@ -1089,14 +1088,14 @@ ecs_api_edp_get_producer_ids() {
 # API Test function: GET /data-producer/v1/info-producers
 # args (v1_2): <response-code> [ ( NOTYPE | <type-id> ) [ EMPTY | <producer-id>+] ]
 # (Function for test scripts)
-ecs_api_edp_get_producer_ids_2() {
+ics_api_edp_get_producer_ids_2() {
        __log_test_start $@
 
     if [ $# -lt 1 ]; then
                __print_err "<response-code> [ ( NOTYPE | <type-id> ) [ EMPTY | <producer-id>+] ]" $@
                return 1
        fi
-    if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+    if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
                query="/data-producer/v1/info-producers"
                if [ $# -gt 1 ] && [ $2 != "NOTYPE" ]; then
                        query=$query"?info_type_id=$2"
@@ -1107,7 +1106,7 @@ ecs_api_edp_get_producer_ids_2() {
                        query=$query"?ei_type_id=$2"
                fi
        fi
-    res="$(__do_curl_to_api ECS GET $query)"
+    res="$(__do_curl_to_api ICS GET $query)"
     status=${res:${#res}-3}
 
        if [ $status -ne $1 ]; then
@@ -1145,7 +1144,7 @@ ecs_api_edp_get_producer_ids_2() {
 # API Test function: GET /ei-producer/v1/eitypes/{eiTypeId}
 # args: (v1_1) <response-code> <type-id> [<job-schema-file> (EMPTY | [<producer-id>]+)]
 # (Function for test scripts)
-ecs_api_edp_get_type() {
+ics_api_edp_get_type() {
        __log_test_start $@
 
        paramError=1
@@ -1161,7 +1160,7 @@ ecs_api_edp_get_type() {
        fi
 
        query="/ei-producer/v1/eitypes/$2"
-    res="$(__do_curl_to_api ECS GET $query)"
+    res="$(__do_curl_to_api ICS GET $query)"
     status=${res:${#res}-3}
 
        if [ $status -ne $1 ]; then
@@ -1205,7 +1204,7 @@ ecs_api_edp_get_type() {
 # API Test function: GET /data-producer/v1/info-types/{infoTypeId}
 # args: (v1_2) <response-code> <type-id> [<job-schema-file> [ <info-type-info> ]]
 # (Function for test scripts)
-ecs_api_edp_get_type_2() {
+ics_api_edp_get_type_2() {
        __log_test_start $@
 
        paramError=1
@@ -1215,7 +1214,7 @@ ecs_api_edp_get_type_2() {
        if [ $# -eq 3 ]; then
                paramError=0
        fi
-       if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPE-INFO"* ]]; then
+       if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPE-INFO"* ]]; then
                if [ $# -eq 4 ]; then
                        paramError=0
                fi
@@ -1224,13 +1223,13 @@ ecs_api_edp_get_type_2() {
                __print_err "<response-code> <type-id> [<job-schema-file> [ <info-type-info> ]]" $@
                return 1
        fi
-       if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+       if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
                query="/data-producer/v1/info-types/$2"
        else
                query="/ei-producer/v1/eitypes/$2"
        fi
 
-    res="$(__do_curl_to_api ECS GET $query)"
+    res="$(__do_curl_to_api ICS GET $query)"
     status=${res:${#res}-3}
 
        if [ $status -ne $1 ]; then
@@ -1256,7 +1255,7 @@ ecs_api_edp_get_type_2() {
                        fi
                        info_data=",\"info_type_information\":$info_data"
                fi
-               if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+               if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
                        targetJson="{\"info_job_data_schema\":$schema $info_data}"
                else
                        targetJson="{\"ei_job_data_schema\":$schema}"
@@ -1278,10 +1277,10 @@ ecs_api_edp_get_type_2() {
 # API Test function: PUT /data-producer/v1/info-types/{infoTypeId}
 # args: (v1_2) <response-code> <type-id> <job-schema-file> [ <info-type-info> ]
 # (Function for test scripts)
-ecs_api_edp_put_type_2() {
+ics_api_edp_put_type_2() {
        __log_test_start $@
 
-       if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPE-INFO"* ]]; then
+       if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPE-INFO"* ]]; then
                if [ $# -lt 3 ] || [ $# -gt 4 ]; then
                        __print_err "<response-code> <type-id> <job-schema-file> [ <info-type-info> ]" $@
                        return 1
@@ -1309,7 +1308,7 @@ ecs_api_edp_put_type_2() {
                info_data=",\"info_type_information\":$info_data"
        fi
 
-       if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+       if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
                schema=$(cat $3)
                input_json="{\"info_job_data_schema\":$schema $info_data}"
                file="./tmp/put_type.json"
@@ -1324,7 +1323,7 @@ ecs_api_edp_put_type_2() {
 
                query="/ei-producer/v1/eitypes/$2"
        fi
-    res="$(__do_curl_to_api ECS PUT $query $file)"
+    res="$(__do_curl_to_api ICS PUT $query $file)"
     status=${res:${#res}-3}
 
        if [ $status -ne $1 ]; then
@@ -1340,7 +1339,7 @@ ecs_api_edp_put_type_2() {
 # API Test function: DELETE /data-producer/v1/info-types/{infoTypeId}
 # args: (v1_2) <response-code> <type-id>
 # (Function for test scripts)
-ecs_api_edp_delete_type_2() {
+ics_api_edp_delete_type_2() {
        __log_test_start $@
 
     if [ $# -ne 2 ]; then
@@ -1348,12 +1347,12 @@ ecs_api_edp_delete_type_2() {
                return 1
        fi
 
-       if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+       if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
                query="/data-producer/v1/info-types/$2"
        else
                query="/ei-producer/v1/eitypes/$2"
        fi
-    res="$(__do_curl_to_api ECS DELETE $query)"
+    res="$(__do_curl_to_api ICS DELETE $query)"
     status=${res:${#res}-3}
 
        if [ $status -ne $1 ]; then
@@ -1368,7 +1367,7 @@ ecs_api_edp_delete_type_2() {
 # API Test function: GET /ei-producer/v1/eiproducers/{eiProducerId}
 # args: (v1_1) <response-code> <producer-id> [<job-callback> <supervision-callback> (EMPTY | [<type-id> <schema-file>]+) ]
 # (Function for test scripts)
-ecs_api_edp_get_producer() {
+ics_api_edp_get_producer() {
        __log_test_start $@
 
        #Possible arg count: 2, 5 6, 8, 10 etc
@@ -1390,7 +1389,7 @@ ecs_api_edp_get_producer() {
        fi
 
        query="/ei-producer/v1/eiproducers/$2"
-    res="$(__do_curl_to_api ECS GET $query)"
+    res="$(__do_curl_to_api ICS GET $query)"
     status=${res:${#res}-3}
 
        if [ $status -ne $1 ]; then
@@ -1438,7 +1437,7 @@ ecs_api_edp_get_producer() {
 # API Test function: GET /data-producer/v1/info-producers/{infoProducerId}
 # args (v1_2): <response-code> <producer-id> [<job-callback> <supervision-callback> (EMPTY | <type-id>+) ]
 # (Function for test scripts)
-ecs_api_edp_get_producer_2() {
+ics_api_edp_get_producer_2() {
        __log_test_start $@
 
        #Possible arg count: 2, 5, 6, 7, 8 etc
@@ -1457,12 +1456,12 @@ ecs_api_edp_get_producer_2() {
                __print_err "<response-code> <producer-id> [<job-callback> <supervision-callback> (EMPTY | <type-id>+) ]" $@
                return 1
        fi
-       if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+       if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
                query="/data-producer/v1/info-producers/$2"
        else
                query="/ei-producer/v1/eiproducers/$2"
        fi
-    res="$(__do_curl_to_api ECS GET $query)"
+    res="$(__do_curl_to_api ICS GET $query)"
     status=${res:${#res}-3}
 
        if [ $status -ne $1 ]; then
@@ -1484,7 +1483,7 @@ ecs_api_edp_get_producer_2() {
                fi
                targetJson=$targetJson"]"
                if [ $# -gt 4 ]; then
-                       if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+                       if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
                                targetJson="{\"supported_info_types\":$targetJson,\"info_job_callback_url\": \"$3\",\"info_producer_supervision_callback_url\": \"$4\"}"
                        else
                                targetJson="{\"supported_ei_types\":$targetJson,\"ei_job_callback_url\": \"$3\",\"ei_producer_supervision_callback_url\": \"$4\"}"
@@ -1507,19 +1506,19 @@ ecs_api_edp_get_producer_2() {
 # API Test function: DELETE /data-producer/v1/info-producers/{infoProducerId}
 # args: <response-code> <producer-id>
 # (Function for test scripts)
-ecs_api_edp_delete_producer() {
+ics_api_edp_delete_producer() {
        __log_test_start $@
 
     if [ $# -lt 2 ]; then
                __print_err "<response-code> <producer-id>" $@
                return 1
        fi
-       if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+       if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
                query="/data-producer/v1/info-producers/$2"
        else
                query="/ei-producer/v1/eiproducers/$2"
        fi
-    res="$(__do_curl_to_api ECS DELETE $query)"
+    res="$(__do_curl_to_api ICS DELETE $query)"
     status=${res:${#res}-3}
 
        if [ $status -ne $1 ]; then
@@ -1534,7 +1533,7 @@ ecs_api_edp_delete_producer() {
 # API Test function: PUT /ei-producer/v1/eiproducers/{eiProducerId}
 # args: (v1_1) <response-code> <producer-id> <job-callback> <supervision-callback> NOTYPE|[<type-id> <schema-file>]+
 # (Function for test scripts)
-ecs_api_edp_put_producer() {
+ics_api_edp_put_producer() {
        __log_test_start $@
 
        #Valid number of parametrer 5,6,8,10,
@@ -1574,7 +1573,7 @@ ecs_api_edp_put_producer() {
        file="./tmp/.p.json"
        echo "$inputJson" > $file
        query="/ei-producer/v1/eiproducers/$2"
-    res="$(__do_curl_to_api ECS PUT $query $file)"
+    res="$(__do_curl_to_api ICS PUT $query $file)"
     status=${res:${#res}-3}
 
        if [ $status -ne $1 ]; then
@@ -1590,7 +1589,7 @@ ecs_api_edp_put_producer() {
 # API Test function: PUT /data-producer/v1/info-producers/{infoProducerId}
 # args: (v1_2) <response-code> <producer-id> <job-callback> <supervision-callback> NOTYPE|[<type-id>+]
 # (Function for test scripts)
-ecs_api_edp_put_producer_2() {
+ics_api_edp_put_producer_2() {
        __log_test_start $@
 
        #Valid number of parametrer 5,6,8,10,
@@ -1615,7 +1614,7 @@ ecs_api_edp_put_producer_2() {
                        inputJson=$inputJson"\""${arr[$i]}"\""
                done
        fi
-       if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+       if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
                inputJson="\"supported_info_types\":"$inputJson"]"
 
                inputJson=$inputJson",\"info_job_callback_url\": \"$3\",\"info_producer_supervision_callback_url\": \"$4\""
@@ -1636,7 +1635,7 @@ ecs_api_edp_put_producer_2() {
                echo "$inputJson" > $file
                query="/ei-producer/v1/eiproducers/$2"
        fi
-    res="$(__do_curl_to_api ECS PUT $query $file)"
+    res="$(__do_curl_to_api ICS PUT $query $file)"
     status=${res:${#res}-3}
 
        if [ $status -ne $1 ]; then
@@ -1651,7 +1650,7 @@ ecs_api_edp_put_producer_2() {
 # API Test function: GET /ei-producer/v1/eiproducers/{eiProducerId}/eijobs
 # args: (V1-1) <response-code> <producer-id> (EMPTY | [<job-id> <type-id> <target-url> <job-owner> <template-job-file>]+)
 # (Function for test scripts)
-ecs_api_edp_get_producer_jobs() {
+ics_api_edp_get_producer_jobs() {
        __log_test_start $@
 
        #Valid number of parameter 2,3,7,11
@@ -1672,7 +1671,7 @@ ecs_api_edp_get_producer_jobs() {
        fi
 
        query="/ei-producer/v1/eiproducers/$2/eijobs"
-    res="$(__do_curl_to_api ECS GET $query)"
+    res="$(__do_curl_to_api ICS GET $query)"
     status=${res:${#res}-3}
        if [ $status -ne $1 ]; then
                __log_test_fail_status_code $1 $status
@@ -1716,7 +1715,7 @@ ecs_api_edp_get_producer_jobs() {
 # API Test function: GET /data-producer/v1/info-producers/{infoProducerId}/info-jobs
 # args: (V1-2) <response-code> <producer-id> (EMPTY | [<job-id> <type-id> <target-url> <job-owner> <template-job-file>]+)
 # (Function for test scripts)
-ecs_api_edp_get_producer_jobs_2() {
+ics_api_edp_get_producer_jobs_2() {
        __log_test_start $@
 
        #Valid number of parameter 2,3,7,11
@@ -1735,12 +1734,12 @@ ecs_api_edp_get_producer_jobs_2() {
                __print_err "<response-code> <producer-id> (EMPTY | [<job-id> <type-id> <target-url> <job-owner> <template-job-file>]+)" $@
                return 1
        fi
-       if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+       if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
                query="/data-producer/v1/info-producers/$2/info-jobs"
        else
                query="/ei-producer/v1/eiproducers/$2/eijobs"
        fi
-    res="$(__do_curl_to_api ECS GET $query)"
+    res="$(__do_curl_to_api ICS GET $query)"
     status=${res:${#res}-3}
        if [ $status -ne $1 ]; then
                __log_test_fail_status_code $1 $status
@@ -1762,7 +1761,7 @@ ecs_api_edp_get_producer_jobs_2() {
                                        __log_test_fail_general "Job template file "${arr[$i+4]}", does not exist"
                                        return 1
                                fi
-                               if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+                               if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
                                        targetJson=$targetJson"{\"info_job_identity\":\"${arr[$i]}\",\"info_type_identity\":\"${arr[$i+1]}\",\"target_uri\":\"${arr[$i+2]}\",\"owner\":\"${arr[$i+3]}\",\"info_job_data\":$jobfile, \"last_updated\":\"????\"}"
                                else
                                        targetJson=$targetJson"{\"ei_job_identity\":\"${arr[$i]}\",\"ei_type_identity\":\"${arr[$i+1]}\",\"target_uri\":\"${arr[$i+2]}\",\"owner\":\"${arr[$i+3]}\",\"ei_job_data\":$jobfile, \"last_updated\":\"????\"}"
@@ -1787,19 +1786,19 @@ ecs_api_edp_get_producer_jobs_2() {
 ##########################################
 ####          Service status          ####
 ##########################################
-# Function prefix: ecs_api_service
+# Function prefix: ics_api_service
 
 # API Test function: GET â€‹/status
 # args: <response-code>
 # (Function for test scripts)
-ecs_api_service_status() {
+ics_api_service_status() {
        __log_test_start $@
 
     if [ $# -lt 1 ]; then
                __print_err "<response-code>" $@
                return 1
        fi
-       res="$(__do_curl_to_api ECS GET /status)"
+       res="$(__do_curl_to_api ICS GET /status)"
     status=${res:${#res}-3}
        if [ $status -ne $1 ]; then
                __log_test_fail_status_code $1 $status
@@ -1812,13 +1811,13 @@ ecs_api_service_status() {
 ###########################################
 ######### Info data consumer API ##########
 ###########################################
-#Function prefix: ecs_api_idc
+#Function prefix: ics_api_idc
 
 
 # API Test function: GET /data-consumer/v1/info-types
 # args: <response-code> [ (EMPTY | [<type-id>]+) ]
 # (Function for test scripts)
-ecs_api_idc_get_type_ids() {
+ics_api_idc_get_type_ids() {
        __log_test_start $@
 
     if [ $# -lt 1 ]; then
@@ -1827,7 +1826,7 @@ ecs_api_idc_get_type_ids() {
        fi
 
        query="/data-consumer/v1/info-types"
-    res="$(__do_curl_to_api ECS GET $query)"
+    res="$(__do_curl_to_api ICS GET $query)"
     status=${res:${#res}-3}
 
        if [ $status -ne $1 ]; then
@@ -1862,7 +1861,7 @@ ecs_api_idc_get_type_ids() {
 # API Test function: GET /data-consumer/v1/info-jobs
 # args: <response-code> <type-id>|NOTYPE <owner-id>|NOOWNER [ EMPTY | <job-id>+ ]
 # (Function for test scripts)
-ecs_api_idc_get_job_ids() {
+ics_api_idc_get_job_ids() {
        __log_test_start $@
 
        # Valid number of parameters 4,5,6 etc
@@ -1884,7 +1883,7 @@ ecs_api_idc_get_job_ids() {
        fi
        query="/data-consumer/v1/info-jobs$search"
 
-    res="$(__do_curl_to_api ECS GET $query)"
+    res="$(__do_curl_to_api ICS GET $query)"
     status=${res:${#res}-3}
 
        if [ $status -ne $1 ]; then
@@ -1922,7 +1921,7 @@ ecs_api_idc_get_job_ids() {
 # API Test function: GET /data-consumer/v1/info-jobs/{infoJobId}
 # args: <response-code> <job-id> [<type-id> <target-url> <owner-id> <template-job-file>]
 # (Function for test scripts)
-ecs_api_idc_get_job() {
+ics_api_idc_get_job() {
        __log_test_start $@
 
        if [ $# -ne 2 ] && [ $# -ne 7 ]; then
@@ -1930,7 +1929,7 @@ ecs_api_idc_get_job() {
                return 1
        fi
        query="/data-consumer/v1/info-jobs/$2"
-    res="$(__do_curl_to_api ECS GET $query)"
+    res="$(__do_curl_to_api ICS GET $query)"
     status=${res:${#res}-3}
 
        if [ $status -ne $1 ]; then
@@ -1966,7 +1965,7 @@ ecs_api_idc_get_job() {
 # API Test function: PUT â€‹/data-consumer/v1/info-jobs/{infoJobId}
 # args: <response-code> <job-id> <type-id> <target-url> <owner-id> <notification-url> <template-job-file> [ VALIDATE ]
 # (Function for test scripts)
-ecs_api_idc_put_job() {
+ics_api_idc_put_job() {
        __log_test_start $@
 
        if [ $# -lt 7 ] || [ $# -gt 8 ]; then
@@ -1993,7 +1992,7 @@ ecs_api_idc_put_job() {
                fi
        fi
 
-    res="$(__do_curl_to_api ECS PUT $query $file)"
+    res="$(__do_curl_to_api ICS PUT $query $file)"
     status=${res:${#res}-3}
 
        if [ $status -ne $1 ]; then
@@ -2008,7 +2007,7 @@ ecs_api_idc_put_job() {
 # API Test function: DELETE â€‹/data-consumer/v1/info-jobs/{infoJobId}
 # args: <response-code> <job-id>
 # (Function for test scripts)
-ecs_api_idc_delete_job() {
+ics_api_idc_delete_job() {
        __log_test_start $@
 
        if [ $# -ne 2 ]; then
@@ -2016,7 +2015,7 @@ ecs_api_idc_delete_job() {
                return 1
        fi
        query="/data-consumer/v1/info-jobs/$2"
-    res="$(__do_curl_to_api ECS DELETE $query)"
+    res="$(__do_curl_to_api ICS DELETE $query)"
     status=${res:${#res}-3}
 
        if [ $status -ne $1 ]; then
@@ -2031,7 +2030,7 @@ ecs_api_idc_delete_job() {
 # API Test function: GET â€‹/data-consumer/v1/info-types/{infoTypeId}
 # args: <response-code> <type-id> [<schema-file> [<type-status> <producers-count]]
 # (Function for test scripts)
-ecs_api_idc_get_type() {
+ics_api_idc_get_type() {
        __log_test_start $@
 
     if [ $# -lt 2 ] || [ $# -gt 5 ]; then
@@ -2040,7 +2039,7 @@ ecs_api_idc_get_type() {
        fi
 
        query="/data-consumer/v1/info-types/$2"
-    res="$(__do_curl_to_api ECS GET $query)"
+    res="$(__do_curl_to_api ICS GET $query)"
     status=${res:${#res}-3}
 
        if [ $status -ne $1 ]; then
@@ -2078,7 +2077,7 @@ ecs_api_idc_get_type() {
 # This test only status during an optional timeout. No test of the list of producers
 # args: <response-code> <job-id> [<status> [<timeout>]]
 # (Function for test scripts)
-ecs_api_idc_get_job_status() {
+ics_api_idc_get_job_status() {
        __log_test_start $@
 
        if [ $# -lt 2 ] && [ $# -gt 4 ]; then
@@ -2090,7 +2089,7 @@ ecs_api_idc_get_job_status() {
 
        start=$SECONDS
        for (( ; ; )); do
-               res="$(__do_curl_to_api ECS GET $query)"
+               res="$(__do_curl_to_api ICS GET $query)"
                status=${res:${#res}-3}
 
                if [ $# -eq 4 ]; then
@@ -2144,7 +2143,7 @@ ecs_api_idc_get_job_status() {
 # This function test status and the list of producers with and optional timeout
 # args: <response-code> <job-id> [<status> EMPTYPROD|( <prod-count> <producer-id>+ ) [<timeout>]]
 # (Function for test scripts)
-ecs_api_idc_get_job_status2() {
+ics_api_idc_get_job_status2() {
 
        __log_test_start $@
        param_error=0
@@ -2169,9 +2168,9 @@ ecs_api_idc_get_job_status2() {
                                idx=$(($4+4))
                                timeout=${args[$idx]}
                        fi
-                       for ((ecs_i = 0 ; ecs_i < $4 ; ecs_i++)); do
-                               idx=$(($ecs_i+4))
-                               if [ $ecs_i -gt 0 ]; then
+                       for ((ics_i = 0 ; ics_i < $4 ; ics_i++)); do
+                               idx=$(($ics_i+4))
+                               if [ $ics_i -gt 0 ]; then
                                        targetJson=$targetJson","
                                fi
                                targetJson=$targetJson"\""${args[$idx]}"\""
@@ -2189,7 +2188,7 @@ ecs_api_idc_get_job_status2() {
 
        start=$SECONDS
        for (( ; ; )); do
-               res="$(__do_curl_to_api ECS GET $query)"
+               res="$(__do_curl_to_api ICS GET $query)"
                status=${res:${#res}-3}
 
                if [ $# -gt 2 ]; then
@@ -2245,7 +2244,7 @@ ecs_api_idc_get_job_status2() {
 # API Test function: GET /data-consumer/v1/info-type-subscription
 # args: <response-code>  <owner-id>|NOOWNER [ EMPTY | <subscription-id>+]
 # (Function for test scripts)
-ecs_api_idc_get_subscription_ids() {
+ics_api_idc_get_subscription_ids() {
        __log_test_start $@
 
     if [ $# -lt 3 ]; then
@@ -2259,7 +2258,7 @@ ecs_api_idc_get_subscription_ids() {
                search="?owner="$2
        fi
 
-    res="$(__do_curl_to_api ECS GET $query$search)"
+    res="$(__do_curl_to_api ICS GET $query$search)"
     status=${res:${#res}-3}
 
        if [ $status -ne $1 ]; then
@@ -2295,7 +2294,7 @@ ecs_api_idc_get_subscription_ids() {
 # API Test function: GET /data-consumer/v1/info-type-subscription/{subscriptionId}
 # args: <response-code>  <subscription-id> [ <owner-id> <status-uri> ]
 # (Function for test scripts)
-ecs_api_idc_get_subscription() {
+ics_api_idc_get_subscription() {
        __log_test_start $@
 
     if [ $# -ne 2 ] && [ $# -ne 4 ]; then
@@ -2304,7 +2303,7 @@ ecs_api_idc_get_subscription() {
        fi
 
        query="/data-consumer/v1/info-type-subscription/$2"
-    res="$(__do_curl_to_api ECS GET $query)"
+    res="$(__do_curl_to_api ICS GET $query)"
     status=${res:${#res}-3}
 
        if [ $status -ne $1 ]; then
@@ -2331,7 +2330,7 @@ ecs_api_idc_get_subscription() {
 # API Test function: PUT /data-consumer/v1/info-type-subscription/{subscriptionId}
 # args: <response-code>  <subscription-id> <owner-id> <status-uri>
 # (Function for test scripts)
-ecs_api_idc_put_subscription() {
+ics_api_idc_put_subscription() {
        __log_test_start $@
 
     if [ $# -ne 4 ]; then
@@ -2344,7 +2343,7 @@ ecs_api_idc_put_subscription() {
        echo "$inputJson" > $file
 
        query="/data-consumer/v1/info-type-subscription/$2"
-    res="$(__do_curl_to_api ECS PUT $query $file)"
+    res="$(__do_curl_to_api ICS PUT $query $file)"
     status=${res:${#res}-3}
 
        if [ $status -ne $1 ]; then
@@ -2359,7 +2358,7 @@ ecs_api_idc_put_subscription() {
 # API Test function: DELETE /data-consumer/v1/info-type-subscription/{subscriptionId}
 # args: <response-code>  <subscription-id>
 # (Function for test scripts)
-ecs_api_idc_delete_subscription() {
+ics_api_idc_delete_subscription() {
        __log_test_start $@
 
        if [ $# -ne 2 ]; then
@@ -2368,7 +2367,7 @@ ecs_api_idc_delete_subscription() {
        fi
 
        query="/data-consumer/v1/info-type-subscription/$2"
-    res="$(__do_curl_to_api ECS DELETE $query)"
+    res="$(__do_curl_to_api ICS DELETE $query)"
     status=${res:${#res}-3}
 
        if [ $status -ne $1 ]; then
@@ -2383,13 +2382,13 @@ ecs_api_idc_delete_subscription() {
 ##########################################
 ####          Reset jobs              ####
 ##########################################
-# Function prefix: ecs_api_admin
+# Function prefix: ics_api_admin
 
 # Admin to remove all jobs
 # args: <response-code> [ <type> ]
 # (Function for test scripts)
 
-ecs_api_admin_reset() {
+ics_api_admin_reset() {
        __log_test_start $@
 
        if [  -z "$FLAT_A1_EI" ]; then
@@ -2397,7 +2396,7 @@ ecs_api_admin_reset() {
        else
                query="/A1-EI/v1/eijobs"
        fi
-    res="$(__do_curl_to_api ECS GET $query)"
+    res="$(__do_curl_to_api ICS GET $query)"
     status=${res:${#res}-3}
 
        if [ $status -ne 200 ]; then
@@ -2417,7 +2416,7 @@ ecs_api_admin_reset() {
                        echo "Not supported for non-flat EI api"
                else
                        query="/A1-EI/v1/eijobs/$job"
-                       res="$(__do_curl_to_api ECS DELETE $query)"
+                       res="$(__do_curl_to_api ICS DELETE $query)"
                        status=${res:${#res}-3}
                        if [ $status -ne 204 ]; then
                                __log_test_fail_status_code $1 $status
@@ -2436,21 +2435,21 @@ ecs_api_admin_reset() {
 ##########################################
 
 
-# Admin reset to remove all data in ecs; jobs, producers etc
+# Admin reset to remove all data in ics; jobs, producers etc
 # NOTE - only works in kubernetes and the pod should not be running
 # args: -
 # (Function for test scripts)
 
-ecs_kube_pvc_reset() {
+ics_kube_pvc_reset() {
        __log_test_start $@
 
-       pvc_name=$(kubectl get pvc -n $KUBE_NONRTRIC_NAMESPACE  --no-headers -o custom-columns=":metadata.name" | grep enrichment)
+       pvc_name=$(kubectl get pvc -n $KUBE_NONRTRIC_NAMESPACE  --no-headers -o custom-columns=":metadata.name" | grep information)
        if [ -z "$pvc_name" ]; then
-               pvc_name=enrichmentservice-pvc
+               pvc_name=informationservice-pvc
        fi
        echo " Trying to reset pvc: "$pvc_name
 
-       __kube_clean_pvc $ECS_APP_NAME $KUBE_NONRTRIC_NAMESPACE $pvc_name $ECS_CONTAINER_MNT_DIR
+       __kube_clean_pvc $ICS_APP_NAME $KUBE_NONRTRIC_NAMESPACE $pvc_name $ICS_CONTAINER_MNT_DIR
 
        __log_test_pass
        return 0
diff --git a/test/common/kafkapc_api_functions.sh b/test/common/kafkapc_api_functions.sh
new file mode 100644 (file)
index 0000000..002657c
--- /dev/null
@@ -0,0 +1,648 @@
+#!/bin/bash
+
+#  ============LICENSE_START===============================================
+#  Copyright (C) 2020 Nordix Foundation. All rights reserved.
+#  ========================================================================
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#  ============LICENSE_END=================================================
+#
+
+# This is a script that contains container/service managemnt functions for Kafka producer/consumer
+
+################ Test engine functions ################
+
+# Create the image var used during the test
+# arg: <image-tag-suffix> (selects staging, snapshot, release etc)
+# <image-tag-suffix> is present only for images with staging, snapshot,release tags
+__KAFKAPC_imagesetup() {
+       __check_and_create_image_var KAFKAPC "KAFKAPC_IMAGE" "KAFKAPC_IMAGE_BASE" "KAFKAPC_IMAGE_TAG" LOCAL "$KAFKAPC_DISPLAY_NAME"
+}
+
+# Pull image from remote repo or use locally built image
+# arg: <pull-policy-override> <pull-policy-original>
+# <pull-policy-override> Shall be used for images allowing overriding. For example use a local image when test is started to use released images
+# <pull-policy-original> Shall be used for images that does not allow overriding
+# Both var may contain: 'remote', 'remote-remove' or 'local'
+__KAFKAPC_imagepull() {
+       __check_and_pull_image $2 "$KAFKAPC_DISPLAY_NAME" $KAFKAPC_APP_NAME KAFKAPC_IMAGE
+}
+
+# Build image (only for simulator or interfaces stubs owned by the test environment)
+# arg: <image-tag-suffix> (selects staging, snapshot, release etc)
+# <image-tag-suffix> is present only for images with staging, snapshot,release tags
+__KAFKAPC_imagebuild() {
+
+       cd ../$KAFKAPC_BUILD_DIR
+       echo " Building KAFKAPC - $KAFKAPC_DISPLAY_NAME - image: $KAFKAPC_IMAGE"
+       docker build  --build-arg NEXUS_PROXY_REPO=$NEXUS_PROXY_REPO -t $KAFKAPC_IMAGE . &> .dockererr
+       if [ $? -eq 0 ]; then
+               echo -e  $GREEN"  Build Ok"$EGREEN
+               __retag_and_push_image KAFKAPC_IMAGE
+               if [ $? -ne 0 ]; then
+                       exit 1
+               fi
+       else
+               echo -e $RED"  Build Failed"$ERED
+               ((RES_CONF_FAIL++))
+               cat .dockererr
+               echo -e $RED"Exiting...."$ERED
+               exit 1
+       fi
+}
+
+# Generate a string for each included image using the app display name and a docker images format string
+# If a custom image repo is used then also the source image from the local repo is listed
+# arg: <docker-images-format-string> <file-to-append>
+__KAFKAPC_image_data() {
+       echo -e "$KAFKAPC_DISPLAY_NAME\t$(docker images --format $1 $KAFKAPC_IMAGE)" >>   $2
+       if [ ! -z "$KAFKAPC_IMAGE_SOURCE" ]; then
+               echo -e "-- source image --\t$(docker images --format $1 $KAFKAPC_IMAGE_SOURCE)" >>   $2
+       fi
+}
+
+# Scale kubernetes resources to zero
+# All resources shall be ordered to be scaled to 0, if relevant. If not relevant to scale, then do no action.
+# This function is called for apps fully managed by the test script
+__KAFKAPC_kube_scale_zero() {
+       __kube_scale_all_resources $KUBE_SIM_NAMESPACE autotest KAFKAPC
+}
+
+# Scale kubernetes resources to zero and wait until this has been accomplished, if relevant. If not relevant to scale, then do no action.
+# This function is called for prestarted apps not managed by the test script.
+__KAFKAPC_kube_scale_zero_and_wait() {
+       echo -e $RED" KAFKAPC app is not scaled in this state"$ERED
+}
+
+# Delete all kube resouces for the app
+# This function is called for apps managed by the test script.
+__KAFKAPC_kube_delete_all() {
+       __kube_delete_all_resources $KUBE_SIM_NAMESPACE autotest KAFKAPC
+}
+
+# Store docker logs
+# This function is called for apps managed by the test script.
+# args: <log-dir> <file-prexix>
+__KAFKAPC_store_docker_logs() {
+       if [ $RUNMODE == "KUBE" ]; then
+               kubectl  logs -l "autotest=KAFKAPC" -n $KUBE_SIM_NAMESPACE --tail=-1 > $1$2_kafkapc.log 2>&1
+       else
+               docker logs $KAFKAPC_APP_NAME > $1$2_kafkapc.log 2>&1
+       fi
+}
+
+# Initial setup of protocol, host and ports
+# This function is called for apps managed by the test script.
+# args: -
+__KAFKAPC_initial_setup() {
+       use_kafkapc_http
+}
+
+# Set app short-name, app name and namespace for logging runtime statistics of kubernets pods or docker containers
+# For docker, the namespace shall be excluded
+# This function is called for apps managed by the test script as well as for prestarted apps.
+# args: -
+__KAFKAPC_statisics_setup() {
+       if [ $RUNMODE == "KUBE" ]; then
+               echo "KAFKAPC $KAFKAPC_APP_NAME $KUBE_SIM_NAMESPACE"
+       else
+               echo "KAFKAPC $KAFKAPC_APP_NAME"
+       fi
+}
+
+#######################################################
+
+#######################################################
+
+# Set http as the protocol to use for all communication to the Kafka procon
+# args: -
+# (Function for test scripts)
+use_kafkapc_http() {
+       __kafkapc_set_protocoll "http" $KAFKAPC_INTERNAL_PORT $KAFKAPC_EXTERNAL_PORT
+}
+
+# Set httpS as the protocol to use for all communication to the Kafka procon
+# args: -
+# (Function for test scripts)
+use_kafkapc_https() {
+       __kafkapc_set_protocoll "https" $KAFKAPC_INTERNAL_SECURE_PORT $KAFKAPC_EXTERNAL_SECURE_PORT
+}
+
+# Setup paths to svc/container for internal and external access
+# args: <protocol> <internal-port> <external-port>
+__kafkapc_set_protocoll() {
+       echo -e $BOLD"$KAFKAPC_DISPLAY_NAME protocol setting"$EBOLD
+       echo -e " Using $BOLD $1 $EBOLD towards $KAFKAPC_DISPLAY_NAME"
+
+       ## Access to Kafka procon
+
+       KAFKAPC_SERVICE_PATH=$1"://"$KAFKAPC_APP_NAME":"$2  # docker access, container->container and script->container via proxy
+       if [ $RUNMODE == "KUBE" ]; then
+               KAFKAPC_SERVICE_PATH=$1"://"$KAFKAPC_APP_NAME.$KUBE_SIM_NAMESPACE":"$3 # kube access, pod->svc and script->svc via proxy
+       fi
+
+       KAFKAPC_ADAPTER_TYPE="REST"
+       KAFKAPC_ADAPTER=$KAFKAPC_SERVICE_PATH
+
+       echo ""
+}
+
+### Admin API functions Kafka procon
+
+###########################
+### Kafka Procon functions
+###########################
+
+# Export env vars for config files, docker compose and kube resources
+# args:
+__kafkapc_export_vars() {
+       export KAFKAPC_APP_NAME
+       export KAFKAPC_DISPLAY_NAME
+
+       export DOCKER_SIM_NWNAME
+       export KUBE_SIM_NAMESPACE
+
+       export KAFKAPC_IMAGE
+       export KAFKAPC_INTERNAL_PORT
+       export KAFKAPC_INTERNAL_SECURE_PORT
+       export KAFKAPC_EXTERNAL_PORT
+       export KAFKAPC_EXTERNAL_SECURE_PORT
+
+       export MR_KAFKA_SERVICE_PATH
+}
+
+
+# Start the Kafka procon in the simulator group
+# args: -
+# (Function for test scripts)
+start_kafkapc() {
+
+       echo -e $BOLD"Starting $KAFKAPC_DISPLAY_NAME"$EBOLD
+
+       if [ $RUNMODE == "KUBE" ]; then
+
+               # Check if app shall be fully managed by the test script
+               __check_included_image "KAFKAPC"
+               retcode_i=$?
+
+               # Check if app shall only be used by the testscipt
+               __check_prestarted_image "KAFKAPC"
+               retcode_p=$?
+
+               if [ $retcode_i -ne 0 ] && [ $retcode_p -ne 0 ]; then
+                       echo -e $RED"The $ICS_APP_NAME app is not included as managed nor prestarted in this test script"$ERED
+                       echo -e $RED"The $ICS_APP_NAME will not be started"$ERED
+                       exit
+               fi
+               if [ $retcode_i -eq 0 ] && [ $retcode_p -eq 0 ]; then
+                       echo -e $RED"The $ICS_APP_NAME app is included both as managed and prestarted in this test script"$ERED
+                       echo -e $RED"The $ICS_APP_NAME will not be started"$ERED
+                       exit
+               fi
+
+               if [ $retcode_p -eq 0 ]; then
+                       echo -e " Using existing $KAFKAPC_APP_NAME deployment and service"
+                       echo " Setting RC replicas=1"
+                       __kube_scale deployment $KAFKAPC_APP_NAME $KUBE_SIM_NAMESPACE 1
+               fi
+
+               if [ $retcode_i -eq 0 ]; then
+                       echo -e " Creating $KAFKAPC_APP_NAME deployment and service"
+
+            __kube_create_namespace $KUBE_SIM_NAMESPACE
+
+                       __kafkapc_export_vars
+
+                       # Create service
+                       input_yaml=$SIM_GROUP"/"$KAFKAPC_COMPOSE_DIR"/"svc.yaml
+                       output_yaml=$PWD/tmp/kafkapc_svc.yaml
+                       __kube_create_instance service $KAFKAPC_APP_NAME $input_yaml $output_yaml
+
+                       # Create app
+                       input_yaml=$SIM_GROUP"/"$KAFKAPC_COMPOSE_DIR"/"app.yaml
+                       output_yaml=$PWD/tmp/kafkapc_app.yaml
+                       __kube_create_instance app $KAFKAPC_APP_NAME $input_yaml $output_yaml
+               fi
+
+               __check_service_start $KAFKAPC_APP_NAME $KAFKAPC_SERVICE_PATH$KAFKAPC_ALIVE_URL
+
+       else
+
+               # Check if docker app shall be fully managed by the test script
+               __check_included_image 'KAFKAPC'
+               if [ $? -eq 1 ]; then
+                       echo -e $RED"The Kafka procon app is not included as managed in this test script"$ERED
+                       echo -e $RED"The Kafka procon will not be started"$ERED
+                       exit
+               fi
+
+               __kafkapc_export_vars
+
+               __start_container $KAFKAPC_COMPOSE_DIR "" NODOCKERARGS 1 $KAFKAPC_APP_NAME
+
+        __check_service_start $KAFKAPC_APP_NAME $KAFKAPC_SERVICE_PATH$KAFKAPC_ALIVE_URL
+       fi
+    echo ""
+    return 0
+}
+
+# Tests if a variable value in the KAFPAPC is equal to a target value and and optional timeout.
+# Arg: <variable-name> <target-value> - This test set pass or fail depending on if the variable is
+# equal to the target or not.
+# Arg: <variable-name> <target-value> <timeout-in-sec>  - This test waits up to the timeout seconds
+# before setting pass or fail depending on if the variable value becomes equal to the target
+# value or not.
+# (Function for test scripts)
+kafkapc_equal() {
+       if [ $# -eq 2 ] || [ $# -eq 3 ]; then
+               __var_test KAFPAPC "$KAFKAPC_SERVICE_PATH/" $1 "=" $2 $3
+       else
+               __print_err "Wrong args to kafkapc_equal, needs two or three args: <sim-param> <target-value> [ timeout ]" $@
+       fi
+}
+
+# KAFKA PC API: Reset all, POST /reset
+# Arg: <response-code>
+# (Function for test scripts)
+kafkapc_api_reset() {
+       __log_conf_start $@
+
+       if [ $# -ne 1 ]; then
+               __print_err "<response-code>" $@
+               return 1
+       fi
+
+       res="$(__do_curl_to_api KAFKAPC POST /reset)"
+       status=${res:${#res}-3}
+
+       if [ $status -ne $1 ]; then
+               __log_conf_fail_status_code $1 $status
+               return 1
+       fi
+
+       __log_conf_ok
+       return 0
+}
+
+# KAFKA PC API: Create a topic of a data-type, PUT /topics/<topic>
+# Arg: <response-code> <topic-name>  <mime-type>
+# (Function for test scripts)
+kafkapc_api_create_topic() {
+       __log_conf_start $@
+
+    if [ $# -ne 3 ]; then
+        __print_err "<response-code> <topic-name>  <mime-type>" $@
+        return 1
+       fi
+
+       res="$(__do_curl_to_api KAFKAPC PUT /topics/$2?type=$3)"
+       status=${res:${#res}-3}
+
+       if [ $status -ne $1 ]; then
+               __log_conf_fail_status_code $1 $status
+               return 1
+       fi
+
+       __log_conf_ok
+       return 0
+}
+
+# KAFKA PC API: Get topics, GET /topics
+# args: <response-code> [ EMPTY | [<topic>]+ ]
+# (Function for test scripts)
+kafkapc_api_get_topics() {
+       __log_test_start $@
+
+    if [ $# -lt 1 ]; then
+               __print_err "<response-code> EMPTY | [<policy-type-id>]*" $@
+               return 1
+       fi
+
+    res="$(__do_curl_to_api KAFKAPC GET /topics)"
+    status=${res:${#res}-3}
+
+       if [ $status -ne $1 ]; then
+               __log_test_fail_status_code $1 $status
+               return 1
+       fi
+       if [ $# -gt 1 ]; then
+               body=${res:0:${#res}-3}
+               targetJson="["
+
+               for pid in ${@:2} ; do
+                       if [ "$targetJson" != "[" ]; then
+                               targetJson=$targetJson","
+                       fi
+                       if [ $pid != "EMPTY" ]; then
+                               targetJson=$targetJson"\"$pid\""
+                       fi
+               done
+               targetJson=$targetJson"]"
+               echo " TARGET JSON: $targetJson" >> $HTTPLOG
+               res=$(python3 ../common/compare_json.py "$targetJson" "$body")
+
+               if [ $res -ne 0 ]; then
+                       __log_test_fail_body
+                       return 1
+               fi
+       fi
+       __log_test_pass
+       return 0
+}
+
+# KAFKA PC API: Get a topic, GET /topic/<topic>
+# args: <response-code> <topic> <mime-type>
+# (Function for test scripts)
+kafkapc_api_get_topic() {
+       __log_test_start $@
+
+    if [ $# -ne 3 ]; then
+               __print_err "<response-code> <topic> <mime-type>" $@
+               return 1
+       fi
+
+    res="$(__do_curl_to_api KAFKAPC GET /topics/$2)"
+    status=${res:${#res}-3}
+
+       if [ $status -ne $1 ]; then
+               __log_test_fail_status_code $1 $status
+               return 1
+       fi
+
+       body=${res:0:${#res}-3}
+       if [ "$body" != $3 ]; then
+               __log_test_fail_body
+               return 1
+       fi
+
+       __log_test_pass
+       return 0
+}
+
+# KAFKA PC API: Start sending on a topic, POST /topic/<topic>/startsend
+# args: <response-code> <topic>
+# (Function for test scripts)
+kafkapc_api_start_sending() {
+       __log_test_start $@
+
+    if [ $# -ne 2 ]; then
+               __print_err "<response-code> <topic>" $@
+               return 1
+       fi
+
+    res="$(__do_curl_to_api KAFKAPC POST /topics/$2/startsend)"
+    status=${res:${#res}-3}
+
+       if [ $status -ne $1 ]; then
+               __log_test_fail_status_code $1 $status
+               return 1
+       fi
+
+       __log_test_pass
+       return 0
+}
+
+# KAFKA PC API: Start receiving on a topic, POST /topic/<topic>/startreceive
+# args: <response-code> <topic>
+# (Function for test scripts)
+kafkapc_api_start_receiving() {
+       __log_test_start $@
+
+    if [ $# -ne 2 ]; then
+               __print_err "<response-code> <topic>" $@
+               return 1
+       fi
+
+    res="$(__do_curl_to_api KAFKAPC POST /topics/$2/startreceive)"
+    status=${res:${#res}-3}
+
+       if [ $status -ne $1 ]; then
+               __log_test_fail_status_code $1 $status
+               return 1
+       fi
+
+       __log_test_pass
+       return 0
+}
+
+# KAFKA PC API: Stop sending on a topic, POST /topic/<topic>/stopsend
+# args: <response-code> <topic>
+# (Function for test scripts)
+kafkapc_api_stop_sending() {
+       __log_test_start $@
+
+    if [ $# -ne 2 ]; then
+               __print_err "<response-code> <topic>" $@
+               return 1
+       fi
+
+    res="$(__do_curl_to_api KAFKAPC POST /topics/$2/stopsend)"
+    status=${res:${#res}-3}
+
+       if [ $status -ne $1 ]; then
+               __log_test_fail_status_code $1 $status
+               return 1
+       fi
+
+       __log_test_pass
+       return 0
+}
+
+# KAFKA PC API: Stop receiving on a topic, POST /topic/<topic>/stopreceive
+# args: <response-code> <topic>
+# (Function for test scripts)
+kafkapc_api_stop_receiving() {
+       __log_test_start $@
+
+    if [ $# -ne 2 ]; then
+               __print_err "<response-code> <topic>" $@
+               return 1
+       fi
+
+    res="$(__do_curl_to_api KAFKAPC POST /topics/$2/stopreceive)"
+    status=${res:${#res}-3}
+
+       if [ $status -ne $1 ]; then
+               __log_test_fail_status_code $1 $status
+               return 1
+       fi
+
+       __log_test_pass
+       return 0
+}
+
+# KAFKA PC API: Send a message on a topic, POST /topic/<topic>/msg
+# args: <response-code> <topic> <mime-type> <msg>
+# (Function for test scripts)
+kafkapc_api_post_msg() {
+       __log_test_start $@
+
+    if [ $# -ne 4 ]; then
+               __print_err "<response-code> <topic> <mime-type> <msg>" $@
+               return 1
+       fi
+       payload="tmp/.kafkapayload"
+       echo -n $4 > $payload     #-n prevent a newline to be added...
+    res="$(__do_curl_to_api KAFKAPC POST /topics/$2/msg $payload $3)"
+    status=${res:${#res}-3}
+
+       if [ $status -ne $1 ]; then
+               __log_test_fail_status_code $1 $status
+               return 1
+       fi
+
+       __log_test_pass
+       return 0
+}
+
+
+# KAFKA PC API: Get a msg on a topic, GET /topic/<topic>/msg
+# args: <response-code> <topic>  ([ <mime-type>  <msg> ] | NOMSG )
+# (Function for test scripts)
+kafkapc_api_get_msg() {
+       __log_test_start $@
+
+    if [ $# -lt 3 ]; then
+               __print_err "<response-code> <topic>  ([ <mime-type>  <msg> ] | NOMSG )" $@
+               return 1
+       fi
+       mime_type="text/plain"
+       if [ ! -z "$3" ]; then
+               mime_type=$3
+       fi
+    res="$(__do_curl_to_api KAFKAPC GET /topics/$2/msg)"
+    status=${res:${#res}-3}
+
+       if [ $status -ne $1 ]; then
+               __log_test_fail_status_code $1 $status
+               return 1
+       fi
+       if [ $# -eq 4 ]; then
+               body=${res:0:${#res}-3}
+               if [ "$body" != "$4" ]; then
+                       __log_test_fail_body
+                       return 1
+               fi
+       fi
+
+       __log_test_pass
+       return 0
+}
+
+# KAFKA PC API: Send a message from a file on a topic, POST /topic/<topic>/msg
+# args: <response-code> <topic> <mime-type> <file>
+# (Function for test scripts)
+kafkapc_api_post_msg_from_file() {
+       __log_test_start $@
+
+    if [ $# -ne 4 ]; then
+               __print_err "<response-code> <topic> <mime-type> <file>" $@
+               return 1
+       fi
+    res="$(__do_curl_to_api KAFKAPC POST /topics/$2/msg $4 $3)"
+    status=${res:${#res}-3}
+
+       if [ $status -ne $1 ]; then
+               __log_test_fail_status_code $1 $status
+               return 1
+       fi
+
+       __log_test_pass
+       return 0
+}
+
+# KAFKA PC API: Get a msg on a topic and compare with file, GET /topic/<topic>/msg
+# args: <response-code> <topic>  <mime-type>  <file>
+# (Function for test scripts)
+kafkapc_api_get_msg_from_file() {
+       __log_test_start $@
+
+    if [ $# -ne 4 ]; then
+               __print_err "<response-code> <topic>  <mime-type>  <file> " $@
+               return 1
+       fi
+
+       if [ -f $4 ]; then
+               msgfile=$(cat $4)
+       else
+               __log_test_fail_general "Message file "$4", does not exist"
+               return 1
+       fi
+
+       mime_type="text/plain"
+
+    res="$(__do_curl_to_api KAFKAPC GET /topics/$2/msg)"
+    status=${res:${#res}-3}
+
+       if [ $status -ne $1 ]; then
+               __log_test_fail_status_code $1 $status
+               return 1
+       fi
+
+       body=${res:0:${#res}-3}
+       if [ "$body" != "$msgfile" ]; then
+               __log_test_fail_body
+               return 1
+       fi
+
+       __log_test_pass
+       return 0
+}
+
+
+# Create json file for payload
+# arg: <size-in-kb> <filename>
+kafkapc_api_generate_json_payload_file() {
+       __log_conf_start $@
+    if [ $# -ne 2 ]; then
+        __print_err "<topic-url> <json-file>" $@
+        return 1
+    fi
+       if [ $1 -lt 1 ] || [ $1 -gt 10000 ]; then
+               __log_conf_fail_general "Only size between 1k and 10000k supported"
+               return 1
+       fi
+       echo -n "{\"abcdefghijklmno\":[" > $2
+       LEN=$(($1*100-2))
+       echo -n "\""ABCDEFG"\"" >> $2
+       for ((idx=1; idx<$LEN; idx++))
+       do
+               echo -n ",\"ABCDEFG\"" >> $2
+       done
+       echo -n "]}" >> $2
+
+       __log_conf_ok
+       return 0
+}
+
+# Create text file for payload
+# arg: <size-in-kb> <filename>
+kafkapc_api_generate_text_payload_file() {
+       __log_conf_start $@
+    if [ $# -ne 2 ]; then
+        __print_err "<topic-url> <text-file>" $@
+        return 1
+    fi
+       if [ $1 -lt 1 ] || [ $1 -gt 10000 ]; then
+               __log_conf_fail_general "Only size between 1k and 10000k supported"
+               return 1
+       fi
+       echo -n "" > $2
+       LEN=$(($1*100))
+       for ((idx=0; idx<$LEN; idx++))
+       do
+               echo -n "ABCDEFGHIJ" >> $2
+       done
+
+       __log_conf_ok
+       return 0
+}
\ No newline at end of file
index da3e34d..3e00ec3 100755 (executable)
@@ -173,7 +173,7 @@ __DMAAPMR_store_docker_logs() {
                        kubectl logs -n $KUBE_ONAP_NAMESPACE $podname --tail=-1 > $1$2_$podname.log 2>&1
                done
        else
-               docker logs $MR_DMAAP_APP_NAME > $1$2mr.log 2>&1
+               docker logs $MR_DMAAP_APP_NAME > $1$2_mr.log 2>&1
                docker logs $MR_KAFKA_APP_NAME > $1$2_mr_kafka.log 2>&1
                docker logs $MR_ZOOKEEPER_APP_NAME > $1$2_mr_zookeeper.log 2>&1
        fi
@@ -199,9 +199,9 @@ __DMAAPMR_initial_setup() {
 # args: -
 __MR_statisics_setup() {
        if [ $RUNMODE == "KUBE" ]; then
-               echo "MR $MR_STUB_APP_NAME $KUBE_ONAP_NAMESPACE"
+               echo "MR-STUB $MR_STUB_APP_NAME $KUBE_ONAP_NAMESPACE"
        else
-               echo "MR $MR_STUB_APP_NAME"
+               echo "MR-STUB $MR_STUB_APP_NAME"
        fi
 }
 
@@ -211,9 +211,9 @@ __MR_statisics_setup() {
 # args: -
 __DMAAPMR_statisics_setup() {
        if [ $RUNMODE == "KUBE" ]; then
-               echo ""
+               echo "KAFKA $MR_KAFKA_APP_NAME $KUBE_ONAP_NAMESPACE MESSAGE-ROUTER $MR_DMAAP_APP_NAME $KUBE_ONAP_NAMESPACE ZOOKEEPER $MR_ZOOKEEPER_APP_NAME $KUBE_ONAP_NAMESPACE"
        else
-               echo ""
+               echo "KAFKA $MR_KAFKA_APP_NAME MESSAGE-ROUTER $MR_DMAAP_APP_NAME ZOOKEEPER $MR_ZOOKEEPER_APP_NAME"
        fi
 }
 
@@ -273,7 +273,7 @@ use_mr_https() {
 # args: <protocol> <internal-port> <external-port> <internal-secure-port> <external-secure-port>
 __mr_set_protocoll() {
        echo -e $BOLD"$MR_STUB_DISPLAY_NAME and $MR_DMAAP_DISPLAY_NAME protocol setting"$EBOLD
-       echo -e " Using $BOLD http $EBOLD towards $MR_STUB_DISPLAY_NAME and $MR_DMAAP_DISPLAY_NAME"
+       echo -e " Using $BOLD $1 $EBOLD towards $MR_STUB_DISPLAY_NAME and $MR_DMAAP_DISPLAY_NAME"
 
        ## Access to Dmaap mediator
 
@@ -294,12 +294,14 @@ __mr_set_protocoll() {
 
        MR_SERVICE_PATH=$MR_STUB_PATH # access container->container, docker -  access pod->svc, kube
        MR_KAFKA_SERVICE_PATH=""
+       MR_ZOOKEEPER_SERVICE_PATH=""
        __check_included_image "DMAAPMR"
        if [ $? -eq 0 ]; then
                MR_SERVICE_PATH=$MR_DMAAP_PATH # access container->container, docker -  access pod->svc, kube
                MR_DMAAP_ADAPTER_HTTP=$MR_DMAAP_PATH
 
                MR_KAFKA_SERVICE_PATH=$MR_KAFKA_APP_NAME":"$MR_KAFKA_PORT
+               MR_ZOOKEEPER_SERVICE_PATH=$MR_ZOOKEEPER_APP_NAME":"$MR_ZOOKEEPER_PORT
        fi
 
        # For directing calls from script to e.g.PMS via message rounter
@@ -321,12 +323,14 @@ __mr_set_protocoll() {
                        MR_SERVICE_PATH=$MR_DMAAP_PATH
                        MR_DMAAP_ADAPTER_HTTP=$MR_DMAAP_PATH
                        MR_KAFKA_SERVICE_PATH=$MR_KAFKA_APP_NAME"."$KUBE_ONAP_NAMESPACE":"$MR_KAFKA_PORT
+                       MR_ZOOKEEPER_SERVICE_PATH=$MR_ZOOKEEPER_APP_NAME"."$KUBE_ONAP_NAMESPACE":"$MR_ZOOKEEPER_PORT
                fi
                __check_prestarted_image "DMAAPMR"
                if [ $? -eq 0 ]; then
                        MR_SERVICE_PATH=$MR_DMAAP_PATH
                        MR_DMAAP_ADAPTER_HTTP=$MR_DMAAP_PATH
                        MR_KAFKA_SERVICE_PATH=$MR_KAFKA_APP_NAME"."$KUBE_ONAP_NAMESPACE":"$MR_KAFKA_PORT
+                       MR_ZOOKEEPER_SERVICE_PATH=$MR_ZOOKEEPER_APP_NAME"."$KUBE_ONAP_NAMESPACE":"$MR_ZOOKEEPER_PORT
                fi
 
                # For directing calls from script to e.g.PMS, via message rounter
@@ -343,74 +347,6 @@ __mr_set_protocoll() {
 
 }
 
-
-# use_mr_http() {                2                3                  4                5                  6                       7
-#      __mr_set_protocoll "http" $MR_INTERNAL_PORT $MR_EXTERNAL_PORT $MR_INTERNAL_PORT $MR_EXTERNAL_PORT $MR_INTERNAL_SECURE_PORT $MR_EXT_SECURE_PORT
-# }
-
-# use_mr_https() {
-#      __mr_set_protocoll "https" $MR_INTERNAL_SECURE_PORT $MR_EXTERNAL_SECURE_PORT
-# }
-
-# # Setup paths to svc/container for internal and external access
-# # args: <protocol> <internal-port> <external-port> <mr-stub-internal-port> <mr-stub-external-port> <mr-stub-internal-secure-port> <mr-stub-external-secure-port>
-# __mr_set_protocoll() {
-#      echo -e $BOLD"$MR_STUB_DISPLAY_NAME and $MR_DMAAP_DISPLAY_NAME protocol setting"$EBOLD
-#      echo -e " Using $BOLD http $EBOLD towards $MR_STUB_DISPLAY_NAME and $MR_DMAAP_DISPLAY_NAME"
-
-#      ## Access to Dmaap mediator
-
-#      MR_HTTPX=$1
-
-#      # Access via test script
-#      MR_STUB_PATH=$MR_HTTPX"://"$MR_STUB_APP_NAME":"$2  # access from script via proxy, docker
-#      MR_DMAAP_PATH=$MR_HTTPX"://"$MR_DMAAP_APP_NAME":"$2 # access from script via proxy, docker
-#      MR_DMAAP_ADAPTER_HTTP="" # Access to dmaap mr via proyx - set only if app is included
-
-#      MR_SERVICE_PATH=$MR_STUB_PATH # access container->container, docker -  access pod->svc, kube
-#      __check_included_image "DMAAPMR"
-#      if [ $? -eq 0 ]; then
-#              MR_SERVICE_PATH=$MR_DMAAP_PATH # access container->container, docker -  access pod->svc, kube
-#              MR_DMAAP_ADAPTER_HTTP=$MR_DMAAP_PATH
-#      fi
-
-#      # For directing calls from script to e.g.PMS via message rounter
-#      # These cases shall always go though the  mr-stub
-#      MR_ADAPTER_HTTP="http://"$MR_STUB_APP_NAME":"$4
-#      MR_ADAPTER_HTTPS="https://"$MR_STUB_APP_NAME":"$6
-
-#      MR_DMAAP_ADAPTER_TYPE="REST"
-
-#      if [ $RUNMODE == "KUBE" ]; then
-#              MR_STUB_PATH=$MR_HTTPX"://"$MR_STUB_APP_NAME.$KUBE_ONAP_NAMESPACE":"$3 # access from script via proxy, kube
-#              MR_DMAAP_PATH=$MR_HTTPX"://"$MR_DMAAP_APP_NAME.$KUBE_ONAP_NAMESPACE":"$3 # access from script via proxy, kube
-
-#              MR_SERVICE_PATH=$MR_STUB_PATH
-#              __check_included_image "DMAAPMR"
-#              if [ $? -eq 0 ]; then
-#                      MR_SERVICE_PATH=$MR_DMAAP_PATH
-#                      MR_DMAAP_ADAPTER_HTTP=$MR_DMAAP_PATH
-#              fi
-#              __check_prestarted_image "DMAAPMR"
-#              if [ $? -eq 0 ]; then
-#                      MR_SERVICE_PATH=$MR_DMAAP_PATH
-#                      MR_DMAAP_ADAPTER_HTTP=$MR_DMAAP_PATH
-#              fi
-
-#              # For directing calls from script to e.g.PMS, via message rounter
-#              # These calls shall always go though the  mr-stub
-#              MR_ADAPTER_HTTP="http://"$MR_STUB_APP_NAME":"$5
-#              MR_ADAPTER_HTTPS="https://"$MR_STUB_APP_NAME":"$7
-#      fi
-
-#      # For calls from script to the mr-stub
-#      MR_STUB_ADAPTER=$MR_STUB_PATH
-#      MR_STUB_ADAPTER_TYPE="REST"
-
-#      echo ""
-
-# }
-
 # Export env vars for config files, docker compose and kube resources
 # args: -
 __dmaapmr_export_vars() {
@@ -435,6 +371,10 @@ __dmaapmr_export_vars() {
        export MR_ZOOKEEPER_PORT
 
        export MR_KAFKA_SERVICE_PATH
+       export MR_ZOOKEEPER_SERVICE_PATH
+
+       export MR_KAFKA_KUBE_NODE_PORT
+       export MR_KAFKA_DOCKER_LOCALHOST_PORT
 }
 
 # Export env vars for config files, docker compose and kube resources
@@ -457,6 +397,7 @@ __mr_export_vars() {
        export MR_EXTERNAL_PORT
 
        export MR_KAFKA_SERVICE_PATH
+       export MR_ZOOKEEPER_SERVICE_PATH
 }
 
 
@@ -569,36 +510,10 @@ start_mr() {
                        __kube_create_instance app $MR_DMAAP_APP_NAME $input_yaml $output_yaml
 
 
-                       # echo " Retrieving host and ports for service..."
-                       # MR_DMAAP_HOST_NAME=$(__kube_get_service_host $MR_DMAAP_APP_NAME $KUBE_ONAP_NAMESPACE)
-
-                       # MR_EXT_PORT=$(__kube_get_service_port $MR_DMAAP_APP_NAME $KUBE_ONAP_NAMESPACE "http")
-                       # MR_EXT_SECURE_PORT=$(__kube_get_service_port $MR_DMAAP_APP_NAME $KUBE_ONAP_NAMESPACE "https")
-
-                       # echo " Host IP, http port, https port: $MR_DMAAP_APP_NAME $MR_EXT_PORT $MR_EXT_SECURE_PORT"
-                       # MR_SERVICE_PATH=""
-                       # if [ $MR_HTTPX == "http" ]; then
-                       #       MR_DMAAP_PATH=$MR_HTTPX"://"$MR_DMAAP_HOST_NAME":"$MR_EXT_PORT
-                       #       MR_SERVICE_PATH=$MR_HTTPX"://"$MR_DMAAP_APP_NAME"."$KUBE_ONAP_NAMESPACE":"$MR_EXT_PORT
-                       # else
-                       #       MR_DMAAP_PATH=$MR_HTTPX"://"$MR_DMAAP_HOST_NAME":"$MR_EXT_SECURE_PORT
-                       #       MR_SERVICE_PATH=$MR_HTTPX"://"$MR_DMAAP_APP_NAME"."$KUBE_ONAP_NAMESPACE":"$MR_EXT_SECURE_PORT
-                       # fi
-
                        __check_service_start $MR_DMAAP_APP_NAME $MR_DMAAP_PATH$MR_DMAAP_ALIVE_URL
 
-                       # Cannot create topics, returns 400 forever.....topics will be created during pipeclean below
-                       #__create_topic $MR_READ_TOPIC "Topic for reading policy messages"
-
-                       #__create_topic $MR_WRITE_TOPIC "Topic for writing policy messages"
+                       echo " Kafka TCP node port $MR_KAFKA_KUBE_NODE_PORT"
 
-#                      __dmaap_pipeclean $MR_READ_TOPIC "/events/$MR_READ_TOPIC" "/events/$MR_READ_TOPIC/users/policy-agent?timeout=1000&limit=100"
-#
-#                      __dmaap_pipeclean $MR_WRITE_TOPIC "/events/$MR_WRITE_TOPIC" "/events/$MR_WRITE_TOPIC/users/mr-stub?timeout=1000&limit=100"
-
-
-                       #__dmaap_pipeclean "unauthenticated.dmaapmed.json" "/events/unauthenticated.dmaapmed.json" "/events/unauthenticated.dmaapmed.json/dmaapmediatorproducer/STD_Fault_Messages?timeout=1000&limit=100"
-                       #__dmaap_pipeclean "unauthenticated.dmaapadp.json" "/events/unauthenticated.dmaapadp.json" "/events/unauthenticated.dmaapadp.json/dmaapadapterproducer/msgs?timeout=1000&limit=100"
 
                        if [ $# -gt 0 ]; then
                                if [ $(($#%3)) -eq 0 ]; then
@@ -650,41 +565,8 @@ start_mr() {
 
                fi
 
-               # echo " Retrieving host and ports for service..."
-               # MR_STUB_HOST_NAME=$(__kube_get_service_host $MR_STUB_APP_NAME $KUBE_ONAP_NAMESPACE)
-
-               # MR_EXT_PORT=$(__kube_get_service_port $MR_STUB_APP_NAME $KUBE_ONAP_NAMESPACE "http")
-               # MR_EXT_SECURE_PORT=$(__kube_get_service_port $MR_STUB_APP_NAME $KUBE_ONAP_NAMESPACE "https")
-
-               # echo " Host IP, http port, https port: $MR_STUB_APP_NAME $MR_EXT_PORT $MR_EXT_SECURE_PORT"
-               # if [ $MR_HTTPX == "http" ]; then
-               #       MR_STUB_PATH=$MR_HTTPX"://"$MR_STUB_HOST_NAME":"$MR_EXT_PORT
-               #       if [ -z "$MR_SERVICE_PATH" ]; then
-               #               MR_SERVICE_PATH=$MR_HTTPX"://"$MR_STUB_APP_NAME"."$KUBE_ONAP_NAMESPACE":"$MR_EXT_PORT
-               #       fi
-               # else
-               #       MR_STUB_PATH=$MR_HTTPX"://"$MR_STUB_HOST_NAME":"$MR_EXT_SECURE_PORT
-               #       if [ -z "$MR_SERVICE_PATH" ]; then
-               #               MR_SERVICE_PATH=$MR_HTTPX"://"$MR_STUB_APP_NAME"."$KUBE_ONAP_NAMESPACE":"$MR_EXT_SECURE_PORT
-               #       fi
-               # fi
-               # MR_ADAPTER_HTTP="http://"$MR_STUB_HOST_NAME":"$MR_EXT_PORT
-               # MR_ADAPTER_HTTPS="https://"$MR_STUB_HOST_NAME":"$MR_EXT_SECURE_PORT
-
-               # MR_STUB_ADAPTER=$MR_STUB_PATH
-               # MR_STUB_ADAPTER_TYPE="REST"
-
                __check_service_start $MR_STUB_APP_NAME $MR_STUB_PATH$MR_STUB_ALIVE_URL
 
-               echo -ne " Service $MR_STUB_APP_NAME - reset  "$SAMELINE
-               result=$(__do_curl $MR_STUB_PATH/reset)
-               if [ $? -ne 0 ]; then
-                       echo -e " Service $MR_STUB_APP_NAME - reset  $RED Failed $ERED - will continue"
-               else
-                       echo -e " Service $MR_STUB_APP_NAME - reset  $GREEN OK $EGREEN"
-               fi
-
-
        else
 
                __check_included_image 'DMAAPMR'
@@ -732,15 +614,7 @@ start_mr() {
 
                        __check_service_start $MR_DMAAP_APP_NAME $MR_DMAAP_PATH$MR_DMAAP_ALIVE_URL
 
-
-                       # Cannot create topics, returns 400 forever.....topics will be created during pipeclean below
-                       #__create_topic $MR_READ_TOPIC "Topic for reading policy messages"
-
-                       #__create_topic $MR_WRITE_TOPIC "Topic for writing policy messages"
-
-                       #__dmaap_pipeclean $MR_READ_TOPIC "/events/$MR_READ_TOPIC" "/events/$MR_READ_TOPIC/users/policy-agent?timeout=1000&limit=100"
-
-                       #__dmaap_pipeclean $MR_WRITE_TOPIC "/events/$MR_WRITE_TOPIC" "/events/$MR_WRITE_TOPIC/users/mr-stub?timeout=1000&limit=100"
+                       echo " Kafka TCP node port $MR_KAFKA_DOCKER_LOCALHOST_PORT"
 
                        if [ $# -gt 0 ]; then
                                if [ $(($#%3)) -eq 0 ]; then
@@ -755,13 +629,7 @@ start_mr() {
                                fi
                        fi
 
-                       #__dmaap_pipeclean "unauthenticated.dmaapmed.json" "/events/unauthenticated.dmaapmed.json" "/events/unauthenticated.dmaapmed.json/dmaapmediatorproducer/STD_Fault_Messages?timeout=1000&limit=100"
-                       #__dmaap_pipeclean "unauthenticated.dmaapadp.json" "/events/unauthenticated.dmaapadp.json" "/events/unauthenticated.dmaapadp.json/dmaapadapterproducer/msgs?timeout=1000&limit=100"
-
-                       echo " Current topics:"
-                       curlString="$MR_DMAAP_PATH/topics"
-                       result=$(__do_curl "$curlString")
-                       echo $result | indent2
+                       dmaap_api_print_topics
                fi
 
                __mr_export_vars
@@ -848,6 +716,15 @@ __dmaap_pipeclean() {
        return 1
 }
 
+# Helper function to list the current topics in DMAAP MR
+# args: -
+dmaap_api_print_topics() {
+       echo " Current topics:"
+       curlString="$MR_DMAAP_PATH/topics"
+       result=$(__do_curl "$curlString")
+       echo $result | indent2
+}
+
 
 ### Generic test cases for varaible checking
 
@@ -1017,12 +894,12 @@ mr_api_generate_json_payload_file() {
                __log_conf_fail_general "Only size between 1k and 10000k supported"
                return 1
        fi
-       echo -n "{\"a\":[" > $2
-       LEN=$(($1*150))
-       echo -n "\"a0\"" >> $2
+       echo -n "{\"abcdefghijklmno\":[" > $2
+       LEN=$(($1*100-2))
+       echo -n "\""ABCDEFG"\"" >> $2
        for ((idx=1; idx<$LEN; idx++))
        do
-               echo -n ",\"a$idx\"" >> $2
+               echo -n ",\"ABCDEFG\"" >> $2
        done
        echo -n "]}" >> $2
 
@@ -1030,7 +907,7 @@ mr_api_generate_json_payload_file() {
        return 0
 }
 
-# Create tet file for payload
+# Create text file for payload
 # arg: <size-in-kb> <filename>
 mr_api_generate_text_payload_file() {
        __log_conf_start $@
similarity index 97%
rename from test/common/gateway_api_functions.sh
rename to test/common/ngw_api_functions.sh
index d8f1707..9e29278 100644 (file)
@@ -128,7 +128,7 @@ use_gateway_https() {
 # args: <protocol> <internal-port> <external-port>
 __gateway_set_protocoll() {
        echo -e $BOLD"$NRT_GATEWAY_DISPLAY_NAME protocol setting"$EBOLD
-       echo -e " Using $BOLD http $EBOLD towards $NRT_GATEWAY_DISPLAY_NAME"
+       echo -e " Using $BOLD $1 $EBOLD towards $NRT_GATEWAY_DISPLAY_NAME"
 
        ## Access to nonrtric gateway
 
@@ -199,12 +199,12 @@ __gateway_export_vars() {
 
        if [ $RUNMODE == "KUBE" ]; then
                export POLICY_AGENT_EXTERNAL_SECURE_PORT
-               export ECS_EXTERNAL_SECURE_PORT
+               export ICS_EXTERNAL_SECURE_PORT
                export POLICY_AGENT_DOMAIN_NAME=$POLICY_AGENT_APP_NAME.$KUBE_NONRTRIC_NAMESPACE
-               export ECS_DOMAIN_NAME=$ECS_APP_NAME.$KUBE_NONRTRIC_NAMESPACE
+               export ICS_DOMAIN_NAME=$ICS_APP_NAME.$KUBE_NONRTRIC_NAMESPACE
        else
                export POLICY_AGENT_DOMAIN_NAME=$POLICY_AGENT_APP_NAME
-               export ECS_DOMAIN_NAME=$ECS_APP_NAME
+               export ICS_DOMAIN_NAME=$ICS_APP_NAME
        fi
 }
 
@@ -319,11 +319,11 @@ gateway_pms_get_status() {
        return 0
 }
 
-# API Test function: GET /ei-producer/v1/eitypes towards ECS
+# API Test function: GET /ei-producer/v1/eitypes towards ICS
 # Note: This is just to test service response
 # args: <response-code>
 # (Function for test scripts)
-gateway_ecs_get_types() {
+gateway_ics_get_types() {
        __log_test_start $@
     if [ $# -ne 1 ]; then
                __print_err "<response-code>" $@
similarity index 92%
rename from test/common/agent_api_functions.sh
rename to test/common/pa_api_functions.sh
index 4cedad1..7c91705 100644 (file)
@@ -149,7 +149,7 @@ use_agent_dmaap_https() {
 # args: <protocol> <internal-port> <external-port>
 __agent_set_protocoll() {
        echo -e $BOLD"$POLICY_AGENT_DISPLAY_NAME protocol setting"$EBOLD
-       echo -e " Using $BOLD http $EBOLD towards $POLICY_AGENT_DISPLAY_NAME"
+       echo -e " Using $BOLD $1 $EBOLD towards $POLICY_AGENT_DISPLAY_NAME"
 
        ## Access to Dmaap adapter
 
@@ -444,6 +444,126 @@ start_stopped_policy_agent() {
 }
 
 
+# Function to perpare the consul configuration according to the current simulator configuration
+# args: SDNC|NOSDNC <output-file>
+# (Function for test scripts)
+prepare_consul_config() {
+       echo -e $BOLD"Prepare Consul config"$EBOLD
+
+       echo " Writing consul config for "$POLICY_AGENT_APP_NAME" to file: "$2
+
+       if [ $# != 2 ];  then
+               ((RES_CONF_FAIL++))
+       __print_err "need two args,  SDNC|NOSDNC <output-file>" $@
+               exit 1
+       fi
+
+       if [ $1 == "SDNC" ]; then
+               echo -e " Config$BOLD including SDNC$EBOLD configuration"
+       elif [ $1 == "NOSDNC" ];  then
+               echo -e " Config$BOLD excluding SDNC$EBOLD configuration"
+       else
+               ((RES_CONF_FAIL++))
+       __print_err "need two args,  SDNC|NOSDNC <output-file>" $@
+               exit 1
+       fi
+
+       config_json="\n            {"
+       if [ $1 == "SDNC" ]; then
+               config_json=$config_json"\n   \"controller\": ["
+               config_json=$config_json"\n                     {"
+               config_json=$config_json"\n                       \"name\": \"$SDNC_APP_NAME\","
+               config_json=$config_json"\n                       \"baseUrl\": \"$SDNC_SERVICE_PATH\","
+               config_json=$config_json"\n                       \"userName\": \"$SDNC_USER\","
+               config_json=$config_json"\n                       \"password\": \"$SDNC_PWD\""
+               config_json=$config_json"\n                     }"
+               config_json=$config_json"\n   ],"
+       fi
+
+       config_json=$config_json"\n   \"streams_publishes\": {"
+       config_json=$config_json"\n                            \"dmaap_publisher\": {"
+       config_json=$config_json"\n                              \"type\": \"message-router\","
+       config_json=$config_json"\n                              \"dmaap_info\": {"
+       config_json=$config_json"\n                                \"topic_url\": \"$MR_SERVICE_PATH$MR_WRITE_URL\""
+       config_json=$config_json"\n                              }"
+       config_json=$config_json"\n                            }"
+       config_json=$config_json"\n   },"
+       config_json=$config_json"\n   \"streams_subscribes\": {"
+       config_json=$config_json"\n                             \"dmaap_subscriber\": {"
+       config_json=$config_json"\n                               \"type\": \"message-router\","
+       config_json=$config_json"\n                               \"dmaap_info\": {"
+       config_json=$config_json"\n                                   \"topic_url\": \"$MR_SERVICE_PATH$MR_READ_URL\""
+       config_json=$config_json"\n                                 }"
+       config_json=$config_json"\n                               }"
+       config_json=$config_json"\n   },"
+
+       config_json=$config_json"\n   \"ric\": ["
+
+       if [ $RUNMODE == "KUBE" ]; then
+               result=$(kubectl get pods -n $KUBE_A1SIM_NAMESPACE -o jsonpath='{.items[?(@.metadata.labels.autotest=="RICSIM")].metadata.name}')
+               rics=""
+               ric_cntr=0
+               if [ $? -eq 0 ] && [ ! -z "$result" ]; then
+                       for im in $result; do
+                               if [[ $im != *"-0" ]]; then
+                                       ric_subdomain=$(kubectl get pod $im -n $KUBE_A1SIM_NAMESPACE -o jsonpath='{.spec.subdomain}')
+                                       rics=$rics" "$im"."$ric_subdomain"."$KUBE_A1SIM_NAMESPACE
+                                       let ric_cntr=ric_cntr+1
+                               fi
+                       done
+               fi
+               if [ $ric_cntr -eq 0 ]; then
+                       echo $YELLOW"Warning: No rics found for the configuration"$EYELLOW
+               fi
+       else
+               rics=$(docker ps --filter "name=$RIC_SIM_PREFIX" --filter "network=$DOCKER_SIM_NWNAME" --filter "status=running" --format {{.Names}})
+               if [ $? -ne 0 ] || [ -z "$rics" ]; then
+                       echo -e $RED" FAIL - the names of the running RIC Simulator cannot be retrieved." $ERED
+                       ((RES_CONF_FAIL++))
+                       return 1
+               fi
+       fi
+       cntr=0
+       for ric in $rics; do
+               if [ $cntr -gt 0 ]; then
+                       config_json=$config_json"\n          ,"
+               fi
+               config_json=$config_json"\n          {"
+               if [ $RUNMODE == "KUBE" ]; then
+                       ric_id=${ric%.*.*} #extract pod id from full hosthame
+                       ric_id=$(echo "$ric_id" | tr '-' '_')
+               else
+                       if [ $DOCKER_COMPOSE_VERION == "V1" ]; then
+                               ric_id=$ric
+                       else
+                               ric_id=$(echo "$ric" | tr '-' '_')  #ric id still needs underscore as it is different from the container name
+                       fi
+               fi
+               echo " Found a1 sim: "$ric_id
+               config_json=$config_json"\n            \"name\": \"$ric_id\","
+               config_json=$config_json"\n            \"baseUrl\": \"$RIC_SIM_HTTPX://$ric:$RIC_SIM_PORT\","
+               if [ $1 == "SDNC" ]; then
+                       config_json=$config_json"\n            \"controller\": \"$SDNC_APP_NAME\","
+               fi
+               config_json=$config_json"\n            \"managedElementIds\": ["
+               config_json=$config_json"\n              \"me1_$ric_id\","
+               config_json=$config_json"\n              \"me2_$ric_id\""
+               config_json=$config_json"\n            ]"
+               config_json=$config_json"\n          }"
+               let cntr=cntr+1
+       done
+
+       config_json=$config_json"\n           ]"
+       config_json=$config_json"\n}"
+
+       if [ $RUNMODE == "KUBE" ]; then
+               config_json="{\"config\":"$config_json"}"
+       fi
+
+       printf "$config_json">$2
+
+       echo ""
+}
 
 # Load the the appl config for the agent into a config map
 agent_load_config() {
index 6c3ce23..f792d69 100644 (file)
@@ -139,7 +139,7 @@ use_prod_stub_https() {
 # args: <protocol> <internal-port> <external-port>
 __prod_stub_set_protocoll() {
        echo -e $BOLD"$PROD_STUB_DISPLAY_NAME protocol setting"$EBOLD
-       echo -e " Using $BOLD http $EBOLD towards $PROD_STUB_DISPLAY_NAME"
+       echo -e " Using $BOLD $1 $EBOLD towards $PROD_STUB_DISPLAY_NAME"
 
        ## Access to Prod stub sim
 
@@ -161,7 +161,6 @@ __prod_stub_set_protocoll() {
 # args:
 __prodstub_export_vars() {
        export PROD_STUB_APP_NAME
-       export PROD_STUB_APP_NAME_ALIAS
        export PROD_STUB_DISPLAY_NAME
 
        export DOCKER_SIM_NWNAME
@@ -193,13 +192,13 @@ start_prod_stub() {
                retcode_p=$?
 
                if [ $retcode_i -ne 0 ] && [ $retcode_p -ne 0 ]; then
-                       echo -e $RED"The $ECS_APP_NAME app is not included as managed nor prestarted in this test script"$ERED
-                       echo -e $RED"The $ECS_APP_NAME will not be started"$ERED
+                       echo -e $RED"The $ICS_APP_NAME app is not included as managed nor prestarted in this test script"$ERED
+                       echo -e $RED"The $ICS_APP_NAME will not be started"$ERED
                        exit
                fi
                if [ $retcode_i -eq 0 ] && [ $retcode_p -eq 0 ]; then
-                       echo -e $RED"The $ECS_APP_NAME app is included both as managed and prestarted in this test script"$ERED
-                       echo -e $RED"The $ECS_APP_NAME will not be started"$ERED
+                       echo -e $RED"The $ICS_APP_NAME app is included both as managed and prestarted in this test script"$ERED
+                       echo -e $RED"The $ICS_APP_NAME will not be started"$ERED
                        exit
                fi
 
@@ -439,7 +438,7 @@ prodstub_check_jobdata_2() {
        __log_test_fail_general "Template file "$7" for jobdata, does not exist"
         return 1
     fi
-       if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+       if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
                targetJson="{\"info_job_identity\":\"$3\",\"info_type_identity\":\"$4\",\"target_uri\":\"$5\",\"owner\":\"$6\", \"info_job_data\":$jobfile,\"last_updated\":\"????\"}"
        else
                targetJson="{\"ei_job_identity\":\"$3\",\"ei_type_identity\":\"$4\",\"target_uri\":\"$5\",\"owner\":\"$6\", \"ei_job_data\":$jobfile,\"last_updated\":\"????\"}"
similarity index 98%
rename from test/common/rapp_catalogue_api_functions.sh
rename to test/common/rc_api_functions.sh
index 537bc0c..3766d19 100644 (file)
@@ -116,7 +116,7 @@ use_rapp_catalogue_https() {
 # args: <protocol> <internal-port> <external-port>
 __rapp_catalogue_set_protocoll() {
        echo -e $BOLD"$RAPP_CAT_DISPLAY_NAME protocol setting"$EBOLD
-       echo -e " Using $BOLD http $EBOLD towards $RAPP_CAT_DISPLAY_NAME"
+       echo -e " Using $BOLD $1 $EBOLD towards $RAPP_CAT_DISPLAY_NAME"
 
        ## Access to Rapp catalogue
 
@@ -234,7 +234,7 @@ rc_equal() {
                #__var_test RC "$LOCALHOST_HTTP:$RC_EXTERNAL_PORT/" $1 "=" $2 $3
                __var_test RC "$RC_SERVICE_PATH/" $1 "=" $2 $3
        else
-               __print_err "Wrong args to ecs_equal, needs two or three args: <sim-param> <target-value> [ timeout ]" $@
+               __print_err "Wrong args to ics_equal, needs two or three args: <sim-param> <target-value> [ timeout ]" $@
        fi
 }
 
similarity index 88%
rename from test/common/ricsimulator_api_functions.sh
rename to test/common/ricsim_api_functions.sh
index 695b535..435c208 100644 (file)
@@ -96,27 +96,34 @@ __RICSIM_initial_setup() {
 # This function is called for apps managed by the test script as well as for prestarted apps.
 # args: -
 __RICSIM_statisics_setup() {
-       if [ $RUNMODE == "KUBE" ]; then
-               echo ""
-       else
-               echo ""
-       fi
+       for ((RICSIM_INSTANCE=10; RICSIM_INSTANCE>0; RICSIM_INSTANCE-- )); do
+               if [ $RUNMODE == "KUBE" ]; then
+                       RICSIM_INSTANCE_KUBE=$(($RICSIM_INSTANCE-1))
+                       echo -n " RICSIM_G1_$RICSIM_INSTANCE_KUBE ${RIC_SIM_PREFIX}-g1-$RICSIM_INSTANCE_KUBE $KUBE_A1SIM_NAMESPACE "
+                       echo -n " RICSIM_G2_$RICSIM_INSTANCE_KUBE ${RIC_SIM_PREFIX}-g2-$RICSIM_INSTANCE_KUBE $KUBE_A1SIM_NAMESPACE "
+                       echo -n " RICSIM_G3_$RICSIM_INSTANCE_KUBE ${RIC_SIM_PREFIX}-g3-$RICSIM_INSTANCE_KUBE $KUBE_A1SIM_NAMESPACE "
+               else
+                       if [ $DOCKER_COMPOSE_VERION == "V1" ]; then
+                               echo -n " RICSIM_G1_$RICSIM_INSTANCE ${RIC_SIM_PREFIX}_g1_$RICSIM_INSTANCE "
+                               echo -n " RICSIM_G2_$RICSIM_INSTANCE ${RIC_SIM_PREFIX}_g2_$RICSIM_INSTANCE "
+                               echo -n " RICSIM_G3_$RICSIM_INSTANCE ${RIC_SIM_PREFIX}_g3_$RICSIM_INSTANCE "
+                       else
+                               echo -n " RICSIM_G1_$RICSIM_INSTANCE ${RIC_SIM_PREFIX}-g1-$RICSIM_INSTANCE "
+                               echo -n " RICSIM_G2_$RICSIM_INSTANCE ${RIC_SIM_PREFIX}-g2-$RICSIM_INSTANCE "
+                               echo -n " RICSIM_G3_$RICSIM_INSTANCE ${RIC_SIM_PREFIX}-g3-$RICSIM_INSTANCE "
+                       fi
+               fi
+       done
 }
 
 #######################################################
 
 
 RIC_SIM_HTTPX="http"
-RIC_SIM_HOST=$RIC_SIM_HTTPX"://"$LOCALHOST_NAME
 RIC_SIM_PORT=$RIC_SIM_INTERNAL_PORT
 
 
-#Vars for A1 interface version and container count
-G1_A1_VERSION=""
-G2_A1_VERSION=""
-G3_A1_VERSION=""
-G4_A1_VERSION=""
-G5_A1_VERSION=""
+#Vars for container count
 G1_COUNT=0
 G2_COUNT=0
 G3_COUNT=0
@@ -132,7 +139,6 @@ use_simulator_http() {
        echo -e $BOLD"RICSIM protocol setting"$EBOLD
        echo -e " Using $BOLD http $EBOLD towards the simulators"
        RIC_SIM_HTTPX="http"
-       RIC_SIM_HOST=$RIC_SIM_HTTPX"://"$LOCALHOST_NAME
        RIC_SIM_PORT=$RIC_SIM_INTERNAL_PORT
        echo ""
 }
@@ -141,7 +147,6 @@ use_simulator_https() {
        echo -e $BOLD"RICSIM protocol setting"$EBOLD
        echo -e " Using $BOLD https $EBOLD towards the simulators"
        RIC_SIM_HTTPX="https"
-       RIC_SIM_HOST=$RIC_SIM_HTTPX"://"$LOCALHOST_NAME
        RIC_SIM_PORT=$RIC_SIM_INTERNAL_SECURE_PORT
        echo ""
 }
@@ -201,19 +206,14 @@ start_ric_simulators() {
        #Set env var for simulator count and A1 interface vesion for the given group
        if [ $1 == "$RIC1" ]; then
                G1_COUNT=$2
-               G1_A1_VERSION=$3
        elif [ $1 == "$RIC2" ]; then
                G2_COUNT=$2
-               G2_A1_VERSION=$3
        elif [ $1 == "$RIC3" ]; then
                G3_COUNT=$2
-               G3_A1_VERSION=$3
        elif [ $1 == "$RIC4" ]; then
                G4_COUNT=$2
-               G4_A1_VERSION=$3
        elif [ $1 == "$RIC5" ]; then
                G5_COUNT=$2
-               G5_A1_VERSION=$3
        else
                ((RES_CONF_FAIL++))
                __print_err "need three args, $RIC1|$RIC2|$RIC3|$RIC4|$RIC5 <count> <interface-id>" $@
@@ -268,22 +268,34 @@ start_ric_simulators() {
                # Create .env file to compose project, all ric container will get this prefix
                echo "COMPOSE_PROJECT_NAME="$RIC_SIM_PREFIX > $SIM_GROUP/$RIC_SIM_COMPOSE_DIR/.env
 
-               export G1_A1_VERSION
-               export G2_A1_VERSION
-               export G3_A1_VERSION
-               export G4_A1_VERSION
-               export G5_A1_VERSION
+               #extract service name (group), g1, g2, g3, g4 or g5 from var $1
+               #E.g. ricsim_g1 -> g1 is the service name
+               TMP_GRP=$1
+               RICSIM_COMPOSE_SERVICE_NAME=$(echo "${TMP_GRP##*_}")
+
+               export RICSIM_COMPOSE_A1_VERSION=$3
+               export RICSIM_COMPOSE_SERVICE_NAME
                export RIC_SIM_INTERNAL_PORT
                export RIC_SIM_INTERNAL_SECURE_PORT
                export RIC_SIM_CERT_MOUNT_DIR
                export DOCKER_SIM_NWNAME
                export RIC_SIM_DISPLAY_NAME
 
-               docker_args="--scale g1=$G1_COUNT --scale g2=$G2_COUNT --scale g3=$G3_COUNT --scale g4=$G4_COUNT --scale g5=$G5_COUNT"
+               docker_args="--no-recreate --scale $RICSIM_COMPOSE_SERVICE_NAME=$2"
+
+               #Create a list of contsiner names
+               #Will be <ricsim-prefix>_<service-name>_<index>
+               # or
+               # <ricsim-prefix>-<service-name>-<index>
                app_data=""
                cntr=1
+               if [ $DOCKER_COMPOSE_VERION == "V1" ]; then
+                       app_name_prefix=$RIC_SIM_PREFIX"_"$RICSIM_COMPOSE_SERVICE_NAME"_"
+               else
+                       app_name_prefix=$RIC_SIM_PREFIX"-"$RICSIM_COMPOSE_SERVICE_NAME"-"
+               fi
                while [ $cntr -le $2 ]; do
-                       app=$1"_"$cntr
+                       app=$app_name_prefix$cntr
                        app_data="$app_data $app"
                        let cntr=cntr+1
                done
@@ -292,7 +304,11 @@ start_ric_simulators() {
 
                cntr=1
                while [ $cntr -le $2 ]; do
-                       app=$1"_"$cntr
+                       if [ $DOCKER_COMPOSE_VERION == "V1" ]; then
+                               app=$RIC_SIM_PREFIX"_"$RICSIM_COMPOSE_SERVICE_NAME"_"$cntr
+                       else
+                               app=$RIC_SIM_PREFIX"-"$RICSIM_COMPOSE_SERVICE_NAME"-"$cntr
+                       fi
                        __check_service_start $app $RIC_SIM_HTTPX"://"$app:$RIC_SIM_PORT$RIC_SIM_ALIVE_URL
                        let cntr=cntr+1
                done
@@ -322,7 +338,12 @@ __find_sim_host() {
                ric_setname="${ricname%-*}"  #Extract the stateful set name
                echo $RIC_SIM_HTTPX"://"$ricname.$ric_setname.$KUBE_A1SIM_NAMESPACE":"$RIC_SIM_PORT
        else
-               echo $RIC_SIM_HTTPX"://"$1":"$RIC_SIM_PORT
+               if [ $DOCKER_COMPOSE_VERION == "V1" ]; then
+                       echo $RIC_SIM_HTTPX"://"$1":"$RIC_SIM_PORT
+               else
+                       ricname=$(echo "$1" | tr '_' '-')
+                       echo $RIC_SIM_HTTPX"://"$ricname":"$RIC_SIM_PORT
+               fi
 
        fi
 }
similarity index 93%
rename from test/common/controller_api_functions.sh
rename to test/common/sdnc_api_functions.sh
index b3ef07b..3ac0a6c 100644 (file)
@@ -140,15 +140,15 @@ use_sdnc_https() {
 # args: <protocol> <internal-port> <external-port>
 __sdnc_set_protocoll() {
        echo -e $BOLD"$SDNC_DISPLAY_NAME protocol setting"$EBOLD
-       echo -e " Using $BOLD http $EBOLD towards $SDNC_DISPLAY_NAME"
+       echo -e " Using $BOLD $1 $EBOLD towards $SDNC_DISPLAY_NAME"
 
        ## Access to SDNC
 
        SDNC_SERVICE_PATH=$1"://"$SDNC_APP_NAME":"$2  # docker access, container->container and script->container via proxy
-       SDNC_SERVICE_API_PATH=$1"://"$SDNC_USER":"$SDNC_PWD"@"$SDNC_APP_NAME":"$1$SDNC_API_URL
+       SDNC_SERVICE_API_PATH=$1"://"$SDNC_USER":"$SDNC_PWD"@"$SDNC_APP_NAME":"$2$SDNC_API_URL
        if [ $RUNMODE == "KUBE" ]; then
                SDNC_SERVICE_PATH=$1"://"$SDNC_APP_NAME.$KUBE_SDNC_NAMESPACE":"$3 # kube access, pod->svc and script->svc via proxy
-               SDNC_SERVICE_API_PATH=$1"://"$SDNC_USER":"$SDNC_PWD"@"$SDNC_APP_NAME.KUBE_SDNC_NAMESPACE":"$1$SDNC_API_URL
+               SDNC_SERVICE_API_PATH=$1"://"$SDNC_USER":"$SDNC_PWD"@"$SDNC_APP_NAME.$KUBE_SDNC_NAMESPACE":"$3$SDNC_API_URL
        fi
        echo ""
 
@@ -380,16 +380,13 @@ __do_curl_to_controller() {
 controller_api_get_A1_policy_ids() {
        __log_test_start $@
 
-       ric_id=$3
-       if [ $RUNMODE == "KUBE" ]; then
-               ric_id=$(get_kube_sim_host $3)
-       fi
+       ric_id=$(__find_sim_host $3)
     paramError=1
     if [ $# -gt 3 ] && [ $2 == "OSC" ]; then
-        url="$RIC_SIM_HTTPX://$ric_id:$RIC_SIM_PORT/a1-p/policytypes/$4/policies"
+        url="$ric_id/a1-p/policytypes/$4/policies"
                paramError=0
     elif [ $# -gt 2 ] && [ $2 == "STD" ]; then
-        url="$RIC_SIM_HTTPX://$ric_id:$RIC_SIM_PORT/A1-P/v1/policies"
+        url="$ric_id/A1-P/v1/policies"
         paramError=0
        fi
 
@@ -446,13 +443,10 @@ controller_api_get_A1_policy_ids() {
 controller_api_get_A1_policy_type() {
        __log_test_start $@
 
-       ric_id=$3
-       if [ $RUNMODE == "KUBE" ]; then
-               ric_id=$(get_kube_sim_host $3)
-       fi
+       ric_id=$(__find_sim_host $3)
     paramError=1
     if [ $# -gt 3 ] && [ $2 == "OSC" ]; then
-        url="$RIC_SIM_HTTPX://$ric_id:$RIC_SIM_PORT/a1-p/policytypes/$4"
+        url="$ric_id/a1-p/policytypes/$4"
                paramError=0
        fi
 
@@ -500,16 +494,13 @@ controller_api_get_A1_policy_type() {
 controller_api_delete_A1_policy() {
        __log_test_start $@
 
-       ric_id=$3
-       if [ $RUNMODE == "KUBE" ]; then
-               ric_id=$(get_kube_sim_host $3)
-       fi
+       ric_id=$(__find_sim_host $3)
     paramError=1
     if [ $# -eq 5 ] && [ $2 == "OSC" ]; then
-        url="$RIC_SIM_HTTPX://$ric_id:$RIC_SIM_PORT/a1-p/policytypes/$4/policies/$UUID$5"
+        url="$ric_id/a1-p/policytypes/$4/policies/$UUID$5"
                paramError=0
     elif [ $# -eq 4 ] && [ $2 == "STD" ]; then
-        url="$RIC_SIM_HTTPX://$ric_id:$RIC_SIM_PORT/A1-P/v1/policies/$UUID$4"
+        url="$ric_id/A1-P/v1/policies/$UUID$4"
         paramError=0
        fi
 
@@ -542,18 +533,15 @@ controller_api_delete_A1_policy() {
 controller_api_put_A1_policy() {
        __log_test_start $@
 
-       ric_id=$3
-       if [ $RUNMODE == "KUBE" ]; then
-               ric_id=$(get_kube_sim_host $3)
-       fi
+       ric_id=$(__find_sim_host $3)
     paramError=1
     if [ $# -eq 6 ] && [ $2 == "OSC" ]; then
-        url="$RIC_SIM_HTTPX://$ric_id:$RIC_SIM_PORT/a1-p/policytypes/$4/policies/$UUID$5"
+        url="$ric_id/a1-p/policytypes/$4/policies/$UUID$5"
         body=$(sed 's/XXX/'${5}'/g' $6)
 
                paramError=0
     elif [ $# -eq 5 ] && [ $2 == "STD" ]; then
-        url="$RIC_SIM_HTTPX://$ric_id:$RIC_SIM_PORT/A1-P/v1/policies/$UUID$4"
+        url="$ric_id/A1-P/v1/policies/$UUID$4"
         body=$(sed 's/XXX/'${4}'/g' $5)
         paramError=0
        fi
@@ -588,14 +576,11 @@ controller_api_put_A1_policy() {
 controller_api_get_A1_policy_status() {
        __log_test_start $@
 
-       ric_id=$3
-       if [ $RUNMODE == "KUBE" ]; then
-               ric_id=$(get_kube_sim_host $3)
-       fi
+       ric_id=$(__find_sim_host $3)
     targetJson=""
     paramError=1
     if [ $# -ge 5 ] && [ $2 == "OSC" ]; then
-        url="$RIC_SIM_HTTPX://$ric_id:$RIC_SIM_PORT/a1-p/policytypes/$4/policies/$UUID$5/status"
+        url="$ric_id/a1-p/policytypes/$4/policies/$UUID$5/status"
         if [ $# -gt 5 ]; then
             targetJson="{\"instance_status\":\"$6\""
             targetJson=$targetJson",\"has_been_deleted\":\"$7\""
@@ -603,7 +588,7 @@ controller_api_get_A1_policy_status() {
         fi
                paramError=0
     elif [ $# -ge 4 ] && [ $2 == "STD" ]; then
-        url="$RIC_SIM_HTTPX://$ric_id:$RIC_SIM_PORT/A1-P/v1/policies/$UUID$4/status"
+        url="$ric_id/A1-P/v1/policies/$UUID$4/status"
         if [ $# -gt 4 ]; then
             targetJson="{\"enforceStatus\":\"$5\""
             if [ $# -eq 6 ]; then
index 6cb18f5..3254e06 100755 (executable)
@@ -212,10 +212,12 @@ MR_DMAAP_COMPOSE_DIR="dmaapmr"                           # Dir in simulator_grou
 MR_STUB_COMPOSE_DIR="mrstub"                             # Dir in simulator_group for mr stub for - docker-compose
 MR_KAFKA_APP_NAME="message-router-kafka"                 # Kafka app name, if just named "kafka" the image will not start...
 MR_KAFKA_PORT=9092                                       # Kafka port number
+MR_KAFKA_DOCKER_LOCALHOST_PORT=30098                     # Kafka port number for docker localhost
+MR_KAFKA_KUBE_NODE_PORT=30099                            # Kafka node port number for kube
 MR_ZOOKEEPER_APP_NAME="zookeeper"                        # Zookeeper app name
 MR_ZOOKEEPER_PORT="2181"                                 # Zookeeper port number
 MR_DMAAP_HOST_MNT_DIR="/mnt"                             # Basedir localhost for mounted files
-MR_DMAAP_HOST_CONFIG_DIR="/configs"                      # Config files dir on localhost
+MR_DMAAP_HOST_CONFIG_DIR="/configs0"                      # Config files dir on localhost
 
 CR_APP_NAME="callback-receiver"                          # Name for the Callback receiver
 CR_DISPLAY_NAME="Callback Reciever"
@@ -226,7 +228,7 @@ CR_INTERNAL_SECURE_PORT=8091                             # Callback receiver con
 CR_APP_CALLBACK="/callbacks"                             # Url for callbacks
 CR_APP_CALLBACK_MR="/callbacks-mr"                       # Url for callbacks (data from mr which contains string encoded jsons in a json arr)
 CR_APP_CALLBACK_TEXT="/callbacks-text"                   # Url for callbacks (data containing text data)
-CR_ALIVE_URL="/"                                         # Base path for alive check
+CR_ALIVE_URL="/reset"                                    # Base path for alive check
 CR_COMPOSE_DIR="cr"                                      # Dir in simulator_group for docker-compose
 
 CONSUL_HOST="consul-server"                              # Host name of consul
index c293420..b312518 100755 (executable)
@@ -86,10 +86,10 @@ SDNC_A1_CONTROLLER_IMAGE_TAG_REMOTE_RELEASE="2.1.6"
 SDNC_DB_IMAGE_BASE="mariadb"
 SDNC_DB_IMAGE_TAG_REMOTE_PROXY="10.5"
 
-# ECS image and tag - uses cherry release
-ECS_IMAGE_BASE="o-ran-sc/nonrtric-enrichment-coordinator-service"
-ECS_IMAGE_TAG_REMOTE_RELEASE_ORAN="1.0.1"
-#Note: Update var ECS_FEATURE_LEVEL if image version is changed
+# ICS image and tag - uses cherry release
+ICS_IMAGE_BASE="o-ran-sc/nonrtric-enrichment-coordinator-service"
+ICS_IMAGE_TAG_REMOTE_RELEASE_ORAN="1.0.1"
+#Note: Update var ICS_FEATURE_LEVEL if image version is changed
 
 # Control Panel image and tag - uses cherry release
 CONTROL_PANEL_IMAGE_BASE="o-ran-sc/nonrtric-controlpanel"
@@ -170,7 +170,7 @@ PVC_CLEANER_IMAGE_TAG_REMOTE_PROXY="20.10"
 PROJECT_IMAGES_APP_NAMES="PA SDNC"
 
 # List of app short names which images pulled from ORAN
-ORAN_IMAGES_APP_NAMES="CP ECS RICSIM RC"
+ORAN_IMAGES_APP_NAMES="CP ICS RICSIM RC"
 
 # List of app short names which images pulled from ONAP
 ONAP_IMAGES_APP_NAMES=""   # Not used
@@ -213,25 +213,25 @@ POLICY_AGENT_CONFIG_FILE="application.yaml"              # Container config file
 POLICY_AGENT_DATA_FILE="application_configuration.json"  # Container data file name
 POLICY_AGENT_CONTAINER_MNT_DIR="/var/policy-management-service" # Mounted dir in the container
 
-ECS_APP_NAME="enrichmentservice"                         # Name for ECS container
-ECS_DISPLAY_NAME="Enrichment Coordinator Service"        # Display name for ECS container
-ECS_EXTERNAL_PORT=8083                                   # ECS container external port (host -> container)
-ECS_INTERNAL_PORT=8083                                   # ECS container internal port (container -> container)
-ECS_EXTERNAL_SECURE_PORT=8434                            # ECS container external secure port (host -> container)
-ECS_INTERNAL_SECURE_PORT=8434                            # ECS container internal secure port (container -> container)
-
-ECS_LOGPATH="/var/log/enrichment-coordinator-service/application.log" # Path the application log in the ECS container
-ECS_APP_NAME_ALIAS="enrichment-service-container"        # Alias name, name used by the control panel
-ECS_HOST_MNT_DIR="./mnt"                                 # Mounted dir, relative to compose file, on the host
-ECS_CONTAINER_MNT_DIR="/var/enrichment-coordinator-service" # Mounted dir in the container
-ECS_ACTUATOR="/actuator/loggers/org.oransc.enrichment"   # Url for trace/debug
-ECS_CERT_MOUNT_DIR="./cert"
-ECS_ALIVE_URL="/status"                                  # Base path for alive check
-ECS_COMPOSE_DIR="ecs"                                    # Dir in simulator_group for docker-compose
-ECS_CONFIG_MOUNT_PATH=/opt/app/enrichment-coordinator-service/config # Internal container path for configuration
-ECS_CONFIG_FILE=application.yaml                         # Config file name
-ECS_VERSION="V1-2"                                       # Version where the types are added in the producer registration
-ECS_FEATURE_LEVEL=""                                     # Space separated list of features
+ICS_APP_NAME="informationservice"                        # Name for ICS container
+ICS_DISPLAY_NAME="Enrichment Coordinator Service"        # Display name for ICS container
+ICS_EXTERNAL_PORT=8083                                   # ICS container external port (host -> container)
+ICS_INTERNAL_PORT=8083                                   # ICS container internal port (container -> container)
+ICS_EXTERNAL_SECURE_PORT=8434                            # ICS container external secure port (host -> container)
+ICS_INTERNAL_SECURE_PORT=8434                            # ICS container internal secure port (container -> container)
+
+ICS_LOGPATH="/var/log/information-coordinator-service/application.log" # Path the application log in the ICS container
+ICS_APP_NAME_ALIAS="information-service-container"       # Alias name, name used by the control panel
+ICS_HOST_MNT_DIR="./mnt"                                 # Mounted dir, relative to compose file, on the host
+ICS_CONTAINER_MNT_DIR="/var/information-coordinator-service" # Mounted dir in the container
+ICS_ACTUATOR="/actuator/loggers/org.oransc.information"  # Url for trace/debug
+ICS_CERT_MOUNT_DIR="./cert"
+ICS_ALIVE_URL="/status"                                  # Base path for alive check
+ICS_COMPOSE_DIR="ics"                                    # Dir in simulator_group for docker-compose
+ICS_CONFIG_MOUNT_PATH=/opt/app/information-coordinator-service/config # Internal container path for configuration
+ICS_CONFIG_FILE=application.yaml                         # Config file name
+ICS_VERSION="V1-2"                                       # Version where the types are added in the producer registration
+ICS_FEATURE_LEVEL=""                                     # Space separated list of features
 
 MR_DMAAP_APP_NAME="message-router"                       # Name for the Dmaap MR
 MR_STUB_APP_NAME="mr-stub"                               # Name of the MR stub
@@ -256,10 +256,12 @@ MR_DMAAP_COMPOSE_DIR="dmaapmr"                           # Dir in simulator_grou
 MR_STUB_COMPOSE_DIR="mrstub"                             # Dir in simulator_group for mr stub for - docker-compose
 MR_KAFKA_APP_NAME="message-router-kafka"                 # Kafka app name, if just named "kafka" the image will not start...
 MR_KAFKA_PORT=9092                                       # Kafka port number
+MR_KAFKA_DOCKER_LOCALHOST_PORT=30098                     # Kafka port number for docker localhost
+MR_KAFKA_KUBE_NODE_PORT=30099                            # Kafka node port number for kube
 MR_ZOOKEEPER_APP_NAME="zookeeper"                        # Zookeeper app name
 MR_ZOOKEEPER_PORT="2181"                                 # Zookeeper port number
 MR_DMAAP_HOST_MNT_DIR="/mnt"                             # Basedir localhost for mounted files
-MR_DMAAP_HOST_CONFIG_DIR="/configs"                      # Config files dir on localhost
+MR_DMAAP_HOST_CONFIG_DIR="/configs0"                      # Config files dir on localhost
 
 CR_APP_NAME="callback-receiver"                          # Name for the Callback receiver
 CR_DISPLAY_NAME="Callback Reciever"
@@ -271,7 +273,7 @@ CR_APP_NAME="callback-receiver"                          # Name for the Callback
 CR_APP_CALLBACK="/callbacks"                             # Url for callbacks
 CR_APP_CALLBACK_MR="/callbacks-mr"                       # Url for callbacks (data from mr which contains string encoded jsons in a json arr)
 CR_APP_CALLBACK_TEXT="/callbacks-text"                   # Url for callbacks (data containing text data)
-CR_ALIVE_URL="/"                                         # Base path for alive check
+CR_ALIVE_URL="/reset"                                    # Base path for alive check
 CR_COMPOSE_DIR="cr"                                      # Dir in simulator_group for docker-compose
 
 PROD_STUB_APP_NAME="producer-stub"                       # Name for the Producer stub
index 5b11137..a8d1b0a 100644 (file)
@@ -86,10 +86,10 @@ SDNC_A1_CONTROLLER_IMAGE_TAG_REMOTE_RELEASE="2.2.1"
 SDNC_DB_IMAGE_BASE="mariadb"
 SDNC_DB_IMAGE_TAG_REMOTE_PROXY="10.5"
 
-# ECS image and tag - uses d release
-ECS_IMAGE_BASE="o-ran-sc/nonrtric-enrichment-coordinator-service"
-ECS_IMAGE_TAG_REMOTE_RELEASE_ORAN="1.1.0"
-#Note: Update var ECS_FEATURE_LEVEL if image version is changed
+# ICS image and tag - uses d release
+ICS_IMAGE_BASE="o-ran-sc/nonrtric-enrichment-coordinator-service"
+ICS_IMAGE_TAG_REMOTE_RELEASE_ORAN="1.1.0"
+#Note: Update var ICS_FEATURE_LEVEL if image version is changed
 
 # Control Panel image and tag - uses d release
 CONTROL_PANEL_IMAGE_BASE="o-ran-sc/nonrtric-controlpanel"
@@ -173,7 +173,7 @@ PVC_CLEANER_IMAGE_TAG_REMOTE_PROXY="20.10"
 PROJECT_IMAGES_APP_NAMES="PA SDNC"
 
 # List of app short names which images pulled from ORAN
-ORAN_IMAGES_APP_NAMES="CP ECS RICSIM RC NGW"
+ORAN_IMAGES_APP_NAMES="CP ICS RICSIM RC NGW"
 
 # List of app short names which images pulled from ONAP
 ONAP_IMAGES_APP_NAMES=""   # Not used
@@ -216,25 +216,25 @@ POLICY_AGENT_CONFIG_FILE="application.yaml"              # Container config file
 POLICY_AGENT_DATA_FILE="application_configuration.json"  # Container data file name
 POLICY_AGENT_CONTAINER_MNT_DIR="/var/policy-management-service" # Mounted dir in the container
 
-ECS_APP_NAME="enrichmentservice"                         # Name for ECS container
-ECS_DISPLAY_NAME="Enrichment Coordinator Service"        # Display name for ECS container
-ECS_EXTERNAL_PORT=8083                                   # ECS container external port (host -> container)
-ECS_INTERNAL_PORT=8083                                   # ECS container internal port (container -> container)
-ECS_EXTERNAL_SECURE_PORT=8434                            # ECS container external secure port (host -> container)
-ECS_INTERNAL_SECURE_PORT=8434                            # ECS container internal secure port (container -> container)
-
-ECS_LOGPATH="/var/log/enrichment-coordinator-service/application.log" # Path the application log in the ECS container
-ECS_APP_NAME_ALIAS="enrichment-service-container"        # Alias name, name used by the control panel
-ECS_HOST_MNT_DIR="./mnt"                                 # Mounted dir, relative to compose file, on the host
-ECS_CONTAINER_MNT_DIR="/var/enrichment-coordinator-service" # Mounted dir in the container
-ECS_ACTUATOR="/actuator/loggers/org.oransc.enrichment"   # Url for trace/debug
-ECS_CERT_MOUNT_DIR="./cert"
-ECS_ALIVE_URL="/status"                                  # Base path for alive check
-ECS_COMPOSE_DIR="ecs"                                    # Dir in simulator_group for docker-compose
-ECS_CONFIG_MOUNT_PATH=/opt/app/enrichment-coordinator-service/config # Internal container path for configuration
-ECS_CONFIG_FILE=application.yaml                         # Config file name
-ECS_VERSION="V1-2"                                       # Version where the types are added in the producer registration
-ECS_FEATURE_LEVEL="INFO-TYPES"                           # Space separated list of features
+ICS_APP_NAME="informationservice"                        # Name for ICS container
+ICS_DISPLAY_NAME="Enrichment Coordinator Service"        # Display name for ICS container
+ICS_EXTERNAL_PORT=8083                                   # ICS container external port (host -> container)
+ICS_INTERNAL_PORT=8083                                   # ICS container internal port (container -> container)
+ICS_EXTERNAL_SECURE_PORT=8434                            # ICS container external secure port (host -> container)
+ICS_INTERNAL_SECURE_PORT=8434                            # ICS container internal secure port (container -> container)
+
+ICS_LOGPATH="/var/log/information-coordinator-service/application.log" # Path the application log in the ICS container
+ICS_APP_NAME_ALIAS="information-service-container"       # Alias name, name used by the control panel
+ICS_HOST_MNT_DIR="./mnt"                                 # Mounted dir, relative to compose file, on the host
+ICS_CONTAINER_MNT_DIR="/var/information-coordinator-service" # Mounted dir in the container
+ICS_ACTUATOR="/actuator/loggers/org.oransc.information"  # Url for trace/debug
+ICS_CERT_MOUNT_DIR="./cert"
+ICS_ALIVE_URL="/status"                                  # Base path for alive check
+ICS_COMPOSE_DIR="ics"                                    # Dir in simulator_group for docker-compose
+ICS_CONFIG_MOUNT_PATH=/opt/app/information-coordinator-service/config # Internal container path for configuration
+ICS_CONFIG_FILE=application.yaml                         # Config file name
+ICS_VERSION="V1-2"                                       # Version where the types are added in the producer registration
+ICS_FEATURE_LEVEL="INFO-TYPES"                           # Space separated list of features
 
 MR_DMAAP_APP_NAME="message-router"                       # Name for the Dmaap MR
 MR_STUB_APP_NAME="mr-stub"                               # Name of the MR stub
@@ -259,10 +259,12 @@ MR_DMAAP_COMPOSE_DIR="dmaapmr"                           # Dir in simulator_grou
 MR_STUB_COMPOSE_DIR="mrstub"                             # Dir in simulator_group for mr stub for - docker-compose
 MR_KAFKA_APP_NAME="message-router-kafka"                 # Kafka app name, if just named "kafka" the image will not start...
 MR_KAFKA_PORT=9092                                       # Kafka port number
+MR_KAFKA_DOCKER_LOCALHOST_PORT=30098                     # Kafka port number for docker localhost
+MR_KAFKA_KUBE_NODE_PORT=30099                            # Kafka node port number for kube
 MR_ZOOKEEPER_APP_NAME="zookeeper"                        # Zookeeper app name
 MR_ZOOKEEPER_PORT="2181"                                 # Zookeeper port number
 MR_DMAAP_HOST_MNT_DIR="/mnt"                             # Basedir localhost for mounted files
-MR_DMAAP_HOST_CONFIG_DIR="/configs"                      # Config files dir on localhost
+MR_DMAAP_HOST_CONFIG_DIR="/configs1"                      # Config files dir on localhost
 
 CR_APP_NAME="callback-receiver"                          # Name for the Callback receiver
 CR_DISPLAY_NAME="Callback Reciever"
@@ -274,7 +276,7 @@ CR_APP_NAME="callback-receiver"                          # Name for the Callback
 CR_APP_CALLBACK="/callbacks"                             # Url for callbacks
 CR_APP_CALLBACK_MR="/callbacks-mr"                       # Url for callbacks (data from mr which contains string encoded jsons in a json arr)
 CR_APP_CALLBACK_TEXT="/callbacks-text"                   # Url for callbacks (data containing text data)
-CR_ALIVE_URL="/"                                         # Base path for alive check
+CR_ALIVE_URL="/reset"                                    # Base path for alive check
 CR_COMPOSE_DIR="cr"                                      # Dir in simulator_group for docker-compose
 
 PROD_STUB_APP_NAME="producer-stub"                       # Name for the Producer stub
index 641aabe..6abb133 100755 (executable)
@@ -65,12 +65,12 @@ POLICY_AGENT_IMAGE_TAG_REMOTE_SNAPSHOT="2.1.1-SNAPSHOT"
 POLICY_AGENT_IMAGE_TAG_REMOTE="2.1.1"
 POLICY_AGENT_IMAGE_TAG_REMOTE_RELEASE="2.1.1"
 
-# ECS image and tags
-ECS_IMAGE_BASE="o-ran-sc/nonrtric-enrichment-coordinator-service"
-ECS_IMAGE_TAG_LOCAL="1.0.1-SNAPSHOT"
-ECS_IMAGE_TAG_REMOTE_SNAPSHOT="1.0.1-SNAPSHOT"
-ECS_IMAGE_TAG_REMOTE="1.0.1"
-ECS_IMAGE_TAG_REMOTE_RELEASE="1.0.1"
+# ICS image and tags
+ICS_IMAGE_BASE="o-ran-sc/nonrtric-enrichment-coordinator-service"
+ICS_IMAGE_TAG_LOCAL="1.0.1-SNAPSHOT"
+ICS_IMAGE_TAG_REMOTE_SNAPSHOT="1.0.1-SNAPSHOT"
+ICS_IMAGE_TAG_REMOTE="1.0.1"
+ICS_IMAGE_TAG_REMOTE_RELEASE="1.0.1"
 
 
 # Control Panel image and tags
@@ -171,7 +171,7 @@ PVC_CLEANER_IMAGE_TAG_REMOTE_PROXY="20.10"
 #No local image for pvc cleaner, remote image always used
 
 # List of app short names produced by the project
-PROJECT_IMAGES_APP_NAMES="PA ECS CP SDNC RC RICSIM"
+PROJECT_IMAGES_APP_NAMES="PA ICS CP SDNC RC RICSIM"
 
 # List of app short names which images pulled from ORAN
 ORAN_IMAGES_APP_NAMES=""  # Not used
@@ -216,25 +216,25 @@ POLICY_AGENT_CONFIG_FILE="application.yaml"              # Container config file
 POLICY_AGENT_DATA_FILE="application_configuration.json"  # Container data file name
 POLICY_AGENT_CONTAINER_MNT_DIR="/var/policy-management-service" # Mounted dir in the container
 
-ECS_APP_NAME="enrichmentservice"                         # Name for ECS container
-ECS_DISPLAY_NAME="Enrichment Coordinator Service"        # Display name for ECS container
-ECS_EXTERNAL_PORT=8083                                   # ECS container external port (host -> container)
-ECS_INTERNAL_PORT=8083                                   # ECS container internal port (container -> container)
-ECS_EXTERNAL_SECURE_PORT=8434                            # ECS container external secure port (host -> container)
-ECS_INTERNAL_SECURE_PORT=8434                            # ECS container internal secure port (container -> container)
-
-ECS_LOGPATH="/var/log/enrichment-coordinator-service/application.log" # Path the application log in the ECS container
-ECS_APP_NAME_ALIAS="enrichment-service-container"        # Alias name, name used by the control panel
-ECS_HOST_MNT_DIR="./mnt"                                 # Mounted dir, relative to compose file, on the host
-ECS_CONTAINER_MNT_DIR="/var/enrichment-coordinator-service" # Mounted dir in the container
-ECS_ACTUATOR="/actuator/loggers/org.oransc.enrichment"   # Url for trace/debug
-ECS_CERT_MOUNT_DIR="./cert"
-ECS_ALIVE_URL="/status"                                  # Base path for alive check
-ECS_COMPOSE_DIR="ecs"                                    # Dir in simulator_group for docker-compose
-ECS_CONFIG_MOUNT_PATH=/opt/app/enrichment-coordinator-service/config # Internal container path for configuration
-ECS_CONFIG_FILE=application.yaml                         # Config file name
-ECS_VERSION="V1-2"                                       # Version where the types are added in the producer registration
-ECS_FEATURE_LEVEL=""                                     # Space separated list of features
+ICS_APP_NAME="informationservice"                        # Name for ICS container
+ICS_DISPLAY_NAME="Enrichment Coordinator Service"        # Display name for ICS container
+ICS_EXTERNAL_PORT=8083                                   # ICS container external port (host -> container)
+ICS_INTERNAL_PORT=8083                                   # ICS container internal port (container -> container)
+ICS_EXTERNAL_SECURE_PORT=8434                            # ICS container external secure port (host -> container)
+ICS_INTERNAL_SECURE_PORT=8434                            # ICS container internal secure port (container -> container)
+
+ICS_LOGPATH="/var/log/information-coordinator-service/application.log" # Path the application log in the ICS container
+ICS_APP_NAME_ALIAS="information-service-container"        # Alias name, name used by the control panel
+ICS_HOST_MNT_DIR="./mnt"                                 # Mounted dir, relative to compose file, on the host
+ICS_CONTAINER_MNT_DIR="/var/information-coordinator-service" # Mounted dir in the container
+ICS_ACTUATOR="/actuator/loggers/org.oransc.information"   # Url for trace/debug
+ICS_CERT_MOUNT_DIR="./cert"
+ICS_ALIVE_URL="/status"                                  # Base path for alive check
+ICS_COMPOSE_DIR="ics"                                    # Dir in simulator_group for docker-compose
+ICS_CONFIG_MOUNT_PATH=/opt/app/information-coordinator-service/config # Internal container path for configuration
+ICS_CONFIG_FILE=application.yaml                         # Config file name
+ICS_VERSION="V1-2"                                       # Version where the types are added in the producer registration
+ICS_FEATURE_LEVEL=""                                     # Space separated list of features
 
 MR_DMAAP_APP_NAME="message-router"                       # Name for the Dmaap MR
 MR_STUB_APP_NAME="mr-stub"                               # Name of the MR stub
@@ -259,10 +259,12 @@ MR_DMAAP_COMPOSE_DIR="dmaapmr"                           # Dir in simulator_grou
 MR_STUB_COMPOSE_DIR="mrstub"                             # Dir in simulator_group for mr stub for - docker-compose
 MR_KAFKA_APP_NAME="message-router-kafka"                 # Kafka app name, if just named "kafka" the image will not start...
 MR_KAFKA_PORT=9092                                       # Kafka port number
+MR_KAFKA_DOCKER_LOCALHOST_PORT=30098                     # Kafka port number for docker localhost
+MR_KAFKA_KUBE_NODE_PORT=30099                            # Kafka node port number for kube
 MR_ZOOKEEPER_APP_NAME="zookeeper"                        # Zookeeper app name
 MR_ZOOKEEPER_PORT="2181"                                 # Zookeeper port number
 MR_DMAAP_HOST_MNT_DIR="/mnt"                             # Basedir localhost for mounted files
-MR_DMAAP_HOST_CONFIG_DIR="/configs"                      # Config files dir on localhost
+MR_DMAAP_HOST_CONFIG_DIR="/configs0"                      # Config files dir on localhost
 
 CR_APP_NAME="callback-receiver"                          # Name for the Callback receiver
 CR_DISPLAY_NAME="Callback Reciever"
@@ -273,7 +275,7 @@ CR_INTERNAL_SECURE_PORT=8091                             # Callback receiver con
 CR_APP_CALLBACK="/callbacks"                             # Url for callbacks
 CR_APP_CALLBACK_MR="/callbacks-mr"                       # Url for callbacks (data from mr which contains string encoded jsons in a json arr)
 CR_APP_CALLBACK_TEXT="/callbacks-text"                   # Url for callbacks (data containing text data)
-CR_ALIVE_URL="/"                                         # Base path for alive check
+CR_ALIVE_URL="/reset"                                    # Base path for alive check
 CR_COMPOSE_DIR="cr"                                      # Dir in simulator_group for docker-compose
 
 PROD_STUB_APP_NAME="producer-stub"                       # Name for the Producer stub
index 18f7e17..ed7670e 100755 (executable)
@@ -65,13 +65,13 @@ POLICY_AGENT_IMAGE_TAG_REMOTE_SNAPSHOT="2.2.1-SNAPSHOT"
 POLICY_AGENT_IMAGE_TAG_REMOTE="2.2.1"
 POLICY_AGENT_IMAGE_TAG_REMOTE_RELEASE="2.2.1"
 
-# ECS image and tags
-ECS_IMAGE_BASE="o-ran-sc/nonrtric-enrichment-coordinator-service"
-ECS_IMAGE_TAG_LOCAL="1.1.0-SNAPSHOT"
-ECS_IMAGE_TAG_REMOTE_SNAPSHOT="1.1.0-SNAPSHOT"
-ECS_IMAGE_TAG_REMOTE="1.1.0"
-ECS_IMAGE_TAG_REMOTE_RELEASE="1.1.0"
-#Note: Update var ECS_FEATURE_LEVEL if image version is changed
+# ICS image and tags
+ICS_IMAGE_BASE="o-ran-sc/nonrtric-enrichment-coordinator-service"
+ICS_IMAGE_TAG_LOCAL="1.1.0-SNAPSHOT"
+ICS_IMAGE_TAG_REMOTE_SNAPSHOT="1.1.0-SNAPSHOT"
+ICS_IMAGE_TAG_REMOTE="1.1.0"
+ICS_IMAGE_TAG_REMOTE_RELEASE="1.1.0"
+#Note: Update var ICS_FEATURE_LEVEL if image version is changed
 
 #Control Panel image and tags
 CONTROL_PANEL_IMAGE_BASE="o-ran-sc/nonrtric-controlpanel"
@@ -190,7 +190,7 @@ PVC_CLEANER_IMAGE_TAG_REMOTE_PROXY="20.10"
 #No local image for pvc cleaner, remote image always used
 
 # List of app short names produced by the project
-PROJECT_IMAGES_APP_NAMES="PA ECS CP RC RICSIM NGW"  # Add SDNC here if oran image is used
+PROJECT_IMAGES_APP_NAMES="PA ICS CP RC RICSIM NGW"  # Add SDNC here if oran image is used
 
 # List of app short names which images pulled from ORAN
 ORAN_IMAGES_APP_NAMES=""  # Not used
@@ -235,25 +235,25 @@ POLICY_AGENT_CONFIG_FILE="application.yaml"              # Container config file
 POLICY_AGENT_DATA_FILE="application_configuration.json"  # Container data file name
 POLICY_AGENT_CONTAINER_MNT_DIR="/var/policy-management-service" # Mounted dir in the container
 
-ECS_APP_NAME="enrichmentservice"                         # Name for ECS container
-ECS_DISPLAY_NAME="Enrichment Coordinator Service"        # Display name for ECS container
-ECS_EXTERNAL_PORT=8083                                   # ECS container external port (host -> container)
-ECS_INTERNAL_PORT=8083                                   # ECS container internal port (container -> container)
-ECS_EXTERNAL_SECURE_PORT=8434                            # ECS container external secure port (host -> container)
-ECS_INTERNAL_SECURE_PORT=8434                            # ECS container internal secure port (container -> container)
-
-ECS_LOGPATH="/var/log/enrichment-coordinator-service/application.log" # Path the application log in the ECS container
-ECS_APP_NAME_ALIAS="enrichment-service-container"        # Alias name, name used by the control panel
-ECS_HOST_MNT_DIR="./mnt"                                 # Mounted db dir, relative to compose file, on the host
-ECS_CONTAINER_MNT_DIR="/var/enrichment-coordinator-service" # Mounted dir in the container
-ECS_ACTUATOR="/actuator/loggers/org.oransc.enrichment"   # Url for trace/debug
-ECS_CERT_MOUNT_DIR="./cert"
-ECS_ALIVE_URL="/status"                                  # Base path for alive check
-ECS_COMPOSE_DIR="ecs"                                    # Dir in simulator_group for docker-compose
-ECS_CONFIG_MOUNT_PATH=/opt/app/enrichment-coordinator-service/config # Internal container path for configuration
-ECS_CONFIG_FILE=application.yaml                         # Config file name
-ECS_VERSION="V1-2"                                       # Version where the types are decoupled from the producer registration
-ECS_FEATURE_LEVEL="INFO-TYPES"                           # Space separated list of features
+ICS_APP_NAME="informationservice"                        # Name for ICS container
+ICS_DISPLAY_NAME="Enrichment Coordinator Service"        # Display name for ICS container
+ICS_EXTERNAL_PORT=8083                                   # ICS container external port (host -> container)
+ICS_INTERNAL_PORT=8083                                   # ICS container internal port (container -> container)
+ICS_EXTERNAL_SECURE_PORT=8434                            # ICS container external secure port (host -> container)
+ICS_INTERNAL_SECURE_PORT=8434                            # ICS container internal secure port (container -> container)
+
+ICS_LOGPATH="/var/log/information-coordinator-service/application.log" # Path the application log in the ICS container
+ICS_APP_NAME_ALIAS="information-service-container"        # Alias name, name used by the control panel
+ICS_HOST_MNT_DIR="./mnt"                                 # Mounted db dir, relative to compose file, on the host
+ICS_CONTAINER_MNT_DIR="/var/information-coordinator-service" # Mounted dir in the container
+ICS_ACTUATOR="/actuator/loggers/org.oransc.information"   # Url for trace/debug
+ICS_CERT_MOUNT_DIR="./cert"
+ICS_ALIVE_URL="/status"                                  # Base path for alive check
+ICS_COMPOSE_DIR="ics"                                    # Dir in simulator_group for docker-compose
+ICS_CONFIG_MOUNT_PATH=/opt/app/information-coordinator-service/config # Internal container path for configuration
+ICS_CONFIG_FILE=application.yaml                         # Config file name
+ICS_VERSION="V1-2"                                       # Version where the types are decoupled from the producer registration
+ICS_FEATURE_LEVEL="INFO-TYPES"                           # Space separated list of features
 
 MR_DMAAP_APP_NAME="message-router"                       # Name for the Dmaap MR
 MR_STUB_APP_NAME="mr-stub"                               # Name of the MR stub
@@ -278,10 +278,12 @@ MR_DMAAP_COMPOSE_DIR="dmaapmr"                           # Dir in simulator_grou
 MR_STUB_COMPOSE_DIR="mrstub"                             # Dir in simulator_group for mr stub for - docker-compose
 MR_KAFKA_APP_NAME="message-router-kafka"                 # Kafka app name, if just named "kafka" the image will not start...
 MR_KAFKA_PORT=9092                                       # Kafka port number
+MR_KAFKA_DOCKER_LOCALHOST_PORT=30098                     # Kafka port number for docker localhost
+MR_KAFKA_KUBE_NODE_PORT=30099                            # Kafka node port number for kube
 MR_ZOOKEEPER_APP_NAME="zookeeper"                        # Zookeeper app name
 MR_ZOOKEEPER_PORT="2181"                                 # Zookeeper port number
 MR_DMAAP_HOST_MNT_DIR="/mnt"                             # Basedir localhost for mounted files
-MR_DMAAP_HOST_CONFIG_DIR="/configs"                      # Config files dir on localhost
+MR_DMAAP_HOST_CONFIG_DIR="/configs0"                      # Config files dir on localhost
 
 CR_APP_NAME="callback-receiver"                          # Name for the Callback receiver
 CR_DISPLAY_NAME="Callback receiver"
@@ -292,7 +294,7 @@ CR_INTERNAL_SECURE_PORT=8091                             # Callback receiver con
 CR_APP_CALLBACK="/callbacks"                             # Url for callbacks
 CR_APP_CALLBACK_MR="/callbacks-mr"                       # Url for callbacks (data from mr which contains string encoded jsons in a json arr)
 CR_APP_CALLBACK_TEXT="/callbacks-text"                   # Url for callbacks (data containing text data)
-CR_ALIVE_URL="/"                                         # Base path for alive check
+CR_ALIVE_URL="/reset"                                    # Base path for alive check
 CR_COMPOSE_DIR="cr"                                      # Dir in simulator_group for docker-compose
 
 PROD_STUB_APP_NAME="producer-stub"                       # Name for the Producer stub
@@ -401,8 +403,8 @@ CONTROL_PANEL_CONFIG_MOUNT_PATH=/etc/nginx               # Container internal pa
 CONTROL_PANEL_NGINX_KUBE_RESOLVER="kube-dns.kube-system.svc.cluster.local valid=5s"  #nginx resolver for kube
 CONTROL_PANEL_NGINX_DOCKER_RESOLVER="127.0.0.11"         # nginx resolver for docker
 CONTROL_PANEL_PATH_POLICY_PREFIX="/a1-policy/"           # Path prefix for forwarding policy calls to NGW
-CONTROL_PANEL_PATH_ECS_PREFIX="/data-producer/"          # Path prefix for forwarding ecs calls to NGW
-CONTROL_PANEL_PATH_ECS_PREFIX2="/data-consumer/"         # Path prefix for forwarding ecs calls to NGW
+CONTROL_PANEL_PATH_ICS_PREFIX="/data-producer/"          # Path prefix for forwarding ics calls to NGW
+CONTROL_PANEL_PATH_ICS_PREFIX2="/data-consumer/"         # Path prefix for forwarding ics calls to NGW
 
 NRT_GATEWAY_APP_NAME="nonrtricgateway"                   # Name of the Gateway container
 NRT_GATEWAY_DISPLAY_NAME="NonRT-RIC Gateway"
index 546e94c..044ad24 100755 (executable)
@@ -65,13 +65,13 @@ POLICY_AGENT_IMAGE_TAG_REMOTE_SNAPSHOT="2.3.0-SNAPSHOT"
 POLICY_AGENT_IMAGE_TAG_REMOTE="2.3.0"
 POLICY_AGENT_IMAGE_TAG_REMOTE_RELEASE="2.3.0"
 
-# ECS image and tags
-ECS_IMAGE_BASE="o-ran-sc/nonrtric-enrichment-coordinator-service"
-ECS_IMAGE_TAG_LOCAL="1.2.0-SNAPSHOT"
-ECS_IMAGE_TAG_REMOTE_SNAPSHOT="1.2.0-SNAPSHOT"
-ECS_IMAGE_TAG_REMOTE="1.2.0"
-ECS_IMAGE_TAG_REMOTE_RELEASE="1.2.0"
-#Note: Update var ECS_FEATURE_LEVEL if image version is changed
+# ICS image and tags
+ICS_IMAGE_BASE="o-ran-sc/nonrtric-information-coordinator-service"
+ICS_IMAGE_TAG_LOCAL="1.2.0-SNAPSHOT"
+ICS_IMAGE_TAG_REMOTE_SNAPSHOT="1.2.0-SNAPSHOT"
+ICS_IMAGE_TAG_REMOTE="1.2.0"
+ICS_IMAGE_TAG_REMOTE_RELEASE="1.2.0"
+#Note: Update var ICS_FEATURE_LEVEL if image version is changed
 
 #Control Panel image and tags
 CONTROL_PANEL_IMAGE_BASE="o-ran-sc/nonrtric-controlpanel"
@@ -197,13 +197,18 @@ KUBE_PROXY_IMAGE_BASE="nodejs-kube-proxy"
 KUBE_PROXY_IMAGE_TAG_LOCAL="latest"
 #No remote image for kube proxy, local image always used
 
-#Kube proxy remote image and tag
+#PVC Cleaner remote image and tag
 PVC_CLEANER_IMAGE_BASE="ubuntu"
 PVC_CLEANER_IMAGE_TAG_REMOTE_PROXY="20.10"
 #No local image for pvc cleaner, remote image always used
 
+#Kafka Procon image and tag
+KAFKAPC_IMAGE_BASE="kafka-procon"
+KAFKAPC_IMAGE_TAG_LOCAL="latest"
+#No local image for pvc cleaner, remote image always used
+
 # List of app short names produced by the project
-PROJECT_IMAGES_APP_NAMES="PA ECS CP RC RICSIM NGW DMAAPADP DMAAPMED"  # Add SDNC here if oran image is used
+PROJECT_IMAGES_APP_NAMES="PA ICS CP RC RICSIM NGW DMAAPADP DMAAPMED"  # Add SDNC here if oran image is used
 
 # List of app short names which images pulled from ORAN
 ORAN_IMAGES_APP_NAMES=""  # Not used
@@ -261,25 +266,25 @@ POLICY_AGENT_CONFIG_FILE="application.yaml"              # Container config file
 POLICY_AGENT_DATA_FILE="application_configuration.json"  # Container data file name
 POLICY_AGENT_CONTAINER_MNT_DIR="/var/policy-management-service" # Mounted dir in the container
 
-ECS_APP_NAME="enrichmentservice"                         # Name for ECS container
-ECS_DISPLAY_NAME="Enrichment Coordinator Service"        # Display name for ECS container
-ECS_EXTERNAL_PORT=8083                                   # ECS container external port (host -> container)
-ECS_INTERNAL_PORT=8083                                   # ECS container internal port (container -> container)
-ECS_EXTERNAL_SECURE_PORT=8434                            # ECS container external secure port (host -> container)
-ECS_INTERNAL_SECURE_PORT=8434                            # ECS container internal secure port (container -> container)
-
-ECS_LOGPATH="/var/log/enrichment-coordinator-service/application.log" # Path the application log in the ECS container
-ECS_APP_NAME_ALIAS="enrichment-service-container"        # Alias name, name used by the control panel
-ECS_HOST_MNT_DIR="./mnt"                                 # Mounted db dir, relative to compose file, on the host
-ECS_CONTAINER_MNT_DIR="/var/enrichment-coordinator-service" # Mounted dir in the container
-ECS_ACTUATOR="/actuator/loggers/org.oransc.enrichment"   # Url for trace/debug
-ECS_CERT_MOUNT_DIR="./cert"
-ECS_ALIVE_URL="/status"                                  # Base path for alive check
-ECS_COMPOSE_DIR="ecs"                                    # Dir in simulator_group for docker-compose
-ECS_CONFIG_MOUNT_PATH=/opt/app/enrichment-coordinator-service/config # Internal container path for configuration
-ECS_CONFIG_FILE=application.yaml                         # Config file name
-ECS_VERSION="V1-2"                                       # Version where the types are decoupled from the producer registration
-ECS_FEATURE_LEVEL="INFO-TYPES TYPE-SUBSCRIPTIONS INFO-TYPE-INFO"  # Space separated list of features
+ICS_APP_NAME="informationservice"                        # Name for ICS container
+ICS_DISPLAY_NAME="Information Coordinator Service"       # Display name for ICS container
+ICS_EXTERNAL_PORT=8083                                   # ICS container external port (host -> container)
+ICS_INTERNAL_PORT=8083                                   # ICS container internal port (container -> container)
+ICS_EXTERNAL_SECURE_PORT=8434                            # ICS container external secure port (host -> container)
+ICS_INTERNAL_SECURE_PORT=8434                            # ICS container internal secure port (container -> container)
+
+ICS_LOGPATH="/var/log/information-coordinator-service/application.log" # Path the application log in the ICS container
+ICS_APP_NAME_ALIAS="information-service-container"       # Alias name, name used by the control panel
+ICS_HOST_MNT_DIR="./mnt"                                 # Mounted db dir, relative to compose file, on the host
+ICS_CONTAINER_MNT_DIR="/var/information-coordinator-service" # Mounted dir in the container
+ICS_ACTUATOR="/actuator/loggers/org.oransc.information"  # Url for trace/debug
+ICS_CERT_MOUNT_DIR="./cert"
+ICS_ALIVE_URL="/status"                                  # Base path for alive check
+ICS_COMPOSE_DIR="ics"                                    # Dir in simulator_group for docker-compose
+ICS_CONFIG_MOUNT_PATH=/opt/app/information-coordinator-service/config # Internal container path for configuration
+ICS_CONFIG_FILE=application.yaml                         # Config file name
+ICS_VERSION="V1-2"                                       # Version where the types are decoupled from the producer registration
+ICS_FEATURE_LEVEL="INFO-TYPES TYPE-SUBSCRIPTIONS INFO-TYPE-INFO RESP_CODE_CHANGE_1"  # Space separated list of features
 
 MR_DMAAP_APP_NAME="message-router"                       # Name for the Dmaap MR
 MR_STUB_APP_NAME="mr-stub"                               # Name of the MR stub
@@ -304,10 +309,12 @@ MR_DMAAP_COMPOSE_DIR="dmaapmr"                           # Dir in simulator_grou
 MR_STUB_COMPOSE_DIR="mrstub"                             # Dir in simulator_group for mr stub for - docker-compose
 MR_KAFKA_APP_NAME="message-router-kafka"                 # Kafka app name, if just named "kafka" the image will not start...
 MR_KAFKA_PORT=9092                                       # Kafka port number
+MR_KAFKA_DOCKER_LOCALHOST_PORT=30098                     # Kafka port number for docker localhost
+MR_KAFKA_KUBE_NODE_PORT=30099                            # Kafka node port number for kube
 MR_ZOOKEEPER_APP_NAME="zookeeper"                        # Zookeeper app name
 MR_ZOOKEEPER_PORT="2181"                                 # Zookeeper port number
 MR_DMAAP_HOST_MNT_DIR="/mnt"                             # Basedir localhost for mounted files
-MR_DMAAP_HOST_CONFIG_DIR="/configs"                      # Config files dir on localhost
+MR_DMAAP_HOST_CONFIG_DIR="/configs1"                      # Config files dir on localhost
 
 CR_APP_NAME="callback-receiver"                          # Name for the Callback receiver
 CR_DISPLAY_NAME="Callback receiver"
@@ -318,7 +325,7 @@ CR_INTERNAL_SECURE_PORT=8091                             # Callback receiver con
 CR_APP_CALLBACK="/callbacks"                             # Url for callbacks
 CR_APP_CALLBACK_MR="/callbacks-mr"                       # Url for callbacks (data from mr which contains string encoded jsons in a json arr)
 CR_APP_CALLBACK_TEXT="/callbacks-text"                   # Url for callbacks (data containing text data)
-CR_ALIVE_URL="/"                                         # Base path for alive check
+CR_ALIVE_URL="/reset"                                    # Base path for alive check
 CR_COMPOSE_DIR="cr"                                      # Dir in simulator_group for docker-compose
 
 PROD_STUB_APP_NAME="producer-stub"                       # Name for the Producer stub
@@ -427,8 +434,8 @@ CONTROL_PANEL_CONFIG_MOUNT_PATH=/etc/nginx               # Container internal pa
 CONTROL_PANEL_NGINX_KUBE_RESOLVER="kube-dns.kube-system.svc.cluster.local valid=5s"  #nginx resolver for kube
 CONTROL_PANEL_NGINX_DOCKER_RESOLVER="127.0.0.11"         # nginx resolver for docker
 CONTROL_PANEL_PATH_POLICY_PREFIX="/a1-policy/"           # Path prefix for forwarding policy calls to NGW
-CONTROL_PANEL_PATH_ECS_PREFIX="/data-producer/"          # Path prefix for forwarding ecs calls to NGW
-CONTROL_PANEL_PATH_ECS_PREFIX2="/data-consumer/"         # Path prefix for forwarding ecs calls to NGW
+CONTROL_PANEL_PATH_ICS_PREFIX="/data-producer/"          # Path prefix for forwarding ics calls to NGW
+CONTROL_PANEL_PATH_ICS_PREFIX2="/data-consumer/"         # Path prefix for forwarding ics calls to NGW
 
 NRT_GATEWAY_APP_NAME="nonrtricgateway"                   # Name of the Gateway container
 NRT_GATEWAY_DISPLAY_NAME="NonRT-RIC Gateway"
@@ -514,7 +521,7 @@ DMAAP_MED_INTERNAL_SECURE_PORT=8185                      # Dmaap Mediator contai
 DMAAP_MED_LOGPATH="/var/log/dmaap-adaptor-service/application.log" # Path the application log in the Dmaap Mediator container
 DMAAP_MED_HOST_MNT_DIR="./mnt"                          # Mounted db dir, relative to compose file, on the host
 #MAAP_ADP_CONTAINER_MNT_DIR="/var/dmaap-adaptor-service" # Mounted dir in the container
-#DMAAP_MED_ACTUATOR="/actuator/loggers/org.oransc.enrichment"   # Url for trace/debug
+#DMAAP_MED_ACTUATOR="/actuator/loggers/org.oransc.information"   # Url for trace/debug
 #DMAAP_MED_CERT_MOUNT_DIR="./cert"
 DMAAP_MED_ALIVE_URL="/status"                            # Base path for alive check
 DMAAP_MED_COMPOSE_DIR="dmaapmed"                         # Dir in simulator_group for docker-compose
@@ -522,6 +529,15 @@ DMAAP_MED_COMPOSE_DIR="dmaapmed"                         # Dir in simulator_grou
 DMAAP_MED_DATA_MOUNT_PATH="/configs"                     # Path in container for data file
 DMAAP_MED_DATA_FILE="type_config.json"                   # Container data file name
 
+KAFKAPC_APP_NAME="kafka-procon"                          # Name for the Kafka procon
+KAFKAPC_DISPLAY_NAME="Kafaka Producer/Consumer"
+KAFKAPC_EXTERNAL_PORT=8096                               # Kafka procon container external port (host -> container)
+KAFKAPC_INTERNAL_PORT=8090                               # Kafka procon container internal port (container -> container)
+KAFKAPC_EXTERNAL_SECURE_PORT=8097                        # Kafka procon container external secure port (host -> container)
+KAFKAPC_INTERNAL_SECURE_PORT=8091                        # Kafka procon container internal secure port (container -> container)
+KAFKAPC_ALIVE_URL="/"                               # Base path for alive check
+KAFKAPC_COMPOSE_DIR="kafka-procon"                       # Dir in simulator_group for docker-compose
+KAFKAPC_BUILD_DIR="kafka-procon"                         # Build dir
 ########################################
 # Setting for common curl-base function
 ########################################
index 78eeb54..d8b359b 100755 (executable)
@@ -128,6 +128,11 @@ STOP_AT_ERROR=0
 # Applies only to images defined in the test-env files with image names and tags defined as XXXX_RELEASE
 IMAGE_CATEGORY="DEV"
 
+#Var to indicate docker-compose version, V1 or V2
+#V1 names replicated containers <proj-name>_<service-name>_<index>
+#V2 names replicated containers <proj-name>-<service-name>-<index>
+DOCKER_COMPOSE_VERION="V1"
+
 # Function to indent cmd output with one space
 indent1() { sed 's/^/ /'; }
 
@@ -345,9 +350,16 @@ __log_conf_ok() {
 #Var for measuring execution time
 TCTEST_START=$SECONDS
 
+#Vars to hold the start time and timer text for a custom timer
+TC_TIMER_STARTTIME=""
+TC_TIMER_TIMER_TEXT=""
+TC_TIMER_CURRENT_FAILS="" # Then numer of failed test when timer starts.
+                          # Compared with the current number of fails at timer stop
+                                                 # to judge the measurement reliability
+
 #File to save timer measurement results
 TIMER_MEASUREMENTS=".timer_measurement.txt"
-echo -e "Activity \t Duration" > $TIMER_MEASUREMENTS
+echo -e "Activity \t Duration \t Info" > $TIMER_MEASUREMENTS
 
 # If this is set, some images (control by the parameter repo-polcy) will be re-tagged and pushed to this repo before any
 IMAGE_REPO_ADR=""
@@ -654,7 +666,7 @@ while [ $paramerror -eq 0 ] && [ $foundparm -eq 0 ]; do
        if [ $paramerror -eq 0 ]; then
                if [ "$1" == "--print-stats" ]; then
                        PRINT_CURRENT_STATS=1
-                       echo "Option set - Print stats"
+                       echo "Option set - Print stats after every test-case and config"
                        shift;
                        foundparm=0
                fi
@@ -762,35 +774,62 @@ if [ ! -z "$TMP_APPS" ]; then
 else
        echo " None"
 fi
+
+echo -e $BOLD"Auto adding included apps"$EBOLD
+       for iapp in $INCLUDED_IMAGES; do
+               file_pointer=$(echo $iapp | tr '[:upper:]' '[:lower:]')
+               file_pointer="../common/"$file_pointer"_api_functions.sh"
+               padded_iapp=$iapp
+               while [ ${#padded_iapp} -lt 16 ]; do
+                       padded_iapp=$padded_iapp" "
+               done
+               echo " Auto-adding included app $padded_iapp  Sourcing $file_pointer"
+               . $file_pointer
+               if [ ! -f "$file_pointer" ]; then
+                       echo " Include file $file_pointer for app $iapp does not exist"
+                       exit 1
+               fi
+       done
 echo ""
 
+echo -e $BOLD"Test environment info"$EBOLD
+
 # Check needed installed sw
+
+tmp=$(which bash)
+if [ $? -ne 0 ] || [ -z "$tmp" ]; then
+       echo -e $RED"bash is required to run the test environment, pls install"$ERED
+       exit 1
+fi
+echo " bash is installed and using version:"
+echo "$(bash --version)" | indent2
+
 tmp=$(which python3)
-if [ $? -ne 0 ] || [ -z tmp ]; then
+if [ $? -ne 0 ] || [ -z "$tmp" ]; then
        echo -e $RED"python3 is required to run the test environment, pls install"$ERED
        exit 1
 fi
+echo " python3 is installed and using version: $(python3 --version)"
+
 tmp=$(which docker)
-if [ $? -ne 0 ] || [ -z tmp ]; then
+if [ $? -ne 0 ] || [ -z "$tmp" ]; then
        echo -e $RED"docker is required to run the test environment, pls install"$ERED
        exit 1
 fi
+echo " docker is installed and using versions:"
+echo  "  $(docker version --format 'Client version {{.Client.Version}} Server version {{.Server.Version}}')"
 
 tmp=$(which docker-compose)
-if [ $? -ne 0 ] || [ -z tmp ]; then
+if [ $? -ne 0 ] || [ -z "$tmp" ]; then
        if [ $RUNMODE == "DOCKER" ]; then
                echo -e $RED"docker-compose is required to run the test environment, pls install"$ERED
                exit 1
        fi
 fi
-if [ $RUNMODE == "DOCKER" ]; then
-       tmp=$(docker-compose version | grep -i 'docker' | grep -i 'compose' | grep -i 'version')
-       if [[ "$tmp" == *'v2'* ]]; then
-               echo -e $RED"docker-compose is using docker-compose version 2"$ERED
-               echo -e $RED"The test environment only support version 1"$ERED
-               echo -e $RED"Disable version 2 by cmd 'docker-compose disable-v2' and re-run the script "$ERED
-               exit 1
-       fi
+tmp=$(docker-compose version --short)
+echo " docker-compose installed and using version $tmp"
+if [[ "$tmp" == *'v2'* ]]; then
+       DOCKER_COMPOSE_VERION="V2"
 fi
 
 tmp=$(which kubectl)
@@ -801,6 +840,8 @@ if [ $? -ne 0 ] || [ -z tmp ]; then
        fi
 else
        if [ $RUNMODE == "KUBE" ]; then
+               echo " kubectl is installed and using versions:"
+               echo $(kubectl version --short=true) | indent2
                res=$(kubectl cluster-info 2>&1)
                if [ $? -ne 0 ]; then
                        echo -e "$BOLD$RED############################################# $ERED$EBOLD"
@@ -832,6 +873,8 @@ else
        fi
 fi
 
+echo ""
+
 echo -e $BOLD"Checking configured image setting for this test case"$EBOLD
 
 #Temp var to check for image variable name errors
@@ -1231,10 +1274,10 @@ setup_testenvironment() {
                                IMAGE_SUFFIX="none"
                        fi
                        # A function name is created from the app short name
-                       # for example app short name 'ECS' -> produce the function
-                       # name __ECS_imagesetup
+                       # for example app short name 'ICS' -> produce the function
+                       # name __ICS_imagesetup
                        # This function is called and is expected to exist in the imported
-                       # file for the ecs test functions
+                       # file for the ics test functions
                        # The resulting function impl will call '__check_and_create_image_var' function
                        # with appropriate parameters
                        # If the image suffix is none, then the component decides the suffix
@@ -1524,7 +1567,7 @@ print_result() {
        echo "===================================="
        column -t -s $'\t' $TIMER_MEASUREMENTS
        if [ $RES_PASS != $RES_TEST ]; then
-               echo -e $RED"Measurement may not be reliable when there are failed test - script timeouts may cause long measurement values"$ERED
+               echo -e $RED"Measurement may not be reliable when there are failed test - failures may cause long measurement values due to timeouts etc."$ERED
        fi
        echo ""
 
@@ -1607,57 +1650,44 @@ print_result() {
 #####################################################################
 
 # Start timer for time measurement
-# args - (any args will be printed though)
+# args:  <timer message to print>  -  timer value and message will be printed both on screen
+#                                     and in the timer measurement report - if at least one "print_timer is called"
 start_timer() {
        echo -e $BOLD"INFO(${BASH_LINENO[0]}): "${FUNCNAME[0]}"," $@ $EBOLD
-       TC_TIMER=$SECONDS
+       TC_TIMER_STARTTIME=$SECONDS
+       TC_TIMER_TIMER_TEXT="${@:1}"
+       if [ $# -ne 1 ]; then
+               __print_err "need 1 arg,  <timer message to print>" $@
+               TC_TIMER_TIMER_TEXT=${FUNCNAME[0]}":"${BASH_LINENO[0]}
+               echo " Assigning timer name: "$TC_TIMER_TIMER_TEXT
+       fi
+       TC_TIMER_CURRENT_FAILS=$(($RES_FAIL+$RES_CONF_FAIL))
        echo " Timer started: $(date)"
 }
 
-# Print the value of the time (in seconds)
-# args - <timer message to print>  -  timer value and message will be printed both on screen
-#                                     and in the timer measurement report
+# Print the running timer  the value of the time (in seconds)
+# Timer value and message will be printed both on screen and in the timer measurement report
 print_timer() {
-       echo -e $BOLD"INFO(${BASH_LINENO[0]}): "${FUNCNAME[0]}"," $@ $EBOLD
-       if [ $# -lt 1 ]; then
-               ((RES_CONF_FAIL++))
-       __print_err "need 1 or more args,  <timer message to print>" $@
-               exit 1
+       echo -e $BOLD"INFO(${BASH_LINENO[0]}): "${FUNCNAME[0]}"," $TC_TIMER_TIMER_TEXT $EBOLD
+       if [ -z  "$TC_TIMER_STARTTIME" ]; then
+               __print_err "timer not started" $@
+               return 1
        fi
-       duration=$(($SECONDS-$TC_TIMER))
+       duration=$(($SECONDS-$TC_TIMER_STARTTIME))
        if [ $duration -eq 0 ]; then
                duration="<1 second"
        else
                duration=$duration" seconds"
        fi
        echo " Timer duration :" $duration
-
-       echo -e "${@:1} \t $duration" >> $TIMER_MEASUREMENTS
-}
-
-# Print the value of the time (in seconds) and reset the timer
-# args - <timer message to print>  -  timer value and message will be printed both on screen
-#                                     and in the timer measurement report
-print_and_reset_timer() {
-       echo -e $BOLD"INFO(${BASH_LINENO[0]}): "${FUNCNAME[0]}"," $@ $EBOLD
-       if [ $# -lt 1 ]; then
-               ((RES_CONF_FAIL++))
-       __print_err "need 1 or more args,  <timer message to print>" $@
-               exit 1
-       fi
-       duration=$(($SECONDS-$TC_TIMER))" seconds"
-       if [ $duration -eq 0 ]; then
-               duration="<1 second"
-       else
-               duration=$duration" seconds"
+       res="-"
+       if [ $(($RES_FAIL+$RES_CONF_FAIL)) -ne $TC_TIMER_CURRENT_FAILS ]; then
+               res="Failures occured during test - timer not reliabled"
        fi
-       echo " Timer duration :" $duration
-       TC_TIMER=$SECONDS
-       echo " Timer reset"
-
-       echo -e "${@:1} \t $duration" >> $TIMER_MEASUREMENTS
 
+       echo -e "$TC_TIMER_TIMER_TEXT \t $duration \t $res" >> $TIMER_MEASUREMENTS
 }
+
 # Print info about a deviations from intended tests
 # Each deviation counted is also printed in the testreport
 # args <deviation message to print>
@@ -1700,6 +1730,10 @@ __clean_containers() {
        for imagename in $APP_SHORT_NAMES; do
                docker ps -a --filter "label=nrttest_app=$imagename"  --filter "network=$DOCKER_SIM_NWNAME" --format ' {{.Label "nrttest_dp"}}\n{{.Label "nrttest_app"}}\n{{.Names}}' >> $running_contr_file
        done
+       running_contr_file_empty="No docker containers running, started by previous test execution"
+       if [ -s $running_contr_file ]; then
+               running_contr_file_empty=""
+       fi
 
        # Kill all containers started by the test env - to speed up shut down
     docker kill $(docker ps -a  --filter "label=nrttest_app" --format '{{.Names}}') &> /dev/null
@@ -1747,37 +1781,41 @@ __clean_containers() {
                tab_heading3="$tab_heading3"" "
        done
 
-       echo " $tab_heading1$tab_heading2$tab_heading3"" Actions"
-       cntr=0
-       while read p; do
-               if (( $cntr % 3 == 0 ));then
-                       row=""
-                       heading=$p
-                       heading_len=$tab_heading1_len
-               fi
-               if (( $cntr % 3 == 1));then
-                       heading=$p
-                       heading_len=$tab_heading2_len
-               fi
-               if (( $cntr % 3 == 2));then
-                       contr=$p
-                       heading=$p
-                       heading_len=$tab_heading3_len
-               fi
-               while (( ${#heading} < $heading_len)); do
-                       heading="$heading"" "
-               done
-               row=$row$heading
-               if (( $cntr % 3 == 2));then
-                       echo -ne $row$SAMELINE
-                       echo -ne " $row ${GREEN}stopping...${EGREEN}${SAMELINE}"
-                       docker stop $(docker ps -qa --filter name=${contr} --filter network=$DOCKER_SIM_NWNAME) &> /dev/null
-                       echo -ne " $row ${GREEN}stopped removing...${EGREEN}${SAMELINE}"
-                       docker rm --force $(docker ps -qa --filter name=${contr} --filter network=$DOCKER_SIM_NWNAME) &> /dev/null
-                       echo -e  " $row ${GREEN}stopped removed     ${EGREEN}"
-               fi
-               let cntr=cntr+1
-       done <$running_contr_file
+       if [ ! -z "$running_contr_file_empty" ]; then
+               echo $running_contr_file_empty | indent1
+       else
+               echo " $tab_heading1$tab_heading2$tab_heading3"" Actions"
+               cntr=0
+               while read p; do
+                       if (( $cntr % 3 == 0 ));then
+                               row=""
+                               heading=$p
+                               heading_len=$tab_heading1_len
+                       fi
+                       if (( $cntr % 3 == 1));then
+                               heading=$p
+                               heading_len=$tab_heading2_len
+                       fi
+                       if (( $cntr % 3 == 2));then
+                               contr=$p
+                               heading=$p
+                               heading_len=$tab_heading3_len
+                       fi
+                       while (( ${#heading} < $heading_len)); do
+                               heading="$heading"" "
+                       done
+                       row=$row$heading
+                       if (( $cntr % 3 == 2));then
+                               echo -ne $row$SAMELINE
+                               echo -ne " $row ${GREEN}stopping...${EGREEN}${SAMELINE}"
+                               docker stop $(docker ps -qa --filter name=${contr} --filter network=$DOCKER_SIM_NWNAME) &> /dev/null
+                               echo -ne " $row ${GREEN}stopped removing...${EGREEN}${SAMELINE}"
+                               docker rm --force $(docker ps -qa --filter name=${contr} --filter network=$DOCKER_SIM_NWNAME) &> /dev/null
+                               echo -e  " $row ${GREEN}stopped removed     ${EGREEN}"
+                       fi
+                       let cntr=cntr+1
+               done <$running_contr_file
+       fi
 
        echo ""
 
@@ -1989,7 +2027,7 @@ __kube_delete_all_resources() {
                                        echo -e "  Scaled $restype $resid $ns_text with label $labelname=$labelid to 0, current count=$count $GREEN OK $EGREEN"
                                fi
                                echo -ne "  Deleting $restype $resid $ns_text with label $labelname=$labelid "$SAMELINE
-                               kubectl delete $restype $resid $ns_flag 1> /dev/null 2> ./tmp/kubeerr
+                               kubectl delete --grace-period=1 $restype $resid $ns_flag 1> /dev/null 2> ./tmp/kubeerr
                                if [ $? -eq 0 ]; then
                                        echo -e "  Deleted $restype $resid $ns_text with label $labelname=$labelid $GREEN OK $EGREEN"
                                else
@@ -2290,7 +2328,7 @@ clean_environment() {
        if [ $RUNMODE == "KUBE" ]; then
                __clean_kube
                if [ $PRE_CLEAN -eq 1 ]; then
-                       echo " Clean docker resouces to free up resources, may take time..."
+                       echo " Cleaning docker resouces to free up resources, may take time..."
                        ../common/clean_docker.sh 2&>1 /dev/null
                        echo ""
                fi
@@ -2399,6 +2437,9 @@ __start_container() {
        appcount=$1
        shift
 
+       envsubst < $compose_file > "gen_"$compose_file
+       compose_file="gen_"$compose_file
+
        if [ "$compose_args" == "NODOCKERARGS" ]; then
                docker-compose -f $compose_file up -d &> .dockererr
                if [ $? -ne 0 ]; then
index 3513464..1048d76 100644 (file)
 #
 
 # List of short names for all supported apps, including simulators etc
-APP_SHORT_NAMES="PA ECS SDNC CP NGW RC RICSIM HTTPPROXY CBS CONSUL DMAAPMR MR CR PRODSTUB KUBEPROXY DMAAPMED DMAAPADP PVCCLEANER"
+APP_SHORT_NAMES="PA ICS SDNC CP NGW RC RICSIM HTTPPROXY CBS CONSUL DMAAPMR MR CR PRODSTUB KUBEPROXY DMAAPMED DMAAPADP PVCCLEANER KAFKAPC"
 
 # List of available apps that built and released of the project
-PROJECT_IMAGES="PA ECS SDNC CP NGW RICSIM RC DMAAPMED DMAAPADP"
+PROJECT_IMAGES="PA ICS SDNC CP NGW RICSIM RC DMAAPMED DMAAPADP"
 
 # List of available apps to override with local or remote staging/snapshot/release image
-AVAILABLE_IMAGES_OVERRIDE="PA ECS SDNC CP NGW RICSIM RC DMAAPMED DMAAPADP"
+AVAILABLE_IMAGES_OVERRIDE="PA ICS SDNC CP NGW RICSIM RC DMAAPMED DMAAPADP"
 
 # List of available apps where the image is built by the test environment
-LOCAL_IMAGE_BUILD="MR CR PRODSTUB KUBEPROXY HTTPPROXY"
+LOCAL_IMAGE_BUILD="MR CR PRODSTUB KUBEPROXY HTTPPROXY KAFKAPC"
 
 # List of system app used only by the test env - kubernetes
 TESTENV_KUBE_SYSTEM_APPS="PVCCLEANER"
index 4aa3a7a..6409fa2 100644 (file)
@@ -1,2 +1,3 @@
 .tmp.json
-.dockererr
\ No newline at end of file
+.dockererr
+.env
\ No newline at end of file
index e66d30f..92efcb0 100644 (file)
@@ -31,4 +31,17 @@ RUN pip install -r requirements.txt
 
 RUN chmod +x start.sh
 
+RUN groupadd -g 999 appuser && \
+    useradd -r -u 999 -g appuser appuser
+
+## add permissions for appuser user
+RUN chown -R appuser:appuser /usr/src/app/ && chmod -R 755 /usr/src/app/ && \
+        chown -R appuser:appuser /var/log/nginx && \
+        chown -R appuser:appuser /var/lib/nginx && \
+        chown -R appuser:appuser /etc/nginx/conf.d
+RUN touch /var/run/nginx.pid && \
+        chown -R appuser:appuser /var/run/nginx.pid
+
+USER appuser
+
 CMD [ "./start.sh" ]
index 32beca1..31e3845 100644 (file)
@@ -1,4 +1,4 @@
-user www-data;
+user www-data;
 worker_processes auto;
 pid /run/nginx.pid;
 include /etc/nginx/modules-enabled/*.conf;
index d7a78ad..0043eeb 100644 (file)
@@ -13,4 +13,6 @@ COPY cert/pass .
 WORKDIR /usr/src/app
 COPY http_proxy.js .
 
+USER node
+
 CMD [ "node", "http_proxy.js" ]
\ No newline at end of file
diff --git a/test/kafka-procon/.gitignore b/test/kafka-procon/.gitignore
new file mode 100644 (file)
index 0000000..6703e3c
--- /dev/null
@@ -0,0 +1,4 @@
+.tmp.json
+.dockererr
+.env
+.payload
diff --git a/test/kafka-procon/Dockerfile b/test/kafka-procon/Dockerfile
new file mode 100644 (file)
index 0000000..97a09cb
--- /dev/null
@@ -0,0 +1,43 @@
+#==================================================================================
+#   Copyright (C) 2021: Nordix Foundation
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+#   This source code is part of the near-RT RIC (RAN Intelligent Controller)
+#   platform project (RICP).
+#==================================================================================
+
+ARG NEXUS_PROXY_REPO
+
+##
+## Build
+##
+
+FROM ${NEXUS_PROXY_REPO}golang:1.17-bullseye AS build
+WORKDIR /app
+COPY go.mod .
+COPY go.sum .
+RUN go mod download
+COPY main.go .
+RUN go build -o /kafkaprocon
+
+##
+## Deploy
+##
+
+FROM gcr.io/distroless/base-debian11
+WORKDIR /
+## Copy from "build" stage
+COPY --from=build /kafkaprocon .
+USER nonroot:nonroot
+ENTRYPOINT ["/kafkaprocon"]
\ No newline at end of file
diff --git a/test/kafka-procon/basic_test.sh b/test/kafka-procon/basic_test.sh
new file mode 100755 (executable)
index 0000000..f3a602e
--- /dev/null
@@ -0,0 +1,632 @@
+#!/bin/bash
+
+#  ============LICENSE_START===============================================
+#  Copyright (C) 2020 Nordix Foundation. All rights reserved.
+#  ========================================================================
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#  ============LICENSE_END=================================================
+#
+
+# Automated test script for Kafka procon container
+
+# NOTE: Need a running instance of kafka
+
+
+export PORT=8096
+export HTTPX="http"
+export REQ_CONTENT=""
+export RESP_CONTENT="text/plain"
+
+# source function to do curl and check result
+. ../common/do_curl_function.sh
+
+echo "Requires a running kafka"
+
+payload=".payload"
+
+echo "=== hello world ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="OK"
+do_curl GET / 200
+
+echo "=== reset ==="
+REQ_CONTENT=""
+RESP_CONTENT=""
+RESULT="*"
+do_curl POST /reset 200
+
+echo "=== get topics ==="
+REQ_CONTENT=""
+RESP_CONTENT="application/json"
+RESULT="json:[]"
+do_curl GET /topics 200
+
+echo "=== get global counter sent ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="0"
+do_curl GET /counters/sent 200
+
+echo "=== get global counter received ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="0"
+do_curl GET /counters/received 200
+
+echo "=== get topic ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl GET /topics/test-topic 404
+
+echo "=== get topic counter sent ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl GET /topics/test-topic/counters/sent 404
+
+echo "=== get topic counter received ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl GET /topics/test-topic/counters/received 404
+
+echo "=== create a topic ==="
+REQ_CONTENT=""
+RESP_CONTENT=""
+RESULT="*"
+do_curl PUT /topics/test-topic 405
+
+echo "=== start to send on a topic ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/test-topic/startsend 404
+
+echo "=== start to receive from a  topic ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/test-topic/startreceive 404
+
+echo "=== send a msg on a  topic ==="
+echo "TEST1" > $payload
+REQ_CONTENT="text/plain"
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/test-topic/msg 404 $payload
+
+echo "=== receive a msg  from a  topic ==="
+echo "TEST1" > $payload
+REQ_CONTENT="text/plain"
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl GET /topics/test-topic/msg 404 $payload
+
+echo "=== stop to send on a  topic ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/test-topic/stopsend 404
+
+echo "=== stop to receive from a  topic ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/test-topic/stopreceive 404
+
+# Create 4 topics
+
+echo "=== create topic1 ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl PUT /topics/topic1?type=text/plain 201
+
+echo "=== get topics ==="
+REQ_CONTENT=""
+RESP_CONTENT="application/json"
+RESULT="json:[\"topic1\"]"
+do_curl GET /topics 200
+
+echo "=== create topic2 ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl PUT /topics/topic2?type=text/plain 201
+
+echo "=== get topics ==="
+REQ_CONTENT=""
+RESP_CONTENT="application/json"
+RESULT="json:[\"topic1\",\"topic2\"]"
+do_curl GET /topics 200
+
+echo "=== create topic3 ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl PUT /topics/topic3?type=application/json 201
+
+echo "=== get topics ==="
+REQ_CONTENT=""
+RESP_CONTENT="application/json"
+RESULT="json:[\"topic1\",\"topic2\",\"topic3\"]"
+do_curl GET /topics 200
+
+echo "=== create topic4 ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl PUT /topics/topic4?type=application/json 201
+
+echo "=== get topics ==="
+REQ_CONTENT=""
+RESP_CONTENT="application/json"
+RESULT="json:[\"topic1\",\"topic2\",\"topic3\",\"topic4\"]"
+do_curl GET /topics 200
+
+echo "=== get topic1 ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="text/plain"
+do_curl GET /topics/topic1 200
+
+echo "=== get topic2 ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="text/plain"
+do_curl GET /topics/topic2 200
+
+echo "=== get topic3 ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="application/json"
+do_curl GET /topics/topic3 200
+
+echo "=== get topic4 ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="application/json"
+do_curl GET /topics/topic4 200
+
+echo "=== send a msg on topic1 ==="
+echo "TEST11" > $payload
+REQ_CONTENT="text/plain"
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/topic1/msg 400  $payload
+
+echo "=== receive a msg  from topic1 ==="
+REQ_CONTENT="text/plain"
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl GET /topics/topic1/msg 400
+
+echo "=== send a msg on topic2 ==="
+echo "TEST22" > $payload
+REQ_CONTENT="text/plain"
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/topic2/msg 400 $payload
+
+echo "=== receive a msg  from topic2 ==="
+REQ_CONTENT="text/plain"
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl GET /topics/topic2/msg 400
+
+
+
+echo "=== send a msg on topic3 ==="
+echo "{\"test\":\"33\"}" > $payload
+REQ_CONTENT="application/json"
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/topic3/msg 400 $payload
+
+echo "=== receive a msg  from topic3 ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl GET /topics/topic3/msg 400
+
+echo "=== send a msg on topic4 ==="
+echo "{\"test\":\"44\"}" > $payload
+REQ_CONTENT="application/json"
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/topic4/msg 400 $payload
+
+echo "=== receive a msg  from topic4 ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl GET /topics/topic2/msg 400
+
+
+echo "=== get global counter sent ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="0"
+do_curl GET /counters/sent 200
+
+echo "=== get global counter received ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="0"
+do_curl GET /counters/received 200
+
+echo "=== get topic1 counter sent ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="0"
+do_curl GET /topics/topic1/counters/sent 200
+
+echo "=== get topic1 counter received ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="0"
+do_curl GET /topics/topic1/counters/received 200
+
+echo "=== get topic2 counter sent ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="0"
+do_curl GET /topics/topic2/counters/sent 200
+
+echo "=== get topic2 counter received ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="0"
+do_curl GET /topics/topic2/counters/received 200
+
+echo "=== get topic3 counter sent ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="0"
+do_curl GET /topics/topic3/counters/sent 200
+
+echo "=== get topic3 counter received ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="0"
+do_curl GET /topics/topic3/counters/received 200
+
+echo "=== get topic4 counter sent ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="0"
+do_curl GET /topics/topic4/counters/sent 200
+
+echo "=== get topic4 counter received ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="0"
+do_curl GET /topics/topic4/counters/received 200
+
+# Begins send and receive
+
+echo "=== set topic1 start sending ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/topic1/startsend 200
+
+echo "=== send a msg on topic1 ==="
+echo "TEST11" > $payload
+REQ_CONTENT="application/json"
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/topic1/msg 400  $payload
+
+echo "=== send a msg on topic1 ==="
+echo "TEST11" > $payload
+REQ_CONTENT="text/plain"
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/topic1/msg 200  $payload
+
+echo "sleep 2  to allow sending the msg to kafka"
+sleep 2
+
+echo "=== receive a msg  from topic1 ==="
+REQ_CONTENT="text/plain"
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl GET /topics/topic1/msg 400
+
+echo "=== get topic1 counter sent ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="1"
+do_curl GET /topics/topic1/counters/sent 200
+
+echo "=== get topic1 counter received ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="0"
+do_curl GET /topics/topic1/counters/received 200
+
+echo "=== set topic1 start receiving ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/topic1/startreceive 200
+
+echo "sleep 60 to allow kafka to process the msg, unclear why first message takes a long time..."
+sleep 60
+
+echo "=== get topic1 counter sent ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="1"
+do_curl GET /topics/topic1/counters/sent 200
+
+echo "=== get topic1 counter received ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="1"
+do_curl GET /topics/topic1/counters/received 200
+
+echo "=== get global counter sent ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="1"
+do_curl GET /counters/sent 200
+
+echo "=== get global counter received ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="1"
+do_curl GET /counters/received 200
+
+echo "=== receive a msg from topic1 ==="
+REQ_CONTENT="text/plain"
+RESP_CONTENT="text/plain"
+RESULT="TEST11"
+do_curl GET /topics/topic1/msg 200
+
+echo "=== receive a msg from topic1 ==="
+REQ_CONTENT="text/plain"
+RESP_CONTENT=""
+RESULT="*"
+do_curl GET /topics/topic1/msg 204
+
+
+echo "=== set topic1 start sending ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/topic1/startsend 200
+
+echo "=== set topic2 start sending ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/topic2/startsend 200
+
+echo "=== set topic3 start sending ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/topic3/startsend 200
+
+echo "=== set topic4 start sending ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/topic4/startsend 200
+
+echo "=== set topic1 start receiving ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/topic1/startreceive 200
+
+echo "=== set topic2 start receiving ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/topic2/startreceive 200
+
+echo "=== set topic3 start receiving ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/topic3/startreceive 200
+
+echo "=== set topic4 start receiving ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/topic4/startreceive 200
+
+
+# Send and receive on all topics
+
+echo "=== send a msg on topic1 ==="
+echo "TEST101" > $payload
+REQ_CONTENT="text/plain"
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/topic1/msg 200  $payload
+
+echo "=== send two msg on topic2 ==="
+echo "TEST201" > $payload
+REQ_CONTENT="text/plain"
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/topic2/msg 200  $payload
+echo "TEST202" > $payload
+do_curl POST /topics/topic2/msg 200  $payload
+
+echo "=== send three msg on topic3 ==="
+echo "{\"a\":\"msg301\"}" > $payload
+REQ_CONTENT="application/json"
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/topic3/msg 200  $payload
+echo "{\"a\":\"msg302\"}" > $payload
+do_curl POST /topics/topic3/msg 200  $payload
+echo "{\"a\":\"msg303\"}" > $payload
+do_curl POST /topics/topic3/msg 200  $payload
+
+
+echo "=== send four msg on topic4 ==="
+echo "{\"a\":\"msg401\"}" > $payload
+REQ_CONTENT="application/json"
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/topic4/msg 200  $payload
+echo "{\"a\":\"msg402\"}" > $payload
+do_curl POST /topics/topic4/msg 200  $payload
+echo "{\"a\":\"msg403\"}" > $payload
+do_curl POST /topics/topic4/msg 200  $payload
+echo "{\"a\":\"msg404\"}" > $payload
+do_curl POST /topics/topic4/msg 200  $payload
+
+echo "sleep 10 to allow kafka to process msg"
+sleep 10
+
+echo "=== get global counter sent ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="11"
+do_curl GET /counters/sent 200
+
+echo "=== get global counter received ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="11"
+do_curl GET /counters/received 200
+
+
+echo "=== get topic1 counter sent ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="2"
+do_curl GET /topics/topic1/counters/sent 200
+
+echo "=== get topic1 counter received ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="2"
+do_curl GET /topics/topic1/counters/received 200
+
+
+echo "=== get topic2 counter sent ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="2"
+do_curl GET /topics/topic2/counters/sent 200
+
+echo "=== get topic2 counter received ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="2"
+do_curl GET /topics/topic2/counters/received 200
+
+
+echo "=== get topic3 counter sent ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="3"
+do_curl GET /topics/topic3/counters/sent 200
+
+echo "=== get topic3 counter received ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="3"
+do_curl GET /topics/topic3/counters/received 200
+
+
+echo "=== get topic4 counter sent ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="4"
+do_curl GET /topics/topic4/counters/sent 200
+
+echo "=== get topic4 counter received ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="4"
+do_curl GET /topics/topic4/counters/received 200
+
+
+echo "=== get a msg on topic1 ==="
+REQ_CONTENT="text/plain"
+RESP_CONTENT="text/plain"
+RESULT="TEST101"
+do_curl GET /topics/topic1/msg 200
+
+
+echo "=== attempt to receive a msg from topic1 ==="
+REQ_CONTENT="text/plain"
+RESP_CONTENT=""
+RESULT="*"
+do_curl GET /topics/topic1/msg 204
+
+echo "=== get a two msg on topic2 ==="
+REQ_CONTENT="text/plain"
+RESP_CONTENT="text/plain"
+RESULT="TEST201"
+do_curl GET /topics/topic2/msg 200
+RESULT="TEST202"
+do_curl GET /topics/topic2/msg 200
+
+
+echo "=== attempt to receive a msg from topic2 ==="
+REQ_CONTENT="text/plain"
+RESP_CONTENT=""
+RESULT="*"
+do_curl GET /topics/topic2/msg 204
+
+echo "=== get three msg on topic3 ==="
+REQ_CONTENT="text/plain"
+RESP_CONTENT="application/json"
+RESULT="json:{\"a\":\"msg301\"}"
+do_curl GET /topics/topic3/msg 200
+RESULT="json:{\"a\":\"msg302\"}"
+do_curl GET /topics/topic3/msg 200
+RESULT="json:{\"a\":\"msg303\"}"
+do_curl GET /topics/topic3/msg 200
+
+echo "=== attempt to receive a msg from topic3 ==="
+REQ_CONTENT="text/plain"
+RESP_CONTENT=""
+RESULT="*"
+do_curl GET /topics/topic3/msg 204
+
+echo "=== send four msg on topic4 ==="
+REQ_CONTENT="text/plain"
+RESP_CONTENT="application/json"
+RESULT="json:{\"a\":\"msg401\"}"
+do_curl GET /topics/topic4/msg 200
+RESULT="json:{\"a\":\"msg402\"}"
+do_curl GET /topics/topic4/msg 200
+RESULT="json:{\"a\":\"msg403\"}"
+do_curl GET /topics/topic4/msg 200
+RESULT="json:{\"a\":\"msg404\"}"
+do_curl GET /topics/topic4/msg 200
+
+echo "=== attempt to receive a msg from topic4 ==="
+REQ_CONTENT="text/plain"
+RESP_CONTENT=""
+RESULT="*"
+do_curl GET /topics/topic4/msg 204
+
+echo "********************"
+echo "*** All tests ok ***"
+echo "********************"
+
diff --git a/test/kafka-procon/build-and-start.sh b/test/kafka-procon/build-and-start.sh
new file mode 100755 (executable)
index 0000000..4e4a550
--- /dev/null
@@ -0,0 +1,40 @@
+#!/bin/bash
+
+#  ============LICENSE_START===============================================
+#  Copyright (C) 2021 Nordix Foundation. All rights reserved.
+#  ========================================================================
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#  ============LICENSE_END=================================================
+#
+
+echo "This script requires running kafka instance in a docker private network"
+
+# Script to build and start the container
+if [ $# -ne 2 ]; then
+    echo "usage: ./build-and-start.sh <docker-network> <kafka-boostrapserver-host>:<kafka-boostrapserver-port>"
+    echo "example: ./build-and-start.sh nonrtric-docker-net message-router-kafka:9092"
+    exit 1
+fi
+IMAGE="kafka-procon:latest"
+#Build the image
+docker build -t $IMAGE .
+
+if [ $? -ne 0 ]; then
+    echo "Build failed, exiting..."
+    exit 1
+fi
+
+echo "Starting kafka-procon"
+#Run the container in interactive mode o port 8090.
+docker run --rm -it -p "8090:8090" --network $1 -e KAFKA_BOOTSTRAP_SERVER=$2 --name kafka-procon $IMAGE
+
diff --git a/test/kafka-procon/go.mod b/test/kafka-procon/go.mod
new file mode 100644 (file)
index 0000000..31ccc7c
--- /dev/null
@@ -0,0 +1,9 @@
+module kafkaprocon
+
+go 1.17
+
+require (
+       github.com/confluentinc/confluent-kafka-go v1.7.0 // indirect
+       github.com/enriquebris/goconcurrentqueue v0.6.0 // indirect
+       github.com/gorilla/mux v1.8.0 // indirect
+)
diff --git a/test/kafka-procon/go.sum b/test/kafka-procon/go.sum
new file mode 100644 (file)
index 0000000..34a6358
--- /dev/null
@@ -0,0 +1,6 @@
+github.com/confluentinc/confluent-kafka-go v1.7.0 h1:tXh3LWb2Ne0WiU3ng4h5qiGA9XV61rz46w60O+cq8bM=
+github.com/confluentinc/confluent-kafka-go v1.7.0/go.mod h1:u2zNLny2xq+5rWeTQjFHbDzzNuba4P1vo31r9r4uAdg=
+github.com/enriquebris/goconcurrentqueue v0.6.0 h1:DJ97cgoPVoqlC4tTGBokn/omaB3o16yIs5QdAm6YEjc=
+github.com/enriquebris/goconcurrentqueue v0.6.0/go.mod h1:wGJhQNFI4wLNHleZLo5ehk1puj8M6OIl0tOjs3kwJus=
+github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI=
+github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So=
diff --git a/test/kafka-procon/main.go b/test/kafka-procon/main.go
new file mode 100644 (file)
index 0000000..6f8bad2
--- /dev/null
@@ -0,0 +1,595 @@
+// Writing a basic HTTP server is easy using the
+// `net/http` package.
+package main
+
+import (
+       "context"
+       "encoding/json"
+       "fmt"
+       "io/ioutil"
+       "net/http"
+       "os"
+       "strconv"
+       "sync/atomic"
+       "time"
+
+       "github.com/confluentinc/confluent-kafka-go/kafka"
+       "github.com/enriquebris/goconcurrentqueue"
+       "github.com/gorilla/mux"
+)
+
+// Note: consumer 'group' and consumer 'user' both uses hardcoded values specific to this interface
+//    globalCounters      var holding the "global counters"
+//      recieved          number of received messages from all topics                             (int)
+//      sent              number of sent messages to all topics                                   (int)
+//    topics              var holding all topic related info
+//      <topic-name>      name of a topic (present after topic is created)
+//        content-type    data type of the topic                                                  (string)
+//        counters
+//          recieved      number of received messages from the topic                              (int)
+//          sent          number of sent messages to the topic                                    (int)
+//        messages
+//          send          messages waiting to be sent (set when sending is started)               (fifo)
+//          received      received messages waiting to be fetched (set when reception is started) (fifo)
+
+type counter struct {
+       c uint64
+}
+
+func (c *counter) step() {
+       atomic.AddUint64(&c.c, 1)
+}
+
+func (c counter) get() uint64 {
+       return atomic.LoadUint64(&c.c)
+}
+
+type counters struct {
+       received counter
+       sent     counter
+}
+
+func newCounters() counters {
+       return counters{
+               received: counter{},
+               sent:     counter{},
+       }
+}
+
+type messages struct {
+       send     *goconcurrentqueue.FIFO
+       received *goconcurrentqueue.FIFO
+}
+
+func (m *messages) startSend() bool {
+       if m.send == nil {
+               m.send = goconcurrentqueue.NewFIFO()
+               return true
+       }
+       return false
+}
+
+func (m *messages) stopSend() {
+       m.send = nil
+}
+
+func (m *messages) addToSend(msg string) error {
+       if m.send == nil {
+               return fmt.Errorf("sending not started")
+       }
+       m.send.Lock()
+       defer m.send.Unlock()
+       return m.send.Enqueue(msg)
+}
+
+func (m *messages) getToSend() (interface{}, error) {
+       if m.send == nil {
+               return "", fmt.Errorf("sending not started")
+       }
+       m.send.Lock()
+       defer m.send.Unlock()
+       return m.send.Dequeue()
+}
+
+func (m *messages) startReceive() bool {
+       if m.received == nil {
+               m.received = goconcurrentqueue.NewFIFO()
+               return true
+       }
+       return false
+}
+
+func (m *messages) stopReceive() {
+       m.send = nil
+}
+
+type topic struct {
+       contentType string
+       counters    counters
+       messages    messages
+}
+
+func newTopic(ct string) *topic {
+       return &topic{
+               contentType: ct,
+               counters:    counters{},
+               messages:    messages{},
+       }
+}
+
+var globalCounters counters
+var topics map[string]*topic = make(map[string]*topic)
+
+var bootstrapserver = ""
+
+func initApp() {
+       bootstrapserver = os.Getenv("KAFKA_BOOTSTRAP_SERVER")
+       if len(bootstrapserver) == 0 {
+               fmt.Println("Fatal error: env var KAFKA_BOOTSTRAP_SERVER not set")
+               fmt.Println("Exiting... ")
+               os.Exit(1)
+       }
+       fmt.Println("Using KAFKA_BOOTSTRAP_SERVER=" + bootstrapserver)
+}
+
+//Helper function to get a created topic, if it exists
+func getTopicFromRequest(w http.ResponseWriter, req *http.Request) (*topic, string, bool) {
+       topicId := mux.Vars(req)["topic"]
+       t, exist := topics[topicId]
+       if exist == false {
+               w.WriteHeader(http.StatusNotFound)
+               fmt.Fprintf(w, "Topic %v does not exist", topicId)
+               return nil, "", false
+       }
+       return t, topicId, true
+}
+
+// Alive check
+// GET on /
+func healthCheck(w http.ResponseWriter, req *http.Request) {
+       fmt.Fprintf(w, "OK")
+}
+
+// Deep reset of this interface stub - no removal of msgs or topics in kafka
+// POST on /reset
+func allreset(w http.ResponseWriter, req *http.Request) {
+       for _, v := range topics {
+               v.messages.stopSend()
+               v.messages.stopReceive()
+       }
+       time.Sleep(5 * time.Second) //Allow producers/consumers to shut down
+       globalCounters = newCounters()
+       topics = make(map[string]*topic)
+       fmt.Fprintf(w, "OK")
+}
+
+// Get topics, return json array of strings of topics created by this interface stub
+// Returns json array of strings, array is empty if no topics exist
+// GET on /topics
+
+func getTopics(w http.ResponseWriter, req *http.Request) {
+       topicKeys := make([]string, 0, len(topics))
+       fmt.Printf("len topics: %v\n", len(topics))
+       for k := range topics {
+               topicKeys = append(topicKeys, k)
+       }
+       var j, err = json.Marshal(topicKeys)
+       if err != nil {
+               w.WriteHeader(http.StatusInternalServerError)
+               fmt.Fprintf(w, "Cannot convert list of topics to json, error details: %v", err)
+               return
+       } else {
+               w.Header().Set("Content-Type", "application/json")
+               w.WriteHeader(http.StatusOK)
+               w.Write(j)
+       }
+}
+
+func writeOkRepsonse(w http.ResponseWriter, httpStatus int, msg string) {
+       w.WriteHeader(httpStatus)
+       w.Header().Set("Content-Type", "text/plain")
+       fmt.Fprintf(w, msg)
+}
+
+// Get a counter value
+// GET /topics/counters/{counter}
+func getCounter(w http.ResponseWriter, req *http.Request) {
+       ctr := mux.Vars(req)["counter"]
+       var ctrvalue = -1
+       if ctr == "received" {
+               ctrvalue = int(globalCounters.received.get())
+       } else if ctr == "sent" {
+               ctrvalue = int(globalCounters.sent.get())
+       }
+
+       if ctrvalue == -1 {
+               w.WriteHeader(http.StatusBadRequest)
+               fmt.Fprintf(w, "Counter %v does not exist", ctr)
+               return
+       }
+       writeOkRepsonse(w, http.StatusOK, strconv.Itoa(ctrvalue))
+       return
+
+}
+
+// Create a topic
+// PUT on /topics/<topic>?type=<type>    type shall be 'application/json' or 'text/plain'
+func createTopic(w http.ResponseWriter, req *http.Request) {
+       topicId := mux.Vars(req)["topic"]
+       topicType := mux.Vars(req)["type"]
+
+       fmt.Printf("Creating topic: %v, content type: %v\n", topicId, topicType)
+
+       if len(topicType) == 0 {
+               w.WriteHeader(http.StatusBadRequest)
+               fmt.Fprintf(w, "Type not specified")
+               return
+       }
+
+       tid, exist := topics[topicId]
+       if exist == true {
+               if tid.contentType != topicType {
+                       w.WriteHeader(http.StatusBadRequest)
+                       fmt.Fprintf(w, "Topic type exist but type is different, queue content type: %v, requested content type: %v", tid.contentType, topicType)
+                       return
+               }
+               writeOkRepsonse(w, http.StatusOK, "Topic exist")
+               return
+       }
+
+       t := newTopic(topicType)
+
+       a, err := kafka.NewAdminClient(&kafka.ConfigMap{"bootstrap.servers": bootstrapserver})
+       defer func() { a.Close() }()
+       if err != nil {
+               w.WriteHeader(http.StatusInternalServerError)
+               fmt.Fprintf(w, "Cannot create client to bootstrap server: "+bootstrapserver+", error details: %v", err)
+               return
+       }
+
+       ctx, cancel := context.WithCancel(context.Background())
+       defer cancel()
+
+       maxDur := 10 * time.Second
+
+       _, err = a.CreateTopics(
+               ctx,
+               []kafka.TopicSpecification{{
+                       Topic:             topicId,
+                       NumPartitions:     1,
+                       ReplicationFactor: 1}},
+               kafka.SetAdminOperationTimeout(maxDur))
+
+       if err != nil {
+               w.WriteHeader(http.StatusInternalServerError)
+               fmt.Fprintf(w, "Failed to create topic: %v, error details: %v", topicId, err)
+               return
+       }
+       topics[topicId] = t
+       w.WriteHeader(http.StatusCreated)
+       fmt.Fprintf(w, "Topic created")
+}
+
+// Get topic type
+// GET on /topic/<topic>
+func getTopic(w http.ResponseWriter, req *http.Request) {
+       t, _, exist := getTopicFromRequest(w, req)
+       if !exist {
+               return
+       }
+       w.WriteHeader(http.StatusOK)
+       fmt.Fprintf(w, t.contentType)
+}
+
+// Get a topics counter value
+// GET /topics/{topic}/counters/{counter}
+func getTopicCounter(w http.ResponseWriter, req *http.Request) {
+       t, _, exist := getTopicFromRequest(w, req)
+       if !exist {
+               return
+       }
+       ctr := mux.Vars(req)["counter"]
+
+       var ctrvalue = -1
+       if ctr == "received" {
+               ctrvalue = int(t.counters.received.get())
+       } else if ctr == "sent" {
+               ctrvalue = int(t.counters.sent.get())
+       }
+
+       if ctrvalue == -1 {
+               w.WriteHeader(http.StatusBadRequest)
+               fmt.Fprintf(w, "Counter %v does not exist", ctr)
+               return
+       }
+       w.WriteHeader(http.StatusOK)
+       fmt.Fprintf(w, strconv.Itoa(ctrvalue))
+       return
+}
+
+func startToSend(w http.ResponseWriter, req *http.Request) {
+       t, topicId, exist := getTopicFromRequest(w, req)
+       if !exist {
+               return
+       }
+
+       if !t.messages.startSend() {
+               w.WriteHeader(http.StatusOK)
+               fmt.Fprintf(w, "Already started sending")
+               return
+       }
+       go func() {
+               p, err := kafka.NewProducer(&kafka.ConfigMap{"bootstrap.servers": bootstrapserver})
+               if err != nil {
+                       fmt.Printf("Cannot create producer for topic: %v, error details: %v\n", topicId, err)
+                       return
+               }
+               defer func() { p.Close() }()
+               for {
+                       q := t.messages.send
+                       if q == nil {
+                               return
+                       }
+                       m, err := q.Get(0)
+                       if err == nil {
+                               err = p.Produce(&kafka.Message{
+                                       TopicPartition: kafka.TopicPartition{Topic: &topicId, Partition: kafka.PartitionAny},
+                                       Value:          []byte(fmt.Sprintf("%v", m)),
+                               }, nil)
+                               if err == nil {
+                                       q.Remove(0)
+                                       t.counters.sent.step()
+                                       globalCounters.sent.step()
+                                       msg := fmt.Sprintf("%v", m)
+                                       if len(msg) < 500 {
+                                               fmt.Printf("Message sent on topic: %v, len: %v, msg: %v", topicId, len(msg), msg)
+                                       } else {
+                                               fmt.Printf("Message sent on topic: %v, len: %v, is larger than 500...not printed", topicId, len(msg))
+                                       }
+                               } else {
+                                       fmt.Printf("Failed to send message on topic: %v. Discarded. Error details: %v", topicId, err)
+                                       q.Remove(0)
+                               }
+                       } else {
+                               time.Sleep(10 * time.Millisecond)
+                       }
+               }
+       }()
+
+       w.WriteHeader(http.StatusOK)
+       fmt.Fprintf(w, "Sending started")
+}
+
+func startToReceive(w http.ResponseWriter, req *http.Request) {
+       t, topicId, exist := getTopicFromRequest(w, req)
+       if !exist {
+               return
+       }
+
+       if !t.messages.startReceive() {
+               w.WriteHeader(http.StatusOK)
+               fmt.Fprintf(w, "Already started receiving")
+               return
+       }
+
+       go func() {
+
+               defer func() { t.messages.stopReceive() }()
+
+               groudId := "kafkaprocon"
+
+               c, err := kafka.NewConsumer(&kafka.ConfigMap{
+                       "bootstrap.servers":       bootstrapserver,
+                       "group.id":                groudId,
+                       "auto.offset.reset":       "earliest",
+                       "enable.auto.commit":      true,
+                       "auto.commit.interval.ms": 5000,
+               })
+               if err != nil {
+                       fmt.Printf("Cannot create consumer for topic: %v, error details: %v\n", topicId, err)
+                       t.messages.stopReceive()
+                       return
+               }
+               c.Commit()
+               defer func() { c.Close() }()
+               for {
+                       que := t.messages.received
+                       if que == nil {
+                               fmt.Printf("Cannot start receiving on topic: %v, queue does not exist\n", topicId)
+                               return
+                       }
+                       fmt.Printf("Start subscribing on topic: %v\n", topicId)
+                       err = c.SubscribeTopics([]string{topicId}, nil)
+                       if err != nil {
+                               fmt.Printf("Cannot start subscribing on topic: %v, error details: %v\n", topicId, err)
+                               return
+                       }
+                       maxDur := 1 * time.Second
+                       for {
+                               msg, err := c.ReadMessage(maxDur)
+                               if err == nil {
+                                       if len(msg.Value) < 500 {
+                                               fmt.Printf("Message received on topic: %v, partion: %v, len: %v, msg: %v", topicId, msg.TopicPartition, len(msg.Value), string(msg.Value))
+                                       } else {
+                                               fmt.Printf("Message received on topic: %v, partion: %v, len: %v is larger than 500...not printed", topicId, msg.TopicPartition, len(msg.Value))
+                                       }
+                                       err = t.messages.received.Enqueue(string(msg.Value))
+                                       if err != nil {
+                                               w.WriteHeader(http.StatusInternalServerError)
+                                               fmt.Fprintf(w, "Received message topic: %v, cannot be put in queue, %v", topicId, err)
+                                               return
+                                       }
+                                       t.counters.received.step()
+                                       globalCounters.received.step()
+                               } else {
+                                       fmt.Printf("Nothing to consume on topic: %v, reason: %v\n", topicId, err)
+                               }
+                       }
+               }
+       }()
+
+       w.WriteHeader(http.StatusOK)
+       fmt.Fprintf(w, "Receiving started")
+}
+
+// Post a message to a topic
+// POST /send    content type is specified in content type
+func sendToTopic(w http.ResponseWriter, req *http.Request) {
+       t, topicId, exist := getTopicFromRequest(w, req)
+       if !exist {
+               return
+       }
+       q := t.messages.send
+       if q == nil {
+               w.WriteHeader(http.StatusBadRequest)
+               fmt.Fprintf(w, "Sending not initiated on topic: %v", topicId)
+               return
+       }
+       ct := req.Header.Get("Content-Type")
+       if ct != t.contentType {
+               w.WriteHeader(http.StatusBadRequest)
+               fmt.Fprintf(w, "Message to send content type: %v on topic: %v does not match queue content type: %v", ct, topicId, t.contentType)
+               return
+       }
+
+       if ct == "application/json" {
+               // decoder := json.NewDecoder(req.Body)
+               // var j :=
+               // err := decoder.Decode(&j)
+               // if err != nil {
+               //      w.WriteHeader(http.StatusBadRequest)
+               //      w.Header().Set("Content-Type", "text/plain")
+               //      fmt.Fprintf(w, "Json payload cannot be decoded, error details %v\n", err)
+               //      return
+               // }
+               //m = mux.Vars(req)[""]
+               if err := req.ParseForm(); err != nil {
+                       w.WriteHeader(http.StatusBadRequest)
+                       fmt.Fprintf(w, "Json payload cannot be decoded on topic: %v, error details %v", topicId, err)
+                       return
+               }
+               b, err := ioutil.ReadAll(req.Body)
+               if err == nil {
+                       if len(b) < 500 {
+                               fmt.Printf("Json payload to send on topic: %v, msg: %v", topicId, string(b))
+                       } else {
+                               fmt.Printf("Json payload to send on topic: %v larger than 500 bytes, not printed...", topicId)
+                       }
+               }
+               err = q.Enqueue(string(b))
+               if err != nil {
+                       w.WriteHeader(http.StatusInternalServerError)
+                       fmt.Fprintf(w, "Json message to send cannot be put in queue")
+                       return
+               }
+       } else if ct == "text/plain" {
+               if err := req.ParseForm(); err != nil {
+                       w.WriteHeader(http.StatusBadRequest)
+                       fmt.Fprintf(w, "Text payload to send on topic: %v cannot be decoded, error details %v\n", topicId, err)
+                       return
+               }
+               b, err := ioutil.ReadAll(req.Body)
+               if err == nil {
+                       if len(b) < 500 {
+                               fmt.Printf("Text payload to send on topic: %v, msg: %v", topicId, string(b))
+                       } else {
+                               fmt.Printf("Text payload to send on topic: %v larger than 500 bytes, not printed...", topicId)
+                       }
+               }
+               err = q.Enqueue(string(b))
+               if err != nil {
+                       w.WriteHeader(http.StatusInternalServerError)
+                       fmt.Fprintf(w, "Text message to send cannot be put in queue")
+                       return
+               }
+       } else {
+               w.WriteHeader(http.StatusBadRequest)
+               fmt.Fprintf(w, "Message to send, unknown content type %v", ct)
+               return
+       }
+
+       w.WriteHeader(http.StatusOK)
+       w.Header().Set("Content-Type", "text/plain")
+       fmt.Fprintf(w, "Message to send put in queue")
+}
+
+// Get zero or one message from a topic
+// GET /receive
+func receiveFromTopic(w http.ResponseWriter, req *http.Request) {
+       t, topicId, exist := getTopicFromRequest(w, req)
+       if !exist {
+               return
+       }
+       if t.messages.received == nil {
+               w.WriteHeader(http.StatusBadRequest)
+               fmt.Fprintf(w, "Receiving not initiated on topic %v", topicId)
+               return
+       }
+
+       m, err := t.messages.received.Dequeue()
+       if err != nil {
+               w.WriteHeader(http.StatusNoContent)
+               return
+       }
+
+       w.Header().Set("Content-Type", t.contentType)
+       w.WriteHeader(http.StatusOK)
+       fmt.Fprintf(w, "%v", m)
+}
+
+// Remove the send queue to stop sending
+func stopToSend(w http.ResponseWriter, req *http.Request) {
+       fmt.Printf("Stop sending\n")
+       t, _, exist := getTopicFromRequest(w, req)
+       if !exist {
+               return
+       }
+       t.messages.stopSend()
+       w.WriteHeader(http.StatusNoContent)
+}
+
+// Remove the receive queue to stop receiving
+func stopToReceive(w http.ResponseWriter, req *http.Request) {
+       fmt.Printf("Stop receiving\n")
+       t, _, exist := getTopicFromRequest(w, req)
+       if !exist {
+               return
+       }
+       t.messages.stopReceive()
+       w.WriteHeader(http.StatusNoContent)
+}
+
+func HelloServer(w http.ResponseWriter, r *http.Request) {
+       fmt.Fprintf(w, "Hello, %s!", r.URL.Path[1:])
+}
+
+func main() {
+
+       initApp()
+
+       r := mux.NewRouter()
+
+       r.HandleFunc("/", healthCheck).Methods("GET")
+       r.HandleFunc("/reset", allreset).Methods("POST")
+       r.HandleFunc("/counters/{counter}", getCounter).Methods("GET")
+       r.HandleFunc("/topics", getTopics).Methods("GET")
+       r.HandleFunc("/topics/{topic}/counters/{counter}", getTopicCounter).Methods("GET")
+       r.HandleFunc("/topics/{topic}", createTopic).Methods("PUT").Queries("type", "{type}")
+       r.HandleFunc("/topics/{topic}", getTopic).Methods("GET")
+       r.HandleFunc("/topics/{topic}/startsend", startToSend).Methods("POST")
+       r.HandleFunc("/topics/{topic}/startreceive", startToReceive).Methods("POST")
+       r.HandleFunc("/topics/{topic}/stopsend", stopToSend).Methods("POST")
+       r.HandleFunc("/topics/{topic}/stopreceive", stopToReceive).Methods("POST")
+       r.HandleFunc("/topics/{topic}/msg", sendToTopic).Methods("POST")
+       r.HandleFunc("/topics/{topic}/msg", receiveFromTopic).Methods("GET")
+
+       port := "8090"
+       srv := &http.Server{
+               Handler:      r,
+               Addr:         ":" + port,
+               WriteTimeout: 15 * time.Second,
+               ReadTimeout:  15 * time.Second,
+       }
+       fmt.Println("Running on port: " + port)
+       fmt.Printf(srv.ListenAndServe().Error())
+}
diff --git a/test/kafka-procon/start_local.sh b/test/kafka-procon/start_local.sh
new file mode 100755 (executable)
index 0000000..bfc1a1b
--- /dev/null
@@ -0,0 +1,32 @@
+#!/bin/bash
+
+#  ============LICENSE_START===============================================
+#  Copyright (C) 2021 Nordix Foundation. All rights reserved.
+#  ========================================================================
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#  ============LICENSE_END=================================================
+#
+
+echo "This script requires golang to be installed and a running kafka instance on (or availble to) localhost"
+
+# Script to build and start app locally
+if [ $# -ne 1 ]; then
+    echo "usage: ./start-local.sh <kafka-boostrapserver-port>"
+    echo "example: ./start-local.sh 30098"
+    exit 1
+fi
+
+export KAFKA_BOOTSTRAP_SERVER=localhost:$1
+
+echo "Starting kafka-procon on local machine"
+go run main.go
index 676c77c..9b58a99 100644 (file)
@@ -34,4 +34,16 @@ RUN pip install -r requirements.txt
 
 RUN chmod +x start.sh
 
+RUN groupadd -g 999 appuser && \
+    useradd -r -u 999 -g appuser appuser
+## add permissions for appuser user
+RUN chown -R appuser:appuser /usr/src/app/ && chmod -R 755 /usr/src/app/ && \
+        chown -R appuser:appuser /var/log/nginx && \
+        chown -R appuser:appuser /var/lib/nginx && \
+        chown -R appuser:appuser /etc/nginx/conf.d
+RUN touch /var/run/nginx.pid && \
+        chown -R appuser:appuser /var/run/nginx.pid
+
+USER appuser
+
 CMD [ "./start.sh" ]
\ No newline at end of file
index 35b5ba0..be342b1 100644 (file)
@@ -1,4 +1,4 @@
-user www-data;
+user www-data;
 worker_processes auto;
 pid /run/nginx.pid;
 include /etc/nginx/modules-enabled/*.conf;
index 4768bf9..813cfbd 100644 (file)
@@ -32,4 +32,17 @@ RUN chmod +x start.sh
 RUN apt-get update
 RUN apt-get install -y nginx=1.14.*
 
+RUN groupadd -g 999 appuser && \
+    useradd -r -u 999 -g appuser appuser
+
+## add permissions for appuser user
+RUN chown -R appuser:appuser /usr/src/app/ && chmod -R 755 /usr/src/app/ && \
+        chown -R appuser:appuser /var/log/nginx && \
+        chown -R appuser:appuser /var/lib/nginx && \
+        chown -R appuser:appuser /etc/nginx/conf.d
+RUN touch /var/run/nginx.pid && \
+        chown -R appuser:appuser /var/run/nginx.pid
+
+USER appuser
+
 CMD [ "./start.sh" ]
index 8119b0d..5ff404b 100644 (file)
@@ -1,4 +1,4 @@
-user www-data;
+user www-data;
 worker_processes auto;
 pid /run/nginx.pid;
 include /etc/nginx/modules-enabled/*.conf;
index 4aa3a7a..7dc00c5 100644 (file)
@@ -1,2 +1,3 @@
 .tmp.json
-.dockererr
\ No newline at end of file
+.dockererr
+gen_docker-compose*
\ No newline at end of file
index 105c875..003dbfa 100644 (file)
@@ -19,9 +19,8 @@ version: '3.0'
 
 networks:
   default:
-    external:
-      name: ${DOCKER_SIM_NWNAME}
-
+    external: true
+    name: ${DOCKER_SIM_NWNAME}
 services:
 
   consul-server:
index 4aa3a7a..7dc00c5 100644 (file)
@@ -1,2 +1,3 @@
 .tmp.json
-.dockererr
\ No newline at end of file
+.dockererr
+gen_docker-compose*
\ No newline at end of file
index 2fc7093..1af026e 100644 (file)
@@ -30,8 +30,8 @@ portalapi.appname = Non-RT RIC Control Panel
 portalapi.username = Default
 portalapi.password = password
 
-# URL for enrichment coordinator service
-enrichmentcontroller.url.prefix = https://${ECS_DOMAIN_NAME}:${ECS_EXTERNAL_SECURE_PORT}/ei-producer/v1
+# URL for information coordinator service
+informationcontroller.url.prefix = https://${ICS_DOMAIN_NAME}:${ICS_EXTERNAL_SECURE_PORT}/ei-producer/v1
 
 # Mimic slow endpoints by defining sleep period, in milliseconds
 mock.config.delay = 0
index 94628fa..b860a89 100644 (file)
@@ -17,8 +17,8 @@
 version: '3.0'
 networks:
   default:
-    external:
-      name: ${DOCKER_SIM_NWNAME}
+    external: true
+    name: ${DOCKER_SIM_NWNAME}
 services:
   control-panel:
     image: ${CONTROL_PANEL_IMAGE}
index 1672ad0..5f8ae57 100644 (file)
@@ -30,11 +30,11 @@ http {
             set $upstream ${NGW_DOMAIN_NAME};
             proxy_pass http://$upstream:${NRT_GATEWAY_EXTERNAL_PORT};
         }
-        location ${CONTROL_PANEL_PATH_ECS_PREFIX} {
+        location ${CONTROL_PANEL_PATH_ICS_PREFIX} {
             set $upstream ${NGW_DOMAIN_NAME};
             proxy_pass http://$upstream:${NRT_GATEWAY_EXTERNAL_PORT};
         }
-        location ${CONTROL_PANEL_PATH_ECS_PREFIX2} {
+        location ${CONTROL_PANEL_PATH_ICS_PREFIX2} {
             set $upstream ${NGW_DOMAIN_NAME};
             proxy_pass http://$upstream:${NRT_GATEWAY_EXTERNAL_PORT};
         }
diff --git a/test/simulator-group/cr/.env b/test/simulator-group/cr/.env
new file mode 100644 (file)
index 0000000..a64de54
--- /dev/null
@@ -0,0 +1 @@
+COMPOSE_PROJECT_NAME=callback-receiver
index 4aa3a7a..7dc00c5 100644 (file)
@@ -1,2 +1,3 @@
 .tmp.json
-.dockererr
\ No newline at end of file
+.dockererr
+gen_docker-compose*
\ No newline at end of file
index 0cb36d2..7953662 100644 (file)
@@ -1,5 +1,5 @@
 apiVersion: apps/v1
-kind: Deployment
+kind: StatefulSet
 metadata:
   name: $CR_APP_NAME
   namespace: $KUBE_SIM_NAMESPACE
@@ -7,7 +7,8 @@ metadata:
     run: $CR_APP_NAME
     autotest: CR
 spec:
-  replicas: 1
+  replicas: $CR_APP_COUNT
+  serviceName: $CR_APP_NAME
   selector:
     matchLabels:
       run: $CR_APP_NAME
index 634a464..047ec45 100644 (file)
 version: '3.0'
 networks:
   default:
-    external:
-      name: ${DOCKER_SIM_NWNAME}
+    external: true
+    name: ${DOCKER_SIM_NWNAME}
 services:
-  callback-receiver:
+  cr:
+    scale: $CR_APP_COUNT
     networks:
       - default
-    container_name: ${CR_APP_NAME}
     image: ${CR_IMAGE}
     ports:
-      - ${CR_EXTERNAL_PORT}:${CR_INTERNAL_PORT}
-      - ${CR_EXTERNAL_SECURE_PORT}:${CR_INTERNAL_SECURE_PORT}
+      - ${CR_INTERNAL_PORT}/tcp
+      - ${CR_INTERNAL_SECURE_PORT}/tcp
     labels:
       - "nrttest_app=CR"
       - "nrttest_dp=${CR_DISPLAY_NAME}"
index 43e532b..0367643 100644 (file)
@@ -7,15 +7,9 @@ metadata:
     run: $CR_APP_NAME
     autotest: CR
 spec:
-  type: ClusterIP
   ports:
-  - port: $CR_EXTERNAL_PORT
-    targetPort: $CR_INTERNAL_PORT
-    protocol: TCP
+  - port: 80
     name: http
-  - port: $CR_EXTERNAL_SECURE_PORT
-    targetPort: $CR_INTERNAL_SECURE_PORT
-    protocol: TCP
-    name: https
+  clusterIP: None
   selector:
-    run: $CR_APP_NAME
\ No newline at end of file
+    run: $CR_APP_NAME
index 4aa3a7a..7dc00c5 100644 (file)
@@ -1,2 +1,3 @@
 .tmp.json
-.dockererr
\ No newline at end of file
+.dockererr
+gen_docker-compose*
\ No newline at end of file
index f96db09..b35fe32 100644 (file)
@@ -62,7 +62,7 @@ app:
     # The HTTP proxy (if configured) will only be used for accessing NearRT RIC:s
     http.proxy-host: $DMAAP_ADP_HTTP_PROXY_CONFIG_HOST_NAME
     http.proxy-port: $DMAAP_ADP_HTTP_PROXY_CONFIG_PORT
-  ecs-base-url: $ECS_SERVICE_PATH
+  ics-base-url: $ICS_SERVICE_PATH
   # Location of the component configuration file. The file will only be used if the Consul database is not used;
   # configuration from the Consul will override the file.
   configuration-filepath: /opt/app/dmaap-adaptor-service/data/application_configuration.json
index f9dee41..cea605a 100644 (file)
@@ -17,8 +17,8 @@
 version: '3.0'
 networks:
   default:
-    external:
-      name: ${DOCKER_SIM_NWNAME}
+    external: true
+    name: ${DOCKER_SIM_NWNAME}
 services:
   dmaap-adapter-service:
     image: ${DMAAP_ADP_IMAGE}
index 4aa3a7a..7dc00c5 100644 (file)
@@ -1,2 +1,3 @@
 .tmp.json
-.dockererr
\ No newline at end of file
+.dockererr
+gen_docker-compose*
\ No newline at end of file
index aa8a0f1..7c39bea 100644 (file)
@@ -36,7 +36,7 @@ spec:
         - name: INFO_PRODUCER_PORT
           value: "$DMAAP_MED_CONF_SELF_PORT"
         - name: INFO_COORD_ADDR
-          value: "$ECS_SERVICE_PATH"
+          value: "$ICS_SERVICE_PATH"
         - name: DMAAP_MR_ADDR
           value: "$MR_SERVICE_PATH"
         - name: LOG_LEVEL
index d0672df..9cb929c 100644 (file)
@@ -17,8 +17,8 @@
 version: '3.0'
 networks:
   default:
-    external:
-      name: ${DOCKER_SIM_NWNAME}
+    external: true
+    name: ${DOCKER_SIM_NWNAME}
 services:
   dmaap-mediator-service:
     image: ${DMAAP_MED_IMAGE}
@@ -30,7 +30,7 @@ services:
     environment:
       - INFO_PRODUCER_HOST=${DMAAP_MED_CONF_SELF_HOST}
       - INFO_PRODUCER_PORT=${DMAAP_MED_CONF_SELF_PORT}
-      - INFO_COORD_ADDR=${ECS_SERVICE_PATH}
+      - INFO_COORD_ADDR=${ICS_SERVICE_PATH}
       - DMAAP_MR_ADDR=${MR_SERVICE_PATH}
       - LOG_LEVEL=Debug
     volumes:
diff --git a/test/simulator-group/dmaapmr/.gitignore b/test/simulator-group/dmaapmr/.gitignore
new file mode 100644 (file)
index 0000000..7dc00c5
--- /dev/null
@@ -0,0 +1,3 @@
+.tmp.json
+.dockererr
+gen_docker-compose*
\ No newline at end of file
index a4ecc91..1a9d40a 100644 (file)
@@ -1,57 +1,58 @@
 apiVersion: apps/v1
 kind: Deployment
 metadata:
-  name: $MR_DMAAP_APP_NAME
+  name: $MR_ZOOKEEPER_APP_NAME
   namespace: $KUBE_ONAP_NAMESPACE
   labels:
-    run: $MR_DMAAP_APP_NAME
+    run: $MR_ZOOKEEPER_APP_NAME
     autotest: DMAAPMR
 spec:
-  replicas: 1
   selector:
     matchLabels:
-      run: $MR_DMAAP_APP_NAME
+      run: $MR_ZOOKEEPER_APP_NAME
   template:
     metadata:
       labels:
-        run: $MR_DMAAP_APP_NAME
+        run: $MR_ZOOKEEPER_APP_NAME
         autotest: DMAAPMR
     spec:
       containers:
-      - name: $MR_DMAAP_APP_NAME
-        image: $ONAP_DMAAPMR_IMAGE
+      - name: $MR_ZOOKEEPER_APP_NAME
+        image: $ONAP_ZOOKEEPER_IMAGE
         imagePullPolicy: $KUBE_IMAGE_PULL_POLICY
         ports:
         - name: http
-          containerPort: $MR_INTERNAL_PORT
-        - name: https
-          containerPort: $MR_INTERNAL_SECURE_PORT
+          containerPort: $MR_ZOOKEEPER_PORT
         env:
-        - name: enableCadi
-          value: 'false'
+        - name: ZOOKEEPER_REPLICAS
+          value: '1'
+        - name: ZOOKEEPER_TICK_TIME
+          value: '2000'
+        - name: ZOOKEEPER_SYNC_LIMIT
+          value: '5'
+        - name: ZOOKEEPER_INIT_LIMIT
+          value: '10'
+        - name: ZOOKEEPER_MAX_CLIENT_CNXNS
+          value: '200'
+        - name: ZOOKEEPER_AUTOPURGE_SNAP_RETAIN_COUNT
+          value: '3'
+        - name: ZOOKEEPER_AUTOPURGE_PURGE_INTERVAL
+          value: '24'
+        - name: ZOOKEEPER_CLIENT_PORT
+          value: '$MR_ZOOKEEPER_PORT'
+        - name: KAFKA_OPTS
+          value: '-Djava.security.auth.login.config=/etc/zookeeper/secrets/jaas/zk_server_jaas.conf -Dzookeeper.kerberos.removeHostFromPrincipal=true -Dzookeeper.kerberos.removeRealmFromPrincipal=true -Dzookeeper.authProvider.1=org.apache.zookeeper.server.auth.SASLAuthenticationProvider -Dzookeeper.requireClientAuthScheme=sasl'
+        - name: ZOOKEEPER_SERVER_ID
+          value: '1'
         volumeMounts:
-        - mountPath: /appl/dmaapMR1/bundleconfig/etc/appprops/MsgRtrApi.properties
-          subPath: MsgRtrApi.properties
-          name: dmaapmr-msg-rtr-api
-        - mountPath: /appl/dmaapMR1/bundleconfig/etc/logback.xml
-          subPath: logback.xml
-          name: dmaapmr-log-back
-        - mountPath: /appl/dmaapMR1/etc/cadi.properties
-          subPath: cadi.properties
-          name: dmaapmr-cadi
+        - mountPath: /etc/zookeeper/secrets/jaas/zk_server_jaas.conf
+          subPath: zk_server_jaas.conf
+          name: dmaapmr-zk-server-jaas
       volumes:
       - configMap:
           defaultMode: 420
-          name: dmaapmr-msgrtrapi.properties
-        name: dmaapmr-msg-rtr-api
-      - configMap:
-          defaultMode: 420
-          name: dmaapmr-logback.xml
-        name: dmaapmr-log-back
-      - configMap:
-          defaultMode: 420
-          name: dmaapmr-cadi.properties
-        name: dmaapmr-cadi
+          name: dmaapmr-zk-server-jaas.conf
+        name: dmaapmr-zk-server-jaas
 ---
 apiVersion: apps/v1
 kind: Deployment
@@ -79,6 +80,8 @@ spec:
         ports:
         - name: http
           containerPort: $MR_KAFKA_PORT
+        - name: http-external
+          containerPort: $MR_KAFKA_KUBE_NODE_PORT
         env:
         - name: enableCadi
           value: 'false'
@@ -89,13 +92,11 @@ spec:
         - name: KAFKA_ZOOKEEPER_SESSION_TIMEOUT_MS
           value: '40000'
         - name: KAFKA_LISTENER_SECURITY_PROTOCOL_MAP
-          value: 'INTERNAL_PLAINTEXT:PLAINTEXT,EXTERNAL_PLAINTEXT:PLAINTEXT'
+          value: 'INTERNAL_PLAINTEXT:PLAINTEXT,EXTERNAL_PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT'
         - name: KAFKA_ADVERTISED_LISTENERS
-          value: 'INTERNAL_PLAINTEXT://$MR_KAFKA_APP_NAME:$MR_KAFKA_PORT'
+          value: 'INTERNAL_PLAINTEXT://$MR_KAFKA_SERVICE_PATH,PLAINTEXT_HOST://localhost:$MR_KAFKA_KUBE_NODE_PORT'
         - name: KAFKA_LISTENERS
-          value: 'INTERNAL_PLAINTEXT://0.0.0.0:$MR_KAFKA_PORT'
-        # - name: KAFKA_LISTENERS
-        #   value: 'EXTERNAL_PLAINTEXT://0.0.0.0:9091,INTERNAL_PLAINTEXT://0.0.0.0:$MR_KAFKA_PORT'
+          value: 'INTERNAL_PLAINTEXT://0.0.0.0:$MR_KAFKA_PORT,PLAINTEXT_HOST://0.0.0.0:$MR_KAFKA_KUBE_NODE_PORT'
         - name: KAFKA_INTER_BROKER_LISTENER_NAME
           value: INTERNAL_PLAINTEXT
         - name: KAFKA_CONFLUENT_SUPPORT_METRICS_ENABLE
@@ -103,7 +104,7 @@ spec:
         - name: KAFKA_OPTS
           value: '-Djava.security.auth.login.config=/etc/kafka/secrets/jaas/zk_client_jaas.conf'
         - name: KAFKA_ZOOKEEPER_SET_ACL
-          value: 'false'
+          value: 'true'
         - name: KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR
           value: '1'
         - name: KAFKA_OFFSETS_TOPIC_NUM_PARTITIONS
@@ -121,58 +122,55 @@ spec:
 apiVersion: apps/v1
 kind: Deployment
 metadata:
-  name: $MR_ZOOKEEPER_APP_NAME
+  name: $MR_DMAAP_APP_NAME
   namespace: $KUBE_ONAP_NAMESPACE
   labels:
-    run: $MR_ZOOKEEPER_APP_NAME
+    run: $MR_DMAAP_APP_NAME
     autotest: DMAAPMR
 spec:
   replicas: 1
   selector:
     matchLabels:
-      run: $MR_ZOOKEEPER_APP_NAME
+      run: $MR_DMAAP_APP_NAME
   template:
     metadata:
       labels:
-        run: $MR_ZOOKEEPER_APP_NAME
+        run: $MR_DMAAP_APP_NAME
         autotest: DMAAPMR
     spec:
       containers:
-      - name: $MR_ZOOKEEPER_APP_NAME
-        image: $ONAP_ZOOKEEPER_IMAGE
+      - name: $MR_DMAAP_APP_NAME
+        image: $ONAP_DMAAPMR_IMAGE
         imagePullPolicy: $KUBE_IMAGE_PULL_POLICY
         ports:
         - name: http
-          containerPort: $MR_ZOOKEEPER_PORT
+          containerPort: $MR_INTERNAL_PORT
+        - name: https
+          containerPort: $MR_INTERNAL_SECURE_PORT
         env:
-        - name: ZOOKEEPER_REPLICAS
-          value: '1'
-        - name: ZOOKEEPER_TICK_TIME
-          value: '2000'
-        - name: ZOOKEEPER_SYNC_LIMIT
-          value: '5'
-        - name: ZOOKEEPER_INIT_LIMIT
-          value: '10'
-        - name: ZOOKEEPER_MAX_CLIENT_CNXNS
-          value: '200'
-        - name: ZOOKEEPER_AUTOPURGE_SNAP_RETAIN_COUNT
-          value: '3'
-        - name: ZOOKEEPER_AUTOPURGE_PURGE_INTERVAL
-          value: '24'
-        - name: ZOOKEEPER_CLIENT_PORT
-          value: '$MR_ZOOKEEPER_PORT'
-        - name: KAFKA_OPTS
-          value: '-Djava.security.auth.login.config=/etc/zookeeper/secrets/jaas/zk_server_jaas.conf -Dzookeeper.kerberos.removeHostFromPrincipal=true -Dzookeeper.kerberos.removeRealmFromPrincipal=true -Dzookeeper.authProvider.1=org.apache.zookeeper.server.auth.SASLAuthenticationProvider -Dzookeeper.requireClientAuthScheme=sasl'
-        - name: ZOOKEEPER_SERVER_ID
-          value: '1'
-        - name: ZOOKEEPER_SASL_ENABLED
+        - name: enableCadi
           value: 'false'
         volumeMounts:
-        - mountPath: /etc/zookeeper/secrets/jaas/zk_server_jaas.conf
-          subPath: zk_server_jaas.conf
-          name: dmaapmr-zk-server-jaas
+        - mountPath: /appl/dmaapMR1/bundleconfig/etc/appprops/MsgRtrApi.properties
+          subPath: MsgRtrApi.properties
+          name: dmaapmr-msg-rtr-api
+        - mountPath: /appl/dmaapMR1/bundleconfig/etc/logback.xml
+          subPath: logback.xml
+          name: dmaapmr-log-back
+        - mountPath: /appl/dmaapMR1/etc/cadi.properties
+          subPath: cadi.properties
+          name: dmaapmr-cadi
       volumes:
       - configMap:
           defaultMode: 420
-          name: dmaapmr-zk-server-jaas.conf
-        name: dmaapmr-zk-server-jaas
\ No newline at end of file
+          name: dmaapmr-msgrtrapi.properties
+        name: dmaapmr-msg-rtr-api
+      - configMap:
+          defaultMode: 420
+          name: dmaapmr-logback.xml
+        name: dmaapmr-log-back
+      - configMap:
+          defaultMode: 420
+          name: dmaapmr-cadi.properties
+        name: dmaapmr-cadi
+
diff --git a/test/simulator-group/dmaapmr/configs0/kafka/zk_client_jaas.conf b/test/simulator-group/dmaapmr/configs0/kafka/zk_client_jaas.conf
new file mode 100644 (file)
index 0000000..dca46d5
--- /dev/null
@@ -0,0 +1,6 @@
+Client {
+   org.apache.zookeeper.server.auth.DigestLoginModule required
+   username="kafka"
+   password="kafka_secret";
+ };
+
@@ -1,7 +1,8 @@
 # LICENSE_START=======================================================
 #  org.onap.dmaap
 #  ================================================================================
-#  Copyright Â© 2017 AT&T Intellectual Property. All rights reserved.
+#  Copyright Ã‚© 2020 Nordix Foundation. All rights reserved.
+#  Copyright Ã‚© 2017 AT&T Intellectual Property. All rights reserved.
 #  ================================================================================
 #  Licensed under the Apache License, Version 2.0 (the "License");
 #  you may not use this file except in compliance with the License.
 ##
 ## Both Cambria and Kafka make use of Zookeeper.
 ##
-config.zk.servers=zookeeper:2181
+#config.zk.servers=172.18.1.1
+#config.zk.servers={{.Values.zookeeper.name}}:{{.Values.zookeeper.port}}
+config.zk.servers=$MR_ZOOKEEPER_SERVICE_PATH
+
+#config.zk.root=/fe3c/cambria/config
+
 
 ###############################################################################
 ##
@@ -45,7 +51,7 @@ config.zk.servers=zookeeper:2181
 ##        if you want to change request.required.acks it can take this one value
 #kafka.metadata.broker.list=localhost:9092,localhost:9093
 #kafka.metadata.broker.list={{.Values.kafka.name}}:{{.Values.kafka.port}}
-kafka.metadata.broker.list=message-router-kafka:9092
+kafka.metadata.broker.list=$MR_KAFKA_SERVICE_PATH
 ##kafka.request.required.acks=-1
 #kafka.client.zookeeper=${config.zk.servers}
 consumer.timeout.ms=100
@@ -81,6 +87,8 @@ kafka.rebalance.max.retries=6
 cambria.secureConfig.key=b/7ouTn9FfEw2PQwL0ov/Q==
 cambria.secureConfig.iv=wR9xP5k5vbz/xD0LmtqQLw==
 authentication.adminSecret=fe3cCompound
+#cambria.secureConfig.key[pc569h]=YT3XPyxEmKCTLI2NK+Sjbw==
+#cambria.secureConfig.iv[pc569h]=rMm2jhR3yVnU+u2V9Ugu3Q==
 
 
 ###############################################################################
@@ -128,14 +136,13 @@ cambria.consumer.cache.touchFreqMs=120000
 ##        This server can report its metrics periodically on a topic.
 ##
 #metrics.send.cambria.enabled=true
-#metrics.send.cambria.topic=cambria.apinode.metrics
-#msgrtr.apinode.metrics.dmaap
+#metrics.send.cambria.topic=cambria.apinode.metrics                                  #msgrtr.apinode.metrics.dmaap
 #metrics.send.cambria.sendEverySeconds=60
 
 cambria.consumer.cache.zkBasePath=/fe3c/cambria/consumerCache
 consumer.timeout=17
 default.partitions=3
-default.replicas=1
+default.replicas=3
 ##############################################################################
 #100mb
 maxcontentlength=10000
@@ -163,4 +170,5 @@ msgRtr.mirrormaker.consumerid=1
 kafka.max.poll.interval.ms=300000
 kafka.heartbeat.interval.ms=60000
 kafka.session.timeout.ms=240000
-kafka.max.poll.records=1000
\ No newline at end of file
+kafka.max.poll.records=1000
+
@@ -1,21 +1,20 @@
-#Removed to be disable aaf in test env
-#aaf_locate_url=https://aaf-onap-test.osaaf.org:8095\
+aaf_locate_url=https://aaf-locate.{{ include "common.namespace" . }}:8095
 aaf_url=https://AAF_LOCATE_URL/onap.org.osaaf.aaf.service:2.1
 aaf_env=DEV
 aaf_lur=org.onap.aaf.cadi.aaf.v2_0.AAFLurPerm
 
-#Removed to be disable aaf in test env
-# cadi_truststore=/appl/dmaapMR1/etc/org.onap.dmaap.mr.trust.jks
-# cadi_truststore_password=8FyfX+ar;0$uZQ0h9*oXchNX
+cadi_truststore=/appl/dmaapMR1/etc/org.onap.dmaap.mr.trust.jks
+cadi_truststore_password=enc:mN6GiIzFQxKGDzAXDOs7b4j8DdIX02QrZ9QOWNRpxV3rD6whPCfizSMZkJwxi_FJ
 
 cadi_keyfile=/appl/dmaapMR1/etc/org.onap.dmaap.mr.keyfile
 
 cadi_alias=dmaapmr@mr.dmaap.onap.org
 cadi_keystore=/appl/dmaapMR1/etc/org.onap.dmaap.mr.p12
-cadi_keystore_password=GDQttV7)BlOvWMf6F7tz&cjy
+cadi_keystore_password=enc:_JJT2gAEkRzXla5xfDIHal8pIoIB5iIos3USvZQT6sL-l14LpI5fRFR_QIGUCh5W
 cadi_x509_issuers=CN=intermediateCA_1, OU=OSAAF, O=ONAP, C=US:CN=intermediateCA_7, OU=OSAAF, O=ONAP, C=US:CN=intermediateCA_9, OU=OSAAF, O=ONAP, C=US
 
 cadi_loglevel=INFO
 cadi_protocols=TLSv1.1,TLSv1.2
 cadi_latitude=37.78187
-cadi_longitude=-122.26147
\ No newline at end of file
+cadi_longitude=-122.26147
+
@@ -1,6 +1,7 @@
 <!--
      ============LICENSE_START=======================================================
-     Copyright Â© 2019 AT&T Intellectual Property. All rights reserved.
+     Copyright Ã‚© 2020 Nordix Foundation. All rights reserved.
+     Copyright Ã‚© 2019 AT&T Intellectual Property. All rights reserved.
      ================================================================================
      Licensed under the Apache License, Version 2.0 (the "License");
      you may not use this file except in compliance with the License.
   </root>
 
 </configuration>
+
@@ -1,4 +1,5 @@
 Server {
        org.apache.zookeeper.server.auth.DigestLoginModule required
-       user_kafka="kafka_secret";
-};
\ No newline at end of file
+       user_kafka=kafka_secret;
+};
+
@@ -35,7 +35,8 @@
 ##
 ## Both Cambria and Kafka make use of Zookeeper.
 ##
-config.zk.servers=$MR_ZOOKEEPER_APP_NAME:$MR_ZOOKEEPER_PORT
+config.zk.servers=$MR_ZOOKEEPER_SERVICE_PATH
+#$MR_ZOOKEEPER_APP_NAME:$MR_ZOOKEEPER_PORT
 
 ###############################################################################
 ##
@@ -46,7 +47,8 @@ config.zk.servers=$MR_ZOOKEEPER_APP_NAME:$MR_ZOOKEEPER_PORT
 ##        if you want to change request.required.acks it can take this one value
 #kafka.metadata.broker.list=localhost:9092,localhost:9093
 #kafka.metadata.broker.list={{.Values.kafka.name}}:{{.Values.kafka.port}}
-kafka.metadata.broker.list=$MR_KAFKA_APP_NAME:$MR_KAFKA_PORT
+kafka.metadata.broker.list=$MR_KAFKA_SERVICE_PATH
+#$MR_KAFKA_APP_NAME:$MR_KAFKA_PORT
 ##kafka.request.required.acks=-1
 #kafka.client.zookeeper=${config.zk.servers}
 consumer.timeout.ms=100
@@ -61,7 +63,6 @@ auto.commit.enable=false
 kafka.rebalance.backoff.ms=10000
 kafka.rebalance.max.retries=6
 
-
 ###############################################################################
 ##
 ##        Secured Config
index f9a5f21..492fab1 100644 (file)
@@ -18,9 +18,8 @@
 version: '3.0'
 networks:
   default:
-    external:
-      name: ${DOCKER_SIM_NWNAME}
-
+    external: true
+    name: ${DOCKER_SIM_NWNAME}
 services:
   zookeeper:
     image: $ONAP_ZOOKEEPER_IMAGE
@@ -51,14 +50,15 @@ services:
    container_name: $MR_KAFKA_APP_NAME
    ports:
     - "$MR_KAFKA_PORT:$MR_KAFKA_PORT"
+    - "$MR_KAFKA_DOCKER_LOCALHOST_PORT:$MR_KAFKA_DOCKER_LOCALHOST_PORT"
    environment:
     enableCadi: 'false'
     KAFKA_ZOOKEEPER_CONNECT: $MR_ZOOKEEPER_APP_NAME:$MR_ZOOKEEPER_PORT
     KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 40000
     KAFKA_ZOOKEEPER_SESSION_TIMEOUT_MS: 40000
-    KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTERNAL_PLAINTEXT:PLAINTEXT,EXTERNAL_PLAINTEXT:PLAINTEXT
-    KAFKA_ADVERTISED_LISTENERS: INTERNAL_PLAINTEXT://$MR_KAFKA_APP_NAME:$MR_KAFKA_PORT
-    KAFKA_LISTENERS: INTERNAL_PLAINTEXT://0.0.0.0:$MR_KAFKA_PORT
+    KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTERNAL_PLAINTEXT:PLAINTEXT,EXTERNAL_PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
+    KAFKA_ADVERTISED_LISTENERS: INTERNAL_PLAINTEXT://$MR_KAFKA_SERVICE_PATH,PLAINTEXT_HOST://localhost:$MR_KAFKA_DOCKER_LOCALHOST_PORT
+    KAFKA_LISTENERS: INTERNAL_PLAINTEXT://0.0.0.0:$MR_KAFKA_PORT,PLAINTEXT_HOST://0.0.0.0:$MR_KAFKA_DOCKER_LOCALHOST_PORT
     KAFKA_INTER_BROKER_LISTENER_NAME: INTERNAL_PLAINTEXT
     KAFKA_CONFLUENT_SUPPORT_METRICS_ENABLE: 'false'
     KAFKA_OPTS: -Djava.security.auth.login.config=/etc/kafka/secrets/jaas/zk_client_jaas.conf
diff --git a/test/simulator-group/dmaapmr/mnt/kafka/zk_client_jaas.conf b/test/simulator-group/dmaapmr/mnt/kafka/zk_client_jaas.conf
deleted file mode 100644 (file)
index 79a7601..0000000
+++ /dev/null
@@ -1,5 +0,0 @@
-Client {
-  org.apache.zookeeper.server.auth.DigestLoginModule required
-  username="kafka"
-  password="kafka_secret";
- };
\ No newline at end of file
index 7fb0962..913b13e 100644 (file)
@@ -1,24 +1,20 @@
 apiVersion: v1
 kind: Service
 metadata:
-  name: $MR_DMAAP_APP_NAME
+  name: $MR_ZOOKEEPER_APP_NAME
   namespace: $KUBE_ONAP_NAMESPACE
   labels:
-    run: $MR_DMAAP_APP_NAME
+    run: $MR_ZOOKEEPER_APP_NAME
     autotest: DMAAPMR
 spec:
   type: ClusterIP
   ports:
-  - port: $MR_EXTERNAL_PORT
-    targetPort: $MR_INTERNAL_PORT
+  - port: $MR_ZOOKEEPER_PORT
+    targetPort: $MR_ZOOKEEPER_PORT
     protocol: TCP
     name: http
-  - port: $MR_EXTERNAL_SECURE_PORT
-    targetPort: $MR_INTERNAL_SECURE_PORT
-    protocol: TCP
-    name: https
   selector:
-    run: $MR_DMAAP_APP_NAME
+    run: $MR_ZOOKEEPER_APP_NAME
 ---
 apiVersion: v1
 kind: Service
@@ -29,29 +25,40 @@ metadata:
     run: $MR_KAFKA_APP_NAME
     autotest: DMAAPMR
 spec:
-  type: ClusterIP
+  type: NodePort
   ports:
   - port: $MR_KAFKA_PORT
     targetPort: $MR_KAFKA_PORT
     protocol: TCP
     name: http
+  - port: $MR_KAFKA_KUBE_NODE_PORT
+    targetPort: $MR_KAFKA_KUBE_NODE_PORT
+    protocol: TCP
+    name: http-external
+    nodePort: $MR_KAFKA_KUBE_NODE_PORT
   selector:
     run: $MR_KAFKA_APP_NAME
 ---
 apiVersion: v1
 kind: Service
 metadata:
-  name: $MR_ZOOKEEPER_APP_NAME
+  name: $MR_DMAAP_APP_NAME
   namespace: $KUBE_ONAP_NAMESPACE
   labels:
-    run: $MR_ZOOKEEPER_APP_NAME
+    run: $MR_DMAAP_APP_NAME
     autotest: DMAAPMR
 spec:
   type: ClusterIP
   ports:
-  - port: $MR_ZOOKEEPER_PORT
-    targetPort: $MR_ZOOKEEPER_PORT
+  - port: $MR_EXTERNAL_PORT
+    targetPort: $MR_INTERNAL_PORT
     protocol: TCP
     name: http
+  - port: $MR_EXTERNAL_SECURE_PORT
+    targetPort: $MR_INTERNAL_SECURE_PORT
+    protocol: TCP
+    name: https
   selector:
-    run: $MR_ZOOKEEPER_APP_NAME
+    run: $MR_DMAAP_APP_NAME
+
+
diff --git a/test/simulator-group/ecs/.gitignore b/test/simulator-group/ecs/.gitignore
deleted file mode 100644 (file)
index 4aa3a7a..0000000
+++ /dev/null
@@ -1,2 +0,0 @@
-.tmp.json
-.dockererr
\ No newline at end of file
diff --git a/test/simulator-group/ecs/app.yaml b/test/simulator-group/ecs/app.yaml
deleted file mode 100644 (file)
index f1090ce..0000000
+++ /dev/null
@@ -1,45 +0,0 @@
-apiVersion: apps/v1
-kind: Deployment
-metadata:
-  name: $ECS_APP_NAME
-  namespace: $KUBE_NONRTRIC_NAMESPACE
-  labels:
-    run: $ECS_APP_NAME
-    autotest: ECS
-spec:
-  replicas: 1
-  selector:
-    matchLabels:
-      run: $ECS_APP_NAME
-  template:
-    metadata:
-      labels:
-        run: $ECS_APP_NAME
-        autotest: ECS
-    spec:
-      containers:
-      - name: $ECS_APP_NAME
-        image: $ECS_IMAGE
-        imagePullPolicy: $KUBE_IMAGE_PULL_POLICY
-        ports:
-        - name: http
-          containerPort: $ECS_INTERNAL_PORT
-        - name: https
-          containerPort: $ECS_INTERNAL_SECURE_PORT
-        volumeMounts:
-        - mountPath: $ECS_CONFIG_MOUNT_PATH
-          name: ecs-conf-name
-        volumeMounts:
-        - mountPath: $ECS_CONTAINER_MNT_DIR
-          name: ecs-data-name
-      volumes:
-      - configMap:
-          defaultMode: 420
-          name: $ECS_CONFIG_CONFIGMAP_NAME
-        name: ecs-conf-name
-      - persistentVolumeClaim:
-          claimName: $ECS_DATA_PVC_NAME
-        name: ecs-data-name
-# Selector will be set when pod is started first time
-      nodeSelector:
-
diff --git a/test/simulator-group/ecs/mnt/.gitignore b/test/simulator-group/ecs/mnt/.gitignore
deleted file mode 100644 (file)
index 72e8ffc..0000000
+++ /dev/null
@@ -1 +0,0 @@
-*
diff --git a/test/simulator-group/ecs/svc.yaml b/test/simulator-group/ecs/svc.yaml
deleted file mode 100644 (file)
index e311c0e..0000000
+++ /dev/null
@@ -1,21 +0,0 @@
-apiVersion: v1
-kind: Service
-metadata:
-  name: $ECS_APP_NAME
-  namespace: $KUBE_NONRTRIC_NAMESPACE
-  labels:
-    run: $ECS_APP_NAME
-    autotest: ECS
-spec:
-  type: ClusterIP
-  ports:
-  - port: $ECS_EXTERNAL_PORT
-    targetPort: $ECS_INTERNAL_PORT
-    protocol: TCP
-    name: http
-  - port: $ECS_EXTERNAL_SECURE_PORT
-    targetPort: $ECS_INTERNAL_SECURE_PORT
-    protocol: TCP
-    name: https
-  selector:
-    run: $ECS_APP_NAME
\ No newline at end of file
index 3384196..edfe9f8 100644 (file)
@@ -1 +1,2 @@
-.dockererr
\ No newline at end of file
+.dockererr
+gen_docker-compose*
\ No newline at end of file
index c27ba5e..2c8c61b 100644 (file)
@@ -18,8 +18,8 @@
 version: '3.0'
 networks:
   default:
-    external:
-      name: ${DOCKER_SIM_NWNAME}
+    external: true
+    name: ${DOCKER_SIM_NWNAME}
 services:
   httpproxy:
     networks:
diff --git a/test/simulator-group/ics/.gitignore b/test/simulator-group/ics/.gitignore
new file mode 100644 (file)
index 0000000..7dc00c5
--- /dev/null
@@ -0,0 +1,3 @@
+.tmp.json
+.dockererr
+gen_docker-compose*
\ No newline at end of file
diff --git a/test/simulator-group/ics/app.yaml b/test/simulator-group/ics/app.yaml
new file mode 100644 (file)
index 0000000..ceb74c7
--- /dev/null
@@ -0,0 +1,45 @@
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+  name: $ICS_APP_NAME
+  namespace: $KUBE_NONRTRIC_NAMESPACE
+  labels:
+    run: $ICS_APP_NAME
+    autotest: ICS
+spec:
+  replicas: 1
+  selector:
+    matchLabels:
+      run: $ICS_APP_NAME
+  template:
+    metadata:
+      labels:
+        run: $ICS_APP_NAME
+        autotest: ICS
+    spec:
+      containers:
+      - name: $ICS_APP_NAME
+        image: $ICS_IMAGE
+        imagePullPolicy: $KUBE_IMAGE_PULL_POLICY
+        ports:
+        - name: http
+          containerPort: $ICS_INTERNAL_PORT
+        - name: https
+          containerPort: $ICS_INTERNAL_SECURE_PORT
+        volumeMounts:
+        - mountPath: $ICS_CONFIG_MOUNT_PATH/$ICS_CONFIG_FILE
+          subPath: $ICS_CONFIG_FILE
+          name: ics-conf-name
+        - mountPath: $ICS_CONTAINER_MNT_DIR
+          name: ics-data-name
+      volumes:
+      - configMap:
+          defaultMode: 420
+          name: $ICS_CONFIG_CONFIGMAP_NAME
+        name: ics-conf-name
+      - persistentVolumeClaim:
+          claimName: $ICS_DATA_PVC_NAME
+        name: ics-data-name
+# Selector will be set when pod is started first time
+      nodeSelector:
+
similarity index 60%
rename from test/simulator-group/ecs/application.yaml
rename to test/simulator-group/ics/application.yaml
index acc914c..3f15aff 100644 (file)
@@ -1,5 +1,5 @@
 ################################################################################
-#   Copyright (c) 2020 Nordix Foundation.                                      #
+#   Copyright (c) 2021 Nordix Foundation.                                      #
 #                                                                              #
 #   Licensed under the Apache License, Version 2.0 (the "License");            #
 #   you may not use this file except in compliance with the License.           #
@@ -13,7 +13,6 @@
 #   See the License for the specific language governing permissions and        #
 #   limitations under the License.                                             #
 ################################################################################
-
 spring:
   profiles:
     active: prod
@@ -21,36 +20,46 @@ spring:
     allow-bean-definition-overriding: true
   aop:
     auto: false
+springdoc:
+  show-actuator: true
 management:
   endpoints:
     web:
       exposure:
+        # Enabling of springboot actuator features. See springboot documentation.
         include: "loggers,logfile,health,info,metrics,threaddump,heapdump"
 
 logging:
+  # Configuration of logging
   level:
     ROOT: ERROR
     org.springframework: ERROR
     org.springframework.data: ERROR
     org.springframework.web.reactive.function.client.ExchangeFunctions: ERROR
-    org.oransc.enrichment: INFO
+    org.oransc.ics: INFO
   file:
-    name: /var/log/enrichment-coordinator-service/application.log
+    name: $ICS_LOGPATH
 server:
-   port : 8434
-   http-port: 8083
+   # Configuration of the HTTP/REST server. The parameters are defined and handeled by the springboot framework.
+   # See springboot documentation.
+   port : $ICS_INTERNAL_SECURE_PORT
+   http-port: $ICS_INTERNAL_PORT
    ssl:
       key-store-type: JKS
       key-store-password: policy_agent
-      key-store: /opt/app/enrichment-coordinator-service/etc/cert/keystore.jks
+      key-store: /opt/app/information-coordinator-service/etc/cert/keystore.jks
       key-password: policy_agent
       key-alias: policy_agent
 app:
-  filepath: /opt/app/enrichment-coordinator-service/data/application_configuration.json
   webclient:
+    # Configuration of the trust store used for the HTTP client (outgoing requests)
+    # The file location and the password for the truststore is only relevant if trust-store-used == true
+    # Note that the same keystore as for the server is used.
     trust-store-used: false
     trust-store-password: policy_agent
-    trust-store: /opt/app/enrichment-coordinator-service/etc/cert/truststore.jks
-    http.proxy-host: $ECS_HTTP_PROXY_CONFIG_HOST_NAME
-    http.proxy-port: $ECS_HTTP_PROXY_CONFIG_PORT
-  vardata-directory: /var/enrichment-coordinator-service
\ No newline at end of file
+    trust-store: /opt/app/information-coordinator-service/etc/cert/truststore.jks
+    # Configuration of usage of HTTP Proxy for the southbound accesses.
+    # The HTTP proxy (if configured) will only be used for accessing NearRT RIC:s
+    http.proxy-host: $ICS_HTTP_PROXY_CONFIG_HOST_NAME
+    http.proxy-port: $ICS_HTTP_PROXY_CONFIG_PORT
+  vardata-directory: $ICS_CONTAINER_MNT_DIR
similarity index 65%
rename from test/simulator-group/ecs/docker-compose.yml
rename to test/simulator-group/ics/docker-compose.yml
index b057753..0f75355 100644 (file)
 version: '3.0'
 networks:
   default:
-    external:
-      name: ${DOCKER_SIM_NWNAME}
+    external: true
+    name: ${DOCKER_SIM_NWNAME}
 services:
-  ecs:
-    image: ${ECS_IMAGE}
-    container_name: ${ECS_APP_NAME}
+  ics:
+    image: ${ICS_IMAGE}
+    container_name: ${ICS_APP_NAME}
     networks:
       default:
         aliases:
-          - ${ECS_APP_NAME_ALIAS}
+          - ${ICS_APP_NAME_ALIAS}
     volumes:
-    - ${ECS_HOST_MNT_DIR}/db:${ECS_CONTAINER_MNT_DIR}
-    - ${ECS_HOST_MNT_DIR}/$ECS_CONFIG_FILE:${ECS_CONFIG_MOUNT_PATH}/$ECS_CONFIG_FILE
+    - ${ICS_HOST_MNT_DIR}/db:${ICS_CONTAINER_MNT_DIR}
+    - ${ICS_HOST_MNT_DIR}/$ICS_CONFIG_FILE:${ICS_CONFIG_MOUNT_PATH}/$ICS_CONFIG_FILE
     ports:
-    - ${ECS_EXTERNAL_PORT}:${ECS_INTERNAL_PORT}
-    - ${ECS_EXTERNAL_SECURE_PORT}:${ECS_INTERNAL_SECURE_PORT}
+    - ${ICS_EXTERNAL_PORT}:${ICS_INTERNAL_PORT}
+    - ${ICS_EXTERNAL_SECURE_PORT}:${ICS_INTERNAL_SECURE_PORT}
     labels:
-      - "nrttest_app=ECS"
-      - "nrttest_dp=${ECS_DISPLAY_NAME}"
+      - "nrttest_app=ICS"
+      - "nrttest_dp=${ICS_DISPLAY_NAME}"
similarity index 58%
rename from test/simulator-group/ecs/pv.yaml
rename to test/simulator-group/ics/pv.yaml
index ffccecc..3e9e285 100644 (file)
@@ -1,16 +1,16 @@
 apiVersion: v1
 kind: PersistentVolume
 metadata:
-  name: $ECS_DATA_PV_NAME
+  name: $ICS_DATA_PV_NAME
   labels:
-    run: $ECS_APP_NAME
-    autotest: ECS
+    run: $ICS_APP_NAME
+    autotest: ICS
 spec:
-  storageClassName: ecs-standard
+  storageClassName: ics-standard
   capacity:
     storage: 1Mi
   accessModes:
     - ReadWriteOnce
   persistentVolumeReclaimPolicy: Delete
   hostPath:
-    path: "/tmp/$ECS_PV_PATH"
+    path: "/tmp/$ICS_PV_PATH"
similarity index 67%
rename from test/simulator-group/ecs/pvc.yaml
rename to test/simulator-group/ics/pvc.yaml
index 7f07893..bbb8e36 100644 (file)
@@ -1,16 +1,16 @@
 apiVersion: v1
 kind: PersistentVolumeClaim
 metadata:
-  name: $ECS_DATA_PVC_NAME
+  name: $ICS_DATA_PVC_NAME
   namespace: $KUBE_NONRTRIC_NAMESPACE
   labels:
-    run: $ECS_APP_NAME
-    autotest: ECS
+    run: $ICS_APP_NAME
+    autotest: ICS
 spec:
   accessModes:
   - ReadWriteOnce
   resources:
     requests:
       storage: 1Mi
-  storageClassName: ecs-standard
+  storageClassName: ics-standard
   volumeMode: Filesystem
diff --git a/test/simulator-group/ics/svc.yaml b/test/simulator-group/ics/svc.yaml
new file mode 100644 (file)
index 0000000..0d758fe
--- /dev/null
@@ -0,0 +1,21 @@
+apiVersion: v1
+kind: Service
+metadata:
+  name: $ICS_APP_NAME
+  namespace: $KUBE_NONRTRIC_NAMESPACE
+  labels:
+    run: $ICS_APP_NAME
+    autotest: ICS
+spec:
+  type: ClusterIP
+  ports:
+  - port: $ICS_EXTERNAL_PORT
+    targetPort: $ICS_INTERNAL_PORT
+    protocol: TCP
+    name: http
+  - port: $ICS_EXTERNAL_SECURE_PORT
+    targetPort: $ICS_INTERNAL_SECURE_PORT
+    protocol: TCP
+    name: https
+  selector:
+    run: $ICS_APP_NAME
\ No newline at end of file
diff --git a/test/simulator-group/kafka-procon/.gitignore b/test/simulator-group/kafka-procon/.gitignore
new file mode 100644 (file)
index 0000000..7dc00c5
--- /dev/null
@@ -0,0 +1,3 @@
+.tmp.json
+.dockererr
+gen_docker-compose*
\ No newline at end of file
diff --git a/test/simulator-group/kafka-procon/app.yaml b/test/simulator-group/kafka-procon/app.yaml
new file mode 100644 (file)
index 0000000..fd2bc45
--- /dev/null
@@ -0,0 +1,33 @@
+apiVersion: apps/v1
+kind: StatefulSet
+metadata:
+  name: $KAFKAPC_APP_NAME
+  namespace: $KUBE_SIM_NAMESPACE
+  labels:
+    run: $KAFKAPC_APP_NAME
+    autotest: CR
+spec:
+  replicas: 1
+  serviceName: $KAFKAPC_APP_NAME
+  selector:
+    matchLabels:
+      run: $KAFKAPC_APP_NAME
+  template:
+    metadata:
+      labels:
+        run: $KAFKAPC_APP_NAME
+        autotest: CR
+    spec:
+      containers:
+      - name: $KAFKAPC_APP_NAME
+        image: $KAFKAPC_IMAGE
+        imagePullPolicy: $KUBE_IMAGE_PULL_POLICY
+        ports:
+        - name: http
+          containerPort: $KAFKAPC_INTERNAL_PORT
+        - name: https
+          containerPort: $KAFKAPC_INTERNAL_SECURE_PORT
+        env:
+        - name: KAFKA_BOOTSTRAP_SERVER
+          value: $MR_KAFKA_SERVICE_PATH
+
diff --git a/test/simulator-group/kafka-procon/docker-compose.yml b/test/simulator-group/kafka-procon/docker-compose.yml
new file mode 100644 (file)
index 0000000..bcb35d9
--- /dev/null
@@ -0,0 +1,36 @@
+#  ============LICENSE_START===============================================
+#  Copyright (C) 2020 Nordix Foundation. All rights reserved.
+#  ========================================================================
+#  Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+#  You may obtain a copy of the License at
+#
+#       http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#  ============LICENSE_END=================================================
+#
+
+version: '3.0'
+networks:
+  default:
+    external: true
+    name: ${DOCKER_SIM_NWNAME}
+services:
+  kafka-procon:
+    networks:
+      - default
+    image: ${KAFKAPC_IMAGE}
+    container_name: ${KAFKAPC_APP_NAME}
+    ports:
+      - ${KAFKAPC_EXTERNAL_PORT}:${KAFKAPC_INTERNAL_PORT}
+      - ${KAFKAPC_EXTERNAL_SECURE_PORT}:${KAFKAPC_INTERNAL_SECURE_PORT}
+    environment:
+      KAFKA_BOOTSTRAP_SERVER: $MR_KAFKA_SERVICE_PATH
+    labels:
+      - "nrttest_app=CR"
+      - "nrttest_dp=${KAFKAPC_DISPLAY_NAME}"
diff --git a/test/simulator-group/kafka-procon/svc.yaml b/test/simulator-group/kafka-procon/svc.yaml
new file mode 100644 (file)
index 0000000..312e239
--- /dev/null
@@ -0,0 +1,21 @@
+apiVersion: v1
+kind: Service
+metadata:
+  name: $KAFKAPC_APP_NAME
+  namespace: $KUBE_SIM_NAMESPACE
+  labels:
+    run: $KAFKAPC_APP_NAME
+    autotest: KAFKAPC
+spec:
+  type: ClusterIP
+  ports:
+  - port: $KAFKAPC_EXTERNAL_PORT
+    targetPort: $KAFKAPC_INTERNAL_PORT
+    protocol: TCP
+    name: http
+  - port: $KAFKAPC_EXTERNAL_SECURE_PORT
+    targetPort: $KAFKAPC_INTERNAL_SECURE_PORT
+    protocol: TCP
+    name: https
+  selector:
+    run: $KAFKAPC_APP_NAME
\ No newline at end of file
index 7f0f349..2e644e0 100644 (file)
@@ -17,8 +17,8 @@
 version: '3.0'
 networks:
   default:
-    external:
-      name: ${DOCKER_SIM_NWNAME}
+    external: true
+    name: ${DOCKER_SIM_NWNAME}
 services:
   kubeproxy:
     image: ${KUBE_PROXY_IMAGE}
index 4aa3a7a..7dc00c5 100644 (file)
@@ -1,2 +1,3 @@
 .tmp.json
-.dockererr
\ No newline at end of file
+.dockererr
+gen_docker-compose*
\ No newline at end of file
index a1c96c0..608f5c8 100644 (file)
@@ -18,8 +18,8 @@
 version: '3.0'
 networks:
   default:
-    external:
-      name: ${DOCKER_SIM_NWNAME}
+    external: true
+    name: ${DOCKER_SIM_NWNAME}
 services:
   mr-stub:
     networks:
index f289168..f87389c 100644 (file)
@@ -31,11 +31,11 @@ spring:
         predicates:
         - Path=/a1-policy/**
       - id: A1-EI
-        uri: https://${ECS_DOMAIN_NAME}:${ECS_EXTERNAL_SECURE_PORT}
+        uri: https://${ICS_DOMAIN_NAME}:${ICS_EXTERNAL_SECURE_PORT}
         predicates:
         - Path=/ei-producer/**
       - id: A1-EI2
-        uri: https://${ECS_DOMAIN_NAME}:${ECS_EXTERNAL_SECURE_PORT}
+        uri: https://${ICS_DOMAIN_NAME}:${ICS_EXTERNAL_SECURE_PORT}
         predicates:
         - Path=/data-producer/**,/data-consumer/**
 management:
index c897ba7..7002eaf 100644 (file)
@@ -18,8 +18,8 @@
 version: '3.0'
 networks:
   default:
-    external:
-      name: ${DOCKER_SIM_NWNAME}
+    external: true
+    name: ${DOCKER_SIM_NWNAME}
 services:
   nonrtric-gateway:
     image: ${NRT_GATEWAY_IMAGE}
index 4aa3a7a..7dc00c5 100644 (file)
@@ -1,2 +1,3 @@
 .tmp.json
-.dockererr
\ No newline at end of file
+.dockererr
+gen_docker-compose*
\ No newline at end of file
index 269d9ca..3f1fcfe 100644 (file)
@@ -32,10 +32,8 @@ spec:
         volumeMounts:
         - mountPath: $POLICY_AGENT_CONFIG_MOUNT_PATH
           name: pa-conf-name
-#        volumeMounts:
         - mountPath: $POLICY_AGENT_CONTAINER_MNT_DIR
           name: pa-pv-data-name
-#        volumeMounts:
         - mountPath: $POLICY_AGENT_DATA_MOUNT_PATH
           name: pa-data-name
       volumes:
index 2261151..4b42b42 100644 (file)
@@ -17,8 +17,8 @@
 version: '3.0'
 networks:
   default:
-    external:
-      name: ${DOCKER_SIM_NWNAME}
+    external: true
+    name: ${DOCKER_SIM_NWNAME}
 services:
   policy-agent:
     image: ${POLICY_AGENT_IMAGE}
index 4aa3a7a..b676cf8 100644 (file)
@@ -1,2 +1,3 @@
 .tmp.json
-.dockererr
\ No newline at end of file
+.dockererr
+gen_docker-compose*
index f771352..e05d3ad 100644 (file)
 version: '3.0'
 networks:
   default:
-    external:
-      name: ${DOCKER_SIM_NWNAME}
+    external: true
+    name: ${DOCKER_SIM_NWNAME}
 services:
   producer-stub:
     networks:
-      default:
-        aliases:
-          - ${PROD_STUB_APP_NAME_ALIAS}
+      - default
     container_name: ${PROD_STUB_APP_NAME}
     image: ${PROD_STUB_IMAGE}
     ports:
index 4aa3a7a..7dc00c5 100644 (file)
@@ -1,2 +1,3 @@
 .tmp.json
-.dockererr
\ No newline at end of file
+.dockererr
+gen_docker-compose*
\ No newline at end of file
index 3dbd45b..7c644b9 100644 (file)
@@ -17,8 +17,8 @@
 version: '3.0'
 networks:
   default:
-    external:
-      name: ${DOCKER_SIM_NWNAME}
+    external: true
+    name: ${DOCKER_SIM_NWNAME}
 services:
   rapp-catalogue:
     image: ${RAPP_CAT_IMAGE}
index c0c4339..1f9d6f9 100644 (file)
@@ -1,4 +1,5 @@
 .tmp.json
 .dockererr
 .env
-fakedir
\ No newline at end of file
+fakedir
+gen_docker-compose*
\ No newline at end of file
index a6358c7..3515973 100644 (file)
@@ -21,11 +21,11 @@ version: '3.0'
 
 networks:
   default:
-    external:
-      name: ${DOCKER_SIM_NWNAME}
+    external: true
+    name: ${DOCKER_SIM_NWNAME}
 
 services:
-  g1:
+  ${RICSIM_COMPOSE_SERVICE_NAME}:
     image: ${RIC_SIM_IMAGE}
     networks:
       - default
@@ -33,7 +33,7 @@ services:
       - ${RIC_SIM_INTERNAL_PORT}/tcp
       - ${RIC_SIM_INTERNAL_SECURE_PORT}/tcp
     environment:
-      - A1_VERSION=${G1_A1_VERSION}
+      - A1_VERSION=${RICSIM_COMPOSE_A1_VERSION}
       - REMOTE_HOSTS_LOGGING=1
       - ALLOW_HTTP=true
       - DUPLICATE_CHECK=1
@@ -42,75 +42,3 @@ services:
     labels:
       - "nrttest_app=RICSIM"
       - "nrttest_dp=${RIC_SIM_DISPLAY_NAME}"
-
-  g2:
-    image: ${RIC_SIM_IMAGE}
-    networks:
-      - default
-    ports:
-      - ${RIC_SIM_INTERNAL_PORT}/tcp
-      - ${RIC_SIM_INTERNAL_SECURE_PORT}/tcp
-    environment:
-      - A1_VERSION=${G2_A1_VERSION}
-      - REMOTE_HOSTS_LOGGING=1
-      - ALLOW_HTTP=true
-      - DUPLICATE_CHECK=1
-    volumes:
-      - ${RIC_SIM_CERT_MOUNT_DIR}:/usr/src/app/cert:ro
-    labels:
-      - "nrttest_app=RICSIM"
-      - "nrttest_dp=${RIC_SIM_DISPLAY_NAME}"
-
-  g3:
-    image: ${RIC_SIM_IMAGE}
-    networks:
-      - default
-    ports:
-      - ${RIC_SIM_INTERNAL_PORT}/tcp
-      - ${RIC_SIM_INTERNAL_SECURE_PORT}/tcp
-    environment:
-      - A1_VERSION=${G3_A1_VERSION}
-      - REMOTE_HOSTS_LOGGING=1
-      - ALLOW_HTTP=true
-      - DUPLICATE_CHECK=1
-    volumes:
-      - ${RIC_SIM_CERT_MOUNT_DIR}:/usr/src/app/cert:ro
-    labels:
-      - "nrttest_app=RICSIM"
-      - "nrttest_dp=${RIC_SIM_DISPLAY_NAME}"
-
-  g4:
-    image: ${RIC_SIM_IMAGE}
-    networks:
-      - default
-    ports:
-      - ${RIC_SIM_INTERNAL_PORT}/tcp
-      - ${RIC_SIM_INTERNAL_SECURE_PORT}/tcp
-    environment:
-      - A1_VERSION=${G4_A1_VERSION}
-      - REMOTE_HOSTS_LOGGING=1
-      - ALLOW_HTTP=true
-      - DUPLICATE_CHECK=1
-    volumes:
-      - ${RIC_SIM_CERT_MOUNT_DIR}:/usr/src/app/cert:ro
-    labels:
-      - "nrttest_app=RICSIM"
-      - "nrttest_dp=${RIC_SIM_DISPLAY_NAME}"
-
-  g5:
-    image: ${RIC_SIM_IMAGE}
-    networks:
-      - default
-    ports:
-      - ${RIC_SIM_INTERNAL_PORT}/tcp
-      - ${RIC_SIM_INTERNAL_SECURE_PORT}/tcp
-    environment:
-      - A1_VERSION=${G5_A1_VERSION}
-      - REMOTE_HOSTS_LOGGING=1
-      - ALLOW_HTTP=true
-      - DUPLICATE_CHECK=1
-    volumes:
-      - ${RIC_SIM_CERT_MOUNT_DIR}:/usr/src/app/cert:ro
-    labels:
-      - "nrttest_app=RICSIM"
-      - "nrttest_dp=${RIC_SIM_DISPLAY_NAME}"
\ No newline at end of file
index 4aa3a7a..7dc00c5 100644 (file)
@@ -1,2 +1,3 @@
 .tmp.json
-.dockererr
\ No newline at end of file
+.dockererr
+gen_docker-compose*
\ No newline at end of file
index 03483a0..37c99b6 100644 (file)
@@ -17,9 +17,8 @@ version: '3.0'
 
 networks:
   default:
-    external:
-      name: ${DOCKER_SIM_NWNAME}
-
+    external: true
+    name: ${DOCKER_SIM_NWNAME}
 services:
   db:
     image: ${SDNC_DB_IMAGE}
index 0b6f357..505a54c 100644 (file)
@@ -17,9 +17,8 @@ version: '3'
 
 networks:
   default:
-    external:
-      name: ${DOCKER_SIM_NWNAME}
-
+    external: true
+    name: ${DOCKER_SIM_NWNAME}
 services:
   db:
     image: ${SDNC_DB_IMAGE}
index e118e17..5b85414 100644 (file)
@@ -24,7 +24,7 @@ var LOCALHOST="http://127.0.0.1:"
 var MRSTUB_PORT="3905"
 var AGENT_PORT="8081"
 var CR_PORT="8090"
-var ECS_PORT="8083"
+var ICS_PORT="8083"
 var PRODSTUB_PORT="8092"
 var RC_PORT="8680"
 
@@ -186,14 +186,14 @@ var simvar4=[]
 var simvar5=[]
 var simvar6=[]
 
-//Status variables, for parameters values fetched from ecs
-var ecs1="", ecs2="", ecs3="", ecs4="", ecs_types="-", ecs_producers="-";
-var ecs_producer_arr=new Array(0)
-var ecs_producer_type_arr=new Array(0)
-var ecs_producer_jobs_arr=new Array(0)
-var ecs_producer_status_arr=new Array(0)
-var ecs_jobs=new Array(0)
-var ecs_job_status=new Array(0)
+//Status variables, for parameters values fetched from ics
+var ics1="", ics2="", ics3="", ics4="", ics_types="-", ics_producers="-";
+var ics_producer_arr=new Array(0)
+var ics_producer_type_arr=new Array(0)
+var ics_producer_jobs_arr=new Array(0)
+var ics_producer_status_arr=new Array(0)
+var ics_jobs=new Array(0)
+var ics_job_status=new Array(0)
 
 //Status variables, for parameters values fetched from prodstub
 var ps2="", ps3="", ps4="", ps_types="-", ps_producers="-";
@@ -209,7 +209,7 @@ var getCtr=0
 
 var refreshCount_pol=-1
 
-var refreshCount_ecs=-1
+var refreshCount_ics=-1
 
 var refreshCount_cr=-1
 
@@ -429,84 +429,84 @@ function fetchAllMetrics_pol() {
     }, 500)
 }
 
-function fetchAllMetrics_ecs() {
+function fetchAllMetrics_ics() {
 
-    console.log("Fetching enrichment metrics - timer:" + refreshCount_ecs)
+    console.log("Fetching information metrics - timer:" + refreshCount_ics)
 
-    if (refreshCount_ecs < 0) {
-        refreshCount_ecs = -1
+    if (refreshCount_ics < 0) {
+        refreshCount_ics = -1
         return
     } else {
-        refreshCount_ecs = refreshCount_ecs - 1
+        refreshCount_ics = refreshCount_ics - 1
     }
     setTimeout(() => {
 
-        if (checkFunctionFlag("ecs_stat")) {
-            getSimCtr(LOCALHOST+ECS_PORT+"/status", 0, function(data, index) {
+        if (checkFunctionFlag("ics_stat")) {
+            getSimCtr(LOCALHOST+ICS_PORT+"/status", 0, function(data, index) {
                 try {
                     var jd=JSON.parse(data);
-                    ecs1=jd["status"]
-                    ecs2=""+jd["no_of_producers"]
-                    ecs3=""+jd["no_of_types"]
-                    ecs4=""+jd["no_of_jobs"]
+                    ics1=jd["status"]
+                    ics2=""+jd["no_of_producers"]
+                    ics3=""+jd["no_of_types"]
+                    ics4=""+jd["no_of_jobs"]
                 }
                 catch (err) {
-                    ecs1="error response"
-                    ecs2="error response"
-                    ecs3="error response"
-                    ecs4="error response"
+                    ics1="error response"
+                    ics2="error response"
+                    ics3="error response"
+                    ics4="error response"
                 }
             });
-            clearFlag("ecs_stat")
+            clearFlag("ics_stat")
         }
-        if (checkFunctionFlag("ecs_types")) {
-            getSimCtr(LOCALHOST+ECS_PORT+"/ei-producer/v1/eitypes", 0, function(data, index) {
-                var tmp_ecs_types="-"
+        if (checkFunctionFlag("ics_types")) {
+            getSimCtr(LOCALHOST+ICS_PORT+"/ei-producer/v1/eitypes", 0, function(data, index) {
+                var tmp_ics_types="-"
                 try {
                     var jd=JSON.parse(data);
                     for(var i=0;i<jd.length;i++) {
-                        if (tmp_ecs_types.length == 1) {
-                            tmp_ecs_types=""
+                        if (tmp_ics_types.length == 1) {
+                            tmp_ics_types=""
                         }
-                        tmp_ecs_types=""+tmp_ecs_types+jd[i]+" "
+                        tmp_ics_types=""+tmp_ics_types+jd[i]+" "
                     }
                 }
                 catch (err) {
-                    tmp_ecs_types="error response"
+                    tmp_ics_types="error response"
                 }
-                ecs_types = tmp_ecs_types
+                ics_types = tmp_ics_types
             });
-            clearFlag("ecs_types")
+            clearFlag("ics_types")
         }
-        if (checkFunctionFlag("ecs_producers")) {
-            getSimCtr(LOCALHOST+ECS_PORT+"/ei-producer/v1/eiproducers", 0, function(data, index) {
-                var tmp_ecs_producers="-"
+        if (checkFunctionFlag("ics_producers")) {
+            getSimCtr(LOCALHOST+ICS_PORT+"/ei-producer/v1/eiproducers", 0, function(data, index) {
+                var tmp_ics_producers="-"
                 try {
                     var jd=JSON.parse(data);
-                    var tmp_ecs_producer_arr=new Array(jd.length)
+                    var tmp_ics_producer_arr=new Array(jd.length)
                     for(var i=0;i<jd.length;i++) {
-                        if (tmp_ecs_producers.length == 1) {
-                            tmp_ecs_producers=""
+                        if (tmp_ics_producers.length == 1) {
+                            tmp_ics_producers=""
                         }
-                        tmp_ecs_producers=""+tmp_ecs_producers+jd[i]+" "
-                        tmp_ecs_producer_arr[i]=jd[i]
+                        tmp_ics_producers=""+tmp_ics_producers+jd[i]+" "
+                        tmp_ics_producer_arr[i]=jd[i]
                     }
-                    ecs_producer_arr = tmp_ecs_producer_arr
-                    ecs_producers = tmp_ecs_producers
+                    ics_producer_arr = tmp_ics_producer_arr
+                    ics_producers = tmp_ics_producers
                 }
                 catch (err) {
-                    ecs_producers="error response"
-                    ecs_producer_arr=new Array(0)
+                    ics_producers="error response"
+                    ics_producer_arr=new Array(0)
                 }
             });
-            clearFlag("ecs_producers")
+            clearFlag("ics_producers")
         }
-        if (checkFunctionFlag("ecs_data")) {
+        if (checkFunctionFlag("ics_data")) {
             try {
-                var tmp_ecs_producer_type_arr = JSON.parse(JSON.stringify(ecs_producer_arr))
-                for(var x=0;x<tmp_ecs_producer_type_arr.length;x++) {
-                    getSimCtr(LOCALHOST+ECS_PORT+"/ei-producer/v1/eiproducers/"+tmp_ecs_producer_type_arr[x], x, function(data, idx) {
-                        var row=""+tmp_ecs_producer_type_arr[idx]+" : "
+                var tmp_ics_producer_type_arr = JSON.parse(JSON.stringify(ics_producer_arr))
+                for(var x=0;x<tmp_ics_producer_type_arr.length;x++) {
+                    getSimCtr(LOCALHOST+ICS_PORT+"/ei-producer/v1/eiproducers/"+tmp_ics_producer_type_arr[x], x, function(data, idx) {
+                        var row=""+tmp_ics_producer_type_arr[idx]+" : "
                         try {
                             var jd=JSON.parse(data);
                             var jda=jd["supported_ei_types"]
@@ -514,96 +514,96 @@ function fetchAllMetrics_ecs() {
                                 row=""+row+jda[j]+" "
 
                             }
-                            tmp_ecs_producer_type_arr[idx]=row
+                            tmp_ics_producer_type_arr[idx]=row
                         }
                         catch (err) {
-                            tmp_ecs_producer_type_arr=new Array(0)
+                            tmp_ics_producer_type_arr=new Array(0)
                         }
                     });
                 }
-                ecs_producer_type_arr = tmp_ecs_producer_type_arr
+                ics_producer_type_arr = tmp_ics_producer_type_arr
             } catch (err) {
-                ecs_producer_type_arr=new Array(0)
+                ics_producer_type_arr=new Array(0)
             }
             try {
-                var tmp_ecs_producer_jobs_arr = JSON.parse(JSON.stringify(ecs_producer_arr))
-                for(x=0;x<tmp_ecs_producer_jobs_arr.length;x++) {
-                    getSimCtr(LOCALHOST+ECS_PORT+"/ei-producer/v1/eiproducers/"+tmp_ecs_producer_jobs_arr[x]+"/eijobs", x, function(data, idx) {
-                        var row=""+tmp_ecs_producer_jobs_arr[idx]+" : "
+                var tmp_ics_producer_jobs_arr = JSON.parse(JSON.stringify(ics_producer_arr))
+                for(x=0;x<tmp_ics_producer_jobs_arr.length;x++) {
+                    getSimCtr(LOCALHOST+ICS_PORT+"/ei-producer/v1/eiproducers/"+tmp_ics_producer_jobs_arr[x]+"/eijobs", x, function(data, idx) {
+                        var row=""+tmp_ics_producer_jobs_arr[idx]+" : "
                         try {
                             var jd=JSON.parse(data);
                             for(var j=0;j<jd.length;j++) {
                                 var jda=jd[j]
                                 row=""+row+jda["ei_job_identity"]+"("+jda["ei_type_identity"]+") "
                             }
-                            tmp_ecs_producer_jobs_arr[idx]=row
+                            tmp_ics_producer_jobs_arr[idx]=row
                         }
                         catch (err) {
-                            tmp_ecs_producer_jobs_arr=new Array(0)
+                            tmp_ics_producer_jobs_arr=new Array(0)
                         }
                     });
                 }
-                ecs_producer_jobs_arr = tmp_ecs_producer_jobs_arr
+                ics_producer_jobs_arr = tmp_ics_producer_jobs_arr
             } catch (err) {
-                ecs_producer_jobs_arr=new Array(0)
+                ics_producer_jobs_arr=new Array(0)
             }
 
             try {
-                var tmp_ecs_producer_status_arr = JSON.parse(JSON.stringify(ecs_producer_arr))
-                for(x=0;x<tmp_ecs_producer_status_arr.length;x++) {
-                    getSimCtr(LOCALHOST+ECS_PORT+"/ei-producer/v1/eiproducers/"+tmp_ecs_producer_status_arr[x]+"/status", x, function(data, idx) {
-                        var row=""+tmp_ecs_producer_status_arr[idx]+" : "
+                var tmp_ics_producer_status_arr = JSON.parse(JSON.stringify(ics_producer_arr))
+                for(x=0;x<tmp_ics_producer_status_arr.length;x++) {
+                    getSimCtr(LOCALHOST+ICS_PORT+"/ei-producer/v1/eiproducers/"+tmp_ics_producer_status_arr[x]+"/status", x, function(data, idx) {
+                        var row=""+tmp_ics_producer_status_arr[idx]+" : "
                         try {
                             var jd=JSON.parse(data);
                             row=""+row+jd["operational_state"]
-                            tmp_ecs_producer_status_arr[idx]=row
+                            tmp_ics_producer_status_arr[idx]=row
                         }
                         catch (err) {
-                            tmp_ecs_producer_status_arr=new Array(0)
+                            tmp_ics_producer_status_arr=new Array(0)
                         }
                     });
                 }
-                ecs_producer_status_arr = tmp_ecs_producer_status_arr
+                ics_producer_status_arr = tmp_ics_producer_status_arr
             } catch (err) {
-                ecs_producer_status_arr=new Array(0)
+                ics_producer_status_arr=new Array(0)
             }
-            clearFlag("ecs_data")
+            clearFlag("ics_data")
         }
-        if (checkFunctionFlag("ecs_jobs")) {
-            getSimCtr(LOCALHOST+ECS_PORT+"/A1-EI/v1/eijobs", 0, function(data, index) {
+        if (checkFunctionFlag("ics_jobs")) {
+            getSimCtr(LOCALHOST+ICS_PORT+"/A1-EI/v1/eijobs", 0, function(data, index) {
                 try {
                     var jd=JSON.parse(data);
                     var tmpArr=new Array(jd.length)
                     for(var i=0;i<jd.length;i++) {
                         tmpArr[i]=jd[i]
                     }
-                    ecs_jobs=tmpArr
+                    ics_jobs=tmpArr
                 }
                 catch (err) {
-                    ecs_jobs=new Array(0)
+                    ics_jobs=new Array(0)
                 }
             });
-            clearFlag("ecs_jobs")
+            clearFlag("ics_jobs")
         }
-        if (checkFunctionFlag("ecs_job_status")) {
+        if (checkFunctionFlag("ics_job_status")) {
             try {
-                var tmp_ecs_job_status= JSON.parse(JSON.stringify(ecs_jobs))
-                for(x=0;x<tmp_ecs_job_status.length;x++) {
-                    getSimCtr(LOCALHOST+ECS_PORT+"/A1-EI/v1/eijobs/"+tmp_ecs_job_status[x]+"/status", x, function(data, idx) {
+                var tmp_ics_job_status= JSON.parse(JSON.stringify(ics_jobs))
+                for(x=0;x<tmp_ics_job_status.length;x++) {
+                    getSimCtr(LOCALHOST+ICS_PORT+"/A1-EI/v1/eijobs/"+tmp_ics_job_status[x]+"/status", x, function(data, idx) {
                         try {
                             var jd=JSON.parse(data);
-                            tmp_ecs_job_status[idx]=""+tmp_ecs_job_status[idx]+":"+jd["eiJobStatus"]
+                            tmp_ics_job_status[idx]=""+tmp_ics_job_status[idx]+":"+jd["eiJobStatus"]
                         }
                         catch (err) {
-                            tmp_ecs_job_status="-"
+                            tmp_ics_job_status="-"
                         }
                     });
                 }
-                ecs_job_status = tmp_ecs_job_status
+                ics_job_status = tmp_ics_job_status
             } catch (err) {
-                ecs_job_status="-"
+                ics_job_status="-"
             }
-            clearFlag("ecs_job_status")
+            clearFlag("ics_job_status")
         }
         if (checkFunctionFlag("prodstub_stat")) {
             getSimCtr(LOCALHOST+PRODSTUB_PORT+"/status", x, function(data, idx) {
@@ -687,14 +687,14 @@ function fetchAllMetrics_ecs() {
             clearFlag("prodstub_stat")
         }
 
-        fetchAllMetrics_ecs();
+        fetchAllMetrics_ics();
 
     }, 500)
 }
 
 function fetchAllMetrics_cr() {
 
-    console.log("Fetching CR DB - timer:" + refreshCount_ecs)
+    console.log("Fetching CR DB - timer:" + refreshCount_ics)
 
     if (refreshCount_cr < 0) {
         refreshCount_cr = -1
@@ -721,7 +721,7 @@ function fetchAllMetrics_cr() {
 
 function fetchAllMetrics_rc() {
 
-    console.log("Fetching RC services - timer:" + refreshCount_ecs)
+    console.log("Fetching RC services - timer:" + refreshCount_ics)
 
     if (refreshCount_rc < 0) {
         refreshCount_rc = -1
@@ -758,7 +758,7 @@ function fetchAllMetrics_rc() {
 // Monitor for CR db
 app.get("/mon3",function(req, res){
 
-    console.log("Creating CR DB page - timer: " + refreshCount_ecs)
+    console.log("Creating CR DB page - timer: " + refreshCount_ics)
 
     if (refreshCount_cr < 0) {
         refreshCount_cr=5
@@ -779,16 +779,16 @@ app.get("/mon3",function(req, res){
     res.send(htmlStr);
 })
 
-// Monitor for ECS
+// Monitor for ICS
 app.get("/mon2",function(req, res){
 
-    console.log("Creating enrichment metrics - timer: " + refreshCount_ecs)
+    console.log("Creating information metrics - timer: " + refreshCount_ics)
 
-    if (refreshCount_ecs < 0) {
-        refreshCount_ecs=5
-        fetchAllMetrics_ecs()
+    if (refreshCount_ics < 0) {
+        refreshCount_ics=5
+        fetchAllMetrics_ics()
     }
-    refreshCount_ecs=5
+    refreshCount_ics=5
 
     var summary=req.query.summary
 
@@ -801,7 +801,7 @@ app.get("/mon2",function(req, res){
           "<html>" +
           "<head>" +
             "<meta http-equiv=\"refresh\" content=\"2\">"+  //2 sec auto refresh
-            "<title>Enrichment coordinator service and producer stub</title>"+
+            "<title>information coordinator service and producer stub</title>"+
             "</head>" +
             "<body>" +
             "<font size=\"-3\" face=\"summary\">"
@@ -810,50 +810,50 @@ app.get("/mon2",function(req, res){
             } else {
                 htmlStr=htmlStr+"<p>Set query param '?summary' to false to only show full statistics</p>"
             }
-            if (ecs_job_status.length > 10) {
+            if (ics_job_status.length > 10) {
                 htmlStr=htmlStr+"<div style=\"color:red\"> Avoid running the server for large number of producers and/or jobs</div>"
             }
             htmlStr=htmlStr+"</font>" +
-            "<h3>Enrichment Coordinator Service</h3>" +
+            "<h3>Information Coordinator Service</h3>" +
             "<font face=\"monospace\">" +
-            "Status:..........." + formatDataRow(ecs1) + "<br>" +
-            "Producers:........" + formatDataRow(ecs2) + "<br>" +
-            "Types:............" + formatDataRow(ecs3) + "<br>" +
-            "Jobs:............." + formatDataRow(ecs4) + "<br>" +
+            "Status:..........." + formatDataRow(ics1) + "<br>" +
+            "Producers:........" + formatDataRow(ics2) + "<br>" +
+            "Types:............" + formatDataRow(ics3) + "<br>" +
+            "Jobs:............." + formatDataRow(ics4) + "<br>" +
             "</font>"
             if (summary == "false") {
                 htmlStr=htmlStr+
                 "<h4>Details</h4>" +
                 "<font face=\"monospace\">" +
-                "Producer ids:....." + formatDataRow(ecs_producers) + "<br>" +
-                "Type ids:........." + formatDataRow(ecs_types) + "<br>" +
+                "Producer ids:....." + formatDataRow(ics_producers) + "<br>" +
+                "Type ids:........." + formatDataRow(ics_types) + "<br>" +
                 "<br>";
-                for(var i=0;i<ecs_producer_type_arr.length;i++) {
-                    var tmp=ecs_producer_type_arr[i]
+                for(var i=0;i<ics_producer_type_arr.length;i++) {
+                    var tmp=ics_producer_type_arr[i]
                     if (tmp != undefined) {
-                        var s = "Producer types...." + formatDataRow(ecs_producer_type_arr[i]) + "<br>"
+                        var s = "Producer types...." + formatDataRow(ics_producer_type_arr[i]) + "<br>"
                         htmlStr=htmlStr+s
                     }
                 }
                 htmlStr=htmlStr+"<br>";
-                for(i=0;i<ecs_producer_jobs_arr.length;i++) {
-                    tmp=ecs_producer_jobs_arr[i]
+                for(i=0;i<ics_producer_jobs_arr.length;i++) {
+                    tmp=ics_producer_jobs_arr[i]
                     if (tmp != undefined) {
-                        s = "Producer jobs....." + formatDataRow(ecs_producer_jobs_arr[i]) + "<br>"
+                        s = "Producer jobs....." + formatDataRow(ics_producer_jobs_arr[i]) + "<br>"
                         htmlStr=htmlStr+s
                     }
                 }
                 htmlStr=htmlStr+"<br>";
-                for(i=0;i<ecs_producer_status_arr.length;i++) {
-                    tmp=ecs_producer_status_arr[i]
+                for(i=0;i<ics_producer_status_arr.length;i++) {
+                    tmp=ics_producer_status_arr[i]
                     if (tmp != undefined) {
                         s = "Producer status..." + formatDataRow(tmp) + "<br>"
                         htmlStr=htmlStr+s
                     }
                 }
                 htmlStr=htmlStr+"<br>";
-                for(i=0;i<ecs_job_status.length;i++) {
-                    tmp=ecs_job_status[i]
+                for(i=0;i<ics_job_status.length;i++) {
+                    tmp=ics_job_status[i]
                     if (tmp != undefined) {
                         s = padding("Job", 18, ".") + formatDataRow(tmp) + "<br>"
                         htmlStr=htmlStr+s
@@ -1025,5 +1025,5 @@ var httpPort=9999;
 httpServer.listen(httpPort);
 console.log("Simulator monitor listening (http) at "+httpPort);
 console.log("Open the web page on localhost:9999/mon to view the policy statistics page.")
-console.log("Open the web page on localhost:9999/mon2 to view the enrichment statistics page.")
+console.log("Open the web page on localhost:9999/mon2 to view the information statistics page.")
 console.log("Open the web page on localhost:9999/mon3 to view CR DB in json.")
\ No newline at end of file
diff --git a/test/usecases/odusliceassurance/apexpolicyversion/SliceAssurance/schemas/StdDefinedOutputSchema.avsc b/test/usecases/odusliceassurance/apexpolicyversion/SliceAssurance/schemas/StdDefinedOutputSchema.avsc
new file mode 100644 (file)
index 0000000..9f513b8
--- /dev/null
@@ -0,0 +1,74 @@
+{
+    "type": "record",
+    "name": "Std_Defined_Output",
+    "fields": [
+        {
+            "name": "radio_DasH_resource_DasH_management_DasH_policy_DasH_ratio",
+            "type": {
+                "type": "array",
+                "items": {
+                    "name": "RRM_Policy_Ratio",
+                    "type": "record",
+                    "fields": [
+                        {
+                            "name": "id",
+                            "type": "string"
+                        },
+                        {
+                            "name": "administrative_DasH_state",
+                            "type": "string"
+                        },
+                        {
+                            "name": "user_DasH_label",
+                            "type": "string"
+                        },
+                        {
+                            "name": "radio_DasH_resource_DasH_management_DasH_policy_DasH_max_DasH_ratio",
+                            "type": "string"
+                        },
+                        {
+                            "name": "radio_DasH_resource_DasH_management_DasH_policy_DasH_min_DasH_ratio",
+                            "type": "string"
+                        },
+                        {
+                            "name": "radio_DasH_resource_DasH_management_DasH_policy_DasH_dedicated_DasH_ratio",
+                            "type": "string"
+                        },
+                        {
+                            "name": "resource_DasH_type",
+                            "type": "string"
+                        },
+                        {
+                            "name": "radio_DasH_resource_DasH_management_DasH_policy_DasH_members",
+                            "type": {
+                                "type": "array",
+                                "items": {
+                                    "name": "RRM_Policy_Members",
+                                    "type": "record",
+                                    "fields": [
+                                        {
+                                            "name": "mobile_DasH_country_DasH_code",
+                                            "type": "string"
+                                        },
+                                        {
+                                            "name": "mobile_DasH_network_DasH_code",
+                                            "type": "string"
+                                        },
+                                        {
+                                            "name": "slice_DasH_differentiator",
+                                            "type": "int"
+                                        },
+                                        {
+                                            "name": "slice_DasH_service_DasH_type",
+                                            "type": "int"
+                                        }
+                                    ]
+                                }
+                            }
+                        }
+                    ]
+                }
+            }
+        }
+    ]
+}
index b59dbd9..fd0af03 100644 (file)
@@ -37,7 +37,7 @@ func main() {
        r.HandleFunc("/rests/data/network-topology:network-topology/topology=topology-netconf/node={O-DU-ID}/yang-ext:mount/o-ran-sc-du-hello-world:network-function/du-to-ru-connection={O-RU-ID}", handleData)
 
        fmt.Println("Starting SDNR on port: ", *port)
-       http.ListenAndServe(fmt.Sprintf(":%v", *port), r)
+       fmt.Println(http.ListenAndServe(fmt.Sprintf(":%v", *port), r))
 
 }
 
index 4cb03c7..3c1a064 100644 (file)
@@ -29,4 +29,8 @@ RUN apt-get install iputils-ping -y
 
 RUN pip install -r requirements.txt
 
+RUN groupadd -g 999 appuser && \
+    useradd -r -u 999 -g appuser appuser
+USER appuser
+
 CMD [ "python3", "-u", "main.py" ]
index 700bb90..d8ce48d 100644 (file)
@@ -45,6 +45,13 @@ spec:
             {{- toYaml .Values.securityContext | nindent 12 }}
           image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}"
           imagePullPolicy: {{ .Values.image.pullPolicy }}
+          env:
+          - name: TOPIC_READ
+            value: http://dmaap-mr:3904/events/unauthenticated.SEC_FAULT_OUTPUT
+          - name: TOPIC_WRITE
+            value: http://dmaap-mr:3904/events/unauthenticated.SEC_FAULT_OUTPUT
+          - name: GENERIC_TOPICS_UPLOAD_BASEURL
+            value: http://dmaap-mr:3904
           ports:
             - name: http
               containerPort: 3904
index 841cf7f..bb9c014 100644 (file)
@@ -29,4 +29,9 @@ RUN apt-get install iputils-ping -y
 
 RUN pip install -r requirements.txt
 
+RUN groupadd -g 999 appuser && \
+    useradd -r -u 999 -g appuser appuser
+
+USER appuser
+
 CMD [ "python3", "-u", "message_generator.py" ]
index 4275b17..f3a5200 100644 (file)
@@ -29,4 +29,9 @@ RUN apt-get install iputils-ping -y
 
 RUN pip install -r requirements.txt
 
+RUN groupadd -g 999 appuser && \
+    useradd -r -u 999 -g appuser appuser
+
+USER appuser
+
 CMD [ "python3", "-u", "sdnr_simulator.py" ]