From: BjornMagnussonXA Date: Mon, 24 Jan 2022 14:20:15 +0000 (+0100) Subject: Updates for F release X-Git-Url: https://gerrit.o-ran-sc.org/r/gitweb?a=commitdiff_plain;h=8fbb226ef10720895adb98e6ca7ac1cae39f0103;p=nonrtric.git Updates for F release Removed usage of Consul/CBS (docker only) in tests for F-release and Jakarta Added test of initial config map for PMS in kubernetes Added a few “missing” non 2XX rest case tests for PMS Added tests for dmaap mediator kafka types Added test case with multiple types for dmaap mediator and adaptor Updated test of HelmChart (from it/dep example recipe) Updates of README files Added feature to collect http endpoint statistics for PMS Added feature to delete namespace (kube) and containers (docker) prior to test - if label by the test env. Added couter in callback receiver to count batches (when more then one message is delivered in an array) Added test suite and override file for aegis image testing Updated Jakrata sdnc test with changed api url and json payload Issue-ID: NONRTRIC-690 Signed-off-by: BjornMagnussonXA Change-Id: I5e80bb136d089b04ca9e519ce71df0b4a450bae2 --- diff --git a/test/auto-test/FTC1.sh b/test/auto-test/FTC1.sh index fe02bf28..00d37581 100755 --- a/test/auto-test/FTC1.sh +++ b/test/auto-test/FTC1.sh @@ -31,7 +31,7 @@ KUBE_PRESTARTED_IMAGES="" #Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if #the image is not configured in the supplied env_file #Used for images not applicable to all supported profile -CONDITIONALLY_IGNORED_IMAGES="NGW" +CONDITIONALLY_IGNORED_IMAGES="NGW CBS CONSUL" #Supported test environment profiles SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE" @@ -120,10 +120,6 @@ for __httpx in $TESTED_PROTOCOLS ; do start_gateway $SIM_GROUP/$NRT_GATEWAY_COMPOSE_DIR/$NRT_GATEWAY_CONFIG_FILE fi - if [ $RUNMODE == "DOCKER" ]; then - start_consul_cbs - fi - if [[ $interface = *"SDNC"* ]]; then start_sdnc prepare_consul_config SDNC ".consul_config.json" @@ -134,7 +130,31 @@ for __httpx in $TESTED_PROTOCOLS ; do if [ $RUNMODE == "KUBE" ]; then agent_load_config ".consul_config.json" else - consul_config_app ".consul_config.json" + if [[ "$PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then + #Temporary switch to http/https if dmaap use. Otherwise it is not possibble to push config + if [ $__httpx == "HTTPS" ]; then + use_agent_rest_https + else + use_agent_rest_http + fi + api_put_configuration 200 ".consul_config.json" + if [ $__httpx == "HTTPS" ]; then + if [[ $interface = *"DMAAP"* ]]; then + use_agent_dmaap_https + else + use_agent_rest_https + fi + else + if [[ $interface = *"DMAAP"* ]]; then + use_agent_dmaap_http + else + use_agent_rest_http + fi + fi + else + start_consul_cbs + consul_config_app ".consul_config.json" + fi fi mr_equal requests_submitted 0 diff --git a/test/auto-test/FTC10.sh b/test/auto-test/FTC10.sh index 50cf98f3..481f450d 100755 --- a/test/auto-test/FTC10.sh +++ b/test/auto-test/FTC10.sh @@ -30,7 +30,7 @@ KUBE_PRESTARTED_IMAGES="" #Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if #the image is not configured in the supplied env_file #Used for images not applicable to all supported profile -CONDITIONALLY_IGNORED_IMAGES="NGW" +CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL NGW" #Supported test environment profiles SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE" @@ -67,7 +67,9 @@ start_mr start_cr 1 if [ $RUNMODE == "DOCKER" ]; then - start_consul_cbs + if [[ "$PMS_FEATURE_LEVEL" != *"NOCONSUL"* ]]; then + start_consul_cbs + fi fi start_control_panel $SIM_GROUP/$CONTROL_PANEL_COMPOSE_DIR/$CONTROL_PANEL_CONFIG_FILE @@ -87,7 +89,11 @@ prepare_consul_config NOSDNC ".consul_config.json" if [ $RUNMODE == "KUBE" ]; then agent_load_config ".consul_config.json" else - consul_config_app ".consul_config.json" + if [[ "$PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then + api_put_configuration 200 ".consul_config.json" + else + consul_config_app ".consul_config.json" + fi fi api_get_status 200 diff --git a/test/auto-test/FTC100.sh b/test/auto-test/FTC100.sh index 670ea5e7..1fafa711 100755 --- a/test/auto-test/FTC100.sh +++ b/test/auto-test/FTC100.sh @@ -31,7 +31,7 @@ KUBE_PRESTARTED_IMAGES="" #Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if #the image is not configured in the supplied env_file #Used for images not applicable to all supported profile -CONDITIONALLY_IGNORED_IMAGES="NGW" +CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL NGW" #Supported test environment profiles SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE" @@ -109,8 +109,14 @@ for __httpx in $TESTED_PROTOCOLS ; do start_ric_simulators ricsim_g1 1 OSC_2.1.0 start_ric_simulators ricsim_g2 1 STD_1.1.3 + + sim_put_policy_type 201 ricsim_g1_1 1 testdata/OSC/sim_1.json + sim_put_policy_type 201 ricsim_g1_1 2 testdata/OSC/sim_2.json + if [ "$PMS_VERSION" == "V2" ]; then start_ric_simulators ricsim_g3 1 STD_2.0.0 + sim_put_policy_type 201 ricsim_g3_1 STD_QOS_0_2_0 testdata/STD2/sim_qos.json + sim_put_policy_type 201 ricsim_g3_1 STD_QOS2_0.1.0 testdata/STD2/sim_qos2.json fi start_mr @@ -123,10 +129,6 @@ for __httpx in $TESTED_PROTOCOLS ; do start_gateway $SIM_GROUP/$NRT_GATEWAY_COMPOSE_DIR/$NRT_GATEWAY_CONFIG_FILE fi - if [ $RUNMODE == "DOCKER" ]; then - start_consul_cbs - fi - if [[ $interface = *"SDNC"* ]]; then start_sdnc prepare_consul_config SDNC ".consul_config.json" @@ -137,16 +139,42 @@ for __httpx in $TESTED_PROTOCOLS ; do if [ $RUNMODE == "KUBE" ]; then agent_load_config ".consul_config.json" else - consul_config_app ".consul_config.json" - fi + if [[ "$PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then + #Temporary switch to http/https if dmaap use. Otherwise it is not possibble to push config + if [ $__httpx == "HTTPS" ]; then + use_agent_rest_https + else + use_agent_rest_http + fi + + if [[ $interface != *"DMAAP"* ]]; then + echo "{}" > ".consul_config_incorrect.json" + api_put_configuration 400 ".consul_config_incorrect.json" + fi + + api_put_configuration 200 ".consul_config.json" + api_get_configuration 200 ".consul_config.json" + if [ $__httpx == "HTTPS" ]; then + if [[ $interface = *"DMAAP"* ]]; then + use_agent_dmaap_https + else + use_agent_rest_https + fi + else + if [[ $interface = *"DMAAP"* ]]; then + use_agent_dmaap_http + else + use_agent_rest_http + fi + fi - sim_put_policy_type 201 ricsim_g1_1 1 testdata/OSC/sim_1.json - sim_put_policy_type 201 ricsim_g1_1 2 testdata/OSC/sim_2.json + else + start_consul_cbs + consul_config_app ".consul_config.json" + fi + fi if [ "$PMS_VERSION" == "V2" ]; then - sim_put_policy_type 201 ricsim_g3_1 STD_QOS_0_2_0 testdata/STD2/sim_qos.json - sim_put_policy_type 201 ricsim_g3_1 STD_QOS2_0.1.0 testdata/STD2/sim_qos2.json - api_equal json:rics 3 300 api_equal json:policy-types 5 120 @@ -179,6 +207,8 @@ for __httpx in $TESTED_PROTOCOLS ; do api_get_status 200 + api_get_status_root 200 + echo "############################################" echo "##### Service registry and supervision #####" echo "############################################" @@ -384,6 +414,10 @@ for __httpx in $TESTED_PROTOCOLS ; do else notificationurl="" fi + if [[ $interface != *"DMAAP"* ]]; then + # Badly formatted json is not possible to send via dmaap + api_put_policy 400 "unregistered-service" ricsim_g1_1 1 2000 NOTRANSIENT $notificationurl testdata/OSC/pi_bad_template.json + fi deviation "TR10 - agent allows policy creation on unregistered service (orig problem) - test combo $interface and $__httpx" #Kept until decison #api_put_policy 400 "unregistered-service" ricsim_g1_1 1 2000 NOTRANSIENT testdata/OSC/pi1_template.json @@ -410,6 +444,8 @@ for __httpx in $TESTED_PROTOCOLS ; do api_put_policy 200 "service10" ricsim_g3_1 STD_QOS2_0.1.0 5200 false $notificationurl testdata/STD2/pi_qos2_template.json fi + api_get_policy_status 404 1 + api_get_policy_status 404 2 VAL='NOT IN EFFECT' api_get_policy_status 200 5000 OSC "$VAL" "false" api_get_policy_status 200 5100 STD "UNDEFINED" diff --git a/test/auto-test/FTC110.sh b/test/auto-test/FTC110.sh index 5b840841..46a44d03 100755 --- a/test/auto-test/FTC110.sh +++ b/test/auto-test/FTC110.sh @@ -31,7 +31,7 @@ KUBE_PRESTARTED_IMAGES="" #Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if #the image is not configured in the supplied env_file #Used for images not applicable to all supported profile -CONDITIONALLY_IGNORED_IMAGES="NGW" +CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL NGW" #Supported test environment profiles SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE" @@ -65,10 +65,6 @@ start_mr start_cr 1 -if [ $RUNMODE == "DOCKER" ]; then - start_consul_cbs -fi - start_control_panel $SIM_GROUP/$CONTROL_PANEL_COMPOSE_DIR/$CONTROL_PANEL_CONFIG_FILE if [ ! -z "$NRT_GATEWAY_APP_NAME" ]; then @@ -82,7 +78,12 @@ prepare_consul_config NOSDNC ".consul_config.json" if [ $RUNMODE == "KUBE" ]; then agent_load_config ".consul_config.json" else - consul_config_app ".consul_config.json" + if [[ "$PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then + api_put_configuration 200 ".consul_config.json" + else + start_consul_cbs + consul_config_app ".consul_config.json" + fi fi set_agent_debug diff --git a/test/auto-test/FTC150.sh b/test/auto-test/FTC150.sh index 8df6f081..5219e1b3 100755 --- a/test/auto-test/FTC150.sh +++ b/test/auto-test/FTC150.sh @@ -27,6 +27,11 @@ KUBE_INCLUDED_IMAGES=" RICSIM SDNC KUBEPROXY" #Prestarted app (not started by script) to include in the test when running kubernetes, space separated list KUBE_PRESTARTED_IMAGES=" " +#Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if +#the image is not configured in the supplied env_file +#Used for images not applicable to all supported profile +CONDITIONALLY_IGNORED_IMAGES="" + #Supported test environment profiles SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE" #Supported run modes diff --git a/test/auto-test/FTC1800.sh b/test/auto-test/FTC1800.sh index 749ba826..1284bf61 100755 --- a/test/auto-test/FTC1800.sh +++ b/test/auto-test/FTC1800.sh @@ -26,7 +26,7 @@ DOCKER_INCLUDED_IMAGES="ICS PRODSTUB CR CP NGW KUBEPROXY" #App names to include in the test when running kubernetes, space separated list KUBE_INCLUDED_IMAGES="ICS PRODSTUB CP CR KUBEPROXY NGW" #Prestarted app (not started by script) to include in the test when running kubernetes, space separated list -KUBE_PRESTARTED_IMAGES="" +KUBE_PRESTARTED_IMAGES="NGW" #Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if #the image is not configured in the supplied env_file diff --git a/test/auto-test/FTC2001.sh b/test/auto-test/FTC2001.sh index 7c1202c4..c09bbadf 100755 --- a/test/auto-test/FTC2001.sh +++ b/test/auto-test/FTC2001.sh @@ -30,7 +30,7 @@ KUBE_PRESTARTED_IMAGES="" #Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if #the image is not configured in the supplied env_file #Used for images not applicable to all supported profile -CONDITIONALLY_IGNORED_IMAGES="NGW" +CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL NGW" #Supported test environment profiles SUPPORTED_PROFILES="ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE" @@ -79,16 +79,17 @@ fi start_policy_agent PROXY $SIM_GROUP/$POLICY_AGENT_COMPOSE_DIR/$POLICY_AGENT_CONFIG_FILE -if [ $RUNMODE == "DOCKER" ]; then - start_consul_cbs -fi - prepare_consul_config NOSDNC ".consul_config.json" if [ $RUNMODE == "KUBE" ]; then agent_load_config ".consul_config.json" else - consul_config_app ".consul_config.json" + if [[ "$PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then + api_put_configuration 200 ".consul_config.json" + else + start_consul_cbs + consul_config_app ".consul_config.json" + fi fi start_cr 1 diff --git a/test/auto-test/FTC2002.sh b/test/auto-test/FTC2002.sh index 37d35765..28c8fa70 100755 --- a/test/auto-test/FTC2002.sh +++ b/test/auto-test/FTC2002.sh @@ -26,6 +26,11 @@ KUBE_INCLUDED_IMAGES="" #Prestarted app (not started by script) to include in the test when running kubernetes, space separated list KUBE_PRESTARTED_IMAGES=" " +#Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if +#the image is not configured in the supplied env_file +#Used for images not applicable to all supported profile +CONDITIONALLY_IGNORED_IMAGES="" + #Supported test environment profiles SUPPORTED_PROFILES="ONAP-ISTANBUL ONAP-JAKARTA" #Supported run modes diff --git a/test/auto-test/FTC300.sh b/test/auto-test/FTC300.sh index a53dc362..2d9e9f92 100755 --- a/test/auto-test/FTC300.sh +++ b/test/auto-test/FTC300.sh @@ -30,7 +30,7 @@ KUBE_PRESTARTED_IMAGES="" #Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if #the image is not configured in the supplied env_file #Used for images not applicable to all supported profile -CONDITIONALLY_IGNORED_IMAGES="NGW" +CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL NGW" #Supported test environment profiles SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE" @@ -102,10 +102,6 @@ for __httpx in $TESTED_PROTOCOLS ; do start_cr 1 - if [ $RUNMODE == "DOCKER" ]; then - start_consul_cbs - fi - start_control_panel $SIM_GROUP/$CONTROL_PANEL_COMPOSE_DIR/$CONTROL_PANEL_CONFIG_FILE if [ ! -z "$NRT_GATEWAY_APP_NAME" ]; then @@ -126,7 +122,31 @@ for __httpx in $TESTED_PROTOCOLS ; do if [ $RUNMODE == "KUBE" ]; then agent_load_config ".consul_config.json" else - consul_config_app ".consul_config.json" + if [[ "$PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then + #Temporary switch to http/https if dmaap use. Otherwise it is not possibble to push config + if [ $__httpx == "HTTPS" ]; then + use_agent_rest_https + else + use_agent_rest_http + fi + api_put_configuration 200 ".consul_config.json" + if [ $__httpx == "HTTPS" ]; then + if [[ $interface = *"DMAAP"* ]]; then + use_agent_dmaap_https + else + use_agent_rest_https + fi + else + if [[ $interface = *"DMAAP"* ]]; then + use_agent_dmaap_http + else + use_agent_rest_http + fi + fi + else + start_consul_cbs + consul_config_app ".consul_config.json" + fi fi api_get_status 200 @@ -170,7 +190,12 @@ for __httpx in $TESTED_PROTOCOLS ; do sim_equal ricsim_g1_1 num_instances 0 - sim_equal ricsim_g1_1 num_instances $NUM_POLICIES 300 + if [[ $interface = *"SDNC"* ]]; then + deviation "Sync over SDNC seem to be slower from Jakarta version..." + sim_equal ricsim_g1_1 num_instances $NUM_POLICIES 2000 + else + sim_equal ricsim_g1_1 num_instances $NUM_POLICIES 300 + fi START_ID2=$(($START_ID+$NUM_POLICIES)) @@ -184,8 +209,12 @@ for __httpx in $TESTED_PROTOCOLS ; do sim_post_delete_instances 200 ricsim_g2_1 sim_equal ricsim_g2_1 num_instances 0 - - sim_equal ricsim_g2_1 num_instances $NUM_POLICIES 300 + if [[ $interface = *"SDNC"* ]]; then + deviation "Sync over SDNC seem to be slower from Jakarta version..." + sim_equal ricsim_g2_1 num_instances $NUM_POLICIES 2000 + else + sim_equal ricsim_g2_1 num_instances $NUM_POLICIES 300 + fi api_delete_policy 204 $(($START_ID+47)) @@ -193,7 +222,12 @@ for __httpx in $TESTED_PROTOCOLS ; do sim_post_delete_instances 200 ricsim_g1_1 - sim_equal ricsim_g1_1 num_instances $(($NUM_POLICIES-2)) 300 + if [[ $interface = *"SDNC"* ]]; then + deviation "Sync over SDNC seem to be slower from Jakarta version..." + sim_equal ricsim_g1_1 num_instances $(($NUM_POLICIES-2)) 2000 + else + sim_equal ricsim_g1_1 num_instances $(($NUM_POLICIES-2)) 300 + fi api_delete_policy 204 $(($START_ID2+37)) @@ -203,9 +237,16 @@ for __httpx in $TESTED_PROTOCOLS ; do sim_post_delete_instances 200 ricsim_g2_1 - sim_equal ricsim_g1_1 num_instances $(($NUM_POLICIES-2)) 300 + if [[ $interface = *"SDNC"* ]]; then + deviation "Sync over SDNC seem to be slower from Jakarta version..." + sim_equal ricsim_g1_1 num_instances $(($NUM_POLICIES-2)) 2000 + + sim_equal ricsim_g2_1 num_instances $(($NUM_POLICIES-3)) 2000 + else + sim_equal ricsim_g1_1 num_instances $(($NUM_POLICIES-2)) 300 - sim_equal ricsim_g2_1 num_instances $(($NUM_POLICIES-3)) 300 + sim_equal ricsim_g2_1 num_instances $(($NUM_POLICIES-3)) 300 + fi api_equal json:policies $(($NUM_POLICIES-2+$NUM_POLICIES-3)) diff --git a/test/auto-test/FTC3000.sh b/test/auto-test/FTC3000.sh index 2293919e..6a1428df 100755 --- a/test/auto-test/FTC3000.sh +++ b/test/auto-test/FTC3000.sh @@ -50,7 +50,6 @@ FLAT_A1_EI="1" NUM_CR=10 # Number of callback receivers, divide all callbacks to this number of servers - for load sharing ## Note: The number jobs must be a multiple of the number of CRs in order to calculate the number of expected event in each CR NUM_JOBS=200 # Mediator and adapter gets same number of jobs for every type - if [ $NUM_JOBS -lt $NUM_CR ]; then __log_conf_fail_general "Number of jobs: $NUM_JOBS must be greater then the number of CRs: $NUM_CR" fi @@ -85,14 +84,24 @@ start_dmaapadp NOPROXY $SIM_GROUP/$DMAAP_ADP_COMPOSE_DIR/$DMAAP_ADP_CONFIG_FILE set_dmaapadp_trace -start_dmaapmed NOPROXY $SIM_GROUP/$DMAAP_MED_COMPOSE_DIR/$DMAAP_MED_DATA_FILE +if [[ "$DMAAP_MED_FEATURE_LEVEL" == *"KAFKATYPES"* ]]; then + kafkapc_api_create_topic 201 "unauthenticated.dmaapmed_kafka.text" "text/plain" + + kafkapc_api_start_sending 200 "unauthenticated.dmaapmed_kafka.text" +fi + +start_dmaapmed NOPROXY $SIM_GROUP/$DMAAP_MED_COMPOSE_DIR/$DMAAP_MED_HOST_DATA_FILE ics_equal json:data-producer/v1/info-producers 2 60 # Check producers ics_api_idc_get_job_ids 200 NOTYPE NOWNER EMPTY -ics_api_idc_get_type_ids 200 ExampleInformationType STD_Fault_Messages ExampleInformationTypeKafka ics_api_edp_get_producer_ids_2 200 NOTYPE DmaapGenericInfoProducer DMaaP_Mediator_Producer +if [[ "$DMAAP_MED_FEATURE_LEVEL" != *"KAFKATYPES"* ]]; then + ics_api_idc_get_type_ids 200 ExampleInformationType STD_Fault_Messages ExampleInformationTypeKafka +else + ics_api_idc_get_type_ids 200 ExampleInformationType STD_Fault_Messages ExampleInformationTypeKafka Kafka_TestTopic +fi # Create jobs for adapter - CR stores data as MD5 hash @@ -126,16 +135,32 @@ do cr_index=$(($i%$NUM_CR)) service_mr="CR_SERVICE_MR_PATH_"$cr_index service_app="CR_SERVICE_APP_PATH_"$cr_index - ics_api_idc_put_job 201 job-med-$i STD_Fault_Messages ${!service_mr}/job-med-data$i"?storeas=md5" info-owner-med-$i ${!service_app}/job_status_info-owner-med-$i testdata/dmaap-adapter/job-template.json + ics_api_idc_put_job 201 job-med-$i STD_Fault_Messages ${!service_mr}/job-med-data$i"?storeas=md5" info-owner-med-$i ${!service_app}/job_status_info-owner-med-$i testdata/dmaap-mediator/job-template.json done print_timer +if [[ "$DMAAP_MED_FEATURE_LEVEL" == *"KAFKATYPES"* ]]; then + # Create jobs for mediator kafka - CR stores data as MD5 hash + start_timer "Create mediator (kafka) jobs: $NUM_JOBS" + for ((i=1; i<=$NUM_JOBS; i++)) + do + cr_index=$(($i%$NUM_CR)) + service_text="CR_SERVICE_TEXT_PATH_"$cr_index + service_app="CR_SERVICE_APP_PATH_"$cr_index + ics_api_idc_put_job 201 job-med-kafka-$i Kafka_TestTopic ${!service_text}/job-med-kafka-data$i"?storeas=md5" info-owner-med-kafka-$i ${!service_app}/job_status_info-owner-med-kafka-$i testdata/dmaap-mediator/job-template-1-kafka.json + done + print_timer +fi + # Check job status for ((i=1; i<=$NUM_JOBS; i++)) do ics_api_a1_get_job_status 200 job-med-$i ENABLED 30 ics_api_a1_get_job_status 200 job-adp-$i ENABLED 30 ics_api_a1_get_job_status 200 job-adp-kafka-$i ENABLED 30 + if [[ "$DMAAP_MED_FEATURE_LEVEL" == *"KAFKATYPES"* ]]; then + ics_api_a1_get_job_status 200 job-med-kafka-$i ENABLED 30 + fi done @@ -152,6 +177,7 @@ mr_api_send_json_file "/events/unauthenticated.dmaapadp.json" ./tmp/data_for_dma for ((i=0; i<$NUM_CR; i++)) do cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60 + cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60 done EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV)) @@ -159,6 +185,7 @@ mr_api_send_json_file "/events/unauthenticated.dmaapadp.json" ./tmp/data_for_dma for ((i=0; i<$NUM_CR; i++)) do cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60 + cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60 done EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV)) @@ -166,6 +193,7 @@ mr_api_send_json_file "/events/unauthenticated.dmaapadp.json" ./tmp/data_for_dma for ((i=0; i<$NUM_CR; i++)) do cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60 + cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60 done EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV)) @@ -173,6 +201,7 @@ mr_api_send_json_file "/events/unauthenticated.dmaapadp.json" ./tmp/data_for_dma for ((i=0; i<$NUM_CR; i++)) do cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60 + cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60 done EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV)) @@ -180,6 +209,7 @@ mr_api_send_json_file "/events/unauthenticated.dmaapadp.json" ./tmp/data_for_dma for ((i=0; i<$NUM_CR; i++)) do cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60 + cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60 done # Check received data callbacks from adapter @@ -202,6 +232,7 @@ kafkapc_equal topics/unauthenticated.dmaapadp_kafka.text/counters/sent 1 30 for ((i=0; i<$NUM_CR; i++)) do cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60 + cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60 done EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV)) @@ -210,6 +241,7 @@ kafkapc_equal topics/unauthenticated.dmaapadp_kafka.text/counters/sent 2 30 for ((i=0; i<$NUM_CR; i++)) do cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60 + cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60 done EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV)) @@ -218,6 +250,7 @@ kafkapc_equal topics/unauthenticated.dmaapadp_kafka.text/counters/sent 3 30 for ((i=0; i<$NUM_CR; i++)) do cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60 + cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60 done EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV)) @@ -226,6 +259,7 @@ kafkapc_equal topics/unauthenticated.dmaapadp_kafka.text/counters/sent 4 30 for ((i=0; i<$NUM_CR; i++)) do cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60 + cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60 done EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV)) @@ -234,6 +268,7 @@ kafkapc_equal topics/unauthenticated.dmaapadp_kafka.text/counters/sent 5 30 for ((i=0; i<$NUM_CR; i++)) do cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60 + cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60 done # Check received data callbacks from adapter kafka @@ -254,6 +289,7 @@ mr_api_send_json_file "/events/unauthenticated.dmaapmed.json" ./tmp/data_for_dma for ((i=0; i<$NUM_CR; i++)) do cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60 + cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60 done EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV)) @@ -261,6 +297,7 @@ mr_api_send_json_file "/events/unauthenticated.dmaapmed.json" ./tmp/data_for_dma for ((i=0; i<$NUM_CR; i++)) do cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60 + cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60 done EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV)) @@ -268,6 +305,7 @@ mr_api_send_json_file "/events/unauthenticated.dmaapmed.json" ./tmp/data_for_dma for ((i=0; i<$NUM_CR; i++)) do cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60 + cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60 done EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV)) @@ -275,6 +313,7 @@ mr_api_send_json_file "/events/unauthenticated.dmaapmed.json" ./tmp/data_for_dma for ((i=0; i<$NUM_CR; i++)) do cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60 + cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60 done EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV)) @@ -282,6 +321,7 @@ mr_api_send_json_file "/events/unauthenticated.dmaapmed.json" ./tmp/data_for_dma for ((i=0; i<$NUM_CR; i++)) do cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60 + cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60 done # Check received data callbacks from mediator @@ -295,6 +335,65 @@ do cr_api_check_single_genric_event_md5_file 200 $cr_index job-med-data$i ./tmp/data_for_dmaap_test.json done +if [[ "$DMAAP_MED_FEATURE_LEVEL" == *"KAFKATYPES"* ]]; then + ## Send text file via message-router to mediator kafka + + EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV)) + kafkapc_api_post_msg_from_file 200 "unauthenticated.dmaapmed_kafka.text" "text/plain" ./tmp/data_for_dmaap_test.txt + kafkapc_equal topics/unauthenticated.dmaapmed_kafka.text/counters/sent 1 30 + for ((i=0; i<$NUM_CR; i++)) + do + cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60 + cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60 + done + + EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV)) + kafkapc_api_post_msg_from_file 200 "unauthenticated.dmaapmed_kafka.text" "text/plain" ./tmp/data_for_dmaap_test.txt + kafkapc_equal topics/unauthenticated.dmaapmed_kafka.text/counters/sent 2 30 + for ((i=0; i<$NUM_CR; i++)) + do + cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60 + cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60 + done + + EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV)) + kafkapc_api_post_msg_from_file 200 "unauthenticated.dmaapmed_kafka.text" "text/plain" ./tmp/data_for_dmaap_test.txt + kafkapc_equal topics/unauthenticated.dmaapmed_kafka.text/counters/sent 3 30 + for ((i=0; i<$NUM_CR; i++)) + do + cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60 + cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60 + done + + EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV)) + kafkapc_api_post_msg_from_file 200 "unauthenticated.dmaapmed_kafka.text" "text/plain" ./tmp/data_for_dmaap_test.txt + kafkapc_equal topics/unauthenticated.dmaapmed_kafka.text/counters/sent 4 30 + for ((i=0; i<$NUM_CR; i++)) + do + cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60 + cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60 + done + + EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV)) + kafkapc_api_post_msg_from_file 200 "unauthenticated.dmaapmed_kafka.text" "text/plain" ./tmp/data_for_dmaap_test.txt + kafkapc_equal topics/unauthenticated.dmaapmed_kafka.text/counters/sent 5 30 + for ((i=0; i<$NUM_CR; i++)) + do + cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60 + cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60 + done + + # Check received data callbacks from adapter kafka + for ((i=1; i<=$NUM_JOBS; i++)) + do + cr_index=$(($i%$NUM_CR)) + cr_api_check_single_genric_event_md5_file 200 $cr_index job-med-kafka-data$i ./tmp/data_for_dmaap_test.txt + cr_api_check_single_genric_event_md5_file 200 $cr_index job-med-kafka-data$i ./tmp/data_for_dmaap_test.txt + cr_api_check_single_genric_event_md5_file 200 $cr_index job-med-kafka-data$i ./tmp/data_for_dmaap_test.txt + cr_api_check_single_genric_event_md5_file 200 $cr_index job-med-kafka-data$i ./tmp/data_for_dmaap_test.txt + cr_api_check_single_genric_event_md5_file 200 $cr_index job-med-kafka-data$i ./tmp/data_for_dmaap_test.txt + done +fi # Send small json via message-router to adapter mr_api_send_json "/events/unauthenticated.dmaapadp.json" '{"msg":"msg-1"}' @@ -308,10 +407,11 @@ start_timer "Data delivery adapter, 2 json per job" for ((i=0; i<$NUM_CR; i++)) do cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60 + cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60 done print_timer -# Send small text via message-routere to adapter +# Send small text via message-router to adapter kafkapc_api_post_msg 200 "unauthenticated.dmaapadp_kafka.text" "text/plain" 'Message-------1' kafkapc_api_post_msg 200 "unauthenticated.dmaapadp_kafka.text" "text/plain" 'Message-------3' kafkapc_equal topics/unauthenticated.dmaapadp_kafka.text/counters/sent 7 30 @@ -322,6 +422,7 @@ start_timer "Data delivery adapter kafka, 2 strings per job" for ((i=0; i<$NUM_CR; i++)) do cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60 + cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60 done print_timer @@ -335,16 +436,41 @@ start_timer "Data delivery mediator, 2 json per job" for ((i=0; i<$NUM_CR; i++)) do cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 100 + cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 100 done print_timer +if [[ "$DMAAP_MED_FEATURE_LEVEL" == *"KAFKATYPES"* ]]; then + # Send small text via message-router to mediator + kafkapc_api_post_msg 200 "unauthenticated.dmaapmed_kafka.text" "text/plain" 'Message-------0' + kafkapc_api_post_msg 200 "unauthenticated.dmaapmed_kafka.text" "text/plain" 'Message-------2' + kafkapc_equal topics/unauthenticated.dmaapmed_kafka.text/counters/sent 7 30 + + # Wait for data recetption, adapter kafka + EXPECTED_DATA_DELIV=$(($NUM_JOBS*2/$NUM_CR+$EXPECTED_DATA_DELIV)) + start_timer "Data delivery mediator kafka, 2 strings per job" + for ((i=0; i<$NUM_CR; i++)) + do + cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60 + cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60 + done + print_timer +fi + # Check received number of messages for mediator and adapter callbacks for ((i=1; i<=$NUM_JOBS; i++)) do cr_index=$(($i%$NUM_CR)) cr_equal $cr_index received_callbacks?id=job-med-data$i $DATA_DELIV_JOBS + cr_equal $cr_index received_callback_batches?id=job-med-data$i $DATA_DELIV_JOBS cr_equal $cr_index received_callbacks?id=job-adp-data$i $DATA_DELIV_JOBS + cr_equal $cr_index received_callback_batches?id=job-adp-data$i $DATA_DELIV_JOBS cr_equal $cr_index received_callbacks?id=job-adp-kafka-data$i $DATA_DELIV_JOBS + cr_equal $cr_index received_callback_batches?id=job-adp-kafka-data$i $DATA_DELIV_JOBS + if [[ "$DMAAP_MED_FEATURE_LEVEL" == *"KAFKATYPES"* ]]; then + cr_equal $cr_index received_callbacks?id=job-med-kafka-data$i $DATA_DELIV_JOBS + cr_equal $cr_index received_callback_batches?id=job-med-kafka-data$i $DATA_DELIV_JOBS + fi done # Check received data and order for mediator and adapter callbacks @@ -357,6 +483,10 @@ do cr_api_check_single_genric_event_md5 200 $cr_index job-adp-data$i '{"msg":"msg-3"}' cr_api_check_single_genric_event_md5 200 $cr_index job-adp-kafka-data$i 'Message-------1' cr_api_check_single_genric_event_md5 200 $cr_index job-adp-kafka-data$i 'Message-------3' + if [[ "$DMAAP_MED_FEATURE_LEVEL" == *"KAFKATYPES"* ]]; then + cr_api_check_single_genric_event_md5 200 $cr_index job-med-kafka-data$i 'Message-------0' + cr_api_check_single_genric_event_md5 200 $cr_index job-med-kafka-data$i 'Message-------2' + fi done # Set delay in the callback receiver to slow down callbacks @@ -376,6 +506,7 @@ start_timer "Data delivery adapter with $SEC_DELAY seconds delay in consumer, 2 for ((i=0; i<$NUM_CR; i++)) do cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 100 + cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 100 done print_timer @@ -391,6 +522,7 @@ start_timer "Data delivery adapter kafka with $SEC_DELAY seconds delay in consum for ((i=0; i<$NUM_CR; i++)) do cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 100 + cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 100 done print_timer @@ -405,16 +537,41 @@ start_timer "Data delivery mediator with $SEC_DELAY seconds delay in consumer, 2 for ((i=0; i<$NUM_CR; i++)) do cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 100 + cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 100 done print_timer +if [[ "$DMAAP_MED_FEATURE_LEVEL" == *"KAFKATYPES"* ]]; then + # Send small text via message-router to mediator kafka + kafkapc_api_post_msg 200 "unauthenticated.dmaapmed_kafka.text" "text/plain" 'Message-------4' + kafkapc_api_post_msg 200 "unauthenticated.dmaapmed_kafka.text" "text/plain" 'Message-------6' + kafkapc_equal topics/unauthenticated.dmaapmed_kafka.text/counters/sent 9 30 + + # Wait for data recetption, mediator kafka + EXPECTED_DATA_DELIV=$(($NUM_JOBS*2/$NUM_CR+$EXPECTED_DATA_DELIV)) + start_timer "Data delivery mediator kafka with $SEC_DELAY seconds delay in consumer, 2 strings per job" + for ((i=0; i<$NUM_CR; i++)) + do + cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 100 + cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 100 + done + print_timer +fi + # Check received number of messages for mediator and adapter callbacks for ((i=1; i<=$NUM_JOBS; i++)) do cr_index=$(($i%$NUM_CR)) cr_equal $cr_index received_callbacks?id=job-med-data$i 9 + cr_equal $cr_index received_callback_batches?id=job-med-data$i 9 cr_equal $cr_index received_callbacks?id=job-adp-data$i 9 + cr_equal $cr_index received_callback_batches?id=job-adp-data$i 9 cr_equal $cr_index received_callbacks?id=job-adp-kafka-data$i 9 + cr_equal $cr_index received_callback_batches?id=job-adp-kafka-data$i 9 + if [[ "$DMAAP_MED_FEATURE_LEVEL" == *"KAFKATYPES"* ]]; then + cr_equal $cr_index received_callbacks?id=job-med-kafka-data$i 9 + cr_equal $cr_index received_callback_batches?id=job-med-kafka-data$i 9 + fi done # Check received data and order for mediator and adapter callbacks @@ -427,6 +584,10 @@ do cr_api_check_single_genric_event_md5 200 $cr_index job-adp-data$i '{"msg":"msg-7"}' cr_api_check_single_genric_event_md5 200 $cr_index job-adp-kafka-data$i 'Message-------5' cr_api_check_single_genric_event_md5 200 $cr_index job-adp-kafka-data$i 'Message-------7' + if [[ "$DMAAP_MED_FEATURE_LEVEL" == *"KAFKATYPES"* ]]; then + cr_api_check_single_genric_event_md5 200 $cr_index job-med-kafka-data$i 'Message-------4' + cr_api_check_single_genric_event_md5 200 $cr_index job-med-kafka-data$i 'Message-------6' + fi done #### TEST COMPLETE #### diff --git a/test/auto-test/FTC3001.sh b/test/auto-test/FTC3001.sh new file mode 100755 index 00000000..2672d127 --- /dev/null +++ b/test/auto-test/FTC3001.sh @@ -0,0 +1,261 @@ +#!/usr/bin/env bash + +# ============LICENSE_START=============================================== +# Copyright (C) 2020 Nordix Foundation. All rights reserved. +# ======================================================================== +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END================================================= +# + +TC_ONELINE_DESCR="App test DMAAP Meditor and DMAAP Adapter with 100 jobs,types and topics" + +#App names to include in the test when running docker, space separated list +DOCKER_INCLUDED_IMAGES="ICS DMAAPMED DMAAPADP KUBEPROXY MR DMAAPMR CR KAFKAPC HTTPPROXY" + +#App names to include in the test when running kubernetes, space separated list +KUBE_INCLUDED_IMAGES=" ICS DMAAPMED DMAAPADP KUBEPROXY MR DMAAPMR CR KAFKAPC HTTPPROXY" + +#Prestarted app (not started by script) to include in the test when running kubernetes, space separated list +KUBE_PRESTARTED_IMAGES="" + +#Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if +#the image is not configured in the supplied env_file +#Used for images not applicable to all supported profile +CONDITIONALLY_IGNORED_IMAGES="" + +#Supported test environment profiles +SUPPORTED_PROFILES="ORAN-E-RELEASE ORAN-F-RELEASE" +#Supported run modes +SUPPORTED_RUNMODES="DOCKER KUBE" + +. ../common/testcase_common.sh $@ + +setup_testenvironment + +#### TEST BEGIN #### + +#Local vars in test script +########################## +FLAT_A1_EI="1" +NUM_CR=1 # Number of callback receivers, max 1 +## Note: The number jobs must be a multiple of the number of CRs in order to calculate the number of expected event in each CR +NUM_JOBS=100 # Mediator and adapter gets same number of jobs for every type +if [ $NUM_CR -gt 1 ]; then + __log_conf_fail_general "Max number of callback receivers is one in this test" +fi + +clean_environment + +#use_cr_https +use_cr_http +use_ics_rest_https +use_mr_https +use_dmaapadp_https +use_dmaapmed_https + +start_kube_proxy + +start_cr $NUM_CR + +start_ics NOPROXY $SIM_GROUP/$ICS_COMPOSE_DIR/$ICS_CONFIG_FILE + +set_ics_trace + +start_mr + +start_kafkapc + +for ((i=1; i<=$NUM_JOBS; i++)) +do + kafkapc_api_create_topic 201 "unauthenticated.dmaapadp_kafka.text$i" "text/plain" + + kafkapc_api_start_sending 200 "unauthenticated.dmaapadp_kafka.text$i" +done + +adp_med_type_list="" +adp_config_data='{"types": [' +for ((i=1; i<=$NUM_JOBS; i++)) +do + if [ $i -ne 1 ]; then + adp_config_data=$adp_config_data',' + fi + adp_config_data=$adp_config_data'{"id": "ADPKafkaType'$i'","kafkaInputTopic": "unauthenticated.dmaapadp_kafka.text'$i'","useHttpProxy": false}' + adp_med_type_list="$adp_med_type_list ADPKafkaType$i " +done +adp_config_data=$adp_config_data']}' +echo $adp_config_data > tmp/adp_config_data.json + +start_dmaapadp NOPROXY $SIM_GROUP/$DMAAP_ADP_COMPOSE_DIR/$DMAAP_ADP_CONFIG_FILE tmp/adp_config_data.json + +set_dmaapadp_trace + +if [[ "$DMAAP_MED_FEATURE_LEVEL" == *"KAFKATYPES"* ]]; then + for ((i=1; i<=$NUM_JOBS; i++)) + do + kafkapc_api_create_topic 201 "unauthenticated.dmaapmed_kafka.text$i" "text/plain" + + kafkapc_api_start_sending 200 "unauthenticated.dmaapmed_kafka.text$i" + done +fi + +med_config_data='{"types": [' +for ((i=1; i<=$NUM_JOBS; i++)) +do + if [ $i -ne 1 ]; then + med_config_data=$med_config_data',' + fi + med_config_data=$med_config_data'{"id": "MEDKafkaType'$i'","kafkaInputTopic": "unauthenticated.dmaapmed_kafka.text'$i'"}' + adp_med_type_list="$adp_med_type_list MEDKafkaType$i " +done +med_config_data=$med_config_data']}' +echo $med_config_data > tmp/med_config_data.json + +start_dmaapmed NOPROXY tmp/med_config_data.json + +ics_equal json:data-producer/v1/info-producers 2 60 + +# Check producers +ics_api_idc_get_job_ids 200 NOTYPE NOWNER EMPTY +ics_api_edp_get_producer_ids_2 200 NOTYPE DmaapGenericInfoProducer DMaaP_Mediator_Producer +ics_api_idc_get_type_ids 200 $adp_med_type_list + + +# Create jobs for adapter kafka - CR stores data as MD5 hash +start_timer "Create adapter (kafka) jobs: $NUM_JOBS" +for ((i=1; i<=$NUM_JOBS; i++)) +do + # Max buffer timeout for is about 160 sec for Adator jobs" + adp_timeout=$(($i*1000)) + adp_config_data='{"filter":"Message*","maxConcurrency": 1,"bufferTimeout": {"maxSize": 100,"maxTimeMiliseconds": '$adp_timeout'}}' + echo $adp_config_data > tmp/adp_config_data.json + + cr_index=$(($i%$NUM_CR)) + service_text="CR_SERVICE_TEXT_PATH_"$cr_index + service_app="CR_SERVICE_APP_PATH_"$cr_index + ics_api_idc_put_job 201 job-adp-kafka-$i "ADPKafkaType$i" ${!service_text}/job-adp-kafka-data$i"?storeas=md5" info-owner-adp-kafka-$i ${!service_app}/callbacks-null tmp/adp_config_data.json + +done +print_timer + +if [[ "$DMAAP_MED_FEATURE_LEVEL" == *"KAFKATYPES"* ]]; then + # Create jobs for mediator kafka - CR stores data as MD5 hash + start_timer "Create mediator (kafka) jobs: $NUM_JOBS" + for ((i=1; i<=$NUM_JOBS; i++)) + do + med_timeout=$(($i*5000)) + med_config_data='{"bufferTimeout": {"maxSize": 100,"maxTimeMiliseconds": '$med_timeout'}}' + echo $med_config_data > tmp/med_config_data.json + cr_index=$(($i%$NUM_CR)) + service_text="CR_SERVICE_TEXT_PATH_"$cr_index + service_app="CR_SERVICE_APP_PATH_"$cr_index + ics_api_idc_put_job 201 job-med-kafka-$i "MEDKafkaType$i" ${!service_text}/job-med-kafka-data$i"?storeas=md5" info-owner-med-kafka-$i ${!service_app}/callbacks-null tmp/med_config_data.json + done + print_timer +fi + +# Check job status +for ((i=1; i<=$NUM_JOBS; i++)) +do + ics_api_a1_get_job_status 200 job-adp-kafka-$i ENABLED 30 + if [[ "$DMAAP_MED_FEATURE_LEVEL" == *"KAFKATYPES"* ]]; then + ics_api_a1_get_job_status 200 job-med-kafka-$i ENABLED 30 + fi +done + + +EXPECTED_DATA_DELIV=0 #Total delivered msg per CR +EXPECTED_BATCHES_DELIV=0 #Total delivered batches per CR +DATA_DELIV_JOBS=0 #Total delivered msg per job per CR + +sleep_wait 60 + +start_timer "Data delivery adapter kafka, 2 strings per job (short buffer timeouts)" +# Send small text via message-router to adapter +for ((i=1; i<=$NUM_JOBS; i++)) +do + kafkapc_api_post_msg 200 "unauthenticated.dmaapadp_kafka.text"$i "text/plain" 'Message-------1'$i + kafkapc_api_post_msg 200 "unauthenticated.dmaapadp_kafka.text"$i "text/plain" 'Discard-------3'$i #Should be filtered out + kafkapc_api_post_msg 200 "unauthenticated.dmaapadp_kafka.text"$i "text/plain" 'Message-------3'$i +done +for ((i=1; i<=$NUM_JOBS; i++)) +do + kafkapc_equal topics/unauthenticated.dmaapadp_kafka.text$i/counters/sent 3 30 +done + +# Wait for data recetption, adapter kafka +EXPECTED_DATA_DELIV=$(($NUM_JOBS*2/$NUM_CR+$EXPECTED_DATA_DELIV)) +EXPECTED_BATCHES_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_BATCHES_DELIV)) + +adp_timeout=$(($NUM_JOBS*1*2+60)) #NUM_JOBS*MIN_BUFFERTIMEOUT*2+60_SEC_DELAY +for ((i=0; i<$NUM_CR; i++)) +do + #tmp_receptio + cr_equal $i received_callbacks $EXPECTED_DATA_DELIV $adp_timeout + cr_greater_or_equal $i received_callback_batches $EXPECTED_BATCHES_DELIV +done +print_timer + +# Check received data callbacks from adapter +for ((i=1; i<=$NUM_JOBS; i++)) +do + cr_index=$(($i%$NUM_CR)) + cr_api_check_single_genric_event_md5 200 $cr_index job-adp-kafka-data$i 'Message-------1'$i + cr_api_check_single_genric_event_md5 200 $cr_index job-adp-kafka-data$i 'Message-------3'$i +done + +if [[ "$DMAAP_MED_FEATURE_LEVEL" == *"KAFKATYPES"* ]]; then + + PREV_DATA_DELIV=$(cr_read 0 received_callbacks) + PREV_BATCHES_DELIV=$(cr_read 0 received_callback_batches) + start_timer "Data delivery mediator kafka, 2 strings per job (long buffer timeouts)" + # Send small text via message-router to mediator + for ((i=1; i<=$NUM_JOBS; i++)) + do + kafkapc_api_post_msg 200 "unauthenticated.dmaapmed_kafka.text$i" "text/plain" 'Message-------0'$i + kafkapc_api_post_msg 200 "unauthenticated.dmaapmed_kafka.text$i" "text/plain" 'Message-------2'$i + done + for ((i=1; i<=$NUM_JOBS; i++)) + do + kafkapc_equal topics/unauthenticated.dmaapmed_kafka.text$i/counters/sent 2 30 + done + + # Wait for data recetption, adapter kafka + + EXPECTED_DATA_DELIV=$(($NUM_JOBS*2/$NUM_CR+$PREV_DATA_DELIV)) + EXPECTED_BATCHES_DELIV=$(($NUM_JOBS/$NUM_CR+$PREV_BATCHES_DELIV)) + + med_timeout=$(($NUM_JOBS*5*2+60)) #NUM_JOBS*MIN_BUFFERTIMEOUT*2+60_SEC_DELAY + for ((i=0; i<$NUM_CR; i++)) + do + cr_equal $i received_callbacks $EXPECTED_DATA_DELIV $med_timeout + cr_greater_or_equal $i received_callback_batches $EXPECTED_BATCHES_DELIV + done + + print_timer + + # Check received data callbacks from mediator + for ((i=1; i<=$NUM_JOBS; i++)) + do + cr_index=$(($i%$NUM_CR)) + cr_api_check_single_genric_event_md5 200 $cr_index job-med-kafka-data$i 'Message-------0'$i + cr_api_check_single_genric_event_md5 200 $cr_index job-med-kafka-data$i 'Message-------2'$i + done +fi + +#### TEST COMPLETE #### + +store_logs END + +print_result + +auto_clean_environment diff --git a/test/auto-test/FTC310.sh b/test/auto-test/FTC310.sh index a7360d01..767dc4ff 100755 --- a/test/auto-test/FTC310.sh +++ b/test/auto-test/FTC310.sh @@ -21,7 +21,12 @@ TC_ONELINE_DESCR="Resync of RIC via changes in the consul config or pushed config" #App names to include in the test when running docker, space separated list -DOCKER_INCLUDED_IMAGES="CBS CONSUL CP CR MR PA RICSIM NGW KUBEPROXY" +DOCKER_INCLUDED_IMAGES="CBS CONSUL CP CR MR PA RICSIM KUBEPROXY" + +#Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if +#the image is not configured in the supplied env_file +#Used for images not applicable to all supported profile +CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL" #Supported test environment profiles SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE" @@ -36,6 +41,9 @@ setup_testenvironment if [ "$PMS_VERSION" == "V2" ]; then TESTED_VARIANTS="CONSUL NOCONSUL" + if [[ "$PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then + TESTED_VARIANTS="NOCONSUL" + fi else TESTED_VARIANTS="CONSUL" fi @@ -140,7 +148,6 @@ for consul_conf in $TESTED_VARIANTS ; do fi check_policy_agent_logs - check_sdnc_logs store_logs END_$consul_conf done diff --git a/test/auto-test/FTC350.sh b/test/auto-test/FTC350.sh index d78be6c6..c7222457 100755 --- a/test/auto-test/FTC350.sh +++ b/test/auto-test/FTC350.sh @@ -20,13 +20,18 @@ TC_ONELINE_DESCR="Change supported policy types and reconfigure rics" #App names to include in the test when running docker, space separated list -DOCKER_INCLUDED_IMAGES="CBS CONSUL CP CR MR PA RICSIM SDNC NGW KUBEPROXY" +DOCKER_INCLUDED_IMAGES="CBS CONSUL CP CR MR PA RICSIM SDNC KUBEPROXY" #App names to include in the test when running kubernetes, space separated list -KUBE_INCLUDED_IMAGES="CP CR MR PA RICSIM SDNC KUBEPROXY NGW" +KUBE_INCLUDED_IMAGES="CP CR MR PA RICSIM SDNC KUBEPROXY" #Prestarted app (not started by script) to include in the test when running kubernetes, space separated list KUBE_PRESTARTED_IMAGES="" +#Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if +#the image is not configured in the supplied env_file +#Used for images not applicable to all supported profile +CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL" + #Supported test environment profiles SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE" #Supported run modes @@ -71,7 +76,9 @@ for interface in $TESTED_VARIANTS ; do start_mr if [ $RUNMODE == "DOCKER" ]; then - start_consul_cbs + if [[ "$PMS_FEATURE_LEVEL" != *"NOCONSUL"* ]]; then + start_consul_cbs + fi fi # Create first config @@ -91,7 +98,11 @@ for interface in $TESTED_VARIANTS ; do prepare_consul_config NOSDNC ".consul_config_all.json" fi - start_policy_agent NORPOXY $SIM_GROUP/$POLICY_AGENT_COMPOSE_DIR/$POLICY_AGENT_CONFIG_FILE + if [ $RUNMODE == "KUBE" ] && [[ "$PMS_FEATURE_LEVEL" == *"INITIALCONFIGMAP"* ]]; then + start_policy_agent NORPOXY $SIM_GROUP/$POLICY_AGENT_COMPOSE_DIR/application2.yaml + else + start_policy_agent NORPOXY $SIM_GROUP/$POLICY_AGENT_COMPOSE_DIR/$POLICY_AGENT_CONFIG_FILE + fi set_agent_trace @@ -103,9 +114,19 @@ for interface in $TESTED_VARIANTS ; do #Load first config if [ $RUNMODE == "KUBE" ]; then - agent_load_config ".consul_config_initial.json" + if [[ "$PMS_FEATURE_LEVEL" == *"INITIALCONFIGMAP"* ]]; then + api_put_configuration 200 ".consul_config_initial.json" + api_get_configuration 200 ".consul_config_initial.json" + else + agent_load_config ".consul_config_initial.json" + fi else - consul_config_app ".consul_config_initial.json" + if [[ "$PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then + api_put_configuration 200 ".consul_config_initial.json" + api_get_configuration 200 ".consul_config_initial.json" + else + consul_config_app ".consul_config_initial.json" + fi fi for ((i=1; i<=${NUM_RICS}; i++)) @@ -202,9 +223,19 @@ for interface in $TESTED_VARIANTS ; do #Load config with all rics if [ $RUNMODE == "KUBE" ]; then - agent_load_config ".consul_config_all.json" + if [[ "$PMS_FEATURE_LEVEL" == *"INITIALCONFIGMAP"* ]]; then + api_put_configuration 200 ".consul_config_all.json" + api_get_configuration 200 ".consul_config_all.json" + else + agent_load_config ".consul_config_all.json" + fi else - consul_config_app ".consul_config_all.json" + if [[ "$PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then + api_put_configuration 200 ".consul_config_all.json" + api_get_configuration 200 ".consul_config_all.json" + else + consul_config_app ".consul_config_all.json" + fi fi api_equal json:rics 10 120 @@ -284,9 +315,19 @@ for interface in $TESTED_VARIANTS ; do # Load config with reduced number of rics if [ $RUNMODE == "KUBE" ]; then - agent_load_config ".consul_config_initial.json" + if [[ "$PMS_FEATURE_LEVEL" == *"INITIALCONFIGMAP"* ]]; then + api_put_configuration 200 ".consul_config_initial.json" + api_get_configuration 200 ".consul_config_initial.json" + else + agent_load_config ".consul_config_initial.json" + fi else - consul_config_app ".consul_config_initial.json" + if [[ "$PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then + api_put_configuration 200 ".consul_config_initial.json" + api_get_configuration 200 ".consul_config_initial.json" + else + consul_config_app ".consul_config_initial.json" + fi fi api_equal json:rics 8 120 @@ -345,9 +386,19 @@ for interface in $TESTED_VARIANTS ; do # Load config with all rics if [ $RUNMODE == "KUBE" ]; then - agent_load_config ".consul_config_all.json" + if [[ "$PMS_FEATURE_LEVEL" == *"INITIALCONFIGMAP"* ]]; then + api_put_configuration 200 ".consul_config_all.json" + api_get_configuration 200 ".consul_config_all.json" + else + agent_load_config ".consul_config_all.json" + fi else - consul_config_app ".consul_config_all.json" + if [[ "$PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then + api_put_configuration 200 ".consul_config_all.json" + api_get_configuration 200 ".consul_config_all.json" + else + consul_config_app ".consul_config_all.json" + fi fi api_equal json:rics 10 120 diff --git a/test/auto-test/FTC800.sh b/test/auto-test/FTC800.sh index 931610b3..3d514c4e 100755 --- a/test/auto-test/FTC800.sh +++ b/test/auto-test/FTC800.sh @@ -30,7 +30,7 @@ KUBE_PRESTARTED_IMAGES="" #Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if #the image is not configured in the supplied env_file #Used for images not applicable to all supported profile -CONDITIONALLY_IGNORED_IMAGES="NGW" +CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL NGW" #Supported test environment profiles SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE" @@ -106,6 +106,10 @@ for __httpx in $TESTED_PROTOCOLS ; do mr_equal requests_submitted 0 + sim_put_policy_type 201 ricsim_g1_1 1 testdata/OSC/sim_1.json + if [ "$PMS_VERSION" == "V2" ]; then + sim_put_policy_type 201 ricsim_g3_1 STD_QOS2_0.1.0 testdata/STD2/sim_qos2.json + fi if [[ $interface == "SDNC" ]]; then start_sdnc prepare_consul_config SDNC ".consul_config.json" @@ -113,17 +117,17 @@ for __httpx in $TESTED_PROTOCOLS ; do prepare_consul_config NOSDNC ".consul_config.json" fi - if [ $RUNMODE == "DOCKER" ]; then - start_consul_cbs - fi - if [ $RUNMODE == "KUBE" ]; then agent_load_config ".consul_config.json" else - consul_config_app ".consul_config.json" + if [[ "$PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then + api_put_configuration 200 ".consul_config.json" + else + start_consul_cbs + consul_config_app ".consul_config.json" + fi fi - api_get_status 200 sim_print ricsim_g1_1 interface @@ -132,11 +136,7 @@ for __httpx in $TESTED_PROTOCOLS ; do sim_print ricsim_g3_1 interface fi - sim_put_policy_type 201 ricsim_g1_1 1 testdata/OSC/sim_1.json - if [ "$PMS_VERSION" == "V2" ]; then - sim_put_policy_type 201 ricsim_g3_1 STD_QOS2_0.1.0 testdata/STD2/sim_qos2.json - api_equal json:policy-types 3 300 #Wait for the agent to refresh types from the simulators else api_equal json:policy_types 2 300 #Wait for the agent to refresh types from the simulators diff --git a/test/auto-test/FTC805.sh b/test/auto-test/FTC805.sh index c7aecdd3..a9268a20 100755 --- a/test/auto-test/FTC805.sh +++ b/test/auto-test/FTC805.sh @@ -30,7 +30,7 @@ KUBE_PRESTARTED_IMAGES="" #Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if #the image is not configured in the supplied env_file #Used for images not applicable to all supported profile -CONDITIONALLY_IGNORED_IMAGES="NGW" +CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL NGW" #Supported test environment profiles SUPPORTED_PROFILES="ONAP-ISTANBUL ONAP-JAKARTA ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE" @@ -106,10 +106,6 @@ for __httpx in $TESTED_PROTOCOLS ; do set_agent_debug - if [ $RUNMODE == "DOCKER" ]; then - start_consul_cbs - fi - if [[ $interface = *"SDNC"* ]]; then start_sdnc prepare_consul_config SDNC ".consul_config.json" @@ -120,7 +116,12 @@ for __httpx in $TESTED_PROTOCOLS ; do if [ $RUNMODE == "KUBE" ]; then agent_load_config ".consul_config.json" else - consul_config_app ".consul_config.json" + if [[ "$PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then + api_put_configuration 200 ".consul_config.json" + else + start_consul_cbs + consul_config_app ".consul_config.json" + fi fi start_cr 1 diff --git a/test/auto-test/FTC810.sh b/test/auto-test/FTC810.sh index 9fd1200f..83e1be79 100755 --- a/test/auto-test/FTC810.sh +++ b/test/auto-test/FTC810.sh @@ -30,7 +30,7 @@ KUBE_PRESTARTED_IMAGES="" #Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if #the image is not configured in the supplied env_file #Used for images not applicable to all supported profile -CONDITIONALLY_IGNORED_IMAGES="NGW" +CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL NGW" #Supported test environment profiles SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE" @@ -98,16 +98,17 @@ fi start_policy_agent NORPOXY $SIM_GROUP/$POLICY_AGENT_COMPOSE_DIR/$POLICY_AGENT_CONFIG_FILE -if [ $RUNMODE == "DOCKER" ]; then - start_consul_cbs -fi - prepare_consul_config SDNC ".consul_config.json" if [ $RUNMODE == "KUBE" ]; then - agent_load_config ".consul_config.json" + agent_load_config ".consul_config.json" else - consul_config_app ".consul_config.json" + if [[ "$PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then + api_put_configuration 200 ".consul_config.json" + else + start_consul_cbs + consul_config_app ".consul_config.json" + fi fi start_sdnc diff --git a/test/auto-test/FTC850.sh b/test/auto-test/FTC850.sh index cb29618f..f9f5be32 100755 --- a/test/auto-test/FTC850.sh +++ b/test/auto-test/FTC850.sh @@ -30,7 +30,7 @@ KUBE_PRESTARTED_IMAGES="" #Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if #the image is not configured in the supplied env_file #Used for images not applicable to all supported profile -CONDITIONALLY_IGNORED_IMAGES="NGW" +CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL NGW" #Supported test environment profiles SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE" @@ -107,10 +107,6 @@ for __httpx in $TESTED_PROTOCOLS ; do set_agent_debug - if [ $RUNMODE == "DOCKER" ]; then - start_consul_cbs - fi - if [[ $interface = *"SDNC"* ]]; then start_sdnc prepare_consul_config SDNC ".consul_config.json" @@ -121,7 +117,12 @@ for __httpx in $TESTED_PROTOCOLS ; do if [ $RUNMODE == "KUBE" ]; then agent_load_config ".consul_config.json" else - consul_config_app ".consul_config.json" + if [[ "$PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then + api_put_configuration 200 ".consul_config.json" + else + start_consul_cbs + consul_config_app ".consul_config.json" + fi fi start_mr # Not used, but removes error messages from the agent log diff --git a/test/auto-test/FTC900.sh b/test/auto-test/FTC900.sh index a1ae6e43..f76d21e4 100755 --- a/test/auto-test/FTC900.sh +++ b/test/auto-test/FTC900.sh @@ -30,7 +30,7 @@ KUBE_PRESTARTED_IMAGES="" #Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if #the image is not configured in the supplied env_file #Used for images not applicable to all supported profile -CONDITIONALLY_IGNORED_IMAGES="NGW" +CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL NGW" #Supported test environment profiles SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE" @@ -70,16 +70,17 @@ start_policy_agent NORPOXY $SIM_GROUP/$POLICY_AGENT_COMPOSE_DIR/$POLICY_AGENT_CO use_agent_rest_http -if [ $RUNMODE == "DOCKER" ]; then - start_consul_cbs -fi - prepare_consul_config NOSDNC ".consul_config.json" if [ $RUNMODE == "KUBE" ]; then agent_load_config ".consul_config.json" else - consul_config_app ".consul_config.json" + if [[ "$PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then + api_put_configuration 200 ".consul_config.json" + else + start_consul_cbs + consul_config_app ".consul_config.json" + fi fi api_get_status 200 diff --git a/test/auto-test/FTC_HELM_E_RELEASE.sh b/test/auto-test/FTC_HELM_E_RELEASE.sh index dcbe7b3a..0f7e30a0 100755 --- a/test/auto-test/FTC_HELM_E_RELEASE.sh +++ b/test/auto-test/FTC_HELM_E_RELEASE.sh @@ -29,6 +29,11 @@ KUBE_INCLUDED_IMAGES=" MR DMAAPMR CR PRODSTUB KUBEPROXY KAFKAPC" #Prestarted app (not started by script) to include in the test when running kubernetes, space separated list KUBE_PRESTARTED_IMAGES=" PA RICSIM CP ICS RC SDNC DMAAPMED DMAAPADP" +#Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if +#the image is not configured in the supplied env_file +#Used for images not applicable to all supported profile +CONDITIONALLY_IGNORED_IMAGES="" + #Supported test environment profiles SUPPORTED_PROFILES="ORAN-E-RELEASE" #Supported run modes @@ -357,7 +362,8 @@ if [ $ICS_VERSION == "V1-1" ]; then else ics_api_edp_put_type_2 201 type1 testdata/ics/ei-type-1.json ics_api_edp_get_type_2 200 type1 - ics_api_edp_get_type_ids 200 STD_Fault_Messages ExampleInformationTypeKafka ExampleInformationType type1 + + ics_api_edp_get_type_ids 200 type1 ics_api_edp_put_producer_2 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 ics_api_edp_put_producer_2 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 @@ -407,11 +413,13 @@ fi # Dmaap mediator and adapter start_dmaapadp NOPROXY $SIM_GROUP/$DMAAP_ADP_COMPOSE_DIR/$DMAAP_ADP_CONFIG_FILE $SIM_GROUP/$DMAAP_ADP_COMPOSE_DIR/$DMAAP_ADP_DATA_FILE -start_dmaapmed NOPROXY $SIM_GROUP/$DMAAP_MED_COMPOSE_DIR/$DMAAP_MED_DATA_FILE +start_dmaapmed NOPROXY $SIM_GROUP/$DMAAP_MED_COMPOSE_DIR/$DMAAP_MED_HOST_DATA_FILE ics_equal json:data-producer/v1/info-producers 3 120 -ics_api_idc_get_type_ids 200 ExampleInformationType ExampleInformationTypeKafka STD_Fault_Messages type-1 +ics_equal json:data-producer/v1/info-types 4 30 + +ics_api_idc_get_type_ids 200 ExampleInformationType ExampleInformationTypeKafka STD_Fault_Messages type1 ics_api_edp_get_producer_ids_2 200 NOTYPE prod-a DmaapGenericInfoProducer DMaaP_Mediator_Producer @@ -435,6 +443,8 @@ do ics_api_a1_get_job_status 200 jobz$i ENABLED 30 done +sleep_wait 30 # Wait for mediator to listening to kafka + mr_api_send_json "/events/unauthenticated.dmaapmed.json" '{"msg":"msg-0"}' mr_api_send_json "/events/unauthenticated.dmaapadp.json" '{"msg":"msg-1"}' mr_api_send_json "/events/unauthenticated.dmaapmed.json" '{"msg":"msg-2"}' diff --git a/test/auto-test/ONAP_UC.sh b/test/auto-test/ONAP_UC.sh index cce69938..7ddff001 100755 --- a/test/auto-test/ONAP_UC.sh +++ b/test/auto-test/ONAP_UC.sh @@ -30,7 +30,7 @@ KUBE_PRESTARTED_IMAGES="" #Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if #the image is not configured in the supplied env_file #Used for images not applicable to all supported profile -CONDITIONALLY_IGNORED_IMAGES="NGW" +CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL NGW" #Supported test environment profiles SUPPORTED_PROFILES="ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA" @@ -110,14 +110,34 @@ for interface in $TESTED_VARIANTS ; do set_agent_trace - if [ $RUNMODE == "DOCKER" ]; then - start_consul_cbs - fi - if [ $RUNMODE == "KUBE" ]; then agent_load_config ".consul_config.json" else - consul_config_app ".consul_config.json" + if [[ "$PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then + #Temporary switch to http/https if dmaap use. Otherwise it is not possibble to push config + if [ $__httpx == "HTTPS" ]; then + use_agent_rest_https + else + use_agent_rest_http + fi + api_put_configuration 200 ".consul_config.json" + if [ $__httpx == "HTTPS" ]; then + if [[ $interface = *"DMAAP"* ]]; then + use_agent_dmaap_https + else + use_agent_rest_https + fi + else + if [[ $interface = *"DMAAP"* ]]; then + use_agent_dmaap_http + else + use_agent_rest_http + fi + fi + else + start_consul_cbs + consul_config_app ".consul_config.json" + fi fi # Check that all rics are synced in diff --git a/test/auto-test/PM_DEMO.sh b/test/auto-test/PM_DEMO.sh index 802f8f51..f0ade77c 100755 --- a/test/auto-test/PM_DEMO.sh +++ b/test/auto-test/PM_DEMO.sh @@ -30,7 +30,7 @@ KUBE_PRESTARTED_IMAGES="" #Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if #the image is not configured in the supplied env_file #Used for images not applicable to all supported profile -CONDITIONALLY_IGNORED_IMAGES="NGW" +CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL NGW" #Supported test environment profiles SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE" @@ -86,16 +86,17 @@ start_policy_agent NORPOXY $SIM_GROUP/$POLICY_AGENT_COMPOSE_DIR/$POLICY_AGENT_CO set_agent_trace -if [ $RUNMODE == "DOCKER" ]; then - start_consul_cbs -fi - prepare_consul_config SDNC ".consul_config.json" if [ $RUNMODE == "KUBE" ]; then agent_load_config ".consul_config.json" else - consul_config_app ".consul_config.json" + if [[ "$PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then + api_put_configuration 200 ".consul_config.json" + else + start_consul_cbs + consul_config_app ".consul_config.json" + fi fi api_get_status 200 diff --git a/test/auto-test/PM_EI_DEMO.sh b/test/auto-test/PM_EI_DEMO.sh index 0eb8946d..a917032d 100755 --- a/test/auto-test/PM_EI_DEMO.sh +++ b/test/auto-test/PM_EI_DEMO.sh @@ -30,7 +30,7 @@ KUBE_PRESTARTED_IMAGES="" #Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if #the image is not configured in the supplied env_file #Used for images not applicable to all supported profile -CONDITIONALLY_IGNORED_IMAGES="NGW" +CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL NGW" #Supported test environment profiles SUPPORTED_PROFILES="ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE" @@ -88,16 +88,17 @@ start_sdnc # Comment this line to run PMS with proxy start_policy_agent PROXY $SIM_GROUP/$POLICY_AGENT_COMPOSE_DIR/$POLICY_AGENT_CONFIG_FILE -if [ $RUNMODE == "DOCKER" ]; then - start_consul_cbs -fi - prepare_consul_config SDNC ".consul_config.json" #Change to NOSDNC if running PMS with proxy if [ $RUNMODE == "KUBE" ]; then agent_load_config ".consul_config.json" else - consul_config_app ".consul_config.json" + if [[ "$PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then + api_put_configuration 200 ".consul_config.json" + else + start_consul_cbs + consul_config_app ".consul_config.json" + fi fi start_cr 1 diff --git a/test/auto-test/Suite-aegis.sh b/test/auto-test/Suite-aegis.sh new file mode 100755 index 00000000..e7a5a51f --- /dev/null +++ b/test/auto-test/Suite-aegis.sh @@ -0,0 +1,42 @@ +#!/bin/bash + +# ============LICENSE_START=============================================== +# Copyright (C) 2020 Nordix Foundation. All rights reserved. +# ======================================================================== +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END================================================= +# + +TS_ONELINE_DESCR="Test suite - PMS endpoint aegis image testing. Agent REST, DMAAP and SNDC controller resconf" + +. ../common/testsuite_common.sh + +suite_setup + +############# TEST CASES ################# + +./FTC1.sh $@ +./FTC10.sh $@ +./FTC100.sh $@ +./FTC110.sh $@ +./FTC300.sh $@ +./FTC310.sh $@ +./FTC350.sh $@ +./FTC800.sh $@ +./FTC805.sh $@ +./FTC850.sh $@ +./FTC2001.sh $@ + +########################################## + +suite_complete \ No newline at end of file diff --git a/test/auto-test/override_aegis_pms.sh b/test/auto-test/override_aegis_pms.sh new file mode 100644 index 00000000..1c0ea01e --- /dev/null +++ b/test/auto-test/override_aegis_pms.sh @@ -0,0 +1,21 @@ +#!/bin/bash +################################################################################ +# Copyright (c) 2021 Nordix Foundation. # +# # +# Licensed under the Apache License, Version 2.0 (the "License"); # +# you may not use this file except in compliance with the License. # +# You may obtain a copy of the License at # +# # +# http://www.apache.org/licenses/LICENSE-2.0 # +# # +# Unless required by applicable law or agreed to in writing, software # +# distributed under the License is distributed on an "AS IS" BASIS, # +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # +# See the License for the specific language governing permissions and # +# limitations under the License. # +################################################################################ + +# Override file for running the e-release helm recipe including all components + + +POLICY_AGENT_IMAGE_BASE="aegis-onap-docker-local.artifactory.est.tech/onap/ccsdk-oran-a1policymanagementservice" diff --git a/test/auto-test/override_ftc_helm_e_release.sh b/test/auto-test/override_ftc_helm_e_release.sh index 3894acef..67edcb70 100644 --- a/test/auto-test/override_ftc_helm_e_release.sh +++ b/test/auto-test/override_ftc_helm_e_release.sh @@ -34,3 +34,5 @@ SDNC_EXTERNAL_SECURE_PORT=8383 RAPP_CAT_EXTERNAL_PORT=9085 RAPP_CAT_EXTERNAL_SECURE_PORT=9086 + +HELM_MANAGER_APP_NAME="helmmanager" diff --git a/test/auto-test/testdata/OSC/pi_bad_template.json b/test/auto-test/testdata/OSC/pi_bad_template.json new file mode 100644 index 00000000..25aca002 --- /dev/null +++ b/test/auto-test/testdata/OSC/pi_bad_template.json @@ -0,0 +1,5 @@ +{ + "scope": { + "ueId": "ueXXX", + "qosId": "qosXXX" + } \ No newline at end of file diff --git a/test/auto-test/testdata/dmaap-mediator/job-schema-1-kafka b/test/auto-test/testdata/dmaap-mediator/job-schema-1-kafka new file mode 100644 index 00000000..290b70ae --- /dev/null +++ b/test/auto-test/testdata/dmaap-mediator/job-schema-1-kafka @@ -0,0 +1,28 @@ +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "type": "object", + "properties": { + "filter": { + "type": "string" + }, + "maxConcurrency": { + "type": "integer" + }, + "bufferTimeout": { + "type": "object", + "properties": { + "maxSize": { + "type": "integer" + }, + "maxTimeMiliseconds": { + "type": "integer" + } + }, + "required": [ + "maxSize", + "maxTimeMiliseconds" + ] + } + }, + "required": [] +} \ No newline at end of file diff --git a/test/auto-test/testdata/dmaap-mediator/job-template-1-kafka.json b/test/auto-test/testdata/dmaap-mediator/job-template-1-kafka.json new file mode 100644 index 00000000..48f0a116 --- /dev/null +++ b/test/auto-test/testdata/dmaap-mediator/job-template-1-kafka.json @@ -0,0 +1,6 @@ +{ + "bufferTimeout": { + "maxSize": 1, + "maxTimeMiliseconds": 0 + } +} \ No newline at end of file diff --git a/test/auto-test/testdata/dmaap-mediator/job-template.json b/test/auto-test/testdata/dmaap-mediator/job-template.json new file mode 100644 index 00000000..9e26dfee --- /dev/null +++ b/test/auto-test/testdata/dmaap-mediator/job-template.json @@ -0,0 +1 @@ +{} \ No newline at end of file diff --git a/test/common/README.md b/test/common/README.md index 5f806d69..e179f30e 100644 --- a/test/common/README.md +++ b/test/common/README.md @@ -197,6 +197,9 @@ The script can be started with these arguments | `--override ` | Override setting from the file supplied by --env-file | | `--pre-clean` | Clean kube resouces when running docker and vice versa | | `--gen-stats` | Collect container/pod runtime statistics | +| `--delete-namespaces` | Delete kubernetes namespaces before starting tests - but only those created by the test scripts. Kube mode only. Ignored if running with prestarted apps. | +| `--delete-containers` | Delete docker containers before starting tests - but only those created by the test scripts. Docker mode only. | +| `--endpoint-stats` | Collect http endpoint statistics | | `help` | Print this info along with the test script description and the list of app short names supported | ## Function: setup_testenvironment ## @@ -402,10 +405,27 @@ With the timeout, the test waits up to the timeout seconds before setting pass o See the 'cr' dir for more details. | arg list | |--| -| ` [ ]` | +| ` [ ]` | | parameter | description | | --------- | ----------- | +| `` | Variable index to CR | +| `` | Variable name in the CR | +| `` | Target value for the variable | +| `` | Max time to wait for the variable to reach the target value | + +## Function: cr_greater_or_equal ## +Tests if a variable value in the Callback Receiver (CR) simulator is equal to or greater than a target value. +Without the timeout, the test sets pass or fail immediately depending on if the variable is equal to or greater than the target or not. +With the timeout, the test waits up to the timeout seconds before setting pass or fail depending on if the variable value becomes equal to the target value or not. +See the 'cr' dir for more details. +| arg list | +|--| +| ` [ ]` | + +| parameter | description | +| --------- | ----------- | +| `` | Variable index to CR | | `` | Variable name in the CR | | `` | Target value for the variable | | `` | Max time to wait for the variable to reach the target value | @@ -419,11 +439,12 @@ See the 'a1-interface' repo for more details. | arg list | |--| -| ` [ ]` | +| ` [ ]` | | parameter | description | | --------- | ----------- | +| `` | Variable index to CR | | `` | Variable name in the CR | | `` | Target substring for the variable | | `` | Max time to wait for the variable to reach the target value | @@ -434,10 +455,11 @@ Reads the value of a variable in the CR simulator. The value is intended to be p See the 'mrstub' dir for more details. | arg list | |--| -| `` | +| ` ` | | parameter | description | | --------- | ----------- | +| `` | Variable index to CR | | `` | Variable name in the CR | ## Function: cr_delay_callback ## @@ -460,11 +482,12 @@ Check the contents of all ric events received for a callback id. | arg list | |--| -| ` [ EMPTY \| ( )+ ]` | +| ` [ EMPTY \| ( )+ ]` | | parameter | description | | --------- | ----------- | | `` | Expected http response code | +| `` | Variable index for CR | | `` | Id of the callback destination | | `EMPTY` | Indicator for an empty list | | `` | Id of the ric | @@ -475,11 +498,12 @@ Check the contents of all current status events for one id from ICS | arg list | |--| -| ` [ EMPTY \| ( )+ ]` | +| ` [ EMPTY \| ( )+ ]` | | parameter | description | | --------- | ----------- | | `` | Expected http response code | +| `` | Variable index for CR | | `` | Id of the callback destination | | `EMPTY` | Indicator for an empty list | | `` | Status string | @@ -490,11 +514,12 @@ Check the contents of all current subscription events for one id from ICS | arg list | |--| -| ` [ EMPTY | ( )+ ]` | +| ` [ EMPTY | ( )+ ]` | | parameter | description | | --------- | ----------- | | `` | Expected http response code | +| `` | Variable index for CR | | `` | Id of the callback destination | | `EMPTY` | Indicator for an empty list | | `` | Id of the data type | @@ -507,7 +532,11 @@ Reset the callback receiver | arg list | |--| -| - | +| `` | + +| parameter | description | +| --------- | ----------- | +| `` | Variable index for CR | ## Function: cr_api_check_all_genric_json_events ## diff --git a/test/common/cr_api_functions.sh b/test/common/cr_api_functions.sh index 40ef7ea7..a12b69e9 100644 --- a/test/common/cr_api_functions.sh +++ b/test/common/cr_api_functions.sh @@ -367,6 +367,27 @@ cr_equal() { fi } +# Tests if a variable value in the CR is equal to or greater than the target value and and optional timeout. +# Arg: - This test set pass or fail depending on if the variable is +# equal to the target or not. +# Arg: - This test waits up to the timeout seconds +# before setting pass or fail depending on if the variable value becomes equal to or greater than the target +# value or not. +# (Function for test scripts) +cr_greater_or_equal() { + if [ $# -eq 3 ] || [ $# -eq 4 ]; then + CR_SERVICE_PATH=$(__cr_get_service_path $1) + CR_ADAPTER=$CR_SERVICE_PATH + if [ $? -ne 0 ]; then + __print_err " missing or incorrect" $@ + return 1 + fi + __var_test "CR" "$CR_SERVICE_PATH/counter/" $2 ">=" $3 $4 + else + __print_err "Wrong args to cr_equal, needs three or four args: [ timeout ]" $@ + fi +} + # Tests if a variable value in the CR contains the target string and and optional timeout # Arg: - This test set pass or fail depending on if the variable contains # the target or not. @@ -391,7 +412,7 @@ cr_contains_str() { fi } -# Read a variable value from CR sim and send to stdout. Arg: +# Read a variable value from CR sim and send to stdout. Arg: cr_read() { CR_SERVICE_PATH=$(__cr_get_service_path $1) CR_ADAPTER=$CR_SERVICE_PATH @@ -399,7 +420,7 @@ cr_read() { __print_err " missing or incorrect" $@ return 1 fi - echo "$(__do_curl $CR_SERVICE_PATH/counter/$1)" + echo "$(__do_curl $CR_SERVICE_PATH/counter/$2)" } # Function to configure write delay on callbacks diff --git a/test/common/dmaapmed_api_functions.sh b/test/common/dmaapmed_api_functions.sh index 8ed01694..ef99ee1e 100644 --- a/test/common/dmaapmed_api_functions.sh +++ b/test/common/dmaapmed_api_functions.sh @@ -165,7 +165,7 @@ __dmaapmed_export_vars() { export DMAAP_MED_DATA_MOUNT_PATH export DMAAP_MED_HOST_MNT_DIR - export DMAAP_MED_DATA_FILE + export DMAAP_MED_CONTR_DATA_FILE export DMAAP_MED_DATA_CONFIGMAP_NAME=$DMAAP_MED_APP_NAME"-data" if [ $1 == "PROXY" ]; then @@ -188,6 +188,8 @@ __dmaapmed_export_vars() { export DMAAP_MED_CONF_SELF_HOST=$(echo $DMAAP_MED_SERVICE_PATH | cut -d: -f1-2) export DMAAP_MED_CONF_SELF_PORT=$(echo $DMAAP_MED_SERVICE_PATH | cut -d: -f3) export MR_SERVICE_PATH + export MR_KAFKA_SERVICE_PATH + } # Start the Dmaap mediator @@ -233,7 +235,7 @@ start_dmaapmed() { __dmaapmed_export_vars $1 # Create config map for data - data_json=$PWD/tmp/$DMAAP_MED_DATA_FILE + data_json=$PWD/tmp/$DMAAP_MED_CONTR_DATA_FILE if [ $# -lt 2 ]; then #create empty dummy file echo "{}" > $data_json @@ -268,7 +270,7 @@ start_dmaapmed() { __dmaapmed_export_vars $1 - dest_file=$SIM_GROUP/$DMAAP_MED_COMPOSE_DIR/$DMAAP_MED_HOST_MNT_DIR/$DMAAP_MED_DATA_FILE + dest_file=$SIM_GROUP/$DMAAP_MED_COMPOSE_DIR/$DMAAP_MED_HOST_MNT_DIR/$DMAAP_MED_CONTR_DATA_FILE envsubst < $2 > $dest_file diff --git a/test/common/do_curl_function.sh b/test/common/do_curl_function.sh index a3f5507e..6476711f 100755 --- a/test/common/do_curl_function.sh +++ b/test/common/do_curl_function.sh @@ -93,7 +93,9 @@ do_curl() { exit 1 else echo " OK, code: "$status" (Expected)" - if [[ "$content_type" == *"$resp_content"* ]]; then + if [[ "$resp_content" == '*' ]]; then + : + elif [[ "$content_type" == *"$resp_content"* ]]; then echo " Content type: "$content_type" (Expected)" else echo " Expected content type: "$resp_content diff --git a/test/common/format_endpoint_stats.sh b/test/common/format_endpoint_stats.sh new file mode 100755 index 00000000..c80f0831 --- /dev/null +++ b/test/common/format_endpoint_stats.sh @@ -0,0 +1,130 @@ +#!/bin/bash + +# ============LICENSE_START=============================================== +# Copyright (C) 2021 Nordix Foundation. All rights reserved. +# ======================================================================== +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============LICENSE_END================================================= +# + +# This script format http endpoint stats generated by testscripts + +print_usage() { + echo "Usage: format_endpoint_stats [tc-id]+ " +} + +SUMMARYFILE="" +SUMMARYFILE_TMP="" + +update_summary() { + + input=$@ + inputarr=(${input// / }) + inputp=${inputarr[3]} + inputn=${inputarr[4]} + inputposarr=(${inputp//\// }) + inputnegarr=(${inputn//\// }) + > $SUMMARYFILE_TMP + found=0 + while read -r line; do + linearr=(${line// / }) + linep=${linearr[3]} + linen=${linearr[4]} + lineposarr=(${linep//\// }) + linenegarr=(${linen//\// }) + if [[ ${linearr[1]} == ${inputarr[1]} ]] && [[ ${linearr[2]} == ${inputarr[2]} ]]; then + let lineposarr[0]=lineposarr[0]+inputposarr[0] + let lineposarr[1]=lineposarr[1]+inputposarr[1] + let linenegarr[0]=linenegarr[0]+inputnegarr[0] + let linenegarr[1]=linenegarr[1]+inputnegarr[1] + found=1 + fi + printf '%-2s %-10s %-45s %-16s %-16s' "#" "${linearr[1]}" "${linearr[2]}" "${lineposarr[0]}/${lineposarr[1]}" "${linenegarr[0]}/${linenegarr[1]}" >> $SUMMARYFILE_TMP + echo "" >> $SUMMARYFILE_TMP + done < $SUMMARYFILE + if [ $found -eq 0 ]; then + printf '%-2s %-10s %-45s %-16s %-16s' "#" "${inputarr[1]}" "${inputarr[2]}" "${inputposarr[0]}/${inputposarr[1]}" "${inputnegarr[0]}/${inputnegarr[1]}" >> $SUMMARYFILE_TMP + echo "" >> $SUMMARYFILE_TMP + fi + cp $SUMMARYFILE_TMP $SUMMARYFILE +} + +if [ $# -lt 4 ]; then + print_usage + exit 1 +fi +BASE_DIR=$1 +if [ ! -d $BASE_DIR ]; then + print_usage + echo " $BASE_DIR does not exist or is not a dir" + exit 1 +fi +SUMMARYFILE=$BASE_DIR/endpoint_summary.log +rm $SUMMARYFILE +touch $SUMMARYFILE +SUMMARYFILE_TMP=$BASE_DIR/endpoint_summary_tmp.log +TC_FAIL=0 +shift +APP_ID=$1 +shift +echo "" +echo "===================================================" +echo "Functional test cases for $1" +echo "===================================================" +echo +shift +while [ $# -gt 0 ]; do + FTC_DIR=$BASE_DIR/$1 + if [ ! -d $FTC_DIR ]; then + echo "Dir $FTC_DIR does not exist" + exit 1 + fi + IMAGE_INFO_FILE=$FTC_DIR/imageinfo_$APP_ID".log" + if [ -f $IMAGE_INFO_FILE ]; then + echo "=== Testscript: $1 ===" + echo "Image: "$(cat $IMAGE_INFO_FILE) + echo + TC_RES_FILE=$FTC_DIR/.result$1.txt + if [ -f "$TC_RES_FILE" ]; then + TC_RESULT=$(< "$TC_RES_FILE") + if [ $TC_RESULT -ne 0 ]; then + echo " !!!!! TESTCASE FAILED !!!!!" + let TC_FAIL=TC_FAIL+1 + fi + fi + echo "=== Results: positive=2XX http status, negative=non 2XX http status - (ok/total)===" + echo "Method Endpoint Positive Negative" + grep --no-filename "#" $FTC_DIR/endpoint_$APP_ID* | cut -c 4- + for filename in $FTC_DIR/endpoint_$APP_ID* ; do + filedata=$(< $filename) + update_summary $filedata + done + echo "===============================" + echo + else + echo "=== No stats collected by Testscript $1 ===" + echo "" + fi + shift +done + +echo "Summary of all testscripts" +if [ $TC_FAIL -ne 0 ]; then + echo " !!!!! ONE OR MORE TESTCASE(S) FAILED - CHECK INDIVIDUAL TEST RESULT!!!!!" +fi +echo "=== Results: positive=2XX http status, negative=non 2XX http status - (ok/total)===" +echo "Method Endpoint Positive Negative" +cat $SUMMARYFILE | cut -c 4- + +exit 0 + diff --git a/test/common/helmmanager_api_functions.sh b/test/common/helmmanager_api_functions.sh index 455387a9..a5a9a097 100644 --- a/test/common/helmmanager_api_functions.sh +++ b/test/common/helmmanager_api_functions.sh @@ -57,7 +57,7 @@ __HELMMANAGER_kube_scale_zero() { # Scale kubernetes resources to zero and wait until this has been accomplished, if relevant. If not relevant to scale, then do no action. # This function is called for prestarted apps not managed by the test script. __HELMMANAGER_kube_scale_zero_and_wait() { - __kube_scale_and_wait_all_resources $KUBE_NONRTRIC_NAMESPACE app "$KUBE_NONRTRIC_NAMESPACE"-helmmanagerservice + __kube_scale_and_wait_all_resources $KUBE_NONRTRIC_NAMESPACE app "$KUBE_NONRTRIC_NAMESPACE"-"$HELM_MANAGER_APP_NAME" } # Delete all kube resouces for the app @@ -196,7 +196,7 @@ start_helm_manager() { if [ $retcode_p -eq 0 ]; then echo -e " Using existing $HELM_MANAGER_APP_NAME deployment and service" echo " Setting $HELM_MANAGER_APP_NAME replicas=1" - __kube_scale deployment $HELM_MANAGER_APP_NAME $KUBE_NONRTRIC_NAMESPACE 1 + __kube_scale sts $HELM_MANAGER_APP_NAME $KUBE_NONRTRIC_NAMESPACE 1 fi if [ $retcode_i -eq 0 ]; then diff --git a/test/common/pa_api_functions.sh b/test/common/pa_api_functions.sh index 9d4d1f13..a5a51c06 100644 --- a/test/common/pa_api_functions.sh +++ b/test/common/pa_api_functions.sh @@ -1,7 +1,7 @@ #!/bin/bash # ============LICENSE_START=============================================== -# Copyright (C) 2020 Nordix Foundation. All rights reserved. +# Copyright (C) 2021 Nordix Foundation. All rights reserved. # ======================================================================== # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -358,6 +358,8 @@ start_policy_agent() { __check_service_start $POLICY_AGENT_APP_NAME $PA_SERVICE_PATH$POLICY_AGENT_ALIVE_URL fi + + __collect_endpoint_stats_image_info "PMS" $POLICY_AGENT_IMAGE echo "" return 0 } @@ -824,7 +826,7 @@ api_get_policies() { fi fi fi - + __collect_endpoint_stats "PMS" 00 "GET" $PMS_API_PREFIX"/v2/policy-instances" $status __log_test_pass return 0 @@ -906,6 +908,7 @@ api_get_policy() { fi fi + __collect_endpoint_stats "PMS" 01 "GET" $PMS_API_PREFIX"/v2/policies/{policy_id}" $status __log_test_pass return 0 } @@ -994,11 +997,11 @@ api_put_policy() { __log_test_fail_status_code $1 $status return 1 fi - let pid=$pid+1 let count=$count+1 echo -ne " Executed "$count"("$max")${SAMELINE}" done + __collect_endpoint_stats "PMS" 02 "PUT" $PMS_API_PREFIX"/v2/policies" $status $max echo "" __log_test_pass @@ -1113,6 +1116,7 @@ api_put_policy_batch() { let count=$count+1 echo -ne " Accepted(batch) "$count"("$max")${SAMELINE}" done + __collect_endpoint_stats "PMS" 02 "PUT" $PMS_API_PREFIX"/v2/policies" $1 $max echo "" @@ -1224,6 +1228,7 @@ api_put_policy_parallel() { fi done if [ -z $msg ]; then + __collect_endpoint_stats "PMS" 02 "PUT" $PMS_API_PREFIX"/v2/policies" $resp_code $(($count*$num_rics)) __log_test_pass " $(($count*$num_rics)) policy request(s) executed" return 0 fi @@ -1267,10 +1272,12 @@ api_delete_policy() { __log_test_fail_status_code $1 $status return 1 fi + let pid=$pid+1 let count=$count+1 echo -ne " Executed "$count"("$max")${SAMELINE}" done + __collect_endpoint_stats "PMS" 03 "DELETE" $PMS_API_PREFIX"/v2/policies/{policy_id}" $status $max echo "" __log_test_pass @@ -1337,6 +1344,7 @@ api_delete_policy_batch() { let count=$count+1 echo -ne " Deleted(batch) "$count"("$max")${SAMELINE}" done + __collect_endpoint_stats "PMS" 03 "DELETE" $PMS_API_PREFIX"/v2/policies/{policy_id}" $1 $max echo "" @@ -1413,6 +1421,7 @@ api_delete_policy_parallel() { fi done if [ -z $msg ]; then + __collect_endpoint_stats "PMS" 03 "DELETE" $PMS_API_PREFIX"/v2/policies/{policy_id}" $resp_code $(($count*$num_rics)) __log_test_pass " $(($count*$num_rics)) policy request(s) executed" return 0 fi @@ -1512,6 +1521,7 @@ api_get_policy_ids() { fi fi + __collect_endpoint_stats "PMS" 04 "GET" $PMS_API_PREFIX"/v2/policies" $status __log_test_pass return 0 } @@ -1556,6 +1566,7 @@ api_get_policy_type() { fi fi + __collect_endpoint_stats "PMS" 05 "GET" $PMS_API_PREFIX"/v2/policy-types/{policyTypeId}" $status __log_test_pass return 0 } @@ -1599,6 +1610,7 @@ api_get_policy_schema() { fi fi + __collect_endpoint_stats "PMS" 06 "GET" $PMS_API_PREFIX"/v2/policy_schema" $status __log_test_pass return 0 } @@ -1666,30 +1678,32 @@ api_get_policy_schemas() { fi fi + __collect_endpoint_stats "PMS" 07 "GET" $PMS_API_PREFIX"/v2/policy-schemas" $status __log_test_pass return 0 } # API Test function: GET /policy_status and V2 GET /policies/{policy_id}/status -# arg: (STD|STD2 |EMPTY [|EMPTY])|(OSC ) +# arg: [ (STD|STD2 |EMPTY [|EMPTY])|(OSC ) ] # (Function for test scripts) api_get_policy_status() { __log_test_start $@ - if [ $# -lt 4 ] || [ $# -gt 5 ]; then - __print_err " (STD |EMPTY [|EMPTY])|(OSC )" $@ + if [ $# -lt 2 ] || [ $# -gt 5 ]; then + __print_err " [(STD |EMPTY [|EMPTY])|(OSC )]" $@ return 1 fi targetJson="" - - if [ $3 == "STD" ]; then + if [ $# -eq 2 ]; then + : + elif [ "$3" == "STD" ]; then targetJson="{\"enforceStatus\":\"$4\"" if [ $# -eq 5 ]; then targetJson=$targetJson",\"reason\":\"$5\"" fi targetJson=$targetJson"}" - elif [ $3 == "STD2" ]; then + elif [ "$3" == "STD2" ]; then if [ $4 == "EMPTY" ]; then targetJson="{\"enforceStatus\":\"\"" else @@ -1703,7 +1717,7 @@ api_get_policy_status() { fi fi targetJson=$targetJson"}" - elif [ $3 == "OSC" ]; then + elif [ "$3" == "OSC" ]; then targetJson="{\"instance_status\":\"$4\"" if [ $# -eq 5 ]; then targetJson=$targetJson",\"has_been_deleted\":\"$5\"" @@ -1728,16 +1742,17 @@ api_get_policy_status() { __log_test_fail_status_code $1 $status return 1 fi + if [ $# -gt 2 ]; then + echo "TARGET JSON: $targetJson" >> $HTTPLOG + body=${res:0:${#res}-3} + res=$(python3 ../common/compare_json.py "$targetJson" "$body") - echo "TARGET JSON: $targetJson" >> $HTTPLOG - body=${res:0:${#res}-3} - res=$(python3 ../common/compare_json.py "$targetJson" "$body") - - if [ $res -ne 0 ]; then - __log_test_fail_body - return 1 + if [ $res -ne 0 ]; then + __log_test_fail_body + return 1 + fi fi - + __collect_endpoint_stats "PMS" 08 "GET" $PMS_API_PREFIX"/policies/{policy_id}/status" $status __log_test_pass return 0 } @@ -1806,6 +1821,7 @@ api_get_policy_types() { fi fi + __collect_endpoint_stats "PMS" 09 "GET" $PMS_API_PREFIX"/v2/policy-types" $status __log_test_pass return 0 } @@ -1836,6 +1852,33 @@ api_get_status() { return 1 fi + __collect_endpoint_stats "PMS" 10 "GET" $PMS_API_PREFIX"/v2/status" $status + __log_test_pass + return 0 +} + +# API Test function: GET /status (root) without api prefix +# args: +# (Function for test scripts) +api_get_status_root() { + __log_test_start $@ + if [ $# -ne 1 ]; then + __print_err "" $@ + return 1 + fi + query="/status" + TMP_PREFIX=$PMS_API_PREFIX + PMS_API_PREFIX="" + res="$(__do_curl_to_api PA GET $query)" + PMS_API_PREFIX=$TMP_PREFIX + status=${res:${#res}-3} + + if [ $status -ne $1 ]; then + __log_test_fail_status_code $1 $status + return 1 + fi + + __collect_endpoint_stats "PMS" 19 "GET" "/status" $status __log_test_pass return 0 } @@ -1922,6 +1965,8 @@ api_get_ric() { fi fi fi + + __collect_endpoint_stats "PMS" 11 "GET" $PMS_API_PREFIX"/v2/rics/ric" $status __log_test_pass return 0 } @@ -1983,6 +2028,7 @@ api_get_rics() { fi fi + __collect_endpoint_stats "PMS" 12 "GET" $PMS_API_PREFIX"/v2/rics" $status __log_test_pass return 0 } @@ -2019,6 +2065,7 @@ api_put_service() { return 1 fi + __collect_endpoint_stats "PMS" 13 "PUT" $PMS_API_PREFIX"/v2/service" $status __log_test_pass return 0 } @@ -2102,6 +2149,7 @@ api_get_services() { fi fi + __collect_endpoint_stats "PMS" 14 "GET" $PMS_API_PREFIX"/v2/services" $status __log_test_pass return 0 } @@ -2155,6 +2203,7 @@ api_get_service_ids() { return 1 fi + __collect_endpoint_stats "PMS" 14 "GET" $PMS_API_PREFIX"/v2/services" $status __log_test_pass return 0 } @@ -2182,6 +2231,7 @@ api_delete_services() { return 1 fi + __collect_endpoint_stats "PMS" 15 "DELETE" $PMS_API_PREFIX"/v2/services/{serviceId}" $status __log_test_pass return 0 } @@ -2210,6 +2260,7 @@ api_put_services_keepalive() { return 1 fi + __collect_endpoint_stats "PMS" 16 "PUT" $PMS_API_PREFIX"/v2/services/{service_id}/keepalive" $status __log_test_pass return 0 } @@ -2238,7 +2289,9 @@ api_put_configuration() { return 1 fi inputJson=$(< $2) - inputJson="{\"config\":"$inputJson"}" + if [ $RUNMODE == "DOCKER" ]; then #In kube the file already has a header + inputJson="{\"config\":"$inputJson"}" + fi file="./tmp/.config.json" echo $inputJson > $file query="/v2/configuration" @@ -2250,6 +2303,7 @@ api_put_configuration() { return 1 fi + __collect_endpoint_stats "PMS" 17 "PUT" $PMS_API_PREFIX"/v2/configuration" $status __log_test_pass return 0 } @@ -2298,6 +2352,7 @@ api_get_configuration() { fi fi + __collect_endpoint_stats "PMS" 18 "GET" $PMS_API_PREFIX"/v2/configuration" $status __log_test_pass return 0 } diff --git a/test/common/ricsim_api_functions.sh b/test/common/ricsim_api_functions.sh index 2953eb0a..f433cad3 100644 --- a/test/common/ricsim_api_functions.sh +++ b/test/common/ricsim_api_functions.sh @@ -286,7 +286,7 @@ start_ric_simulators() { export DOCKER_SIM_NWNAME export RIC_SIM_DISPLAY_NAME - docker_args="--no-recreate --scale $RICSIM_COMPOSE_SERVICE_NAME=$2" + docker_args=" --scale $RICSIM_COMPOSE_SERVICE_NAME=$2" #Create a list of contsiner names #Will be __ diff --git a/test/common/test_env-onap-guilin.sh b/test/common/test_env-onap-guilin.sh index 0fdb0652..012716b8 100755 --- a/test/common/test_env-onap-guilin.sh +++ b/test/common/test_env-onap-guilin.sh @@ -188,6 +188,7 @@ POLICY_AGENT_DATA_MOUNT_PATH="/opt/app/policy-agent/data" # Path in container fo POLICY_AGENT_CONFIG_FILE="application.yaml" # Container config file name POLICY_AGENT_DATA_FILE="application_configuration.json" # Container data file name POLICY_AGENT_CONTAINER_MNT_DIR="/var/policy-management-service" # Mounted dir in the container +PMS_FEATURE_LEVEL="" # Space separated list of features MR_DMAAP_APP_NAME="message-router" # Name for the Dmaap MR MR_STUB_APP_NAME="mr-stub" # Name of the MR stub diff --git a/test/common/test_env-onap-honolulu.sh b/test/common/test_env-onap-honolulu.sh index efb54251..c9643a89 100755 --- a/test/common/test_env-onap-honolulu.sh +++ b/test/common/test_env-onap-honolulu.sh @@ -212,6 +212,7 @@ POLICY_AGENT_DATA_MOUNT_PATH="/opt/app/policy-agent/data" # Path in container fo POLICY_AGENT_CONFIG_FILE="application.yaml" # Container config file name POLICY_AGENT_DATA_FILE="application_configuration.json" # Container data file name POLICY_AGENT_CONTAINER_MNT_DIR="/var/policy-management-service" # Mounted dir in the container +PMS_FEATURE_LEVEL="" # Space separated list of features ICS_APP_NAME="informationservice" # Name for ICS container ICS_DISPLAY_NAME="Enrichment Coordinator Service" # Display name for ICS container diff --git a/test/common/test_env-onap-istanbul.sh b/test/common/test_env-onap-istanbul.sh index c0f1491f..a1f59f58 100644 --- a/test/common/test_env-onap-istanbul.sh +++ b/test/common/test_env-onap-istanbul.sh @@ -215,6 +215,7 @@ POLICY_AGENT_DATA_MOUNT_PATH="/opt/app/policy-agent/data" # Path in container fo POLICY_AGENT_CONFIG_FILE="application.yaml" # Container config file name POLICY_AGENT_DATA_FILE="application_configuration.json" # Container data file name POLICY_AGENT_CONTAINER_MNT_DIR="/var/policy-management-service" # Mounted dir in the container +PMS_FEATURE_LEVEL="" # Space separated list of features ICS_APP_NAME="informationservice" # Name for ICS container ICS_DISPLAY_NAME="Enrichment Coordinator Service" # Display name for ICS container diff --git a/test/common/test_env-onap-jakarta.sh b/test/common/test_env-onap-jakarta.sh index 386f168a..b5b6c397 100644 --- a/test/common/test_env-onap-jakarta.sh +++ b/test/common/test_env-onap-jakarta.sh @@ -108,19 +108,6 @@ RAPP_CAT_IMAGE_TAG_REMOTE_RELEASE_ORAN="1.0.1" RIC_SIM_IMAGE_BASE="o-ran-sc/a1-simulator" RIC_SIM_IMAGE_TAG_REMOTE_RELEASE_ORAN="2.2.0" - -#Consul remote image and tag -CONSUL_IMAGE_BASE="consul" -CONSUL_IMAGE_TAG_REMOTE_PROXY="1.7.2" -#No local image for Consul, remote image always used - - -#CBS remote image and tag -CBS_IMAGE_BASE="onap/org.onap.dcaegen2.platform.configbinding.app-app" -CBS_IMAGE_TAG_REMOTE_RELEASE_ONAP="2.3.0" -#No local image for CBS, remote image always used - - #MR stub image and tag MRSTUB_IMAGE_BASE="mrstub" MRSTUB_IMAGE_TAG_LOCAL="latest" @@ -215,6 +202,7 @@ POLICY_AGENT_DATA_MOUNT_PATH="/opt/app/policy-agent/data" # Path in container fo POLICY_AGENT_CONFIG_FILE="application.yaml" # Container config file name POLICY_AGENT_DATA_FILE="application_configuration.json" # Container data file name POLICY_AGENT_CONTAINER_MNT_DIR="/var/policy-management-service" # Mounted dir in the container +PMS_FEATURE_LEVEL="NOCONSUL INITIALCONFIGMAP" # Space separated list of features ICS_APP_NAME="informationservice" # Name for ICS container ICS_DISPLAY_NAME="Enrichment Coordinator Service" # Display name for ICS container @@ -290,21 +278,6 @@ PROD_STUB_SUPERVISION_CALLBACK="/callbacks/supervision" # Callback path for pro PROD_STUB_ALIVE_URL="/" # Base path for alive check PROD_STUB_COMPOSE_DIR="prodstub" # Dir in simulator_group for docker-compose -CONSUL_HOST="consul-server" # Host name of consul -CONSUL_DISPLAY_NAME="Consul" -CONSUL_EXTERNAL_PORT=8500 # Consul container external port (host -> container) -CONSUL_INTERNAL_PORT=8500 # Consul container internal port (container -> container) -CONSUL_APP_NAME="polman-consul" # Name for consul container -CONSUL_ALIVE_URL="/ui/dc1/kv" # Base path for alive check -CONSUL_CBS_COMPOSE_DIR="consul_cbs" # Dir in simulator group for docker compose - -CBS_APP_NAME="polman-cbs" # Name for CBS container -CBS_DISPLAY_NAME="Config Binding Service" -CBS_EXTERNAL_PORT=10000 # CBS container external port (host -> container) -CBS_INTERNAL_PORT=10000 # CBS container internal port (container -> container) -CONFIG_BINDING_SERVICE="config-binding-service" # Host name of CBS -CBS_ALIVE_URL="/healthcheck" # Base path for alive check - RIC_SIM_DISPLAY_NAME="Near-RT RIC A1 Simulator" RIC_SIM_BASE="g" # Base name of the RIC Simulator container, shall be the group code # Note, a prefix is added to each container name by the .env file in the 'ric' dir @@ -330,15 +303,15 @@ SDNC_A1_TRUSTSTORE_PASSWORD="a1adapter" # SDNC truststore passw SDNC_USER="admin" # SDNC username SDNC_PWD="admin" # SNDC PWD SDNC_PWD="Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U" # SNDC PWD -#SDNC_API_URL="/rests/operations/A1-ADAPTER-API:" # Base url path for SNDC API (for upgraded sdnc) -SDNC_API_URL="/restconf/operations/A1-ADAPTER-API:" # Base url path for SNDC API +SDNC_API_URL="/rests/operations/A1-ADAPTER-API:" # Base url path for SNDC API (for upgraded sdnc) +#SDNC_API_URL="/restconf/operations/A1-ADAPTER-API:" # Base url path for SNDC API SDNC_ALIVE_URL="/apidoc/explorer/" # Base url path for SNDC API docs (for alive check) SDNC_COMPOSE_DIR="sdnc" SDNC_COMPOSE_FILE="docker-compose-2.yml" SDNC_KUBE_APP_FILE="app2.yaml" SDNC_KARAF_LOG="/opt/opendaylight/data/log/karaf.log" # Path to karaf log -#SDNC_RESPONSE_JSON_KEY="A1-ADAPTER-API:output" # Key name for output json in replies from sdnc (for upgraded sdnc) -SDNC_RESPONSE_JSON_KEY="output" # Key name for output json in replies from sdnc +SDNC_RESPONSE_JSON_KEY="A1-ADAPTER-API:output" # Key name for output json in replies from sdnc (for upgraded sdnc) +#SDNC_RESPONSE_JSON_KEY="output" # Key name for output json in replies from sdnc SDNC_FEATURE_LEVEL="TRANS_RESP_CODE" # Space separated list of features # TRANS_RESP_CODE: SDNC return southbound response code diff --git a/test/common/test_env-oran-cherry.sh b/test/common/test_env-oran-cherry.sh index fbf13f62..d794e693 100755 --- a/test/common/test_env-oran-cherry.sh +++ b/test/common/test_env-oran-cherry.sh @@ -215,6 +215,7 @@ POLICY_AGENT_DATA_MOUNT_PATH="/opt/app/policy-agent/data" # Path in container fo POLICY_AGENT_CONFIG_FILE="application.yaml" # Container config file name POLICY_AGENT_DATA_FILE="application_configuration.json" # Container data file name POLICY_AGENT_CONTAINER_MNT_DIR="/var/policy-management-service" # Mounted dir in the container +PMS_FEATURE_LEVEL="" # Space separated list of features ICS_APP_NAME="informationservice" # Name for ICS container ICS_DISPLAY_NAME="Enrichment Coordinator Service" # Display name for ICS container diff --git a/test/common/test_env-oran-d-release.sh b/test/common/test_env-oran-d-release.sh index 51b11895..a4f725c5 100755 --- a/test/common/test_env-oran-d-release.sh +++ b/test/common/test_env-oran-d-release.sh @@ -234,6 +234,7 @@ POLICY_AGENT_DATA_MOUNT_PATH="/opt/app/policy-agent/data" # Path in container fo POLICY_AGENT_CONFIG_FILE="application.yaml" # Container config file name POLICY_AGENT_DATA_FILE="application_configuration.json" # Container data file name POLICY_AGENT_CONTAINER_MNT_DIR="/var/policy-management-service" # Mounted dir in the container +PMS_FEATURE_LEVEL="" # Space separated list of features ICS_APP_NAME="informationservice" # Name for ICS container ICS_DISPLAY_NAME="Enrichment Coordinator Service" # Display name for ICS container diff --git a/test/common/test_env-oran-e-release.sh b/test/common/test_env-oran-e-release.sh index 245dec8c..9a892705 100755 --- a/test/common/test_env-oran-e-release.sh +++ b/test/common/test_env-oran-e-release.sh @@ -277,6 +277,7 @@ POLICY_AGENT_DATA_MOUNT_PATH="/opt/app/policy-agent/data" # Path in container fo POLICY_AGENT_CONFIG_FILE="application.yaml" # Container config file name POLICY_AGENT_DATA_FILE="application_configuration.json" # Container data file name POLICY_AGENT_CONTAINER_MNT_DIR="/var/policy-management-service" # Mounted dir in the container +PMS_FEATURE_LEVEL="" # Space separated list of features ICS_APP_NAME="informationservice" # Name for ICS container ICS_DISPLAY_NAME="Information Coordinator Service" # Display name for ICS container @@ -539,10 +540,12 @@ DMAAP_MED_ALIVE_URL="/status" # Base path for alive c DMAAP_MED_COMPOSE_DIR="dmaapmed" # Dir in simulator_group for docker-compose #MAAP_MED_CONFIG_MOUNT_PATH="/app" # Internal container path for configuration DMAAP_MED_DATA_MOUNT_PATH="/configs" # Path in container for data file -DMAAP_MED_DATA_FILE="type_config.json" # Container data file name +DMAAP_MED_HOST_DATA_FILE="type_config.json" # Host data file name +DMAAP_MED_CONTR_DATA_FILE="type_config.json" # Container data file name +DMAAP_MED_FEATURE_LEVEL="" # Space separated list of features KAFKAPC_APP_NAME="kafka-procon" # Name for the Kafka procon -KAFKAPC_DISPLAY_NAME="Kafaka Producer/Consumer" +KAFKAPC_DISPLAY_NAME="Kafka Producer/Consumer" KAFKAPC_EXTERNAL_PORT=8096 # Kafka procon container external port (host -> container) KAFKAPC_INTERNAL_PORT=8090 # Kafka procon container internal port (container -> container) KAFKAPC_EXTERNAL_SECURE_PORT=8097 # Kafka procon container external secure port (host -> container) diff --git a/test/common/test_env-oran-f-release.sh b/test/common/test_env-oran-f-release.sh index 0ba821e9..3bafdb46 100755 --- a/test/common/test_env-oran-f-release.sh +++ b/test/common/test_env-oran-f-release.sh @@ -60,10 +60,10 @@ NEXUS_RELEASE_REPO_ORAN=$NEXUS_RELEASE_REPO # Policy Agent base image and tags POLICY_AGENT_IMAGE_BASE="o-ran-sc/nonrtric-a1-policy-management-service" -POLICY_AGENT_IMAGE_TAG_LOCAL="2.3.0-SNAPSHOT" -POLICY_AGENT_IMAGE_TAG_REMOTE_SNAPSHOT="2.3.0-SNAPSHOT" -POLICY_AGENT_IMAGE_TAG_REMOTE="2.3.0" -POLICY_AGENT_IMAGE_TAG_REMOTE_RELEASE="2.3.0" +POLICY_AGENT_IMAGE_TAG_LOCAL="2.4.0-SNAPSHOT" +POLICY_AGENT_IMAGE_TAG_REMOTE_SNAPSHOT="2.4.0-SNAPSHOT" +POLICY_AGENT_IMAGE_TAG_REMOTE="2.4.0" +POLICY_AGENT_IMAGE_TAG_REMOTE_RELEASE="2.4.0" # ICS image and tags ICS_IMAGE_BASE="o-ran-sc/nonrtric-information-coordinator-service" @@ -131,17 +131,17 @@ RIC_SIM_IMAGE_TAG_REMOTE_RELEASE="2.2.0" # DMAAP Mediator Service DMAAP_MED_IMAGE_BASE="o-ran-sc/nonrtric-dmaap-mediator-producer" -DMAAP_MED_IMAGE_TAG_LOCAL="1.0.0-SNAPSHOT" -DMAAP_MED_IMAGE_TAG_REMOTE_SNAPSHOT="1.0.0-SNAPSHOT" -DMAAP_MED_IMAGE_TAG_REMOTE="1.0.0" -DMAAP_MED_IMAGE_TAG_REMOTE_RELEASE="1.0.0" +DMAAP_MED_IMAGE_TAG_LOCAL="1.1.0-SNAPSHOT" +DMAAP_MED_IMAGE_TAG_REMOTE_SNAPSHOT="1.1.0-SNAPSHOT" +DMAAP_MED_IMAGE_TAG_REMOTE="1.1.0" +DMAAP_MED_IMAGE_TAG_REMOTE_RELEASE="1.1.0" # DMAAP Adapter Service DMAAP_ADP_IMAGE_BASE="o-ran-sc/nonrtric-dmaap-adaptor" -DMAAP_ADP_IMAGE_TAG_LOCAL="1.0.0-SNAPSHOT" -DMAAP_ADP_IMAGE_TAG_REMOTE_SNAPSHOT="1.0.0-SNAPSHOT" -DMAAP_ADP_IMAGE_TAG_REMOTE="1.0.0" -DMAAP_ADP_IMAGE_TAG_REMOTE_RELEASE="1.0.0" +DMAAP_ADP_IMAGE_TAG_LOCAL="1.1.0-SNAPSHOT" +DMAAP_ADP_IMAGE_TAG_REMOTE_SNAPSHOT="1.1.0-SNAPSHOT" +DMAAP_ADP_IMAGE_TAG_REMOTE="1.1.0" +DMAAP_ADP_IMAGE_TAG_REMOTE_RELEASE="1.1.0" # Helm Manager HELM_MANAGER_IMAGE_BASE="o-ran-sc/nonrtric-helm-manager" @@ -150,18 +150,6 @@ HELM_MANAGER_IMAGE_TAG_REMOTE_SNAPSHOT="1.2.0-SNAPSHOT" HELM_MANAGER_IMAGE_TAG_REMOTE="1.2.0" HELM_MANAGER_IMAGE_TAG_REMOTE_RELEASE="1.2.0" -#Consul remote image and tag -CONSUL_IMAGE_BASE="consul" -CONSUL_IMAGE_TAG_REMOTE_PROXY="1.7.2" -#No local image for Consul, remote image always used - - -#CBS remote image and tag -CBS_IMAGE_BASE="onap/org.onap.dcaegen2.platform.configbinding.app-app" -CBS_IMAGE_TAG_REMOTE_RELEASE_ONAP="2.3.0" -#No local image for CBS, remote image always used - - #MR stub image and tag MRSTUB_IMAGE_BASE="mrstub" MRSTUB_IMAGE_TAG_LOCAL="latest" @@ -226,7 +214,7 @@ PROJECT_IMAGES_APP_NAMES="PA ICS CP RC RICSIM NGW DMAAPADP DMAAPMED HELMMANAGER" ORAN_IMAGES_APP_NAMES="" # Not used # List of app short names which images pulled from ONAP -ONAP_IMAGES_APP_NAMES="CBS DMAAPMR SDNC" # SDNC added as ONAP image +ONAP_IMAGES_APP_NAMES="DMAAPMR SDNC" # SDNC added as ONAP image ######################################## @@ -277,6 +265,7 @@ POLICY_AGENT_DATA_MOUNT_PATH="/opt/app/policy-agent/data" # Path in container fo POLICY_AGENT_CONFIG_FILE="application.yaml" # Container config file name POLICY_AGENT_DATA_FILE="application_configuration.json" # Container data file name POLICY_AGENT_CONTAINER_MNT_DIR="/var/policy-management-service" # Mounted dir in the container +PMS_FEATURE_LEVEL="NOCONSUL INITIALCONFIGMAP" # Space separated list of features ICS_APP_NAME="informationservice" # Name for ICS container ICS_DISPLAY_NAME="Information Coordinator Service" # Display name for ICS container @@ -351,21 +340,6 @@ PROD_STUB_SUPERVISION_CALLBACK="/callbacks/supervision" # Callback path for pro PROD_STUB_ALIVE_URL="/" # Base path for alive check PROD_STUB_COMPOSE_DIR="prodstub" # Dir in simulator_group for docker-compose -CONSUL_HOST="consul-server" # Host name of consul -CONSUL_DISPLAY_NAME="Consul" -CONSUL_EXTERNAL_PORT=8500 # Consul container external port (host -> container) -CONSUL_INTERNAL_PORT=8500 # Consul container internal port (container -> container) -CONSUL_APP_NAME="polman-consul" # Name for consul container -CONSUL_ALIVE_URL="/ui/dc1/kv" # Base path for alive check -CONSUL_CBS_COMPOSE_DIR="consul_cbs" # Dir in simulator group for docker compose - -CBS_APP_NAME="polman-cbs" # Name for CBS container -CBS_DISPLAY_NAME="Config Binding Service" -CBS_EXTERNAL_PORT=10000 # CBS container external port (host -> container) -CBS_INTERNAL_PORT=10000 # CBS container internal port (container -> container) -CONFIG_BINDING_SERVICE="config-binding-service" # Host name of CBS -CBS_ALIVE_URL="/healthcheck" # Base path for alive check - RIC_SIM_DISPLAY_NAME="Near-RT RIC A1 Simulator" RIC_SIM_BASE="g" # Base name of the RIC Simulator container, shall be the group code # Note, a prefix is added to each container name by the .env file in the 'ric' dir @@ -535,14 +509,16 @@ DMAAP_MED_HOST_MNT_DIR="./mnt" # Mounted db dir, relati #MAAP_ADP_CONTAINER_MNT_DIR="/var/dmaap-adaptor-service" # Mounted dir in the container #DMAAP_MED_ACTUATOR="/actuator/loggers/org.oransc.information" # Url for trace/debug #DMAAP_MED_CERT_MOUNT_DIR="./cert" -DMAAP_MED_ALIVE_URL="/status" # Base path for alive check +DMAAP_MED_ALIVE_URL="/health_check" # Base path for alive check DMAAP_MED_COMPOSE_DIR="dmaapmed" # Dir in simulator_group for docker-compose #MAAP_MED_CONFIG_MOUNT_PATH="/app" # Internal container path for configuration DMAAP_MED_DATA_MOUNT_PATH="/configs" # Path in container for data file -DMAAP_MED_DATA_FILE="type_config.json" # Container data file name +DMAAP_MED_HOST_DATA_FILE="type_config_1.json" # Host data file name +DMAAP_MED_CONTR_DATA_FILE="type_config.json" # Container data file name +DMAAP_MED_FEATURE_LEVEL="KAFKATYPES" # Space separated list of features KAFKAPC_APP_NAME="kafka-procon" # Name for the Kafka procon -KAFKAPC_DISPLAY_NAME="Kafaka Producer/Consumer" +KAFKAPC_DISPLAY_NAME="Kafka Producer/Consumer" KAFKAPC_EXTERNAL_PORT=8096 # Kafka procon container external port (host -> container) KAFKAPC_INTERNAL_PORT=8090 # Kafka procon container internal port (container -> container) KAFKAPC_EXTERNAL_SECURE_PORT=8097 # Kafka procon container external secure port (host -> container) diff --git a/test/common/testcase_common.sh b/test/common/testcase_common.sh index d9b5e87d..c9374cf8 100755 --- a/test/common/testcase_common.sh +++ b/test/common/testcase_common.sh @@ -28,7 +28,8 @@ __print_args() { echo " [--ricsim-prefix ] [--use-local-image +] [--use-snapshot-image +]" echo " [--use-staging-image +] [--use-release-image +] [--image-repo ] [--print-stats]" - echo " [--override --pre-clean --gen-stats]" + echo " [--override ] [--pre-clean] [--gen-stats] [--delete-namespaces]" + echo " [--delete-containers] [--endpoint-stats]" } if [ $# -eq 1 ] && [ "$1" == "help" ]; then @@ -60,7 +61,9 @@ if [ $# -eq 1 ] && [ "$1" == "help" ]; then echo "--override - Override setting from the file supplied by --env-file" echo "--pre-clean - Will clean kube resouces when running docker and vice versa" echo "--gen-stats - Collect container/pod runtime statistics" - + echo "--delete-namespaces - Delete kubernetes namespaces before starting tests - but only those created by the test scripts. Kube mode only. Ignored if running with prestarted apps." + echo "--delete-containers - Delete docker containers before starting tests - but only those created by the test scripts. Docker mode only." + echo "--endpoint-stats - Collect endpoint statistics" echo "" echo "List of app short names supported: "$APP_SHORT_NAMES exit 0 @@ -209,6 +212,7 @@ if [ $? -ne 0 ]; then exit 1 fi + # Create a http message log for this testcase HTTPLOG=$PWD"/.httplog_"$ATC".txt" echo "" > $HTTPLOG @@ -232,6 +236,9 @@ rm $TESTLOGS/$ATC/*.log &> /dev/null rm $TESTLOGS/$ATC/*.txt &> /dev/null rm $TESTLOGS/$ATC/*.json &> /dev/null +#Create result file in the log dir +echo "1" > "$TESTLOGS/$ATC/.result$ATC.txt" + # Log all output from the test case to a TC log TCLOG=$TESTLOGS/$ATC/TC.log exec &> >(tee ${TCLOG}) @@ -250,6 +257,15 @@ PRINT_CURRENT_STATS=0 COLLECT_RUNTIME_STATS=0 COLLECT_RUNTIME_STATS_PID=0 +#Var to control if endpoint statistics shall be collected +COLLECT_ENDPOINT_STATS=0 + +#Var to control if namespaces shall be delete before test setup +DELETE_KUBE_NAMESPACES=0 + +#Var to control if containers shall be delete before test setup +DELETE_CONTAINERS=0 + #File to keep deviation messages DEVIATION_FILE=".tmp_deviations" rm $DEVIATION_FILE &> /dev/null @@ -386,6 +402,44 @@ __log_conf_ok() { __print_current_stats } +# Function to collect stats on endpoints +# args: [] +__collect_endpoint_stats() { + if [ $COLLECT_ENDPOINT_STATS -eq 0 ]; then + return + fi + ENDPOINT_COUNT=1 + if [ $# -gt 5 ]; then + ENDPOINT_COUNT=$6 + fi + ENDPOINT_STAT_FILE=$TESTLOGS/$ATC/endpoint_$ATC_$1_$2".log" + ENDPOINT_POS=0 + ENDPOINT_NEG=0 + if [ -f $ENDPOINT_STAT_FILE ]; then + ENDPOINT_VAL=$(< $ENDPOINT_STAT_FILE) + ENDPOINT_POS=$(echo $ENDPOINT_VAL | cut -f4 -d ' ' | cut -f1 -d '/') + ENDPOINT_NEG=$(echo $ENDPOINT_VAL | cut -f5 -d ' ' | cut -f1 -d '/') + fi + + if [ $5 -ge 200 ] && [ $5 -lt 300 ]; then + let ENDPOINT_POS=ENDPOINT_POS+$ENDPOINT_COUNT + else + let ENDPOINT_NEG=ENDPOINT_NEG+$ENDPOINT_COUNT + fi + + printf '%-2s %-10s %-45s %-16s %-16s' "#" "$3" "$4" "$ENDPOINT_POS/$ENDPOINT_POS" "$ENDPOINT_NEG/$ENDPOINT_NEG" > $ENDPOINT_STAT_FILE +} + +# Function to collect stats on endpoints +# args: +__collect_endpoint_stats_image_info() { + if [ $COLLECT_ENDPOINT_STATS -eq 0 ]; then + return + fi + ENDPOINT_STAT_FILE=$TESTLOGS/$ATC/imageinfo_$ATC_$1".log" + echo $POLICY_AGENT_IMAGE > $ENDPOINT_STAT_FILE +} + #Var for measuring execution time TCTEST_START=$SECONDS @@ -400,7 +454,7 @@ TC_TIMER_CURRENT_FAILS="" # Then numer of failed test when timer starts. TIMER_MEASUREMENTS=".timer_measurement.txt" echo -e "Activity \t Duration \t Info" > $TIMER_MEASUREMENTS -# If this is set, some images (control by the parameter repo-polcy) will be re-tagged and pushed to this repo before any +# If this is set, some images (controlled by the parameter repo-policy) will be re-tagged and pushed to this repo before any IMAGE_REPO_ADR="" IMAGE_REPO_POLICY="local" CLUSTER_TIME_OUT=0 @@ -718,6 +772,44 @@ while [ $paramerror -eq 0 ] && [ $foundparm -eq 0 ]; do foundparm=0 fi fi + if [ $paramerror -eq 0 ]; then + if [ "$1" == "--delete-namespaces" ]; then + if [ $RUNMODE == "DOCKER" ]; then + DELETE_KUBE_NAMESPACES=0 + echo "Option ignored - Delete namespaces (ignored when running docker)" + else + if [ -z "KUBE_PRESTARTED_IMAGES" ]; then + DELETE_KUBE_NAMESPACES=0 + echo "Option ignored - Delete namespaces (ignored when using prestarted apps)" + else + DELETE_KUBE_NAMESPACES=1 + echo "Option set - Delete namespaces" + fi + fi + shift; + foundparm=0 + fi + fi + if [ $paramerror -eq 0 ]; then + if [ "$1" == "--delete-containers" ]; then + if [ $RUNMODE == "DOCKER" ]; then + DELETE_CONTAINERS=1 + echo "Option set - Delete containers started by previous test(s)" + else + echo "Option ignored - Delete containers (ignored when running kube)" + fi + shift; + foundparm=0 + fi + fi + if [ $paramerror -eq 0 ]; then + if [ "$1" == "--endpoint-stats" ]; then + COLLECT_ENDPOINT_STATS=1 + echo "Option set - Collect endpoint statistics" + shift; + foundparm=0 + fi + fi done echo "" @@ -931,7 +1023,7 @@ else exit 1 fi - echo " Node(s) and container container runtime config" + echo " Node(s) and container runtime config" kubectl get nodes -o wide | indent2 fi fi @@ -1386,9 +1478,38 @@ setup_testenvironment() { #Temp var to check for image pull errors IMAGE_ERR=0 - # The following sequence pull the configured images + # Delete namespaces + echo -e $BOLD"Deleting namespaces"$EBOLD + + if [ "$DELETE_KUBE_NAMESPACES" -eq 1 ]; then + test_env_namespaces=$(kubectl get ns --no-headers -o custom-columns=":metadata.name" -l autotest=engine) #Get list of ns created by the test env + if [ $? -ne 0 ]; then + echo " Cannot get list of namespaces...ignoring delete" + else + for test_env_ns in $test_env_namespaces; do + __kube_delete_namespace $test_env_ns + done + fi + else + echo " Namespace delete option not set" + fi + echo "" + + # Delete containers + echo -e $BOLD"Deleting containers"$EBOLD + + if [ "$DELETE_CONTAINERS" -eq 1 ]; then + echo " Stopping containers label 'nrttest_app'..." + docker stop $(docker ps -qa --filter "label=nrttest_app") 2> /dev/null + echo " Removing stopped containers..." + docker rm $(docker ps -qa --filter "label=nrttest_app") 2> /dev/null + else + echo " Contatiner delete option not set" + fi + echo "" + # The following sequence pull the configured images echo -e $BOLD"Pulling configured images, if needed"$EBOLD if [ ! -z "$IMAGE_REPO_ADR" ] && [ $IMAGE_REPO_POLICY == "local" ]; then echo -e $YELLOW" Excluding all remote image check/pull when running with image repo: $IMAGE_REPO_ADR and image policy $IMAGE_REPO_POLICY"$EYELLOW @@ -1681,6 +1802,7 @@ print_result() { fi #Create file with OK exit code echo "0" > "$AUTOTEST_HOME/.result$ATC.txt" + echo "0" > "$TESTLOGS/$ATC/.result$ATC.txt" else echo -e "One or more tests with status \033[31m\033[1mFAIL\033[0m " echo -e "\033[31m\033[1m ___ _ ___ _ \033[0m" @@ -1777,6 +1899,16 @@ __check_stop_at_error() { if [ $STOP_AT_ERROR -eq 1 ]; then echo -e $RED"Test script configured to stop at first FAIL, taking all logs and stops"$ERED store_logs "STOP_AT_ERROR" + + # Update test suite counter + if [ -f .tmp_tcsuite_fail_ctr ]; then + tmpval=$(< .tmp_tcsuite_fail_ctr) + ((tmpval++)) + echo $tmpval > .tmp_tcsuite_fail_ctr + fi + if [ -f .tmp_tcsuite_fail ]; then + echo " - "$ATC " -- "$TC_ONELINE_DESCR" Execution stopped due to error" >> .tmp_tcsuite_fail + fi exit 1 fi return 0 @@ -2150,6 +2282,7 @@ __kube_create_namespace() { echo " Message: $(<./tmp/kubeerr)" return 1 else + kubectl label ns $1 autotest=engine echo -e " Creating namespace $1 $GREEN$BOLD OK $EBOLD$EGREEN" fi else @@ -2501,6 +2634,7 @@ __print_err() { echo -e $RED" Got: "${FUNCNAME[1]} ${@:2} $ERED fi ((RES_CONF_FAIL++)) + __check_stop_at_error } # Function to create the docker network for the test @@ -2864,25 +2998,31 @@ __var_test() { __check_stop_at_error return fi - elif [ $4 = "=" ] && [ "$result" -eq $5 ]; then + elif [ "$4" == "=" ] && [ "$result" -eq $5 ]; then ((RES_PASS++)) echo -e " Result=${result} after ${duration} seconds${SAMELINE}" echo -e $GREEN" PASS${EGREEN} - Result=${result} after ${duration} seconds" __print_current_stats return - elif [ $4 = ">" ] && [ "$result" -gt $5 ]; then + elif [ "$4" == ">" ] && [ "$result" -gt $5 ]; then ((RES_PASS++)) echo -e " Result=${result} after ${duration} seconds${SAMELINE}" echo -e $GREEN" PASS${EGREEN} - Result=${result} after ${duration} seconds" __print_current_stats return - elif [ $4 = "<" ] && [ "$result" -lt $5 ]; then + elif [ "$4" == "<" ] && [ "$result" -lt $5 ]; then ((RES_PASS++)) echo -e " Result=${result} after ${duration} seconds${SAMELINE}" echo -e $GREEN" PASS${EGREEN} - Result=${result} after ${duration} seconds" __print_current_stats return - elif [ $4 = "contain_str" ] && [[ $result =~ $5 ]]; then + elif [ "$4" == ">=" ] && [ "$result" -ge $5 ]; then + ((RES_PASS++)) + echo -e " Result=${result} after ${duration} seconds${SAMELINE}" + echo -e $GREEN" PASS${EGREEN} - Result=${result} after ${duration} seconds" + __print_current_stats + return + elif [ "$4" == "contain_str" ] && [[ $result =~ $5 ]]; then ((RES_PASS++)) echo -e " Result=${result} after ${duration} seconds${SAMELINE}" echo -e $GREEN" PASS${EGREEN} - Result=${result} after ${duration} seconds" @@ -2924,19 +3064,23 @@ __var_test() { echo -e $RED" FAIL ${ERED}- ${3} ${4} ${5} not reached, result = ${result}" __print_current_stats __check_stop_at_error - elif [ $4 = "=" ] && [ "$result" -eq $5 ]; then + elif [ "$4" == "=" ] && [ "$result" -eq $5 ]; then + ((RES_PASS++)) + echo -e $GREEN" PASS${EGREEN} - Result=${result}" + __print_current_stats + elif [ "$4" == ">" ] && [ "$result" -gt $5 ]; then ((RES_PASS++)) echo -e $GREEN" PASS${EGREEN} - Result=${result}" __print_current_stats - elif [ $4 = ">" ] && [ "$result" -gt $5 ]; then + elif [ "$4" == "<" ] && [ "$result" -lt $5 ]; then ((RES_PASS++)) echo -e $GREEN" PASS${EGREEN} - Result=${result}" __print_current_stats - elif [ $4 = "<" ] && [ "$result" -lt $5 ]; then + elif [ "$4" == ">=" ] && [ "$result" -ge $5 ]; then ((RES_PASS++)) echo -e $GREEN" PASS${EGREEN} - Result=${result}" __print_current_stats - elif [ $4 = "contain_str" ] && [[ $result =~ $5 ]]; then + elif [ "$4" == "contain_str" ] && [[ $result =~ $5 ]]; then ((RES_PASS++)) echo -e $GREEN" PASS${EGREEN} - Result=${result}" __print_current_stats diff --git a/test/cr/app/cr.py b/test/cr/app/cr.py index 94ef606d..2066e148 100644 --- a/test/cr/app/cr.py +++ b/test/cr/app/cr.py @@ -48,6 +48,7 @@ HOST_PORT = 2222 # Metrics vars cntr_msg_callbacks=0 +cntr_batch_callbacks=0 cntr_msg_fetched=0 cntr_callbacks={} hosts_set=set() @@ -59,6 +60,7 @@ CALLBACK_TEXT_URL="/callbacks-text/" # Callback for string of text APP_READ_URL="/get-event/" APP_READ_ALL_URL="/get-all-events/" DUMP_ALL_URL="/db" +NULL_URL="/callbacks-null" # Url for ignored callback. Callbacks are not checked, counted or stored MIME_TEXT="text/plain" MIME_JSON="application/json" @@ -200,10 +202,12 @@ def events_write(id): if (id in cntr_callbacks.keys()): cntr_callbacks[id][0] += 1 + cntr_callbacks[id][2] += 1 else: cntr_callbacks[id]=[] cntr_callbacks[id].append(1) cntr_callbacks[id].append(0) + cntr_callbacks[id].append(0) except Exception as e: print(CAUGHT_EXCEPTION+str(e)) @@ -223,6 +227,7 @@ def events_write(id): def events_write_mr(id): global msg_callbacks global cntr_msg_callbacks + global cntr_batch_callbacks storeas=request.args.get('storeas') #If set, store payload as a md5 hascode and dont log the payload #Large payloads will otherwise overload the server @@ -232,6 +237,7 @@ def events_write_mr(id): if (storeas is None): print("raw data: str(request.data): "+str(request.data)) do_delay() + list_data=False try: #if (request.content_type == MIME_JSON): if (MIME_JSON in request.content_type): @@ -239,6 +245,7 @@ def events_write_mr(id): msg_list = json.loads(data) if (storeas is None): print("Payload(json): "+str(msg_list)) + list_data=True else: msg_list=[] print("Payload(content-type="+request.content_type+"). Setting empty json as payload") @@ -249,6 +256,8 @@ def events_write_mr(id): with lock: remote_host_logging(request) + if (list_data): + cntr_batch_callbacks += 1 for msg in msg_list: if (storeas is None): msg=json.loads(msg) @@ -277,6 +286,9 @@ def events_write_mr(id): cntr_callbacks[id]=[] cntr_callbacks[id].append(1) cntr_callbacks[id].append(0) + cntr_callbacks[id].append(0) + if (id in msg_callbacks.keys() and list_data): + cntr_callbacks[id][2] += 1 except Exception as e: print(CAUGHT_EXCEPTION+str(e)) @@ -294,6 +306,7 @@ def events_write_mr(id): def events_write_text(id): global msg_callbacks global cntr_msg_callbacks + global cntr_batch_callbacks storeas=request.args.get('storeas') #If set, store payload as a md5 hascode and dont log the payload #Large payloads will otherwise overload the server @@ -306,26 +319,28 @@ def events_write_text(id): try: msg_list=None + list_data=False if (MIME_JSON in request.content_type): #Json array of strings msg_list=json.loads(request.data) + list_data=True else: data=request.data.decode("utf-8") #Assuming string msg_list=[] msg_list.append(data) + with lock: + cntr_batch_callbacks += 1 + for msg in msg_list: + if (storeas == "md5"): + md5msg={} + print("msg: "+str(msg)) + print("msg (endcode str): "+str(msg.encode('utf-8'))) + md5msg["md5"]=md5(msg.encode('utf-8')).hexdigest() + msg=md5msg + print("msg (data converted to md5 hash): "+str(msg["md5"])) + + if (isinstance(msg, dict)): + msg[TIME_STAMP]=str(datetime.now()) - for msg in msg_list: - if (storeas == "md5"): - md5msg={} - print("msg: "+str(msg)) - print("msg (endcode str): "+str(msg.encode('utf-8'))) - md5msg["md5"]=md5(msg.encode('utf-8')).hexdigest() - msg=md5msg - print("msg (data converted to md5 hash): "+str(msg["md5"])) - - if (isinstance(msg, dict)): - msg[TIME_STAMP]=str(datetime.now()) - - with lock: cntr_msg_callbacks += 1 if (id in msg_callbacks.keys()): msg_callbacks[id].append(msg) @@ -339,6 +354,9 @@ def events_write_text(id): cntr_callbacks[id]=[] cntr_callbacks[id].append(1) cntr_callbacks[id].append(0) + cntr_callbacks[id].append(0) + if (id in cntr_callbacks.keys() and list_data): + cntr_callbacks[id][2] += 1 except Exception as e: print(CAUGHT_EXCEPTION+str(e)) traceback.print_exc() @@ -352,7 +370,13 @@ def events_write_text(id): return 'OK',200 -### Functions for test ### +# Receive a callback message but ignore contents and return 200 +# URI and payload, (PUT or POST): /callbacks-text/ +# response: OK 200 +@app.route(NULL_URL, + methods=['PUT','POST']) +def null_url(id): + return 'OK',200 # Dump the whole db of current callbacks # URI and parameter, (GET): /db @@ -376,6 +400,18 @@ def requests_submitted(): else: return Response(str("0"), status=200, mimetype=MIME_TEXT) +@app.route('/counter/received_callback_batches', + methods=['GET']) +def batches_submitted(): + req_id = request.args.get('id') + if (req_id is None): + return Response(str(cntr_batch_callbacks), status=200, mimetype=MIME_TEXT) + + if (req_id in cntr_callbacks.keys()): + return Response(str(cntr_callbacks[req_id][2]), status=200, mimetype=MIME_TEXT) + else: + return Response(str("0"), status=200, mimetype=MIME_TEXT) + @app.route('/counter/fetched_callbacks', methods=['GET']) def requests_fetched(): @@ -440,6 +476,7 @@ def reset(): global msg_callbacks global cntr_msg_fetched global cntr_msg_callbacks + global cntr_batch_callbacks global cntr_callbacks global forced_settings @@ -447,6 +484,7 @@ def reset(): msg_callbacks={} cntr_msg_fetched=0 cntr_msg_callbacks=0 + cntr_batch_callbacks=0 cntr_callbacks={} forced_settings['delay']=None diff --git a/test/cr/basic_test.sh b/test/cr/basic_test.sh index 44e8526a..ea0a5761 100755 --- a/test/cr/basic_test.sh +++ b/test/cr/basic_test.sh @@ -45,6 +45,8 @@ fi # source function to do curl and check result . ../common/do_curl_function.sh +RESP_CONTENT='*' #Dont check resp content type + echo "=== CR hello world ===" RESULT="OK" do_curl GET / 200 @@ -57,6 +59,10 @@ echo "=== Get counter - callbacks ===" RESULT="0" do_curl GET /counter/received_callbacks 200 +echo "=== Get counter - callback batches ===" +RESULT="0" +do_curl GET /counter/received_callback_batches 200 + echo "=== Get counter - fetched events ===" RESULT="0" do_curl GET /counter/fetched_callbacks 200 @@ -91,6 +97,10 @@ echo "=== Get counter - callbacks ===" RESULT="2" do_curl GET /counter/received_callbacks 200 +echo "=== Get counter - callback batches ===" +RESULT="2" +do_curl GET /counter/received_callback_batches 200 + echo "=== Get counter - fetched events ===" RESULT="0" do_curl GET /counter/fetched_callbacks 200 @@ -104,6 +114,10 @@ echo "=== Get counter - callbacks ===" RESULT="2" do_curl GET /counter/received_callbacks?id=test 200 +echo "=== Get counter - callback batches ===" +RESULT="2" +do_curl GET /counter/received_callback_batches?id=test 200 + echo "=== Get counter - fetched events ===" RESULT="0" do_curl GET /counter/fetched_callbacks?id=test 200 @@ -117,6 +131,10 @@ echo "=== Get counter - callbacks ===" RESULT="0" do_curl GET /counter/received_callbacks?id=dummy 200 +echo "=== Get counter - callback batches ===" +RESULT="0" +do_curl GET /counter/received_callback_batches?id=dummy 200 + echo "=== Get counter - fetched events ===" RESULT="0" do_curl GET /counter/fetched_callbacks?id=dummy 200 @@ -142,6 +160,10 @@ echo "=== Get counter - callbacks ===" RESULT="2" do_curl GET /counter/received_callbacks 200 +echo "=== Get counter - callback batches ===" +RESULT="2" +do_curl GET /counter/received_callback_batches 200 + echo "=== Get counter - fetched events ===" RESULT="2" do_curl GET /counter/fetched_callbacks 200 @@ -155,6 +177,10 @@ echo "=== Get counter - callbacks ===" RESULT="2" do_curl GET /counter/received_callbacks?id=test 200 +echo "=== Get counter - callback batches ===" +RESULT="2" +do_curl GET /counter/received_callback_batches?id=test 200 + echo "=== Get counter - fetched events ===" RESULT="2" do_curl GET /counter/fetched_callbacks?id=test 200 @@ -185,6 +211,10 @@ echo "=== Get counter - callbacks ===" RESULT="5" do_curl GET /counter/received_callbacks 200 +echo "=== Get counter - callback batches ===" +RESULT="5" +do_curl GET /counter/received_callback_batches 200 + echo "=== Get counter - fetched events ===" RESULT="2" do_curl GET /counter/fetched_callbacks 200 @@ -198,6 +228,10 @@ echo "=== Get counter - callbacks ===" RESULT="1" do_curl GET /counter/received_callbacks?id=test1 200 +echo "=== Get counter - callback batches ===" +RESULT="1" +do_curl GET /counter/received_callback_batches?id=test1 200 + echo "=== Get counter - fetched events ===" RESULT="0" do_curl GET /counter/fetched_callbacks?id=test1 200 @@ -214,6 +248,10 @@ echo "=== Get counter - callbacks ===" RESULT="5" do_curl GET /counter/received_callbacks 200 +echo "=== Get counter - callback batches ===" +RESULT="5" +do_curl GET /counter/received_callback_batches 200 + echo "=== Get counter - fetched events ===" RESULT="4" do_curl GET /counter/fetched_callbacks 200 @@ -222,6 +260,29 @@ echo "=== Get counter - current events ===" RESULT="1" do_curl GET /counter/current_messages 200 +echo "=== Send a request ===" +RESULT="*" +#create payload +echo "[{\"DATA-MSG\":\"msg\"},{\"DATA-MSG\":\"msg\"}]" > .tmp.json +do_curl POST '/callbacks-text/test' 200 .tmp.json + +echo "=== Get counter - callbacks ===" +RESULT="7" +do_curl GET /counter/received_callbacks 200 + +echo "=== Get counter - callback batches ===" +RESULT="6" +do_curl GET /counter/received_callback_batches 200 + +echo "=== Get counter - fetched events ===" +RESULT="4" +do_curl GET /counter/fetched_callbacks 200 + +echo "=== Get counter - current events ===" +RESULT="3" +do_curl GET /counter/current_messages 200 + + echo "=== CR reset ===" RESULT="OK" do_curl GET /reset 200 @@ -230,6 +291,10 @@ echo "=== Get counter - callbacks ===" RESULT="0" do_curl GET /counter/received_callbacks 200 +echo "=== Get counter - callback batches ===" +RESULT="0" +do_curl GET /counter/received_callback_batches 200 + echo "=== Get counter - fetched events ===" RESULT="0" do_curl GET /counter/fetched_callbacks 200 diff --git a/test/simulator-group/dmaapmed/app.yaml b/test/simulator-group/dmaapmed/app.yaml index 7c39bea3..2a94a536 100644 --- a/test/simulator-group/dmaapmed/app.yaml +++ b/test/simulator-group/dmaapmed/app.yaml @@ -27,8 +27,8 @@ spec: - name: https containerPort: $DMAAP_MED_INTERNAL_SECURE_PORT volumeMounts: - - mountPath: $DMAAP_MED_DATA_MOUNT_PATH/$DMAAP_MED_DATA_FILE - subPath: $DMAAP_MED_DATA_FILE + - mountPath: $DMAAP_MED_DATA_MOUNT_PATH/$DMAAP_MED_CONTR_DATA_FILE + subPath: $DMAAP_MED_CONTR_DATA_FILE name: dmaapadp-data-name env: - name: INFO_PRODUCER_HOST @@ -41,6 +41,8 @@ spec: value: "$MR_SERVICE_PATH" - name: LOG_LEVEL value: Debug + - name: KAFKA_BOOTSTRAP_SERVERS + value: "$MR_KAFKA_SERVICE_PATH" volumes: - configMap: defaultMode: 420 diff --git a/test/simulator-group/dmaapmed/docker-compose.yml b/test/simulator-group/dmaapmed/docker-compose.yml index 9cb929cf..53d126fb 100644 --- a/test/simulator-group/dmaapmed/docker-compose.yml +++ b/test/simulator-group/dmaapmed/docker-compose.yml @@ -33,8 +33,9 @@ services: - INFO_COORD_ADDR=${ICS_SERVICE_PATH} - DMAAP_MR_ADDR=${MR_SERVICE_PATH} - LOG_LEVEL=Debug + - KAFKA_BOOTSTRAP_SERVERS=${MR_KAFKA_SERVICE_PATH} volumes: - - ${DMAAP_MED_HOST_MNT_DIR}/$DMAAP_MED_DATA_FILE:${DMAAP_MED_DATA_MOUNT_PATH}/$DMAAP_MED_DATA_FILE + - ${DMAAP_MED_HOST_MNT_DIR}/${DMAAP_MED_CONTR_DATA_FILE}:${DMAAP_MED_DATA_MOUNT_PATH}/${DMAAP_MED_CONTR_DATA_FILE} labels: - "nrttest_app=DMAAPMED" - "nrttest_dp=${DMAAP_MED_DISPLAY_NAME}" diff --git a/test/simulator-group/dmaapmed/type_config_1.json b/test/simulator-group/dmaapmed/type_config_1.json new file mode 100644 index 00000000..de1b1a46 --- /dev/null +++ b/test/simulator-group/dmaapmed/type_config_1.json @@ -0,0 +1,13 @@ +{ + "types": + [ + { + "id": "STD_Fault_Messages", + "dmaapTopicUrl": "/events/unauthenticated.dmaapmed.json/dmaapmediatorproducer/STD_Fault_Messages?timeout=15000&limit=100" + }, + { + "id": "Kafka_TestTopic", + "kafkaInputTopic": "unauthenticated.dmaapmed_kafka.text" + } + ] + } \ No newline at end of file diff --git a/test/simulator-group/policy_agent/application2.yaml b/test/simulator-group/policy_agent/application2.yaml new file mode 100644 index 00000000..05c5db12 --- /dev/null +++ b/test/simulator-group/policy_agent/application2.yaml @@ -0,0 +1,71 @@ +################################################################################ +# Copyright (c) 2020 Nordix Foundation. # +# # +# Licensed under the Apache License, Version 2.0 (the \"License\"); # +# you may not use this file except in compliance with the License. # +# You may obtain a copy of the License at # +# # +# http://www.apache.org/licenses/LICENSE-2.0 # +# # +# Unless required by applicable law or agreed to in writing, software # +# distributed under the License is distributed on an \"AS IS\" BASIS, # +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # +# See the License for the specific language governing permissions and # +# limitations under the License. # +################################################################################ + +spring: + profiles: + active: prod + main: + allow-bean-definition-overriding: true + aop: + auto: false +management: + endpoints: + web: + exposure: + # Enabling of springboot actuator features. See springboot documentation. + include: "loggers,logfile,health,info,metrics,threaddump,heapdump" + +logging: + # Configuration of logging + level: + ROOT: ERROR + org.springframework: ERROR + org.springframework.data: ERROR + org.springframework.web.reactive.function.client.ExchangeFunctions: ERROR + ${POLICY_AGENT_PKG_NAME}: INFO + file: /var/log/policy-agent/application.log + +server: + # Configuration of the HTTP/REST server. The parameters are defined and handeled by the springboot framework. + # See springboot documentation. + port : 8433 + http-port: 8081 + ssl: + key-store-type: JKS + key-store-password: policy_agent + key-store: /opt/app/policy-agent/etc/cert/keystore.jks + key-password: policy_agent + key-alias: policy_agent +app: + # Location of the component configuration file. The file will only be used if the Consul database is not used; + # configuration from the Consul will override the file. + filepath: /var/policy-management-service/application_configuration.json + # path where the service can store data + vardata-directory: /var/policy-management-service + # path to json schema for config validation + config-file-schema-path: /application_configuration_schema.json + webclient: + # Configuration of the trust store used for the HTTP client (outgoing requests) + # The file location and the password for the truststore is only relevant if trust-store-used == true + # Note that the same keystore as for the server is used. + trust-store-used: false + trust-store-password: policy_agent + trust-store: /opt/app/policy-agent/etc/cert/truststore.jks + # Configuration of usage of HTTP Proxy for the southbound accesses. + # The HTTP proxy (if configured) will only be used for accessing NearRT RIC:s + http.proxy-host: $AGENT_HTTP_PROXY_CONFIG_HOST_NAME + http.proxy-port: $AGENT_HTTP_PROXY_CONFIG_PORT + http.proxy-type: HTTP