X-Git-Url: https://gerrit.o-ran-sc.org/r/gitweb?a=blobdiff_plain;f=test%2Fauto-test%2FFTC3001.sh;h=3222da16968e5930f63989c6e1467bb580851b4b;hb=d14ce4bf82cf83574bac13b11242b48c58e82874;hp=26ba3f22376a8b16355ebd333775134694150045;hpb=2c5af88869ce0fa702b011dbc6cb8a272273f70e;p=nonrtric.git diff --git a/test/auto-test/FTC3001.sh b/test/auto-test/FTC3001.sh index 26ba3f22..3222da16 100755 --- a/test/auto-test/FTC3001.sh +++ b/test/auto-test/FTC3001.sh @@ -1,7 +1,8 @@ #!/usr/bin/env bash # ============LICENSE_START=============================================== -# Copyright (C) 2020 Nordix Foundation. All rights reserved. +# Copyright (C) 2020-2023 Nordix Foundation. All rights reserved. +# Copyright (C) 2023 OpenInfra Foundation Europe. All rights reserved. # ======================================================================== # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -25,7 +26,7 @@ DOCKER_INCLUDED_IMAGES="ICS DMAAPMED DMAAPADP KUBEPROXY MR DMAAPMR CR KAFKAPC HT #App names to include in the test when running kubernetes, space separated list KUBE_INCLUDED_IMAGES=" ICS DMAAPMED DMAAPADP KUBEPROXY MR DMAAPMR CR KAFKAPC HTTPPROXY" -#Prestarted app (not started by script) to include in the test when running kubernetes, space separated list +#Pre-started app (not started by script) to include in the test when running kubernetes, space separated list KUBE_PRESTARTED_IMAGES="" #Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if @@ -34,7 +35,7 @@ KUBE_PRESTARTED_IMAGES="" CONDITIONALLY_IGNORED_IMAGES="" #Supported test environment profiles -SUPPORTED_PROFILES="ORAN-E-RELEASE ORAN-F-RELEASE" +SUPPORTED_PROFILES="ORAN-G-RELEASE ORAN-H-RELEASE ORAN-I-RELEASE" #Supported run modes SUPPORTED_RUNMODES="DOCKER KUBE" @@ -46,7 +47,7 @@ setup_testenvironment #Local vars in test script ########################## -FLAT_A1_EI="1" + NUM_CR=1 # Number of callback receivers, max 1 ## Note: The number jobs must be a multiple of the number of CRs in order to calculate the number of expected event in each CR NUM_JOBS=100 # Mediator and adapter gets same number of jobs for every type @@ -144,9 +145,22 @@ ics_api_idc_get_type_ids 200 $adp_med_type_list start_timer "Create adapter (kafka) jobs: $NUM_JOBS" for ((i=1; i<=$NUM_JOBS; i++)) do - # Max buffer timeout for is about 160 sec for Adator jobs" + # Max buffer timeout for is about 160 sec for Adapter jobs" adp_timeout=$(($i*1000)) - adp_config_data='{"filter":"Message*","maxConcurrency": 1,"bufferTimeout": {"maxSize": 100,"maxTimeMiliseconds": '$adp_timeout'}}' + if [[ "$DMAAP_ADP_FEATURE_LEVEL" == *"FILTERSPEC"* ]]; then + deviation "It is possible to give filter without filtertype without error indication" + if [[ "$DMAAP_ADP_FEATURE_LEVEL" == *"FILTERSCHEMA"* ]]; then + adp_config_data='{"filterType": "regexp", "filter":"Message*","maxConcurrency": 1,"bufferTimeout": {"maxSize": 100,"maxTimeMilliseconds": '$adp_timeout'}}' + else + adp_config_data='{"filterType": "regexp", "filter":"Message*","maxConcurrency": 1,"bufferTimeout": {"maxSize": 100,"maxTimeMiliseconds": '$adp_timeout'}}' + fi + else + if [[ "$DMAAP_ADP_FEATURE_LEVEL" == *"FILTERSCHEMA"* ]]; then + adp_config_data='{"filter":"Message*","maxConcurrency": 1,"bufferTimeout": {"maxSize": 100,"maxTimeMilliseconds": '$adp_timeout'}}' + else + adp_config_data='{"filter":"Message*","maxConcurrency": 1,"bufferTimeout": {"maxSize": 100,"maxTimeMiliseconds": '$adp_timeout'}}' + fi + fi echo $adp_config_data > tmp/adp_config_data.json cr_index=$(($i%$NUM_CR)) @@ -163,7 +177,11 @@ if [[ "$DMAAP_MED_FEATURE_LEVEL" == *"KAFKATYPES"* ]]; then for ((i=1; i<=$NUM_JOBS; i++)) do med_timeout=$(($i*5000)) - med_config_data='{"bufferTimeout": {"maxSize": 100,"maxTimeMiliseconds": '$med_timeout'}}' + if [[ "$DMAAP_MED_FEATURE_LEVEL" == *"FILTERSCHEMA"* ]]; then + med_config_data='{"bufferTimeout": {"maxSize": 100,"maxTimeMilliseconds": '$med_timeout'}}' + else + med_config_data='{"bufferTimeout": {"maxSize": 100,"maxTimeMiliseconds": '$med_timeout'}}' + fi echo $med_config_data > tmp/med_config_data.json cr_index=$(($i%$NUM_CR)) service_text="CR_SERVICE_TEXT_PATH_"$cr_index @@ -202,7 +220,7 @@ do kafkapc_equal topics/unauthenticated.dmaapadp_kafka.text$i/counters/sent 3 30 done -# Wait for data recetption, adapter kafka +# Wait for data reception, adapter kafka EXPECTED_DATA_DELIV=$(($NUM_JOBS*2/$NUM_CR+$EXPECTED_DATA_DELIV)) EXPECTED_BATCHES_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_BATCHES_DELIV)) @@ -218,8 +236,8 @@ print_timer for ((i=1; i<=$NUM_JOBS; i++)) do cr_index=$(($i%$NUM_CR)) - cr_api_check_single_genric_event_md5 200 $cr_index job-adp-kafka-data$i 'Message-------1'$i - cr_api_check_single_genric_event_md5 200 $cr_index job-adp-kafka-data$i 'Message-------3'$i + cr_api_check_single_generic_event_md5 200 $cr_index job-adp-kafka-data$i 'Message-------1'$i + cr_api_check_single_generic_event_md5 200 $cr_index job-adp-kafka-data$i 'Message-------3'$i done if [[ "$DMAAP_MED_FEATURE_LEVEL" == *"KAFKATYPES"* ]]; then @@ -238,7 +256,7 @@ if [[ "$DMAAP_MED_FEATURE_LEVEL" == *"KAFKATYPES"* ]]; then kafkapc_equal topics/unauthenticated.dmaapmed_kafka.text$i/counters/sent 2 30 done - # Wait for data recetption, adapter kafka + # Wait for data reception, adapter kafka EXPECTED_DATA_DELIV=$(($NUM_JOBS*2/$NUM_CR+$PREV_DATA_DELIV)) EXPECTED_BATCHES_DELIV=$(($NUM_JOBS/$NUM_CR+$PREV_BATCHES_DELIV)) @@ -256,8 +274,8 @@ if [[ "$DMAAP_MED_FEATURE_LEVEL" == *"KAFKATYPES"* ]]; then for ((i=1; i<=$NUM_JOBS; i++)) do cr_index=$(($i%$NUM_CR)) - cr_api_check_single_genric_event_md5 200 $cr_index job-med-kafka-data$i 'Message-------0'$i - cr_api_check_single_genric_event_md5 200 $cr_index job-med-kafka-data$i 'Message-------2'$i + cr_api_check_single_generic_event_md5 200 $cr_index job-med-kafka-data$i 'Message-------0'$i + cr_api_check_single_generic_event_md5 200 $cr_index job-med-kafka-data$i 'Message-------2'$i done fi