X-Git-Url: https://gerrit.o-ran-sc.org/r/gitweb?a=blobdiff_plain;f=test%2Fcommon%2Fa1pms_api_functions.sh;h=b80bb25ef159e6addbc4fb0c4677ccb6a6e42e1c;hb=84525a0316cc1626fe4d6328283d4578f735ae9b;hp=f5397171c5a6264bc0c9638f17987a28b052169d;hpb=d2aeca8843fe3ffca2e73dec5b64daeef0dda938;p=nonrtric.git diff --git a/test/common/a1pms_api_functions.sh b/test/common/a1pms_api_functions.sh index f5397171..b80bb25e 100644 --- a/test/common/a1pms_api_functions.sh +++ b/test/common/a1pms_api_functions.sh @@ -1,7 +1,8 @@ #!/bin/bash # ============LICENSE_START=============================================== -# Copyright (C) 2021 Nordix Foundation. All rights reserved. +# Copyright (C) 2021-2023 Nordix Foundation. All rights reserved. +# Copyright (C) 2024 OpenInfra Foundation Europe. All rights reserved. # ======================================================================== # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -25,7 +26,7 @@ # arg: (selects staging, snapshot, release etc) # is present only for images with staging, snapshot,release tags __A1PMS_imagesetup() { - __check_and_create_image_var A1PMS "A1PMS_IMAGE" "A1PMS_IMAGE_BASE" "A1PMS_IMAGE_TAG" $1 "$A1PMS_DISPLAY_NAME" + __check_and_create_image_var A1PMS "A1PMS_IMAGE" "A1PMS_IMAGE_BASE" "A1PMS_IMAGE_TAG" $1 "$A1PMS_DISPLAY_NAME" "" } # Pull image from remote repo or use locally built image @@ -62,12 +63,12 @@ __A1PMS_kube_scale_zero() { } # Scale kubernetes resources to zero and wait until this has been accomplished, if relevant. If not relevant to scale, then do no action. -# This function is called for prestarted apps not managed by the test script. +# This function is called for pre-started apps not managed by the test script. __A1PMS_kube_scale_zero_and_wait() { __kube_scale_and_wait_all_resources $KUBE_NONRTRIC_NAMESPACE app "$KUBE_NONRTRIC_NAMESPACE"-policymanagementservice } -# Delete all kube resouces for the app +# Delete all kube resources for the app # This function is called for apps managed by the test script. __A1PMS_kube_delete_all() { __kube_delete_all_resources $KUBE_NONRTRIC_NAMESPACE autotest A1PMS @@ -75,7 +76,7 @@ __A1PMS_kube_delete_all() { # Store docker logs # This function is called for apps managed by the test script. -# args: +# args: __A1PMS_store_docker_logs() { if [ $RUNMODE == "KUBE" ]; then kubectl $KUBECONF logs -l "autotest=A1PMS" -n $KUBE_NONRTRIC_NAMESPACE --tail=-1 > $1$2_a1pms.log 2>&1 @@ -89,13 +90,14 @@ __A1PMS_store_docker_logs() { # args: - __A1PMS_initial_setup() { use_a1pms_rest_http + export A1PMS_SIDECAR_JWT_FILE="" } -# Set app short-name, app name and namespace for logging runtime statistics of kubernets pods or docker containers +# Set app short-name, app name and namespace for logging runtime statistics of kubernetes pods or docker containers # For docker, the namespace shall be excluded -# This function is called for apps managed by the test script as well as for prestarted apps. +# This function is called for apps managed by the test script as well as for pre-started apps. # args: - -__A1PMS_statisics_setup() { +__A1PMS_statistics_setup() { if [ $RUNMODE == "KUBE" ]; then echo "A1PMS $A1PMS_APP_NAME $KUBE_NONRTRIC_NAMESPACE" else @@ -196,9 +198,6 @@ __export_a1pms_vars() { export A1PMS_CONFIG_CONFIGMAP_NAME=$A1PMS_APP_NAME"-config" export A1PMS_DATA_CONFIGMAP_NAME=$A1PMS_APP_NAME"-data" export A1PMS_PKG_NAME - export CONSUL_HOST - export CONSUL_INTERNAL_PORT - export CONFIG_BINDING_SERVICE export A1PMS_CONFIG_KEY export DOCKER_SIM_NWNAME export A1PMS_HOST_MNT_DIR @@ -240,7 +239,7 @@ start_a1pms() { __check_included_image "A1PMS" retcode_i=$? - # Check if app shall only be used by the testscipt + # Check if app shall only be used by the test script __check_prestarted_image "A1PMS" retcode_p=$? @@ -306,6 +305,10 @@ start_a1pms() { # Create app input_yaml=$SIM_GROUP"/"$A1PMS_COMPOSE_DIR"/"app.yaml output_yaml=$PWD/tmp/a1pmsapp.yaml + if [ -z "$A1PMS_SIDECAR_JWT_FILE" ]; then + cat $input_yaml | sed '/#A1PMS_JWT_START/,/#A1PMS_JWT_STOP/d' > $PWD/tmp/a1pmsapp_tmp.yaml + input_yaml=$PWD/tmp/a1pmsapp_tmp.yaml + fi __kube_create_instance app $A1PMS_APP_NAME $input_yaml $output_yaml fi @@ -346,6 +349,7 @@ start_a1pms() { fi else echo " No files in mounted dir or dir does not exists" + mkdir db fi cd $curdir @@ -453,17 +457,17 @@ start_stopped_a1pms() { } -# Function to perpare the consul configuration according to the current simulator configuration -# args: SDNC|NOSDNC +# Function to prepare the a1pms configuration according to the current simulator configuration +# args: SDNC|NOSDNC [ ] # (Function for test scripts) -prepare_consul_config() { - echo -e $BOLD"Prepare Consul config"$EBOLD +prepare_a1pms_config() { + echo -e $BOLD"Prepare A1PMS config"$EBOLD - echo " Writing consul config for "$A1PMS_APP_NAME" to file: "$2 + echo " Writing a1pms config for "$A1PMS_APP_NAME" to file: "$2 - if [ $# != 2 ]; then + if [ $# != 2 ] && [ $# != 4 ]; then ((RES_CONF_FAIL++)) - __print_err "need two args, SDNC|NOSDNC " $@ + __print_err "need two or four args, SDNC|NOSDNC [ ]" $@ exit 1 fi @@ -473,7 +477,7 @@ prepare_consul_config() { echo -e " Config$BOLD excluding SDNC$EBOLD configuration" else ((RES_CONF_FAIL++)) - __print_err "need two args, SDNC|NOSDNC " $@ + __print_err "need three args, SDNC|NOSDNC HEADER|NOHEADER" $@ exit 1 fi @@ -488,23 +492,26 @@ prepare_consul_config() { config_json=$config_json"\n }" config_json=$config_json"\n ]," fi - - config_json=$config_json"\n \"streams_publishes\": {" - config_json=$config_json"\n \"dmaap_publisher\": {" - config_json=$config_json"\n \"type\": \"message-router\"," - config_json=$config_json"\n \"dmaap_info\": {" - config_json=$config_json"\n \"topic_url\": \"$MR_SERVICE_PATH$MR_WRITE_URL\"" - config_json=$config_json"\n }" - config_json=$config_json"\n }" - config_json=$config_json"\n }," - config_json=$config_json"\n \"streams_subscribes\": {" - config_json=$config_json"\n \"dmaap_subscriber\": {" - config_json=$config_json"\n \"type\": \"message-router\"," - config_json=$config_json"\n \"dmaap_info\": {" - config_json=$config_json"\n \"topic_url\": \"$MR_SERVICE_PATH$MR_READ_URL\"" - config_json=$config_json"\n }" - config_json=$config_json"\n }" - config_json=$config_json"\n }," + if [[ "$A1PMS_FEATURE_LEVEL" == *"NO-DMAAP"* ]]; then + : + else + config_json=$config_json"\n \"streams_publishes\": {" + config_json=$config_json"\n \"dmaap_publisher\": {" + config_json=$config_json"\n \"type\": \"message-router\"," + config_json=$config_json"\n \"dmaap_info\": {" + config_json=$config_json"\n \"topic_url\": \"$MR_SERVICE_PATH$MR_WRITE_URL\"" + config_json=$config_json"\n }" + config_json=$config_json"\n }" + config_json=$config_json"\n }," + config_json=$config_json"\n \"streams_subscribes\": {" + config_json=$config_json"\n \"dmaap_subscriber\": {" + config_json=$config_json"\n \"type\": \"message-router\"," + config_json=$config_json"\n \"dmaap_info\": {" + config_json=$config_json"\n \"topic_url\": \"$MR_SERVICE_PATH$MR_READ_URL\"" + config_json=$config_json"\n }" + config_json=$config_json"\n }" + config_json=$config_json"\n }," + fi config_json=$config_json"\n \"ric\": [" @@ -521,15 +528,31 @@ prepare_consul_config() { fi done fi + result=$(kubectl $KUBECONF get pods -n $KUBE_A1SIM_NAMESPACE -o jsonpath='{.items[?(@.metadata.labels.autotest=="RICMEDIATORSIM")].metadata.name}') + oranrics="" + if [ $? -eq 0 ] && [ ! -z "$result" ]; then + for im in $result; do + if [[ $im != *"-0" ]]; then + ric_subdomain=$(kubectl $KUBECONF get pod $im -n $KUBE_A1SIM_NAMESPACE -o jsonpath='{.spec.subdomain}') + rics=$rics" "$im"."$ric_subdomain"."$KUBE_A1SIM_NAMESPACE + oranrics=$oranrics" "$im"."$ric_subdomain"."$KUBE_A1SIM_NAMESPACE + let ric_cntr=ric_cntr+1 + fi + done + fi if [ $ric_cntr -eq 0 ]; then echo $YELLOW"Warning: No rics found for the configuration"$EYELLOW fi else - rics=$(docker ps --filter "name=$RIC_SIM_PREFIX" --filter "network=$DOCKER_SIM_NWNAME" --filter "status=running" --format {{.Names}}) + rics=$(docker ps --filter "name=$RIC_SIM_PREFIX" --filter "network=$DOCKER_SIM_NWNAME" --filter "label=a1sim" --filter "status=running" --format {{.Names}}) + oranrics=$(docker ps --filter "name=$RIC_SIM_PREFIX" --filter "network=$DOCKER_SIM_NWNAME" --filter "label=orana1sim" --filter "status=running" --format {{.Names}}) + + rics="$rics $oranrics" + if [ $? -ne 0 ] || [ -z "$rics" ]; then - echo -e $RED" FAIL - the names of the running RIC Simulator cannot be retrieved." $ERED + echo -e $RED" FAIL - the names of the running RIC Simulator or ORAN RIC cannot be retrieved." $ERED ((RES_CONF_FAIL++)) - return 1 + exit 1 fi fi cntr=0 @@ -542,15 +565,27 @@ prepare_consul_config() { ric_id=${ric%.*.*} #extract pod id from full hosthame ric_id=$(echo "$ric_id" | tr '-' '_') else - if [ $DOCKER_COMPOSE_VERION == "V1" ]; then - ric_id=$ric - else - ric_id=$(echo "$ric" | tr '-' '_') #ric id still needs underscore as it is different from the container name - fi + ric_id=$(echo "$ric" | tr '-' '_') #ric var still needs underscore as it is different from the container name fi - echo " Found a1 sim: "$ric_id + echo " Found a1 sim: "$ric config_json=$config_json"\n \"name\": \"$ric_id\"," - config_json=$config_json"\n \"baseUrl\": \"$RIC_SIM_HTTPX://$ric:$RIC_SIM_PORT\"," + + xricfound=0 + for xric in $oranrics; do + if [ $xric == $ric ]; then + xricfound=1 + fi + done + if [ $xricfound -eq 0 ]; then + config_json=$config_json"\n \"baseUrl\": \"$RIC_SIM_HTTPX://$ric:$RIC_SIM_PORT\"," + else + config_json=$config_json"\n \"baseUrl\": \"$RICMEDIATOR_SIM_HTTPX://$ric:$RICMEDIATOR_SIM_PORT\"," + fi + if [ ! -z "$3" ]; then + if [[ $ric == "$3"* ]]; then + config_json=$config_json"\n \"customAdapterClass\": \"$4\"," + fi + fi if [ $1 == "SDNC" ]; then config_json=$config_json"\n \"controller\": \"$SDNC_APP_NAME\"," fi @@ -565,9 +600,7 @@ prepare_consul_config() { config_json=$config_json"\n ]" config_json=$config_json"\n}" - if [ $RUNMODE == "KUBE" ]; then - config_json="{\"config\":"$config_json"}" - fi + config_json="{\"config\":"$config_json"}" printf "$config_json">$2 @@ -1371,7 +1404,7 @@ a1pms_api_delete_policy_parallel() { #if [ $A1PMS_ADAPTER != $RESTBASE ] && [ $A1PMS_ADAPTER != $RESTBASE_SECURE ]; then if [ $A1PMS_ADAPTER_TYPE != "REST" ]; then - echo " Info - a1pms_api_delete_policy_parallel uses only the a1pms REST interface - create over dmaap in parallel is not supported" + echo " Info - a1pms_api_delete_policy_parallel uses only the a1pms REST interface - delete over dmaap in parallel is not supported" echo " Info - will execute over a1pms REST" fi @@ -1431,6 +1464,84 @@ a1pms_api_delete_policy_parallel() { return 1 } +# API Test function: GET /policy and V2 GET /v2/policies/{policy_id}, to run in i parallel for a number of rics +# args: +# (Function for test scripts) +a1pms_api_get_policy_parallel() { + __log_test_start $@ + + if [ $# -ne 5 ]; then + __print_err " " $@ + return 1 + fi + resp_code=$1; shift; + num_rics=$1; shift; + start_id=$1; shift; + count=$1; shift; + pids=$1; shift; + + #if [ $A1PMS_ADAPTER != $RESTBASE ] && [ $A1PMS_ADAPTER != $RESTBASE_SECURE ]; then + if [ $A1PMS_ADAPTER_TYPE != "REST" ]; then + echo " Info - a1pms_api_get_policy_parallel uses only the a1pms REST interface - GET over dmaap in parallel is not supported" + echo " Info - will execute over a1pms REST" + fi + + if [ "$A1PMS_VERSION" == "V2" ]; then + query="$A1PMS_API_PREFIX/v2/policies/" + else + query="/policy" + fi + + urlbase=${A1PMS_ADAPTER}${query} + + httpproxy="NOPROXY" + if [ ! -z "$KUBE_PROXY_PATH" ]; then + httpproxy=$KUBE_PROXY_PATH + fi + + for ((i=1; i<=$pids; i++)) + do + uuid=$UUID + if [ -z "$uuid" ]; then + uuid="NOUUID" + fi + echo "" > "./tmp/.pid${i}.get.res.txt" + echo $resp_code $urlbase $num_rics $uuid $start_id $count $pids $i $httpproxy> "./tmp/.pid${i}.get.txt" + echo $i + done | xargs -n 1 -I{} -P $pids bash -c '{ + arg=$(echo {}) + echo " Parallel process $arg started" + tmp=$(< "./tmp/.pid${arg}.get.txt") + python3 ../common/get_policies_process.py $tmp > ./tmp/.pid${arg}.get.res.txt + }' + msg="" + for ((i=1; i<=$pids; i++)) + do + file="./tmp/.pid${i}.get.res.txt" + tmp=$(< $file) + if [ -z "$tmp" ]; then + echo " Process $i : unknown result (result file empty" + msg="failed" + else + res=${tmp:0:1} + if [ $res == "0" ]; then + echo " Process $i : OK - "${tmp:1} + else + echo " Process $i : failed - "${tmp:1} + msg="failed" + fi + fi + done + if [ -z $msg ]; then + __collect_endpoint_stats "A1PMS" 04 "GET" $A1PMS_API_PREFIX"/v2/policies/{policy_id}" $resp_code $(($count*$num_rics)) + __log_test_pass " $(($count*$num_rics)) policy request(s) executed" + return 0 + fi + + __log_test_fail_general "One of more processes failed to execute" + return 1 +} + # API Test function: GET /policy_ids and V2 GET /v2/policies # args: |NORIC |NOSERVICE |NOTYPE ([ $file query="/v2/configuration" @@ -2343,7 +2454,9 @@ a1pms_api_get_configuration() { body=${res:0:${#res}-3} targetJson=$(< $2) - targetJson="{\"config\":"$targetJson"}" + # if [ $RUNMODE == "DOCKER" ]; then #In kube the file already has a header + # inputJson="{\"config\":"$inputJson"}" + # fi echo "TARGET JSON: $targetJson" >> $HTTPLOG res=$(python3 ../common/compare_json.py "$targetJson" "$body") @@ -2380,4 +2493,17 @@ a1pms_kube_pvc_reset() { __log_test_pass return 0 +} + +# args: +a1pms_configure_sec() { + export A1PMS_CREDS_GRANT_TYPE="client_credentials" + export A1PMS_CREDS_CLIENT_SECRET=$3 + export A1PMS_CREDS_CLIENT_ID=$2 + export A1PMS_AUTH_SERVICE_URL=$KEYCLOAK_SERVICE_PATH$KEYCLOAK_TOKEN_URL_PREFIX/$1/protocol/openid-connect/token + export A1PMS_SIDECAR_MOUNT="/token-cache" + export A1PMS_SIDECAR_JWT_FILE=$A1PMS_SIDECAR_MOUNT"/jwt.txt" + + export AUTHSIDECAR_APP_NAME + export AUTHSIDECAR_DISPLAY_NAME } \ No newline at end of file