-// Package docs GENERATED BY THE COMMAND ABOVE; DO NOT EDIT
+// Package api GENERATED BY THE COMMAND ABOVE; DO NOT EDIT
// This file was generated by swaggo/swag
-package docs
+package api
import (
"bytes"
"description": ""
},
"400": {
- "description": "Bad Request",
+ "description": "Problem as defined in https://tools.ietf.org/html/rfc7807",
"schema": {
- "type": "string"
+ "$ref": "#/definitions/ErrorInfo"
+ },
+ "headers": {
+ "Content-Type": {
+ "type": "string",
+ "description": "application/problem+json"
+ }
}
}
}
"/health_check": {
"get": {
"description": "Get the status of the producer. Will show if the producer has registered in ICS.",
+ "produces": [
+ "application/json"
+ ],
"tags": [
"Data producer (callbacks)"
],
"summary": "Get status",
"responses": {
"200": {
- "description": ""
+ "description": "OK",
+ "schema": {
+ "$ref": "#/definitions/"
+ }
}
}
}
"in": "body",
"required": true,
"schema": {
- "$ref": "#/definitions/jobs.JobInfo"
+ "$ref": "#/definitions/JobInfo"
}
}
],
"description": ""
},
"400": {
- "description": "Bad Request",
+ "description": "Problem as defined in https://tools.ietf.org/html/rfc7807",
"schema": {
- "type": "string"
+ "$ref": "#/definitions/ErrorInfo"
+ },
+ "headers": {
+ "Content-Type": {
+ "type": "string",
+ "description": "application/problem+json"
+ }
}
}
}
}
},
"definitions": {
- "jobs.BufferTimeout": {
+ "": {
+ "type": "object",
+ "properties": {
+ "registeredStatus": {
+ "description": "The registration status of the producer in Information Coordinator Service. Either ` + "`" + `registered` + "`" + ` or ` + "`" + `not registered` + "`" + `",
+ "type": "string",
+ "example": "registered"
+ }
+ }
+ },
+ "BufferTimeout": {
"type": "object",
"properties": {
"maxSize": {
}
}
},
- "jobs.JobInfo": {
+ "ErrorInfo": {
+ "type": "object",
+ "properties": {
+ "detail": {
+ "description": "A human-readable explanation specific to this occurrence of the problem.",
+ "type": "string",
+ "example": "Info job type not found"
+ },
+ "instance": {
+ "description": "A URI reference that identifies the specific occurrence of the problem.",
+ "type": "string"
+ },
+ "status": {
+ "description": "The HTTP status code generated by the origin server for this occurrence of the problem.",
+ "type": "integer",
+ "example": 400
+ },
+ "title": {
+ "description": "A short, human-readable summary of the problem type.",
+ "type": "string"
+ },
+ "type": {
+ "description": "A URI reference that identifies the problem type.",
+ "type": "string"
+ }
+ }
+ },
+ "JobInfo": {
"type": "object",
"properties": {
"info_job_data": {
- "$ref": "#/definitions/jobs.Parameters"
+ "$ref": "#/definitions/Parameters"
},
"info_job_identity": {
"type": "string"
}
}
},
- "jobs.Parameters": {
+ "Parameters": {
"type": "object",
"properties": {
"bufferTimeout": {
- "$ref": "#/definitions/jobs.BufferTimeout"
+ "$ref": "#/definitions/BufferTimeout"
}
}
}
"description": ""
},
"400": {
- "description": "Bad Request",
+ "description": "Problem as defined in https://tools.ietf.org/html/rfc7807",
"schema": {
- "type": "string"
+ "$ref": "#/definitions/ErrorInfo"
+ },
+ "headers": {
+ "Content-Type": {
+ "type": "string",
+ "description": "application/problem+json"
+ }
}
}
}
"/health_check": {
"get": {
"description": "Get the status of the producer. Will show if the producer has registered in ICS.",
+ "produces": [
+ "application/json"
+ ],
"tags": [
"Data producer (callbacks)"
],
"summary": "Get status",
"responses": {
"200": {
- "description": ""
+ "description": "OK",
+ "schema": {
+ "$ref": "#/definitions/"
+ }
}
}
}
"in": "body",
"required": true,
"schema": {
- "$ref": "#/definitions/jobs.JobInfo"
+ "$ref": "#/definitions/JobInfo"
}
}
],
"description": ""
},
"400": {
- "description": "Bad Request",
+ "description": "Problem as defined in https://tools.ietf.org/html/rfc7807",
"schema": {
- "type": "string"
+ "$ref": "#/definitions/ErrorInfo"
+ },
+ "headers": {
+ "Content-Type": {
+ "type": "string",
+ "description": "application/problem+json"
+ }
}
}
}
}
},
"definitions": {
- "jobs.BufferTimeout": {
+ "": {
+ "type": "object",
+ "properties": {
+ "registeredStatus": {
+ "description": "The registration status of the producer in Information Coordinator Service. Either `registered` or `not registered`",
+ "type": "string",
+ "example": "registered"
+ }
+ }
+ },
+ "BufferTimeout": {
"type": "object",
"properties": {
"maxSize": {
}
}
},
- "jobs.JobInfo": {
+ "ErrorInfo": {
+ "type": "object",
+ "properties": {
+ "detail": {
+ "description": "A human-readable explanation specific to this occurrence of the problem.",
+ "type": "string",
+ "example": "Info job type not found"
+ },
+ "instance": {
+ "description": "A URI reference that identifies the specific occurrence of the problem.",
+ "type": "string"
+ },
+ "status": {
+ "description": "The HTTP status code generated by the origin server for this occurrence of the problem.",
+ "type": "integer",
+ "example": 400
+ },
+ "title": {
+ "description": "A short, human-readable summary of the problem type.",
+ "type": "string"
+ },
+ "type": {
+ "description": "A URI reference that identifies the problem type.",
+ "type": "string"
+ }
+ }
+ },
+ "JobInfo": {
"type": "object",
"properties": {
"info_job_data": {
- "$ref": "#/definitions/jobs.Parameters"
+ "$ref": "#/definitions/Parameters"
},
"info_job_identity": {
"type": "string"
}
}
},
- "jobs.Parameters": {
+ "Parameters": {
"type": "object",
"properties": {
"bufferTimeout": {
- "$ref": "#/definitions/jobs.BufferTimeout"
+ "$ref": "#/definitions/BufferTimeout"
}
}
}
definitions:
- jobs.BufferTimeout:
+ "":
+ properties:
+ registeredStatus:
+ description: The registration status of the producer in Information Coordinator
+ Service. Either `registered` or `not registered`
+ example: registered
+ type: string
+ type: object
+ BufferTimeout:
properties:
maxSize:
type: integer
maxTimeMiliseconds:
type: integer
type: object
- jobs.JobInfo:
+ ErrorInfo:
+ properties:
+ detail:
+ description: A human-readable explanation specific to this occurrence of the
+ problem.
+ example: Info job type not found
+ type: string
+ instance:
+ description: A URI reference that identifies the specific occurrence of the
+ problem.
+ type: string
+ status:
+ description: The HTTP status code generated by the origin server for this
+ occurrence of the problem.
+ example: 400
+ type: integer
+ title:
+ description: A short, human-readable summary of the problem type.
+ type: string
+ type:
+ description: A URI reference that identifies the problem type.
+ type: string
+ type: object
+ JobInfo:
properties:
info_job_data:
- $ref: '#/definitions/jobs.Parameters'
+ $ref: '#/definitions/Parameters'
info_job_identity:
type: string
info_type_identity:
target_uri:
type: string
type: object
- jobs.Parameters:
+ Parameters:
properties:
bufferTimeout:
- $ref: '#/definitions/jobs.BufferTimeout'
+ $ref: '#/definitions/BufferTimeout'
type: object
info:
contact: {}
"200":
description: ""
"400":
- description: Bad Request
+ description: Problem as defined in https://tools.ietf.org/html/rfc7807
+ headers:
+ Content-Type:
+ description: application/problem+json
+ type: string
schema:
- type: string
+ $ref: '#/definitions/ErrorInfo'
summary: Set log level
tags:
- Admin
get:
description: Get the status of the producer. Will show if the producer has registered
in ICS.
+ produces:
+ - application/json
responses:
"200":
- description: ""
+ description: OK
+ schema:
+ $ref: '#/definitions/'
summary: Get status
tags:
- Data producer (callbacks)
name: user
required: true
schema:
- $ref: '#/definitions/jobs.JobInfo'
+ $ref: '#/definitions/JobInfo'
responses:
"200":
description: ""
"400":
- description: Bad Request
+ description: Problem as defined in https://tools.ietf.org/html/rfc7807
+ headers:
+ Content-Type:
+ description: application/problem+json
+ type: string
schema:
- type: string
+ $ref: '#/definitions/ErrorInfo'
summary: Add info job
tags:
- Data producer (callbacks)
#
##############################################################################
-swag init
\ No newline at end of file
+go get -u github.com/swaggo/swag/cmd/swag
+swag init --output api
+swag fmt
\ No newline at end of file
go 1.17
require (
+ github.com/confluentinc/confluent-kafka-go v1.8.2
github.com/gorilla/mux v1.8.0
github.com/hashicorp/go-retryablehttp v0.7.0
github.com/sirupsen/logrus v1.8.1
github.com/stretchr/testify v1.7.0
+ github.com/swaggo/http-swagger v1.1.2
+ github.com/swaggo/swag v1.7.8
)
require (
github.com/KyleBanks/depth v1.2.1 // indirect
github.com/PuerkitoBio/purell v1.1.1 // indirect
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 // indirect
- github.com/confluentinc/confluent-kafka-go v1.8.2 // indirect
github.com/cpuguy83/go-md2man/v2 v2.0.1 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/ghodss/yaml v1.0.0 // indirect
github.com/shurcooL/sanitized_anchor_name v1.0.0 // indirect
github.com/stretchr/objx v0.1.0 // indirect
github.com/swaggo/files v0.0.0-20210815190702-a29dd2bc99b2 // indirect
- github.com/swaggo/http-swagger v1.1.2 // indirect
- github.com/swaggo/swag v1.7.8 // indirect
github.com/urfave/cli/v2 v2.3.0 // indirect
golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d // indirect
golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e // indirect
InfoJobData Parameters `json:"info_job_data"`
InfoTypeIdentity string `json:"info_type_identity"`
sourceType sourceType
-}
+} // @name JobInfo
type JobTypesManager interface {
LoadTypesFromConfiguration(types []config.TypeDefinition) []config.TypeDefinition
type Parameters struct {
BufferTimeout BufferTimeout `json:"bufferTimeout"`
-}
+} // @name Parameters
type BufferTimeout struct {
MaxSize int `json:"maxSize"`
MaxTimeMiliseconds int64 `json:"maxTimeMiliseconds"`
-}
+} // @name BufferTimeout
func (j *job) start() {
if j.isJobBuffered() {
const logLevelToken = "level"
const logAdminPath = "/admin/log"
+type ErrorInfo struct {
+ // A URI reference that identifies the problem type.
+ Type string `json:"type" swaggertype:"string"`
+ // A short, human-readable summary of the problem type.
+ Title string `json:"title" swaggertype:"string"`
+ // The HTTP status code generated by the origin server for this occurrence of the problem.
+ Status int `json:"status" swaggertype:"integer" example:"400"`
+ // A human-readable explanation specific to this occurrence of the problem.
+ Detail string `json:"detail" swaggertype:"string" example:"Info job type not found"`
+ // A URI reference that identifies the specific occurrence of the problem.
+ Instance string `json:"instance" swaggertype:"string"`
+} // @name ErrorInfo
+
type ProducerCallbackHandler struct {
jobsManager jobs.JobsManager
}
// @Accept json
// @Param user body jobs.JobInfo true "Info job data"
// @Success 200
-// @Failure 400 {string} Cause of error
+// @Failure 400 {object} ErrorInfo "Problem as defined in https://tools.ietf.org/html/rfc7807"
+// @Header 400 {string} Content-Type "application/problem+json"
// @Router /info_job [post]
func (h *ProducerCallbackHandler) addInfoJobHandler(w http.ResponseWriter, r *http.Request) {
b, readErr := ioutil.ReadAll(r.Body)
if readErr != nil {
- http.Error(w, fmt.Sprintf("Unable to read body due to: %v", readErr), http.StatusBadRequest)
+ returnError(fmt.Sprintf("Unable to read body due to: %v", readErr), w)
return
}
jobInfo := jobs.JobInfo{}
if unmarshalErr := json.Unmarshal(b, &jobInfo); unmarshalErr != nil {
- http.Error(w, fmt.Sprintf("Invalid json body. Cause: %v", unmarshalErr), http.StatusBadRequest)
+ returnError(fmt.Sprintf("Invalid json body. Cause: %v", unmarshalErr), w)
return
}
if err := h.jobsManager.AddJobFromRESTCall(jobInfo); err != nil {
- http.Error(w, fmt.Sprintf("Invalid job info. Cause: %v", err), http.StatusBadRequest)
+ returnError(fmt.Sprintf("Invalid job info. Cause: %v", err), w)
+ return
}
}
// @Tags Admin
// @Param level query string false "string enums" Enums(Error, Warn, Info, Debug)
// @Success 200
-// @Failure 400 {string} Cause of error
+// @Failure 400 {object} ErrorInfo "Problem as defined in https://tools.ietf.org/html/rfc7807"
+// @Header 400 {string} Content-Type "application/problem+json"
// @Router /admin/log [put]
func (h *ProducerCallbackHandler) setLogLevel(w http.ResponseWriter, r *http.Request) {
query := r.URL.Query()
if loglevel, err := log.ParseLevel(logLevelStr); err == nil {
log.SetLevel(loglevel)
} else {
- http.Error(w, fmt.Sprintf("Invalid log level: %v. Log level will not be changed!", logLevelStr), http.StatusBadRequest)
+ returnError(fmt.Sprintf("Invalid log level: %v. Log level will not be changed!", logLevelStr), w)
return
}
}
func (h *methodNotAllowedHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
http.Error(w, "Method is not supported.", http.StatusMethodNotAllowed)
}
+
+func returnError(msg string, w http.ResponseWriter) {
+ errInfo := ErrorInfo{
+ Status: http.StatusBadRequest,
+ Detail: msg,
+ }
+ w.Header().Add("Content-Type", "application/problem+json")
+ w.WriteHeader(http.StatusBadRequest)
+ json.NewEncoder(w).Encode(errInfo)
+}
mockReturn error
}
tests := []struct {
- name string
- args args
- wantedStatus int
- wantedBody string
+ name string
+ args args
+ wantedStatus int
+ wantedErrorInfo *ErrorInfo
}{
{
name: "AddInfoJobToJobsHandler with correct job, should return OK",
mockReturn: errors.New("error"),
},
wantedStatus: http.StatusBadRequest,
- wantedBody: "Invalid job info. Cause: error",
+ wantedErrorInfo: &ErrorInfo{
+ Status: http.StatusBadRequest,
+ Detail: "Invalid job info. Cause: error",
+ },
},
}
for _, tt := range tests {
handler.ServeHTTP(responseRecorder, r)
assertions.Equal(tt.wantedStatus, responseRecorder.Code, tt.name)
- assertions.Contains(responseRecorder.Body.String(), tt.wantedBody, tt.name)
+ if tt.wantedErrorInfo != nil {
+ var actualErrInfo ErrorInfo
+ err := json.Unmarshal(getBody(responseRecorder, t), &actualErrInfo)
+ if err != nil {
+ t.Error("Unable to unmarshal error body", err)
+ t.Fail()
+ }
+ assertions.Equal(*tt.wantedErrorInfo, actualErrInfo, tt.name)
+ assertions.Equal("application/problem+json", responseRecorder.Result().Header.Get("Content-Type"))
+ }
jobsHandlerMock.AssertCalled(t, "AddJobFromRESTCall", tt.args.job)
})
}
logLevel string
}
tests := []struct {
- name string
- args args
- wantedStatus int
- wantedBody string
+ name string
+ args args
+ wantedStatus int
+ wantedErrorInfo *ErrorInfo
}{
{
name: "Set to valid log level, should return OK",
logLevel: "bad",
},
wantedStatus: http.StatusBadRequest,
- wantedBody: "Invalid log level: bad",
+ wantedErrorInfo: &ErrorInfo{
+ Detail: "Invalid log level: bad. Log level will not be changed!",
+ Status: http.StatusBadRequest,
+ },
},
}
for _, tt := range tests {
handler.ServeHTTP(responseRecorder, r)
assertions.Equal(tt.wantedStatus, responseRecorder.Code, tt.name)
- assertions.Contains(responseRecorder.Body.String(), tt.wantedBody, tt.name)
+ if tt.wantedErrorInfo != nil {
+ var actualErrInfo ErrorInfo
+ err := json.Unmarshal(getBody(responseRecorder, t), &actualErrInfo)
+ if err != nil {
+ t.Error("Unable to unmarshal error body", err)
+ t.Fail()
+ }
+ assertions.Equal(*tt.wantedErrorInfo, actualErrInfo, tt.name)
+ assertions.Equal("application/problem+json", responseRecorder.Result().Header.Get("Content-Type"))
+ }
})
}
}
return nil
}
}
+
+func getBody(responseRecorder *httptest.ResponseRecorder, t *testing.T) []byte {
+ buf := new(bytes.Buffer)
+ if _, err := buf.ReadFrom(responseRecorder.Body); err != nil {
+ t.Error("Unable to read error body", err)
+ t.Fail()
+ }
+ return buf.Bytes()
+}
import (
"crypto/tls"
+ "encoding/json"
"fmt"
"net/http"
"time"
"github.com/gorilla/mux"
log "github.com/sirupsen/logrus"
- _ "oransc.org/nonrtric/dmaapmediatorproducer/docs"
+ _ "oransc.org/nonrtric/dmaapmediatorproducer/api"
"oransc.org/nonrtric/dmaapmediatorproducer/internal/config"
"oransc.org/nonrtric/dmaapmediatorproducer/internal/jobs"
"oransc.org/nonrtric/dmaapmediatorproducer/internal/kafkaclient"
configuration = config.New()
}
-// @title DMaaP Mediator Producer
-// @version 1.1.0
+// @title DMaaP Mediator Producer
+// @version 1.1.0
// @license.name Apache 2.0
// @license.url http://www.apache.org/licenses/LICENSE-2.0.html
}
}
-// @Summary Get status
-// @Description Get the status of the producer. Will show if the producer has registered in ICS.
-// @Tags Data producer (callbacks)
-// @Success 200
-// @Router /health_check [get]
+type ProducerStatus struct {
+ // The registration status of the producer in Information Coordinator Service. Either `registered` or `not registered`
+ RegisteredStatus string `json:"registeredStatus" swaggertype:"string" example:"registered"`
+} // @name ProducerStatus
+
+// @Summary Get status
+// @Description Get the status of the producer. Will show if the producer has registered in ICS.
+// @Tags Data producer (callbacks)
+// @Produce json
+// @Success 200 {object} ProducerStatus
+// @Router /health_check [get]
func statusHandler(w http.ResponseWriter, r *http.Request) {
- registeredStatus := "not registered"
+ status := ProducerStatus{
+ RegisteredStatus: "not registered",
+ }
if registered {
- registeredStatus = "registered"
+ status.RegisteredStatus = "registered"
}
- fmt.Fprintf(w, `{"status": "%v"}`, registeredStatus)
+ json.NewEncoder(w).Encode(status)
}
-// @Summary Get Swagger Documentation
-// @Description Get the Swagger API documentation for the producer.
-// @Tags Admin
-// @Success 200
-// @Router /swagger [get]
+// @Summary Get Swagger Documentation
+// @Description Get the Swagger API documentation for the producer.
+// @Tags Admin
+// @Success 200
+// @Router /swagger [get]
func addSwaggerHandler(r *mux.Router) {
r.PathPrefix("/swagger").Handler(httpSwagger.WrapHandler)
}
import (
"bytes"
+ "fmt"
"io/ioutil"
"net/http"
"os/exec"
func TestGenerateSwaggerDocs(t *testing.T) {
cmd := exec.Command("./generate_swagger_docs.sh")
- cmd.Run()
+ err := cmd.Run()
+ if err != nil {
+ fmt.Println("Error generating Swagger:", err)
+ }
}
func TestValidateConfiguration(t *testing.T) {
:header: "API name", "|swagger-icon|", "|yaml-icon|"
:widths: 10,5, 5
- "DMaaP Mediator Producer API", ":download:`link <../dmaap-mediator-producer/docs/swagger.json>`", ":download:`link <../dmaap-mediator-producer/docs/swagger.yaml>`"
+ "DMaaP Mediator Producer API", ":download:`link <../dmaap-mediator-producer/api/swagger.json>`", ":download:`link <../dmaap-mediator-producer/api/swagger.yaml>`"
Non-RT-RIC App Catalogue (Initial)
==================================
{
'name': 'DMaaP Mediator Producer API',
'page': 'dmaap-mediator-producer-api',
- 'spec': '../dmaap-mediator-producer/docs/swagger.json',
+ 'spec': '../dmaap-mediator-producer/api/swagger.json',
'embed': True,
}
]
-Subproject commit 0f8b20544745afaf9c7b38140b9516667d9c4752
+Subproject commit 3d2a09b1bc7d6798c8083bfc3dc04c69a1b709c7
#Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if
#the image is not configured in the supplied env_file
#Used for images not applicable to all supported profile
-CONDITIONALLY_IGNORED_IMAGES="NGW"
+CONDITIONALLY_IGNORED_IMAGES="NGW CBS CONSUL"
#Supported test environment profiles
SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE"
start_gateway $SIM_GROUP/$NRT_GATEWAY_COMPOSE_DIR/$NRT_GATEWAY_CONFIG_FILE
fi
- if [ $RUNMODE == "DOCKER" ]; then
- start_consul_cbs
- fi
-
if [[ $interface = *"SDNC"* ]]; then
start_sdnc
prepare_consul_config SDNC ".consul_config.json"
if [ $RUNMODE == "KUBE" ]; then
agent_load_config ".consul_config.json"
else
- consul_config_app ".consul_config.json"
+ if [[ "$PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
+ #Temporary switch to http/https if dmaap use. Otherwise it is not possibble to push config
+ if [ $__httpx == "HTTPS" ]; then
+ use_agent_rest_https
+ else
+ use_agent_rest_http
+ fi
+ api_put_configuration 200 ".consul_config.json"
+ if [ $__httpx == "HTTPS" ]; then
+ if [[ $interface = *"DMAAP"* ]]; then
+ use_agent_dmaap_https
+ else
+ use_agent_rest_https
+ fi
+ else
+ if [[ $interface = *"DMAAP"* ]]; then
+ use_agent_dmaap_http
+ else
+ use_agent_rest_http
+ fi
+ fi
+ else
+ start_consul_cbs
+ consul_config_app ".consul_config.json"
+ fi
fi
mr_equal requests_submitted 0
#Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if
#the image is not configured in the supplied env_file
#Used for images not applicable to all supported profile
-CONDITIONALLY_IGNORED_IMAGES="NGW"
+CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL NGW"
#Supported test environment profiles
SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE"
start_cr 1
if [ $RUNMODE == "DOCKER" ]; then
- start_consul_cbs
+ if [[ "$PMS_FEATURE_LEVEL" != *"NOCONSUL"* ]]; then
+ start_consul_cbs
+ fi
fi
start_control_panel $SIM_GROUP/$CONTROL_PANEL_COMPOSE_DIR/$CONTROL_PANEL_CONFIG_FILE
if [ $RUNMODE == "KUBE" ]; then
agent_load_config ".consul_config.json"
else
- consul_config_app ".consul_config.json"
+ if [[ "$PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
+ api_put_configuration 200 ".consul_config.json"
+ else
+ consul_config_app ".consul_config.json"
+ fi
fi
api_get_status 200
#Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if
#the image is not configured in the supplied env_file
#Used for images not applicable to all supported profile
-CONDITIONALLY_IGNORED_IMAGES="NGW"
+CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL NGW"
#Supported test environment profiles
SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE"
start_ric_simulators ricsim_g1 1 OSC_2.1.0
start_ric_simulators ricsim_g2 1 STD_1.1.3
+
+ sim_put_policy_type 201 ricsim_g1_1 1 testdata/OSC/sim_1.json
+ sim_put_policy_type 201 ricsim_g1_1 2 testdata/OSC/sim_2.json
+
if [ "$PMS_VERSION" == "V2" ]; then
start_ric_simulators ricsim_g3 1 STD_2.0.0
+ sim_put_policy_type 201 ricsim_g3_1 STD_QOS_0_2_0 testdata/STD2/sim_qos.json
+ sim_put_policy_type 201 ricsim_g3_1 STD_QOS2_0.1.0 testdata/STD2/sim_qos2.json
fi
start_mr
start_gateway $SIM_GROUP/$NRT_GATEWAY_COMPOSE_DIR/$NRT_GATEWAY_CONFIG_FILE
fi
- if [ $RUNMODE == "DOCKER" ]; then
- start_consul_cbs
- fi
-
if [[ $interface = *"SDNC"* ]]; then
start_sdnc
prepare_consul_config SDNC ".consul_config.json"
if [ $RUNMODE == "KUBE" ]; then
agent_load_config ".consul_config.json"
else
- consul_config_app ".consul_config.json"
- fi
+ if [[ "$PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
+ #Temporary switch to http/https if dmaap use. Otherwise it is not possibble to push config
+ if [ $__httpx == "HTTPS" ]; then
+ use_agent_rest_https
+ else
+ use_agent_rest_http
+ fi
+
+ if [[ $interface != *"DMAAP"* ]]; then
+ echo "{}" > ".consul_config_incorrect.json"
+ api_put_configuration 400 ".consul_config_incorrect.json"
+ fi
+
+ api_put_configuration 200 ".consul_config.json"
+ api_get_configuration 200 ".consul_config.json"
+ if [ $__httpx == "HTTPS" ]; then
+ if [[ $interface = *"DMAAP"* ]]; then
+ use_agent_dmaap_https
+ else
+ use_agent_rest_https
+ fi
+ else
+ if [[ $interface = *"DMAAP"* ]]; then
+ use_agent_dmaap_http
+ else
+ use_agent_rest_http
+ fi
+ fi
- sim_put_policy_type 201 ricsim_g1_1 1 testdata/OSC/sim_1.json
- sim_put_policy_type 201 ricsim_g1_1 2 testdata/OSC/sim_2.json
+ else
+ start_consul_cbs
+ consul_config_app ".consul_config.json"
+ fi
+ fi
if [ "$PMS_VERSION" == "V2" ]; then
- sim_put_policy_type 201 ricsim_g3_1 STD_QOS_0_2_0 testdata/STD2/sim_qos.json
- sim_put_policy_type 201 ricsim_g3_1 STD_QOS2_0.1.0 testdata/STD2/sim_qos2.json
-
api_equal json:rics 3 300
api_equal json:policy-types 5 120
api_get_status 200
+ api_get_status_root 200
+
echo "############################################"
echo "##### Service registry and supervision #####"
echo "############################################"
else
notificationurl=""
fi
+ if [[ $interface != *"DMAAP"* ]]; then
+ # Badly formatted json is not possible to send via dmaap
+ api_put_policy 400 "unregistered-service" ricsim_g1_1 1 2000 NOTRANSIENT $notificationurl testdata/OSC/pi_bad_template.json
+ fi
deviation "TR10 - agent allows policy creation on unregistered service (orig problem) - test combo $interface and $__httpx"
#Kept until decison
#api_put_policy 400 "unregistered-service" ricsim_g1_1 1 2000 NOTRANSIENT testdata/OSC/pi1_template.json
api_put_policy 200 "service10" ricsim_g3_1 STD_QOS2_0.1.0 5200 false $notificationurl testdata/STD2/pi_qos2_template.json
fi
+ api_get_policy_status 404 1
+ api_get_policy_status 404 2
VAL='NOT IN EFFECT'
api_get_policy_status 200 5000 OSC "$VAL" "false"
api_get_policy_status 200 5100 STD "UNDEFINED"
#Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if
#the image is not configured in the supplied env_file
#Used for images not applicable to all supported profile
-CONDITIONALLY_IGNORED_IMAGES="NGW"
+CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL NGW"
#Supported test environment profiles
SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE"
start_cr 1
-if [ $RUNMODE == "DOCKER" ]; then
- start_consul_cbs
-fi
-
start_control_panel $SIM_GROUP/$CONTROL_PANEL_COMPOSE_DIR/$CONTROL_PANEL_CONFIG_FILE
if [ ! -z "$NRT_GATEWAY_APP_NAME" ]; then
if [ $RUNMODE == "KUBE" ]; then
agent_load_config ".consul_config.json"
else
- consul_config_app ".consul_config.json"
+ if [[ "$PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
+ api_put_configuration 200 ".consul_config.json"
+ else
+ start_consul_cbs
+ consul_config_app ".consul_config.json"
+ fi
fi
set_agent_debug
#Prestarted app (not started by script) to include in the test when running kubernetes, space separated list
KUBE_PRESTARTED_IMAGES=" "
+#Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if
+#the image is not configured in the supplied env_file
+#Used for images not applicable to all supported profile
+CONDITIONALLY_IGNORED_IMAGES=""
+
#Supported test environment profiles
SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE"
#Supported run modes
#App names to include in the test when running kubernetes, space separated list
KUBE_INCLUDED_IMAGES="ICS PRODSTUB CP CR KUBEPROXY NGW"
#Prestarted app (not started by script) to include in the test when running kubernetes, space separated list
-KUBE_PRESTARTED_IMAGES=""
+KUBE_PRESTARTED_IMAGES="NGW"
#Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if
#the image is not configured in the supplied env_file
#Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if
#the image is not configured in the supplied env_file
#Used for images not applicable to all supported profile
-CONDITIONALLY_IGNORED_IMAGES="NGW"
+CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL NGW"
#Supported test environment profiles
SUPPORTED_PROFILES="ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE"
start_policy_agent PROXY $SIM_GROUP/$POLICY_AGENT_COMPOSE_DIR/$POLICY_AGENT_CONFIG_FILE
-if [ $RUNMODE == "DOCKER" ]; then
- start_consul_cbs
-fi
-
prepare_consul_config NOSDNC ".consul_config.json"
if [ $RUNMODE == "KUBE" ]; then
agent_load_config ".consul_config.json"
else
- consul_config_app ".consul_config.json"
+ if [[ "$PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
+ api_put_configuration 200 ".consul_config.json"
+ else
+ start_consul_cbs
+ consul_config_app ".consul_config.json"
+ fi
fi
start_cr 1
#Prestarted app (not started by script) to include in the test when running kubernetes, space separated list
KUBE_PRESTARTED_IMAGES=" "
+#Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if
+#the image is not configured in the supplied env_file
+#Used for images not applicable to all supported profile
+CONDITIONALLY_IGNORED_IMAGES=""
+
#Supported test environment profiles
SUPPORTED_PROFILES="ONAP-ISTANBUL ONAP-JAKARTA"
#Supported run modes
#Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if
#the image is not configured in the supplied env_file
#Used for images not applicable to all supported profile
-CONDITIONALLY_IGNORED_IMAGES="NGW"
+CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL NGW"
#Supported test environment profiles
SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE"
start_cr 1
- if [ $RUNMODE == "DOCKER" ]; then
- start_consul_cbs
- fi
-
start_control_panel $SIM_GROUP/$CONTROL_PANEL_COMPOSE_DIR/$CONTROL_PANEL_CONFIG_FILE
if [ ! -z "$NRT_GATEWAY_APP_NAME" ]; then
if [ $RUNMODE == "KUBE" ]; then
agent_load_config ".consul_config.json"
else
- consul_config_app ".consul_config.json"
+ if [[ "$PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
+ #Temporary switch to http/https if dmaap use. Otherwise it is not possibble to push config
+ if [ $__httpx == "HTTPS" ]; then
+ use_agent_rest_https
+ else
+ use_agent_rest_http
+ fi
+ api_put_configuration 200 ".consul_config.json"
+ if [ $__httpx == "HTTPS" ]; then
+ if [[ $interface = *"DMAAP"* ]]; then
+ use_agent_dmaap_https
+ else
+ use_agent_rest_https
+ fi
+ else
+ if [[ $interface = *"DMAAP"* ]]; then
+ use_agent_dmaap_http
+ else
+ use_agent_rest_http
+ fi
+ fi
+ else
+ start_consul_cbs
+ consul_config_app ".consul_config.json"
+ fi
fi
api_get_status 200
sim_equal ricsim_g1_1 num_instances 0
- sim_equal ricsim_g1_1 num_instances $NUM_POLICIES 300
+ if [[ $interface = *"SDNC"* ]]; then
+ deviation "Sync over SDNC seem to be slower from Jakarta version..."
+ sim_equal ricsim_g1_1 num_instances $NUM_POLICIES 2000
+ else
+ sim_equal ricsim_g1_1 num_instances $NUM_POLICIES 300
+ fi
START_ID2=$(($START_ID+$NUM_POLICIES))
sim_post_delete_instances 200 ricsim_g2_1
sim_equal ricsim_g2_1 num_instances 0
-
- sim_equal ricsim_g2_1 num_instances $NUM_POLICIES 300
+ if [[ $interface = *"SDNC"* ]]; then
+ deviation "Sync over SDNC seem to be slower from Jakarta version..."
+ sim_equal ricsim_g2_1 num_instances $NUM_POLICIES 2000
+ else
+ sim_equal ricsim_g2_1 num_instances $NUM_POLICIES 300
+ fi
api_delete_policy 204 $(($START_ID+47))
sim_post_delete_instances 200 ricsim_g1_1
- sim_equal ricsim_g1_1 num_instances $(($NUM_POLICIES-2)) 300
+ if [[ $interface = *"SDNC"* ]]; then
+ deviation "Sync over SDNC seem to be slower from Jakarta version..."
+ sim_equal ricsim_g1_1 num_instances $(($NUM_POLICIES-2)) 2000
+ else
+ sim_equal ricsim_g1_1 num_instances $(($NUM_POLICIES-2)) 300
+ fi
api_delete_policy 204 $(($START_ID2+37))
sim_post_delete_instances 200 ricsim_g2_1
- sim_equal ricsim_g1_1 num_instances $(($NUM_POLICIES-2)) 300
+ if [[ $interface = *"SDNC"* ]]; then
+ deviation "Sync over SDNC seem to be slower from Jakarta version..."
+ sim_equal ricsim_g1_1 num_instances $(($NUM_POLICIES-2)) 2000
+
+ sim_equal ricsim_g2_1 num_instances $(($NUM_POLICIES-3)) 2000
+ else
+ sim_equal ricsim_g1_1 num_instances $(($NUM_POLICIES-2)) 300
- sim_equal ricsim_g2_1 num_instances $(($NUM_POLICIES-3)) 300
+ sim_equal ricsim_g2_1 num_instances $(($NUM_POLICIES-3)) 300
+ fi
api_equal json:policies $(($NUM_POLICIES-2+$NUM_POLICIES-3))
NUM_CR=10 # Number of callback receivers, divide all callbacks to this number of servers - for load sharing
## Note: The number jobs must be a multiple of the number of CRs in order to calculate the number of expected event in each CR
NUM_JOBS=200 # Mediator and adapter gets same number of jobs for every type
-
if [ $NUM_JOBS -lt $NUM_CR ]; then
__log_conf_fail_general "Number of jobs: $NUM_JOBS must be greater then the number of CRs: $NUM_CR"
fi
set_dmaapadp_trace
-start_dmaapmed NOPROXY $SIM_GROUP/$DMAAP_MED_COMPOSE_DIR/$DMAAP_MED_DATA_FILE
+if [[ "$DMAAP_MED_FEATURE_LEVEL" == *"KAFKATYPES"* ]]; then
+ kafkapc_api_create_topic 201 "unauthenticated.dmaapmed_kafka.text" "text/plain"
+
+ kafkapc_api_start_sending 200 "unauthenticated.dmaapmed_kafka.text"
+fi
+
+start_dmaapmed NOPROXY $SIM_GROUP/$DMAAP_MED_COMPOSE_DIR/$DMAAP_MED_HOST_DATA_FILE
ics_equal json:data-producer/v1/info-producers 2 60
# Check producers
ics_api_idc_get_job_ids 200 NOTYPE NOWNER EMPTY
-ics_api_idc_get_type_ids 200 ExampleInformationType STD_Fault_Messages ExampleInformationTypeKafka
ics_api_edp_get_producer_ids_2 200 NOTYPE DmaapGenericInfoProducer DMaaP_Mediator_Producer
+if [[ "$DMAAP_MED_FEATURE_LEVEL" != *"KAFKATYPES"* ]]; then
+ ics_api_idc_get_type_ids 200 ExampleInformationType STD_Fault_Messages ExampleInformationTypeKafka
+else
+ ics_api_idc_get_type_ids 200 ExampleInformationType STD_Fault_Messages ExampleInformationTypeKafka Kafka_TestTopic
+fi
# Create jobs for adapter - CR stores data as MD5 hash
cr_index=$(($i%$NUM_CR))
service_mr="CR_SERVICE_MR_PATH_"$cr_index
service_app="CR_SERVICE_APP_PATH_"$cr_index
- ics_api_idc_put_job 201 job-med-$i STD_Fault_Messages ${!service_mr}/job-med-data$i"?storeas=md5" info-owner-med-$i ${!service_app}/job_status_info-owner-med-$i testdata/dmaap-adapter/job-template.json
+ ics_api_idc_put_job 201 job-med-$i STD_Fault_Messages ${!service_mr}/job-med-data$i"?storeas=md5" info-owner-med-$i ${!service_app}/job_status_info-owner-med-$i testdata/dmaap-mediator/job-template.json
done
print_timer
+if [[ "$DMAAP_MED_FEATURE_LEVEL" == *"KAFKATYPES"* ]]; then
+ # Create jobs for mediator kafka - CR stores data as MD5 hash
+ start_timer "Create mediator (kafka) jobs: $NUM_JOBS"
+ for ((i=1; i<=$NUM_JOBS; i++))
+ do
+ cr_index=$(($i%$NUM_CR))
+ service_text="CR_SERVICE_TEXT_PATH_"$cr_index
+ service_app="CR_SERVICE_APP_PATH_"$cr_index
+ ics_api_idc_put_job 201 job-med-kafka-$i Kafka_TestTopic ${!service_text}/job-med-kafka-data$i"?storeas=md5" info-owner-med-kafka-$i ${!service_app}/job_status_info-owner-med-kafka-$i testdata/dmaap-mediator/job-template-1-kafka.json
+ done
+ print_timer
+fi
+
# Check job status
for ((i=1; i<=$NUM_JOBS; i++))
do
ics_api_a1_get_job_status 200 job-med-$i ENABLED 30
ics_api_a1_get_job_status 200 job-adp-$i ENABLED 30
ics_api_a1_get_job_status 200 job-adp-kafka-$i ENABLED 30
+ if [[ "$DMAAP_MED_FEATURE_LEVEL" == *"KAFKATYPES"* ]]; then
+ ics_api_a1_get_job_status 200 job-med-kafka-$i ENABLED 30
+ fi
done
for ((i=0; i<$NUM_CR; i++))
do
cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+ cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60
done
EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
for ((i=0; i<$NUM_CR; i++))
do
cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+ cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60
done
EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
for ((i=0; i<$NUM_CR; i++))
do
cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+ cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60
done
EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
for ((i=0; i<$NUM_CR; i++))
do
cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+ cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60
done
EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
for ((i=0; i<$NUM_CR; i++))
do
cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+ cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60
done
# Check received data callbacks from adapter
for ((i=0; i<$NUM_CR; i++))
do
cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+ cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60
done
EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
for ((i=0; i<$NUM_CR; i++))
do
cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+ cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60
done
EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
for ((i=0; i<$NUM_CR; i++))
do
cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+ cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60
done
EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
for ((i=0; i<$NUM_CR; i++))
do
cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+ cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60
done
EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
for ((i=0; i<$NUM_CR; i++))
do
cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+ cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60
done
# Check received data callbacks from adapter kafka
for ((i=0; i<$NUM_CR; i++))
do
cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+ cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60
done
EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
for ((i=0; i<$NUM_CR; i++))
do
cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+ cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60
done
EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
for ((i=0; i<$NUM_CR; i++))
do
cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+ cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60
done
EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
for ((i=0; i<$NUM_CR; i++))
do
cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+ cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60
done
EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
for ((i=0; i<$NUM_CR; i++))
do
cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+ cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60
done
# Check received data callbacks from mediator
cr_api_check_single_genric_event_md5_file 200 $cr_index job-med-data$i ./tmp/data_for_dmaap_test.json
done
+if [[ "$DMAAP_MED_FEATURE_LEVEL" == *"KAFKATYPES"* ]]; then
+ ## Send text file via message-router to mediator kafka
+
+ EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
+ kafkapc_api_post_msg_from_file 200 "unauthenticated.dmaapmed_kafka.text" "text/plain" ./tmp/data_for_dmaap_test.txt
+ kafkapc_equal topics/unauthenticated.dmaapmed_kafka.text/counters/sent 1 30
+ for ((i=0; i<$NUM_CR; i++))
+ do
+ cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+ cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60
+ done
+
+ EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
+ kafkapc_api_post_msg_from_file 200 "unauthenticated.dmaapmed_kafka.text" "text/plain" ./tmp/data_for_dmaap_test.txt
+ kafkapc_equal topics/unauthenticated.dmaapmed_kafka.text/counters/sent 2 30
+ for ((i=0; i<$NUM_CR; i++))
+ do
+ cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+ cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60
+ done
+
+ EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
+ kafkapc_api_post_msg_from_file 200 "unauthenticated.dmaapmed_kafka.text" "text/plain" ./tmp/data_for_dmaap_test.txt
+ kafkapc_equal topics/unauthenticated.dmaapmed_kafka.text/counters/sent 3 30
+ for ((i=0; i<$NUM_CR; i++))
+ do
+ cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+ cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60
+ done
+
+ EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
+ kafkapc_api_post_msg_from_file 200 "unauthenticated.dmaapmed_kafka.text" "text/plain" ./tmp/data_for_dmaap_test.txt
+ kafkapc_equal topics/unauthenticated.dmaapmed_kafka.text/counters/sent 4 30
+ for ((i=0; i<$NUM_CR; i++))
+ do
+ cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+ cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60
+ done
+
+ EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
+ kafkapc_api_post_msg_from_file 200 "unauthenticated.dmaapmed_kafka.text" "text/plain" ./tmp/data_for_dmaap_test.txt
+ kafkapc_equal topics/unauthenticated.dmaapmed_kafka.text/counters/sent 5 30
+ for ((i=0; i<$NUM_CR; i++))
+ do
+ cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+ cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60
+ done
+
+ # Check received data callbacks from adapter kafka
+ for ((i=1; i<=$NUM_JOBS; i++))
+ do
+ cr_index=$(($i%$NUM_CR))
+ cr_api_check_single_genric_event_md5_file 200 $cr_index job-med-kafka-data$i ./tmp/data_for_dmaap_test.txt
+ cr_api_check_single_genric_event_md5_file 200 $cr_index job-med-kafka-data$i ./tmp/data_for_dmaap_test.txt
+ cr_api_check_single_genric_event_md5_file 200 $cr_index job-med-kafka-data$i ./tmp/data_for_dmaap_test.txt
+ cr_api_check_single_genric_event_md5_file 200 $cr_index job-med-kafka-data$i ./tmp/data_for_dmaap_test.txt
+ cr_api_check_single_genric_event_md5_file 200 $cr_index job-med-kafka-data$i ./tmp/data_for_dmaap_test.txt
+ done
+fi
# Send small json via message-router to adapter
mr_api_send_json "/events/unauthenticated.dmaapadp.json" '{"msg":"msg-1"}'
for ((i=0; i<$NUM_CR; i++))
do
cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+ cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60
done
print_timer
-# Send small text via message-routere to adapter
+# Send small text via message-router to adapter
kafkapc_api_post_msg 200 "unauthenticated.dmaapadp_kafka.text" "text/plain" 'Message-------1'
kafkapc_api_post_msg 200 "unauthenticated.dmaapadp_kafka.text" "text/plain" 'Message-------3'
kafkapc_equal topics/unauthenticated.dmaapadp_kafka.text/counters/sent 7 30
for ((i=0; i<$NUM_CR; i++))
do
cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+ cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60
done
print_timer
for ((i=0; i<$NUM_CR; i++))
do
cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 100
+ cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 100
done
print_timer
+if [[ "$DMAAP_MED_FEATURE_LEVEL" == *"KAFKATYPES"* ]]; then
+ # Send small text via message-router to mediator
+ kafkapc_api_post_msg 200 "unauthenticated.dmaapmed_kafka.text" "text/plain" 'Message-------0'
+ kafkapc_api_post_msg 200 "unauthenticated.dmaapmed_kafka.text" "text/plain" 'Message-------2'
+ kafkapc_equal topics/unauthenticated.dmaapmed_kafka.text/counters/sent 7 30
+
+ # Wait for data recetption, adapter kafka
+ EXPECTED_DATA_DELIV=$(($NUM_JOBS*2/$NUM_CR+$EXPECTED_DATA_DELIV))
+ start_timer "Data delivery mediator kafka, 2 strings per job"
+ for ((i=0; i<$NUM_CR; i++))
+ do
+ cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+ cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 60
+ done
+ print_timer
+fi
+
# Check received number of messages for mediator and adapter callbacks
for ((i=1; i<=$NUM_JOBS; i++))
do
cr_index=$(($i%$NUM_CR))
cr_equal $cr_index received_callbacks?id=job-med-data$i $DATA_DELIV_JOBS
+ cr_equal $cr_index received_callback_batches?id=job-med-data$i $DATA_DELIV_JOBS
cr_equal $cr_index received_callbacks?id=job-adp-data$i $DATA_DELIV_JOBS
+ cr_equal $cr_index received_callback_batches?id=job-adp-data$i $DATA_DELIV_JOBS
cr_equal $cr_index received_callbacks?id=job-adp-kafka-data$i $DATA_DELIV_JOBS
+ cr_equal $cr_index received_callback_batches?id=job-adp-kafka-data$i $DATA_DELIV_JOBS
+ if [[ "$DMAAP_MED_FEATURE_LEVEL" == *"KAFKATYPES"* ]]; then
+ cr_equal $cr_index received_callbacks?id=job-med-kafka-data$i $DATA_DELIV_JOBS
+ cr_equal $cr_index received_callback_batches?id=job-med-kafka-data$i $DATA_DELIV_JOBS
+ fi
done
# Check received data and order for mediator and adapter callbacks
cr_api_check_single_genric_event_md5 200 $cr_index job-adp-data$i '{"msg":"msg-3"}'
cr_api_check_single_genric_event_md5 200 $cr_index job-adp-kafka-data$i 'Message-------1'
cr_api_check_single_genric_event_md5 200 $cr_index job-adp-kafka-data$i 'Message-------3'
+ if [[ "$DMAAP_MED_FEATURE_LEVEL" == *"KAFKATYPES"* ]]; then
+ cr_api_check_single_genric_event_md5 200 $cr_index job-med-kafka-data$i 'Message-------0'
+ cr_api_check_single_genric_event_md5 200 $cr_index job-med-kafka-data$i 'Message-------2'
+ fi
done
# Set delay in the callback receiver to slow down callbacks
for ((i=0; i<$NUM_CR; i++))
do
cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 100
+ cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 100
done
print_timer
for ((i=0; i<$NUM_CR; i++))
do
cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 100
+ cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 100
done
print_timer
for ((i=0; i<$NUM_CR; i++))
do
cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 100
+ cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 100
done
print_timer
+if [[ "$DMAAP_MED_FEATURE_LEVEL" == *"KAFKATYPES"* ]]; then
+ # Send small text via message-router to mediator kafka
+ kafkapc_api_post_msg 200 "unauthenticated.dmaapmed_kafka.text" "text/plain" 'Message-------4'
+ kafkapc_api_post_msg 200 "unauthenticated.dmaapmed_kafka.text" "text/plain" 'Message-------6'
+ kafkapc_equal topics/unauthenticated.dmaapmed_kafka.text/counters/sent 9 30
+
+ # Wait for data recetption, mediator kafka
+ EXPECTED_DATA_DELIV=$(($NUM_JOBS*2/$NUM_CR+$EXPECTED_DATA_DELIV))
+ start_timer "Data delivery mediator kafka with $SEC_DELAY seconds delay in consumer, 2 strings per job"
+ for ((i=0; i<$NUM_CR; i++))
+ do
+ cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 100
+ cr_equal $i received_callback_batches $EXPECTED_DATA_DELIV 100
+ done
+ print_timer
+fi
+
# Check received number of messages for mediator and adapter callbacks
for ((i=1; i<=$NUM_JOBS; i++))
do
cr_index=$(($i%$NUM_CR))
cr_equal $cr_index received_callbacks?id=job-med-data$i 9
+ cr_equal $cr_index received_callback_batches?id=job-med-data$i 9
cr_equal $cr_index received_callbacks?id=job-adp-data$i 9
+ cr_equal $cr_index received_callback_batches?id=job-adp-data$i 9
cr_equal $cr_index received_callbacks?id=job-adp-kafka-data$i 9
+ cr_equal $cr_index received_callback_batches?id=job-adp-kafka-data$i 9
+ if [[ "$DMAAP_MED_FEATURE_LEVEL" == *"KAFKATYPES"* ]]; then
+ cr_equal $cr_index received_callbacks?id=job-med-kafka-data$i 9
+ cr_equal $cr_index received_callback_batches?id=job-med-kafka-data$i 9
+ fi
done
# Check received data and order for mediator and adapter callbacks
cr_api_check_single_genric_event_md5 200 $cr_index job-adp-data$i '{"msg":"msg-7"}'
cr_api_check_single_genric_event_md5 200 $cr_index job-adp-kafka-data$i 'Message-------5'
cr_api_check_single_genric_event_md5 200 $cr_index job-adp-kafka-data$i 'Message-------7'
+ if [[ "$DMAAP_MED_FEATURE_LEVEL" == *"KAFKATYPES"* ]]; then
+ cr_api_check_single_genric_event_md5 200 $cr_index job-med-kafka-data$i 'Message-------4'
+ cr_api_check_single_genric_event_md5 200 $cr_index job-med-kafka-data$i 'Message-------6'
+ fi
done
#### TEST COMPLETE ####
--- /dev/null
+#!/usr/bin/env bash
+
+# ============LICENSE_START===============================================
+# Copyright (C) 2020 Nordix Foundation. All rights reserved.
+# ========================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=================================================
+#
+
+TC_ONELINE_DESCR="App test DMAAP Meditor and DMAAP Adapter with 100 jobs,types and topics"
+
+#App names to include in the test when running docker, space separated list
+DOCKER_INCLUDED_IMAGES="ICS DMAAPMED DMAAPADP KUBEPROXY MR DMAAPMR CR KAFKAPC HTTPPROXY"
+
+#App names to include in the test when running kubernetes, space separated list
+KUBE_INCLUDED_IMAGES=" ICS DMAAPMED DMAAPADP KUBEPROXY MR DMAAPMR CR KAFKAPC HTTPPROXY"
+
+#Prestarted app (not started by script) to include in the test when running kubernetes, space separated list
+KUBE_PRESTARTED_IMAGES=""
+
+#Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if
+#the image is not configured in the supplied env_file
+#Used for images not applicable to all supported profile
+CONDITIONALLY_IGNORED_IMAGES=""
+
+#Supported test environment profiles
+SUPPORTED_PROFILES="ORAN-E-RELEASE ORAN-F-RELEASE"
+#Supported run modes
+SUPPORTED_RUNMODES="DOCKER KUBE"
+
+. ../common/testcase_common.sh $@
+
+setup_testenvironment
+
+#### TEST BEGIN ####
+
+#Local vars in test script
+##########################
+FLAT_A1_EI="1"
+NUM_CR=1 # Number of callback receivers, max 1
+## Note: The number jobs must be a multiple of the number of CRs in order to calculate the number of expected event in each CR
+NUM_JOBS=100 # Mediator and adapter gets same number of jobs for every type
+if [ $NUM_CR -gt 1 ]; then
+ __log_conf_fail_general "Max number of callback receivers is one in this test"
+fi
+
+clean_environment
+
+#use_cr_https
+use_cr_http
+use_ics_rest_https
+use_mr_https
+use_dmaapadp_https
+use_dmaapmed_https
+
+start_kube_proxy
+
+start_cr $NUM_CR
+
+start_ics NOPROXY $SIM_GROUP/$ICS_COMPOSE_DIR/$ICS_CONFIG_FILE
+
+set_ics_trace
+
+start_mr
+
+start_kafkapc
+
+for ((i=1; i<=$NUM_JOBS; i++))
+do
+ kafkapc_api_create_topic 201 "unauthenticated.dmaapadp_kafka.text$i" "text/plain"
+
+ kafkapc_api_start_sending 200 "unauthenticated.dmaapadp_kafka.text$i"
+done
+
+adp_med_type_list=""
+adp_config_data='{"types": ['
+for ((i=1; i<=$NUM_JOBS; i++))
+do
+ if [ $i -ne 1 ]; then
+ adp_config_data=$adp_config_data','
+ fi
+ adp_config_data=$adp_config_data'{"id": "ADPKafkaType'$i'","kafkaInputTopic": "unauthenticated.dmaapadp_kafka.text'$i'","useHttpProxy": false}'
+ adp_med_type_list="$adp_med_type_list ADPKafkaType$i "
+done
+adp_config_data=$adp_config_data']}'
+echo $adp_config_data > tmp/adp_config_data.json
+
+start_dmaapadp NOPROXY $SIM_GROUP/$DMAAP_ADP_COMPOSE_DIR/$DMAAP_ADP_CONFIG_FILE tmp/adp_config_data.json
+
+set_dmaapadp_trace
+
+if [[ "$DMAAP_MED_FEATURE_LEVEL" == *"KAFKATYPES"* ]]; then
+ for ((i=1; i<=$NUM_JOBS; i++))
+ do
+ kafkapc_api_create_topic 201 "unauthenticated.dmaapmed_kafka.text$i" "text/plain"
+
+ kafkapc_api_start_sending 200 "unauthenticated.dmaapmed_kafka.text$i"
+ done
+fi
+
+med_config_data='{"types": ['
+for ((i=1; i<=$NUM_JOBS; i++))
+do
+ if [ $i -ne 1 ]; then
+ med_config_data=$med_config_data','
+ fi
+ med_config_data=$med_config_data'{"id": "MEDKafkaType'$i'","kafkaInputTopic": "unauthenticated.dmaapmed_kafka.text'$i'"}'
+ adp_med_type_list="$adp_med_type_list MEDKafkaType$i "
+done
+med_config_data=$med_config_data']}'
+echo $med_config_data > tmp/med_config_data.json
+
+start_dmaapmed NOPROXY tmp/med_config_data.json
+
+ics_equal json:data-producer/v1/info-producers 2 60
+
+# Check producers
+ics_api_idc_get_job_ids 200 NOTYPE NOWNER EMPTY
+ics_api_edp_get_producer_ids_2 200 NOTYPE DmaapGenericInfoProducer DMaaP_Mediator_Producer
+ics_api_idc_get_type_ids 200 $adp_med_type_list
+
+
+# Create jobs for adapter kafka - CR stores data as MD5 hash
+start_timer "Create adapter (kafka) jobs: $NUM_JOBS"
+for ((i=1; i<=$NUM_JOBS; i++))
+do
+ # Max buffer timeout for is about 160 sec for Adator jobs"
+ adp_timeout=$(($i*1000))
+ adp_config_data='{"filter":"Message*","maxConcurrency": 1,"bufferTimeout": {"maxSize": 100,"maxTimeMiliseconds": '$adp_timeout'}}'
+ echo $adp_config_data > tmp/adp_config_data.json
+
+ cr_index=$(($i%$NUM_CR))
+ service_text="CR_SERVICE_TEXT_PATH_"$cr_index
+ service_app="CR_SERVICE_APP_PATH_"$cr_index
+ ics_api_idc_put_job 201 job-adp-kafka-$i "ADPKafkaType$i" ${!service_text}/job-adp-kafka-data$i"?storeas=md5" info-owner-adp-kafka-$i ${!service_app}/callbacks-null tmp/adp_config_data.json
+
+done
+print_timer
+
+if [[ "$DMAAP_MED_FEATURE_LEVEL" == *"KAFKATYPES"* ]]; then
+ # Create jobs for mediator kafka - CR stores data as MD5 hash
+ start_timer "Create mediator (kafka) jobs: $NUM_JOBS"
+ for ((i=1; i<=$NUM_JOBS; i++))
+ do
+ med_timeout=$(($i*5000))
+ med_config_data='{"bufferTimeout": {"maxSize": 100,"maxTimeMiliseconds": '$med_timeout'}}'
+ echo $med_config_data > tmp/med_config_data.json
+ cr_index=$(($i%$NUM_CR))
+ service_text="CR_SERVICE_TEXT_PATH_"$cr_index
+ service_app="CR_SERVICE_APP_PATH_"$cr_index
+ ics_api_idc_put_job 201 job-med-kafka-$i "MEDKafkaType$i" ${!service_text}/job-med-kafka-data$i"?storeas=md5" info-owner-med-kafka-$i ${!service_app}/callbacks-null tmp/med_config_data.json
+ done
+ print_timer
+fi
+
+# Check job status
+for ((i=1; i<=$NUM_JOBS; i++))
+do
+ ics_api_a1_get_job_status 200 job-adp-kafka-$i ENABLED 30
+ if [[ "$DMAAP_MED_FEATURE_LEVEL" == *"KAFKATYPES"* ]]; then
+ ics_api_a1_get_job_status 200 job-med-kafka-$i ENABLED 30
+ fi
+done
+
+
+EXPECTED_DATA_DELIV=0 #Total delivered msg per CR
+EXPECTED_BATCHES_DELIV=0 #Total delivered batches per CR
+DATA_DELIV_JOBS=0 #Total delivered msg per job per CR
+
+sleep_wait 60
+
+start_timer "Data delivery adapter kafka, 2 strings per job (short buffer timeouts)"
+# Send small text via message-router to adapter
+for ((i=1; i<=$NUM_JOBS; i++))
+do
+ kafkapc_api_post_msg 200 "unauthenticated.dmaapadp_kafka.text"$i "text/plain" 'Message-------1'$i
+ kafkapc_api_post_msg 200 "unauthenticated.dmaapadp_kafka.text"$i "text/plain" 'Discard-------3'$i #Should be filtered out
+ kafkapc_api_post_msg 200 "unauthenticated.dmaapadp_kafka.text"$i "text/plain" 'Message-------3'$i
+done
+for ((i=1; i<=$NUM_JOBS; i++))
+do
+ kafkapc_equal topics/unauthenticated.dmaapadp_kafka.text$i/counters/sent 3 30
+done
+
+# Wait for data recetption, adapter kafka
+EXPECTED_DATA_DELIV=$(($NUM_JOBS*2/$NUM_CR+$EXPECTED_DATA_DELIV))
+EXPECTED_BATCHES_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_BATCHES_DELIV))
+
+adp_timeout=$(($NUM_JOBS*1*2+60)) #NUM_JOBS*MIN_BUFFERTIMEOUT*2+60_SEC_DELAY
+for ((i=0; i<$NUM_CR; i++))
+do
+ #tmp_receptio
+ cr_equal $i received_callbacks $EXPECTED_DATA_DELIV $adp_timeout
+ cr_greater_or_equal $i received_callback_batches $EXPECTED_BATCHES_DELIV
+done
+print_timer
+
+# Check received data callbacks from adapter
+for ((i=1; i<=$NUM_JOBS; i++))
+do
+ cr_index=$(($i%$NUM_CR))
+ cr_api_check_single_genric_event_md5 200 $cr_index job-adp-kafka-data$i 'Message-------1'$i
+ cr_api_check_single_genric_event_md5 200 $cr_index job-adp-kafka-data$i 'Message-------3'$i
+done
+
+if [[ "$DMAAP_MED_FEATURE_LEVEL" == *"KAFKATYPES"* ]]; then
+
+ PREV_DATA_DELIV=$(cr_read 0 received_callbacks)
+ PREV_BATCHES_DELIV=$(cr_read 0 received_callback_batches)
+ start_timer "Data delivery mediator kafka, 2 strings per job (long buffer timeouts)"
+ # Send small text via message-router to mediator
+ for ((i=1; i<=$NUM_JOBS; i++))
+ do
+ kafkapc_api_post_msg 200 "unauthenticated.dmaapmed_kafka.text$i" "text/plain" 'Message-------0'$i
+ kafkapc_api_post_msg 200 "unauthenticated.dmaapmed_kafka.text$i" "text/plain" 'Message-------2'$i
+ done
+ for ((i=1; i<=$NUM_JOBS; i++))
+ do
+ kafkapc_equal topics/unauthenticated.dmaapmed_kafka.text$i/counters/sent 2 30
+ done
+
+ # Wait for data recetption, adapter kafka
+
+ EXPECTED_DATA_DELIV=$(($NUM_JOBS*2/$NUM_CR+$PREV_DATA_DELIV))
+ EXPECTED_BATCHES_DELIV=$(($NUM_JOBS/$NUM_CR+$PREV_BATCHES_DELIV))
+
+ med_timeout=$(($NUM_JOBS*5*2+60)) #NUM_JOBS*MIN_BUFFERTIMEOUT*2+60_SEC_DELAY
+ for ((i=0; i<$NUM_CR; i++))
+ do
+ cr_equal $i received_callbacks $EXPECTED_DATA_DELIV $med_timeout
+ cr_greater_or_equal $i received_callback_batches $EXPECTED_BATCHES_DELIV
+ done
+
+ print_timer
+
+ # Check received data callbacks from mediator
+ for ((i=1; i<=$NUM_JOBS; i++))
+ do
+ cr_index=$(($i%$NUM_CR))
+ cr_api_check_single_genric_event_md5 200 $cr_index job-med-kafka-data$i 'Message-------0'$i
+ cr_api_check_single_genric_event_md5 200 $cr_index job-med-kafka-data$i 'Message-------2'$i
+ done
+fi
+
+#### TEST COMPLETE ####
+
+store_logs END
+
+print_result
+
+auto_clean_environment
TC_ONELINE_DESCR="Resync of RIC via changes in the consul config or pushed config"
#App names to include in the test when running docker, space separated list
-DOCKER_INCLUDED_IMAGES="CBS CONSUL CP CR MR PA RICSIM NGW KUBEPROXY"
+DOCKER_INCLUDED_IMAGES="CBS CONSUL CP CR MR PA RICSIM KUBEPROXY"
+
+#Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if
+#the image is not configured in the supplied env_file
+#Used for images not applicable to all supported profile
+CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL"
#Supported test environment profiles
SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE"
if [ "$PMS_VERSION" == "V2" ]; then
TESTED_VARIANTS="CONSUL NOCONSUL"
+ if [[ "$PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
+ TESTED_VARIANTS="NOCONSUL"
+ fi
else
TESTED_VARIANTS="CONSUL"
fi
fi
check_policy_agent_logs
- check_sdnc_logs
store_logs END_$consul_conf
done
TC_ONELINE_DESCR="Change supported policy types and reconfigure rics"
#App names to include in the test when running docker, space separated list
-DOCKER_INCLUDED_IMAGES="CBS CONSUL CP CR MR PA RICSIM SDNC NGW KUBEPROXY"
+DOCKER_INCLUDED_IMAGES="CBS CONSUL CP CR MR PA RICSIM SDNC KUBEPROXY"
#App names to include in the test when running kubernetes, space separated list
-KUBE_INCLUDED_IMAGES="CP CR MR PA RICSIM SDNC KUBEPROXY NGW"
+KUBE_INCLUDED_IMAGES="CP CR MR PA RICSIM SDNC KUBEPROXY"
#Prestarted app (not started by script) to include in the test when running kubernetes, space separated list
KUBE_PRESTARTED_IMAGES=""
+#Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if
+#the image is not configured in the supplied env_file
+#Used for images not applicable to all supported profile
+CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL"
+
#Supported test environment profiles
SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE"
#Supported run modes
start_mr
if [ $RUNMODE == "DOCKER" ]; then
- start_consul_cbs
+ if [[ "$PMS_FEATURE_LEVEL" != *"NOCONSUL"* ]]; then
+ start_consul_cbs
+ fi
fi
# Create first config
prepare_consul_config NOSDNC ".consul_config_all.json"
fi
- start_policy_agent NORPOXY $SIM_GROUP/$POLICY_AGENT_COMPOSE_DIR/$POLICY_AGENT_CONFIG_FILE
+ if [ $RUNMODE == "KUBE" ] && [[ "$PMS_FEATURE_LEVEL" == *"INITIALCONFIGMAP"* ]]; then
+ start_policy_agent NORPOXY $SIM_GROUP/$POLICY_AGENT_COMPOSE_DIR/application2.yaml
+ else
+ start_policy_agent NORPOXY $SIM_GROUP/$POLICY_AGENT_COMPOSE_DIR/$POLICY_AGENT_CONFIG_FILE
+ fi
set_agent_trace
#Load first config
if [ $RUNMODE == "KUBE" ]; then
- agent_load_config ".consul_config_initial.json"
+ if [[ "$PMS_FEATURE_LEVEL" == *"INITIALCONFIGMAP"* ]]; then
+ api_put_configuration 200 ".consul_config_initial.json"
+ api_get_configuration 200 ".consul_config_initial.json"
+ else
+ agent_load_config ".consul_config_initial.json"
+ fi
else
- consul_config_app ".consul_config_initial.json"
+ if [[ "$PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
+ api_put_configuration 200 ".consul_config_initial.json"
+ api_get_configuration 200 ".consul_config_initial.json"
+ else
+ consul_config_app ".consul_config_initial.json"
+ fi
fi
for ((i=1; i<=${NUM_RICS}; i++))
#Load config with all rics
if [ $RUNMODE == "KUBE" ]; then
- agent_load_config ".consul_config_all.json"
+ if [[ "$PMS_FEATURE_LEVEL" == *"INITIALCONFIGMAP"* ]]; then
+ api_put_configuration 200 ".consul_config_all.json"
+ api_get_configuration 200 ".consul_config_all.json"
+ else
+ agent_load_config ".consul_config_all.json"
+ fi
else
- consul_config_app ".consul_config_all.json"
+ if [[ "$PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
+ api_put_configuration 200 ".consul_config_all.json"
+ api_get_configuration 200 ".consul_config_all.json"
+ else
+ consul_config_app ".consul_config_all.json"
+ fi
fi
api_equal json:rics 10 120
# Load config with reduced number of rics
if [ $RUNMODE == "KUBE" ]; then
- agent_load_config ".consul_config_initial.json"
+ if [[ "$PMS_FEATURE_LEVEL" == *"INITIALCONFIGMAP"* ]]; then
+ api_put_configuration 200 ".consul_config_initial.json"
+ api_get_configuration 200 ".consul_config_initial.json"
+ else
+ agent_load_config ".consul_config_initial.json"
+ fi
else
- consul_config_app ".consul_config_initial.json"
+ if [[ "$PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
+ api_put_configuration 200 ".consul_config_initial.json"
+ api_get_configuration 200 ".consul_config_initial.json"
+ else
+ consul_config_app ".consul_config_initial.json"
+ fi
fi
api_equal json:rics 8 120
# Load config with all rics
if [ $RUNMODE == "KUBE" ]; then
- agent_load_config ".consul_config_all.json"
+ if [[ "$PMS_FEATURE_LEVEL" == *"INITIALCONFIGMAP"* ]]; then
+ api_put_configuration 200 ".consul_config_all.json"
+ api_get_configuration 200 ".consul_config_all.json"
+ else
+ agent_load_config ".consul_config_all.json"
+ fi
else
- consul_config_app ".consul_config_all.json"
+ if [[ "$PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
+ api_put_configuration 200 ".consul_config_all.json"
+ api_get_configuration 200 ".consul_config_all.json"
+ else
+ consul_config_app ".consul_config_all.json"
+ fi
fi
api_equal json:rics 10 120
#Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if
#the image is not configured in the supplied env_file
#Used for images not applicable to all supported profile
-CONDITIONALLY_IGNORED_IMAGES="NGW"
+CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL NGW"
#Supported test environment profiles
SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE"
mr_equal requests_submitted 0
+ sim_put_policy_type 201 ricsim_g1_1 1 testdata/OSC/sim_1.json
+ if [ "$PMS_VERSION" == "V2" ]; then
+ sim_put_policy_type 201 ricsim_g3_1 STD_QOS2_0.1.0 testdata/STD2/sim_qos2.json
+ fi
if [[ $interface == "SDNC" ]]; then
start_sdnc
prepare_consul_config SDNC ".consul_config.json"
prepare_consul_config NOSDNC ".consul_config.json"
fi
- if [ $RUNMODE == "DOCKER" ]; then
- start_consul_cbs
- fi
-
if [ $RUNMODE == "KUBE" ]; then
agent_load_config ".consul_config.json"
else
- consul_config_app ".consul_config.json"
+ if [[ "$PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
+ api_put_configuration 200 ".consul_config.json"
+ else
+ start_consul_cbs
+ consul_config_app ".consul_config.json"
+ fi
fi
-
api_get_status 200
sim_print ricsim_g1_1 interface
sim_print ricsim_g3_1 interface
fi
- sim_put_policy_type 201 ricsim_g1_1 1 testdata/OSC/sim_1.json
-
if [ "$PMS_VERSION" == "V2" ]; then
- sim_put_policy_type 201 ricsim_g3_1 STD_QOS2_0.1.0 testdata/STD2/sim_qos2.json
-
api_equal json:policy-types 3 300 #Wait for the agent to refresh types from the simulators
else
api_equal json:policy_types 2 300 #Wait for the agent to refresh types from the simulators
#Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if
#the image is not configured in the supplied env_file
#Used for images not applicable to all supported profile
-CONDITIONALLY_IGNORED_IMAGES="NGW"
+CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL NGW"
#Supported test environment profiles
SUPPORTED_PROFILES="ONAP-ISTANBUL ONAP-JAKARTA ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE"
set_agent_debug
- if [ $RUNMODE == "DOCKER" ]; then
- start_consul_cbs
- fi
-
if [[ $interface = *"SDNC"* ]]; then
start_sdnc
prepare_consul_config SDNC ".consul_config.json"
if [ $RUNMODE == "KUBE" ]; then
agent_load_config ".consul_config.json"
else
- consul_config_app ".consul_config.json"
+ if [[ "$PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
+ api_put_configuration 200 ".consul_config.json"
+ else
+ start_consul_cbs
+ consul_config_app ".consul_config.json"
+ fi
fi
start_cr 1
#Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if
#the image is not configured in the supplied env_file
#Used for images not applicable to all supported profile
-CONDITIONALLY_IGNORED_IMAGES="NGW"
+CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL NGW"
#Supported test environment profiles
SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE"
start_policy_agent NORPOXY $SIM_GROUP/$POLICY_AGENT_COMPOSE_DIR/$POLICY_AGENT_CONFIG_FILE
-if [ $RUNMODE == "DOCKER" ]; then
- start_consul_cbs
-fi
-
prepare_consul_config SDNC ".consul_config.json"
if [ $RUNMODE == "KUBE" ]; then
- agent_load_config ".consul_config.json"
+ agent_load_config ".consul_config.json"
else
- consul_config_app ".consul_config.json"
+ if [[ "$PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
+ api_put_configuration 200 ".consul_config.json"
+ else
+ start_consul_cbs
+ consul_config_app ".consul_config.json"
+ fi
fi
start_sdnc
#Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if
#the image is not configured in the supplied env_file
#Used for images not applicable to all supported profile
-CONDITIONALLY_IGNORED_IMAGES="NGW"
+CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL NGW"
#Supported test environment profiles
SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE"
set_agent_debug
- if [ $RUNMODE == "DOCKER" ]; then
- start_consul_cbs
- fi
-
if [[ $interface = *"SDNC"* ]]; then
start_sdnc
prepare_consul_config SDNC ".consul_config.json"
if [ $RUNMODE == "KUBE" ]; then
agent_load_config ".consul_config.json"
else
- consul_config_app ".consul_config.json"
+ if [[ "$PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
+ api_put_configuration 200 ".consul_config.json"
+ else
+ start_consul_cbs
+ consul_config_app ".consul_config.json"
+ fi
fi
start_mr # Not used, but removes error messages from the agent log
#Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if
#the image is not configured in the supplied env_file
#Used for images not applicable to all supported profile
-CONDITIONALLY_IGNORED_IMAGES="NGW"
+CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL NGW"
#Supported test environment profiles
SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE"
use_agent_rest_http
-if [ $RUNMODE == "DOCKER" ]; then
- start_consul_cbs
-fi
-
prepare_consul_config NOSDNC ".consul_config.json"
if [ $RUNMODE == "KUBE" ]; then
agent_load_config ".consul_config.json"
else
- consul_config_app ".consul_config.json"
+ if [[ "$PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
+ api_put_configuration 200 ".consul_config.json"
+ else
+ start_consul_cbs
+ consul_config_app ".consul_config.json"
+ fi
fi
api_get_status 200
#Prestarted app (not started by script) to include in the test when running kubernetes, space separated list
KUBE_PRESTARTED_IMAGES=" PA RICSIM CP ICS RC SDNC DMAAPMED DMAAPADP"
+#Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if
+#the image is not configured in the supplied env_file
+#Used for images not applicable to all supported profile
+CONDITIONALLY_IGNORED_IMAGES=""
+
#Supported test environment profiles
SUPPORTED_PROFILES="ORAN-E-RELEASE"
#Supported run modes
else
ics_api_edp_put_type_2 201 type1 testdata/ics/ei-type-1.json
ics_api_edp_get_type_2 200 type1
- ics_api_edp_get_type_ids 200 STD_Fault_Messages ExampleInformationTypeKafka ExampleInformationType type1
+
+ ics_api_edp_get_type_ids 200 type1
ics_api_edp_put_producer_2 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
ics_api_edp_put_producer_2 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
# Dmaap mediator and adapter
start_dmaapadp NOPROXY $SIM_GROUP/$DMAAP_ADP_COMPOSE_DIR/$DMAAP_ADP_CONFIG_FILE $SIM_GROUP/$DMAAP_ADP_COMPOSE_DIR/$DMAAP_ADP_DATA_FILE
-start_dmaapmed NOPROXY $SIM_GROUP/$DMAAP_MED_COMPOSE_DIR/$DMAAP_MED_DATA_FILE
+start_dmaapmed NOPROXY $SIM_GROUP/$DMAAP_MED_COMPOSE_DIR/$DMAAP_MED_HOST_DATA_FILE
ics_equal json:data-producer/v1/info-producers 3 120
-ics_api_idc_get_type_ids 200 ExampleInformationType ExampleInformationTypeKafka STD_Fault_Messages type-1
+ics_equal json:data-producer/v1/info-types 4 30
+
+ics_api_idc_get_type_ids 200 ExampleInformationType ExampleInformationTypeKafka STD_Fault_Messages type1
ics_api_edp_get_producer_ids_2 200 NOTYPE prod-a DmaapGenericInfoProducer DMaaP_Mediator_Producer
ics_api_a1_get_job_status 200 jobz$i ENABLED 30
done
+sleep_wait 30 # Wait for mediator to listening to kafka
+
mr_api_send_json "/events/unauthenticated.dmaapmed.json" '{"msg":"msg-0"}'
mr_api_send_json "/events/unauthenticated.dmaapadp.json" '{"msg":"msg-1"}'
mr_api_send_json "/events/unauthenticated.dmaapmed.json" '{"msg":"msg-2"}'
#Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if
#the image is not configured in the supplied env_file
#Used for images not applicable to all supported profile
-CONDITIONALLY_IGNORED_IMAGES="NGW"
+CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL NGW"
#Supported test environment profiles
SUPPORTED_PROFILES="ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA"
set_agent_trace
- if [ $RUNMODE == "DOCKER" ]; then
- start_consul_cbs
- fi
-
if [ $RUNMODE == "KUBE" ]; then
agent_load_config ".consul_config.json"
else
- consul_config_app ".consul_config.json"
+ if [[ "$PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
+ #Temporary switch to http/https if dmaap use. Otherwise it is not possibble to push config
+ if [ $__httpx == "HTTPS" ]; then
+ use_agent_rest_https
+ else
+ use_agent_rest_http
+ fi
+ api_put_configuration 200 ".consul_config.json"
+ if [ $__httpx == "HTTPS" ]; then
+ if [[ $interface = *"DMAAP"* ]]; then
+ use_agent_dmaap_https
+ else
+ use_agent_rest_https
+ fi
+ else
+ if [[ $interface = *"DMAAP"* ]]; then
+ use_agent_dmaap_http
+ else
+ use_agent_rest_http
+ fi
+ fi
+ else
+ start_consul_cbs
+ consul_config_app ".consul_config.json"
+ fi
fi
# Check that all rics are synced in
#Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if
#the image is not configured in the supplied env_file
#Used for images not applicable to all supported profile
-CONDITIONALLY_IGNORED_IMAGES="NGW"
+CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL NGW"
#Supported test environment profiles
SUPPORTED_PROFILES="ONAP-GUILIN ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE"
set_agent_trace
-if [ $RUNMODE == "DOCKER" ]; then
- start_consul_cbs
-fi
-
prepare_consul_config SDNC ".consul_config.json"
if [ $RUNMODE == "KUBE" ]; then
agent_load_config ".consul_config.json"
else
- consul_config_app ".consul_config.json"
+ if [[ "$PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
+ api_put_configuration 200 ".consul_config.json"
+ else
+ start_consul_cbs
+ consul_config_app ".consul_config.json"
+ fi
fi
api_get_status 200
#Ignore image in DOCKER_INCLUDED_IMAGES, KUBE_INCLUDED_IMAGES if
#the image is not configured in the supplied env_file
#Used for images not applicable to all supported profile
-CONDITIONALLY_IGNORED_IMAGES="NGW"
+CONDITIONALLY_IGNORED_IMAGES="CBS CONSUL NGW"
#Supported test environment profiles
SUPPORTED_PROFILES="ONAP-HONOLULU ONAP-ISTANBUL ONAP-JAKARTA ORAN-CHERRY ORAN-D-RELEASE ORAN-E-RELEASE ORAN-F-RELEASE"
start_policy_agent PROXY $SIM_GROUP/$POLICY_AGENT_COMPOSE_DIR/$POLICY_AGENT_CONFIG_FILE
-if [ $RUNMODE == "DOCKER" ]; then
- start_consul_cbs
-fi
-
prepare_consul_config SDNC ".consul_config.json" #Change to NOSDNC if running PMS with proxy
if [ $RUNMODE == "KUBE" ]; then
agent_load_config ".consul_config.json"
else
- consul_config_app ".consul_config.json"
+ if [[ "$PMS_FEATURE_LEVEL" == *"NOCONSUL"* ]]; then
+ api_put_configuration 200 ".consul_config.json"
+ else
+ start_consul_cbs
+ consul_config_app ".consul_config.json"
+ fi
fi
start_cr 1
--- /dev/null
+#!/bin/bash
+
+# ============LICENSE_START===============================================
+# Copyright (C) 2020 Nordix Foundation. All rights reserved.
+# ========================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=================================================
+#
+
+TS_ONELINE_DESCR="Test suite - PMS endpoint aegis image testing. Agent REST, DMAAP and SNDC controller resconf"
+
+. ../common/testsuite_common.sh
+
+suite_setup
+
+############# TEST CASES #################
+
+./FTC1.sh $@
+./FTC10.sh $@
+./FTC100.sh $@
+./FTC110.sh $@
+./FTC300.sh $@
+./FTC310.sh $@
+./FTC350.sh $@
+./FTC800.sh $@
+./FTC805.sh $@
+./FTC850.sh $@
+./FTC2001.sh $@
+
+##########################################
+
+suite_complete
\ No newline at end of file
--- /dev/null
+#!/bin/bash
+################################################################################
+# Copyright (c) 2021 Nordix Foundation. #
+# #
+# Licensed under the Apache License, Version 2.0 (the "License"); #
+# you may not use this file except in compliance with the License. #
+# You may obtain a copy of the License at #
+# #
+# http://www.apache.org/licenses/LICENSE-2.0 #
+# #
+# Unless required by applicable law or agreed to in writing, software #
+# distributed under the License is distributed on an "AS IS" BASIS, #
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
+# See the License for the specific language governing permissions and #
+# limitations under the License. #
+################################################################################
+
+# Override file for running the e-release helm recipe including all components
+
+
+POLICY_AGENT_IMAGE_BASE="aegis-onap-docker-local.artifactory.est.tech/onap/ccsdk-oran-a1policymanagementservice"
RAPP_CAT_EXTERNAL_PORT=9085
RAPP_CAT_EXTERNAL_SECURE_PORT=9086
+
+HELM_MANAGER_APP_NAME="helmmanager"
--- /dev/null
+{
+ "scope": {
+ "ueId": "ueXXX",
+ "qosId": "qosXXX"
+ }
\ No newline at end of file
--- /dev/null
+{
+ "$schema": "http://json-schema.org/draft-04/schema#",
+ "type": "object",
+ "properties": {
+ "filter": {
+ "type": "string"
+ },
+ "maxConcurrency": {
+ "type": "integer"
+ },
+ "bufferTimeout": {
+ "type": "object",
+ "properties": {
+ "maxSize": {
+ "type": "integer"
+ },
+ "maxTimeMiliseconds": {
+ "type": "integer"
+ }
+ },
+ "required": [
+ "maxSize",
+ "maxTimeMiliseconds"
+ ]
+ }
+ },
+ "required": []
+}
\ No newline at end of file
--- /dev/null
+{
+ "bufferTimeout": {
+ "maxSize": 1,
+ "maxTimeMiliseconds": 0
+ }
+}
\ No newline at end of file
--- /dev/null
+{}
\ No newline at end of file
| `--override <file>` | Override setting from the file supplied by --env-file |
| `--pre-clean` | Clean kube resouces when running docker and vice versa |
| `--gen-stats` | Collect container/pod runtime statistics |
+| `--delete-namespaces` | Delete kubernetes namespaces before starting tests - but only those created by the test scripts. Kube mode only. Ignored if running with prestarted apps. |
+| `--delete-containers` | Delete docker containers before starting tests - but only those created by the test scripts. Docker mode only. |
+| `--endpoint-stats` | Collect http endpoint statistics |
| `help` | Print this info along with the test script description and the list of app short names supported |
## Function: setup_testenvironment ##
See the 'cr' dir for more details.
| arg list |
|--|
-| `<variable-name> <target-value> [ <timeout-in-sec> ]` |
+| `<cr-path-id> <variable-name> <target-value> [ <timeout-in-sec> ]` |
| parameter | description |
| --------- | ----------- |
+| `<cr-path-id>` | Variable index to CR |
+| `<variable-name>` | Variable name in the CR |
+| `<target-value>` | Target value for the variable |
+| `<timeout-in-sec>` | Max time to wait for the variable to reach the target value |
+
+## Function: cr_greater_or_equal ##
+Tests if a variable value in the Callback Receiver (CR) simulator is equal to or greater than a target value.
+Without the timeout, the test sets pass or fail immediately depending on if the variable is equal to or greater than the target or not.
+With the timeout, the test waits up to the timeout seconds before setting pass or fail depending on if the variable value becomes equal to the target value or not.
+See the 'cr' dir for more details.
+| arg list |
+|--|
+| `<cr-path-id> <variable-name> <target-value> [ <timeout-in-sec> ]` |
+
+| parameter | description |
+| --------- | ----------- |
+| `<cr-path-id>` | Variable index to CR |
| `<variable-name>` | Variable name in the CR |
| `<target-value>` | Target value for the variable |
| `<timeout-in-sec>` | Max time to wait for the variable to reach the target value |
| arg list |
|--|
-| `<variable-name> <target-value> [ <timeout-in-sec> ]` |
+| `<cr-path-id> <variable-name> <target-value> [ <timeout-in-sec> ]` |
| parameter | description |
| --------- | ----------- |
+| `<cr-path-id>` | Variable index to CR |
| `<variable-name>` | Variable name in the CR |
| `<target-value>` | Target substring for the variable |
| `<timeout-in-sec>` | Max time to wait for the variable to reach the target value |
See the 'mrstub' dir for more details.
| arg list |
|--|
-| `<variable-name>` |
+| `<cr-path-id> <variable-name>` |
| parameter | description |
| --------- | ----------- |
+| `<cr-path-id>` | Variable index to CR |
| `<variable-name>` | Variable name in the CR |
## Function: cr_delay_callback ##
| arg list |
|--|
-| `<response-code> <id> [ EMPTY \| ( <ric-id> )+ ]` |
+| `<response-code> <cr-path-id> <id> [ EMPTY \| ( <ric-id> )+ ]` |
| parameter | description |
| --------- | ----------- |
| `<response-code>` | Expected http response code |
+| `<cr-path-id>` | Variable index for CR |
| `<id>` | Id of the callback destination |
| `EMPTY` | Indicator for an empty list |
| `<ric-id>` | Id of the ric |
| arg list |
|--|
-| `<response-code> <id> [ EMPTY \| ( <status> )+ ]` |
+| `<response-code> <cr-path-id> <id> [ EMPTY \| ( <status> )+ ]` |
| parameter | description |
| --------- | ----------- |
| `<response-code>` | Expected http response code |
+| `<cr-path-id>` | Variable index for CR |
| `<id>` | Id of the callback destination |
| `EMPTY` | Indicator for an empty list |
| `<status>` | Status string |
| arg list |
|--|
-| `<response-code> <id> [ EMPTY | ( <type-id> <schema> <registration-status> )+ ]` |
+| `<response-code> <cr-path-id> <id> [ EMPTY | ( <type-id> <schema> <registration-status> )+ ]` |
| parameter | description |
| --------- | ----------- |
| `<response-code>` | Expected http response code |
+| `<cr-path-id>` | Variable index for CR |
| `<id>` | Id of the callback destination |
| `EMPTY` | Indicator for an empty list |
| `<type-id>` | Id of the data type |
| arg list |
|--|
-| - |
+| `<cr-path-id>` |
+
+| parameter | description |
+| --------- | ----------- |
+| `<cr-path-id>` | Variable index for CR |
## Function: cr_api_check_all_genric_json_events ##
fi
}
+# Tests if a variable value in the CR is equal to or greater than the target value and and optional timeout.
+# Arg: <variable-name> <target-value> - This test set pass or fail depending on if the variable is
+# equal to the target or not.
+# Arg: <cr-path-id> <variable-name> <target-value> <timeout-in-sec> - This test waits up to the timeout seconds
+# before setting pass or fail depending on if the variable value becomes equal to or greater than the target
+# value or not.
+# (Function for test scripts)
+cr_greater_or_equal() {
+ if [ $# -eq 3 ] || [ $# -eq 4 ]; then
+ CR_SERVICE_PATH=$(__cr_get_service_path $1)
+ CR_ADAPTER=$CR_SERVICE_PATH
+ if [ $? -ne 0 ]; then
+ __print_err "<cr-path-id> missing or incorrect" $@
+ return 1
+ fi
+ __var_test "CR" "$CR_SERVICE_PATH/counter/" $2 ">=" $3 $4
+ else
+ __print_err "Wrong args to cr_equal, needs three or four args: <cr-path-id> <variable-name> <target-value> [ timeout ]" $@
+ fi
+}
+
# Tests if a variable value in the CR contains the target string and and optional timeout
# Arg: <variable-name> <target-value> - This test set pass or fail depending on if the variable contains
# the target or not.
fi
}
-# Read a variable value from CR sim and send to stdout. Arg: <variable-name>
+# Read a variable value from CR sim and send to stdout. Arg: <cr-path-id> <variable-name>
cr_read() {
CR_SERVICE_PATH=$(__cr_get_service_path $1)
CR_ADAPTER=$CR_SERVICE_PATH
__print_err "<cr-path-id> missing or incorrect" $@
return 1
fi
- echo "$(__do_curl $CR_SERVICE_PATH/counter/$1)"
+ echo "$(__do_curl $CR_SERVICE_PATH/counter/$2)"
}
# Function to configure write delay on callbacks
export DMAAP_MED_DATA_MOUNT_PATH
export DMAAP_MED_HOST_MNT_DIR
- export DMAAP_MED_DATA_FILE
+ export DMAAP_MED_CONTR_DATA_FILE
export DMAAP_MED_DATA_CONFIGMAP_NAME=$DMAAP_MED_APP_NAME"-data"
if [ $1 == "PROXY" ]; then
export DMAAP_MED_CONF_SELF_HOST=$(echo $DMAAP_MED_SERVICE_PATH | cut -d: -f1-2)
export DMAAP_MED_CONF_SELF_PORT=$(echo $DMAAP_MED_SERVICE_PATH | cut -d: -f3)
export MR_SERVICE_PATH
+ export MR_KAFKA_SERVICE_PATH
+
}
# Start the Dmaap mediator
__dmaapmed_export_vars $1
# Create config map for data
- data_json=$PWD/tmp/$DMAAP_MED_DATA_FILE
+ data_json=$PWD/tmp/$DMAAP_MED_CONTR_DATA_FILE
if [ $# -lt 2 ]; then
#create empty dummy file
echo "{}" > $data_json
__dmaapmed_export_vars $1
- dest_file=$SIM_GROUP/$DMAAP_MED_COMPOSE_DIR/$DMAAP_MED_HOST_MNT_DIR/$DMAAP_MED_DATA_FILE
+ dest_file=$SIM_GROUP/$DMAAP_MED_COMPOSE_DIR/$DMAAP_MED_HOST_MNT_DIR/$DMAAP_MED_CONTR_DATA_FILE
envsubst < $2 > $dest_file
exit 1
else
echo " OK, code: "$status" (Expected)"
- if [[ "$content_type" == *"$resp_content"* ]]; then
+ if [[ "$resp_content" == '*' ]]; then
+ :
+ elif [[ "$content_type" == *"$resp_content"* ]]; then
echo " Content type: "$content_type" (Expected)"
else
echo " Expected content type: "$resp_content
--- /dev/null
+#!/bin/bash
+
+# ============LICENSE_START===============================================
+# Copyright (C) 2021 Nordix Foundation. All rights reserved.
+# ========================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=================================================
+#
+
+# This script format http endpoint stats generated by testscripts
+
+print_usage() {
+ echo "Usage: format_endpoint_stats <log-base-dir> <app-id> <app-description> [tc-id]+ "
+}
+
+SUMMARYFILE=""
+SUMMARYFILE_TMP=""
+
+update_summary() {
+
+ input=$@
+ inputarr=(${input// / })
+ inputp=${inputarr[3]}
+ inputn=${inputarr[4]}
+ inputposarr=(${inputp//\// })
+ inputnegarr=(${inputn//\// })
+ > $SUMMARYFILE_TMP
+ found=0
+ while read -r line; do
+ linearr=(${line// / })
+ linep=${linearr[3]}
+ linen=${linearr[4]}
+ lineposarr=(${linep//\// })
+ linenegarr=(${linen//\// })
+ if [[ ${linearr[1]} == ${inputarr[1]} ]] && [[ ${linearr[2]} == ${inputarr[2]} ]]; then
+ let lineposarr[0]=lineposarr[0]+inputposarr[0]
+ let lineposarr[1]=lineposarr[1]+inputposarr[1]
+ let linenegarr[0]=linenegarr[0]+inputnegarr[0]
+ let linenegarr[1]=linenegarr[1]+inputnegarr[1]
+ found=1
+ fi
+ printf '%-2s %-10s %-45s %-16s %-16s' "#" "${linearr[1]}" "${linearr[2]}" "${lineposarr[0]}/${lineposarr[1]}" "${linenegarr[0]}/${linenegarr[1]}" >> $SUMMARYFILE_TMP
+ echo "" >> $SUMMARYFILE_TMP
+ done < $SUMMARYFILE
+ if [ $found -eq 0 ]; then
+ printf '%-2s %-10s %-45s %-16s %-16s' "#" "${inputarr[1]}" "${inputarr[2]}" "${inputposarr[0]}/${inputposarr[1]}" "${inputnegarr[0]}/${inputnegarr[1]}" >> $SUMMARYFILE_TMP
+ echo "" >> $SUMMARYFILE_TMP
+ fi
+ cp $SUMMARYFILE_TMP $SUMMARYFILE
+}
+
+if [ $# -lt 4 ]; then
+ print_usage
+ exit 1
+fi
+BASE_DIR=$1
+if [ ! -d $BASE_DIR ]; then
+ print_usage
+ echo "<log-base-dir> $BASE_DIR does not exist or is not a dir"
+ exit 1
+fi
+SUMMARYFILE=$BASE_DIR/endpoint_summary.log
+rm $SUMMARYFILE
+touch $SUMMARYFILE
+SUMMARYFILE_TMP=$BASE_DIR/endpoint_summary_tmp.log
+TC_FAIL=0
+shift
+APP_ID=$1
+shift
+echo ""
+echo "==================================================="
+echo "Functional test cases for $1"
+echo "==================================================="
+echo
+shift
+while [ $# -gt 0 ]; do
+ FTC_DIR=$BASE_DIR/$1
+ if [ ! -d $FTC_DIR ]; then
+ echo "Dir $FTC_DIR does not exist"
+ exit 1
+ fi
+ IMAGE_INFO_FILE=$FTC_DIR/imageinfo_$APP_ID".log"
+ if [ -f $IMAGE_INFO_FILE ]; then
+ echo "=== Testscript: $1 ==="
+ echo "Image: "$(cat $IMAGE_INFO_FILE)
+ echo
+ TC_RES_FILE=$FTC_DIR/.result$1.txt
+ if [ -f "$TC_RES_FILE" ]; then
+ TC_RESULT=$(< "$TC_RES_FILE")
+ if [ $TC_RESULT -ne 0 ]; then
+ echo " !!!!! TESTCASE FAILED !!!!!"
+ let TC_FAIL=TC_FAIL+1
+ fi
+ fi
+ echo "=== Results: positive=2XX http status, negative=non 2XX http status - (ok/total)==="
+ echo "Method Endpoint Positive Negative"
+ grep --no-filename "#" $FTC_DIR/endpoint_$APP_ID* | cut -c 4-
+ for filename in $FTC_DIR/endpoint_$APP_ID* ; do
+ filedata=$(< $filename)
+ update_summary $filedata
+ done
+ echo "==============================="
+ echo
+ else
+ echo "=== No stats collected by Testscript $1 ==="
+ echo ""
+ fi
+ shift
+done
+
+echo "Summary of all testscripts"
+if [ $TC_FAIL -ne 0 ]; then
+ echo " !!!!! ONE OR MORE TESTCASE(S) FAILED - CHECK INDIVIDUAL TEST RESULT!!!!!"
+fi
+echo "=== Results: positive=2XX http status, negative=non 2XX http status - (ok/total)==="
+echo "Method Endpoint Positive Negative"
+cat $SUMMARYFILE | cut -c 4-
+
+exit 0
+
# Scale kubernetes resources to zero and wait until this has been accomplished, if relevant. If not relevant to scale, then do no action.
# This function is called for prestarted apps not managed by the test script.
__HELMMANAGER_kube_scale_zero_and_wait() {
- __kube_scale_and_wait_all_resources $KUBE_NONRTRIC_NAMESPACE app "$KUBE_NONRTRIC_NAMESPACE"-helmmanagerservice
+ __kube_scale_and_wait_all_resources $KUBE_NONRTRIC_NAMESPACE app "$KUBE_NONRTRIC_NAMESPACE"-"$HELM_MANAGER_APP_NAME"
}
# Delete all kube resouces for the app
if [ $retcode_p -eq 0 ]; then
echo -e " Using existing $HELM_MANAGER_APP_NAME deployment and service"
echo " Setting $HELM_MANAGER_APP_NAME replicas=1"
- __kube_scale deployment $HELM_MANAGER_APP_NAME $KUBE_NONRTRIC_NAMESPACE 1
+ __kube_scale sts $HELM_MANAGER_APP_NAME $KUBE_NONRTRIC_NAMESPACE 1
fi
if [ $retcode_i -eq 0 ]; then
#!/bin/bash
# ============LICENSE_START===============================================
-# Copyright (C) 2020 Nordix Foundation. All rights reserved.
+# Copyright (C) 2021 Nordix Foundation. All rights reserved.
# ========================================================================
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
__check_service_start $POLICY_AGENT_APP_NAME $PA_SERVICE_PATH$POLICY_AGENT_ALIVE_URL
fi
+
+ __collect_endpoint_stats_image_info "PMS" $POLICY_AGENT_IMAGE
echo ""
return 0
}
fi
fi
fi
-
+ __collect_endpoint_stats "PMS" 00 "GET" $PMS_API_PREFIX"/v2/policy-instances" $status
__log_test_pass
return 0
fi
fi
+ __collect_endpoint_stats "PMS" 01 "GET" $PMS_API_PREFIX"/v2/policies/{policy_id}" $status
__log_test_pass
return 0
}
__log_test_fail_status_code $1 $status
return 1
fi
-
let pid=$pid+1
let count=$count+1
echo -ne " Executed "$count"("$max")${SAMELINE}"
done
+ __collect_endpoint_stats "PMS" 02 "PUT" $PMS_API_PREFIX"/v2/policies" $status $max
echo ""
__log_test_pass
let count=$count+1
echo -ne " Accepted(batch) "$count"("$max")${SAMELINE}"
done
+ __collect_endpoint_stats "PMS" 02 "PUT" $PMS_API_PREFIX"/v2/policies" $1 $max
echo ""
fi
done
if [ -z $msg ]; then
+ __collect_endpoint_stats "PMS" 02 "PUT" $PMS_API_PREFIX"/v2/policies" $resp_code $(($count*$num_rics))
__log_test_pass " $(($count*$num_rics)) policy request(s) executed"
return 0
fi
__log_test_fail_status_code $1 $status
return 1
fi
+
let pid=$pid+1
let count=$count+1
echo -ne " Executed "$count"("$max")${SAMELINE}"
done
+ __collect_endpoint_stats "PMS" 03 "DELETE" $PMS_API_PREFIX"/v2/policies/{policy_id}" $status $max
echo ""
__log_test_pass
let count=$count+1
echo -ne " Deleted(batch) "$count"("$max")${SAMELINE}"
done
+ __collect_endpoint_stats "PMS" 03 "DELETE" $PMS_API_PREFIX"/v2/policies/{policy_id}" $1 $max
echo ""
fi
done
if [ -z $msg ]; then
+ __collect_endpoint_stats "PMS" 03 "DELETE" $PMS_API_PREFIX"/v2/policies/{policy_id}" $resp_code $(($count*$num_rics))
__log_test_pass " $(($count*$num_rics)) policy request(s) executed"
return 0
fi
fi
fi
+ __collect_endpoint_stats "PMS" 04 "GET" $PMS_API_PREFIX"/v2/policies" $status
__log_test_pass
return 0
}
fi
fi
+ __collect_endpoint_stats "PMS" 05 "GET" $PMS_API_PREFIX"/v2/policy-types/{policyTypeId}" $status
__log_test_pass
return 0
}
fi
fi
+ __collect_endpoint_stats "PMS" 06 "GET" $PMS_API_PREFIX"/v2/policy_schema" $status
__log_test_pass
return 0
}
fi
fi
+ __collect_endpoint_stats "PMS" 07 "GET" $PMS_API_PREFIX"/v2/policy-schemas" $status
__log_test_pass
return 0
}
# API Test function: GET /policy_status and V2 GET /policies/{policy_id}/status
-# arg: <response-code> <policy-id> (STD|STD2 <enforce-status>|EMPTY [<reason>|EMPTY])|(OSC <instance-status> <has-been-deleted>)
+# arg: <response-code> <policy-id> [ (STD|STD2 <enforce-status>|EMPTY [<reason>|EMPTY])|(OSC <instance-status> <has-been-deleted>) ]
# (Function for test scripts)
api_get_policy_status() {
__log_test_start $@
- if [ $# -lt 4 ] || [ $# -gt 5 ]; then
- __print_err "<response-code> <policy-id> (STD <enforce-status>|EMPTY [<reason>|EMPTY])|(OSC <instance-status> <has-been-deleted>)" $@
+ if [ $# -lt 2 ] || [ $# -gt 5 ]; then
+ __print_err "<response-code> <policy-id> [(STD <enforce-status>|EMPTY [<reason>|EMPTY])|(OSC <instance-status> <has-been-deleted>)]" $@
return 1
fi
targetJson=""
-
- if [ $3 == "STD" ]; then
+ if [ $# -eq 2 ]; then
+ :
+ elif [ "$3" == "STD" ]; then
targetJson="{\"enforceStatus\":\"$4\""
if [ $# -eq 5 ]; then
targetJson=$targetJson",\"reason\":\"$5\""
fi
targetJson=$targetJson"}"
- elif [ $3 == "STD2" ]; then
+ elif [ "$3" == "STD2" ]; then
if [ $4 == "EMPTY" ]; then
targetJson="{\"enforceStatus\":\"\""
else
fi
fi
targetJson=$targetJson"}"
- elif [ $3 == "OSC" ]; then
+ elif [ "$3" == "OSC" ]; then
targetJson="{\"instance_status\":\"$4\""
if [ $# -eq 5 ]; then
targetJson=$targetJson",\"has_been_deleted\":\"$5\""
__log_test_fail_status_code $1 $status
return 1
fi
+ if [ $# -gt 2 ]; then
+ echo "TARGET JSON: $targetJson" >> $HTTPLOG
+ body=${res:0:${#res}-3}
+ res=$(python3 ../common/compare_json.py "$targetJson" "$body")
- echo "TARGET JSON: $targetJson" >> $HTTPLOG
- body=${res:0:${#res}-3}
- res=$(python3 ../common/compare_json.py "$targetJson" "$body")
-
- if [ $res -ne 0 ]; then
- __log_test_fail_body
- return 1
+ if [ $res -ne 0 ]; then
+ __log_test_fail_body
+ return 1
+ fi
fi
-
+ __collect_endpoint_stats "PMS" 08 "GET" $PMS_API_PREFIX"/v2/policies/{policy_id}/status" $status
__log_test_pass
return 0
}
fi
fi
+ __collect_endpoint_stats "PMS" 09 "GET" $PMS_API_PREFIX"/v2/policy-types" $status
__log_test_pass
return 0
}
return 1
fi
+ __collect_endpoint_stats "PMS" 10 "GET" $PMS_API_PREFIX"/v2/status" $status
+ __log_test_pass
+ return 0
+}
+
+# API Test function: GET /status (root) without api prefix
+# args: <response-code>
+# (Function for test scripts)
+api_get_status_root() {
+ __log_test_start $@
+ if [ $# -ne 1 ]; then
+ __print_err "<response-code>" $@
+ return 1
+ fi
+ query="/status"
+ TMP_PREFIX=$PMS_API_PREFIX
+ PMS_API_PREFIX=""
+ res="$(__do_curl_to_api PA GET $query)"
+ PMS_API_PREFIX=$TMP_PREFIX
+ status=${res:${#res}-3}
+
+ if [ $status -ne $1 ]; then
+ __log_test_fail_status_code $1 $status
+ return 1
+ fi
+
+ __collect_endpoint_stats "PMS" 19 "GET" "/status" $status
__log_test_pass
return 0
}
fi
fi
fi
+
+ __collect_endpoint_stats "PMS" 11 "GET" $PMS_API_PREFIX"/v2/rics/ric" $status
__log_test_pass
return 0
}
fi
fi
+ __collect_endpoint_stats "PMS" 12 "GET" $PMS_API_PREFIX"/v2/rics" $status
__log_test_pass
return 0
}
return 1
fi
+ __collect_endpoint_stats "PMS" 13 "PUT" $PMS_API_PREFIX"/v2/service" $status
__log_test_pass
return 0
}
fi
fi
+ __collect_endpoint_stats "PMS" 14 "GET" $PMS_API_PREFIX"/v2/services" $status
__log_test_pass
return 0
}
return 1
fi
+ __collect_endpoint_stats "PMS" 14 "GET" $PMS_API_PREFIX"/v2/services" $status
__log_test_pass
return 0
}
return 1
fi
+ __collect_endpoint_stats "PMS" 15 "DELETE" $PMS_API_PREFIX"/v2/services/{serviceId}" $status
__log_test_pass
return 0
}
return 1
fi
+ __collect_endpoint_stats "PMS" 16 "PUT" $PMS_API_PREFIX"/v2/services/{service_id}/keepalive" $status
__log_test_pass
return 0
}
return 1
fi
inputJson=$(< $2)
- inputJson="{\"config\":"$inputJson"}"
+ if [ $RUNMODE == "DOCKER" ]; then #In kube the file already has a header
+ inputJson="{\"config\":"$inputJson"}"
+ fi
file="./tmp/.config.json"
echo $inputJson > $file
query="/v2/configuration"
return 1
fi
+ __collect_endpoint_stats "PMS" 17 "PUT" $PMS_API_PREFIX"/v2/configuration" $status
__log_test_pass
return 0
}
fi
fi
+ __collect_endpoint_stats "PMS" 18 "GET" $PMS_API_PREFIX"/v2/configuration" $status
__log_test_pass
return 0
}
export DOCKER_SIM_NWNAME
export RIC_SIM_DISPLAY_NAME
- docker_args="--no-recreate --scale $RICSIM_COMPOSE_SERVICE_NAME=$2"
+ docker_args=" --scale $RICSIM_COMPOSE_SERVICE_NAME=$2"
#Create a list of contsiner names
#Will be <ricsim-prefix>_<service-name>_<index>
POLICY_AGENT_CONFIG_FILE="application.yaml" # Container config file name
POLICY_AGENT_DATA_FILE="application_configuration.json" # Container data file name
POLICY_AGENT_CONTAINER_MNT_DIR="/var/policy-management-service" # Mounted dir in the container
+PMS_FEATURE_LEVEL="" # Space separated list of features
MR_DMAAP_APP_NAME="message-router" # Name for the Dmaap MR
MR_STUB_APP_NAME="mr-stub" # Name of the MR stub
POLICY_AGENT_CONFIG_FILE="application.yaml" # Container config file name
POLICY_AGENT_DATA_FILE="application_configuration.json" # Container data file name
POLICY_AGENT_CONTAINER_MNT_DIR="/var/policy-management-service" # Mounted dir in the container
+PMS_FEATURE_LEVEL="" # Space separated list of features
ICS_APP_NAME="informationservice" # Name for ICS container
ICS_DISPLAY_NAME="Enrichment Coordinator Service" # Display name for ICS container
POLICY_AGENT_CONFIG_FILE="application.yaml" # Container config file name
POLICY_AGENT_DATA_FILE="application_configuration.json" # Container data file name
POLICY_AGENT_CONTAINER_MNT_DIR="/var/policy-management-service" # Mounted dir in the container
+PMS_FEATURE_LEVEL="" # Space separated list of features
ICS_APP_NAME="informationservice" # Name for ICS container
ICS_DISPLAY_NAME="Enrichment Coordinator Service" # Display name for ICS container
RIC_SIM_IMAGE_BASE="o-ran-sc/a1-simulator"
RIC_SIM_IMAGE_TAG_REMOTE_RELEASE_ORAN="2.2.0"
-
-#Consul remote image and tag
-CONSUL_IMAGE_BASE="consul"
-CONSUL_IMAGE_TAG_REMOTE_PROXY="1.7.2"
-#No local image for Consul, remote image always used
-
-
-#CBS remote image and tag
-CBS_IMAGE_BASE="onap/org.onap.dcaegen2.platform.configbinding.app-app"
-CBS_IMAGE_TAG_REMOTE_RELEASE_ONAP="2.3.0"
-#No local image for CBS, remote image always used
-
-
#MR stub image and tag
MRSTUB_IMAGE_BASE="mrstub"
MRSTUB_IMAGE_TAG_LOCAL="latest"
POLICY_AGENT_CONFIG_FILE="application.yaml" # Container config file name
POLICY_AGENT_DATA_FILE="application_configuration.json" # Container data file name
POLICY_AGENT_CONTAINER_MNT_DIR="/var/policy-management-service" # Mounted dir in the container
+PMS_FEATURE_LEVEL="NOCONSUL INITIALCONFIGMAP" # Space separated list of features
ICS_APP_NAME="informationservice" # Name for ICS container
ICS_DISPLAY_NAME="Enrichment Coordinator Service" # Display name for ICS container
PROD_STUB_ALIVE_URL="/" # Base path for alive check
PROD_STUB_COMPOSE_DIR="prodstub" # Dir in simulator_group for docker-compose
-CONSUL_HOST="consul-server" # Host name of consul
-CONSUL_DISPLAY_NAME="Consul"
-CONSUL_EXTERNAL_PORT=8500 # Consul container external port (host -> container)
-CONSUL_INTERNAL_PORT=8500 # Consul container internal port (container -> container)
-CONSUL_APP_NAME="polman-consul" # Name for consul container
-CONSUL_ALIVE_URL="/ui/dc1/kv" # Base path for alive check
-CONSUL_CBS_COMPOSE_DIR="consul_cbs" # Dir in simulator group for docker compose
-
-CBS_APP_NAME="polman-cbs" # Name for CBS container
-CBS_DISPLAY_NAME="Config Binding Service"
-CBS_EXTERNAL_PORT=10000 # CBS container external port (host -> container)
-CBS_INTERNAL_PORT=10000 # CBS container internal port (container -> container)
-CONFIG_BINDING_SERVICE="config-binding-service" # Host name of CBS
-CBS_ALIVE_URL="/healthcheck" # Base path for alive check
-
RIC_SIM_DISPLAY_NAME="Near-RT RIC A1 Simulator"
RIC_SIM_BASE="g" # Base name of the RIC Simulator container, shall be the group code
# Note, a prefix is added to each container name by the .env file in the 'ric' dir
SDNC_USER="admin" # SDNC username
SDNC_PWD="admin" # SNDC PWD
SDNC_PWD="Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U" # SNDC PWD
-#SDNC_API_URL="/rests/operations/A1-ADAPTER-API:" # Base url path for SNDC API (for upgraded sdnc)
-SDNC_API_URL="/restconf/operations/A1-ADAPTER-API:" # Base url path for SNDC API
+SDNC_API_URL="/rests/operations/A1-ADAPTER-API:" # Base url path for SNDC API (for upgraded sdnc)
+#SDNC_API_URL="/restconf/operations/A1-ADAPTER-API:" # Base url path for SNDC API
SDNC_ALIVE_URL="/apidoc/explorer/" # Base url path for SNDC API docs (for alive check)
SDNC_COMPOSE_DIR="sdnc"
SDNC_COMPOSE_FILE="docker-compose-2.yml"
SDNC_KUBE_APP_FILE="app2.yaml"
SDNC_KARAF_LOG="/opt/opendaylight/data/log/karaf.log" # Path to karaf log
-#SDNC_RESPONSE_JSON_KEY="A1-ADAPTER-API:output" # Key name for output json in replies from sdnc (for upgraded sdnc)
-SDNC_RESPONSE_JSON_KEY="output" # Key name for output json in replies from sdnc
+SDNC_RESPONSE_JSON_KEY="A1-ADAPTER-API:output" # Key name for output json in replies from sdnc (for upgraded sdnc)
+#SDNC_RESPONSE_JSON_KEY="output" # Key name for output json in replies from sdnc
SDNC_FEATURE_LEVEL="TRANS_RESP_CODE" # Space separated list of features
# TRANS_RESP_CODE: SDNC return southbound response code
POLICY_AGENT_CONFIG_FILE="application.yaml" # Container config file name
POLICY_AGENT_DATA_FILE="application_configuration.json" # Container data file name
POLICY_AGENT_CONTAINER_MNT_DIR="/var/policy-management-service" # Mounted dir in the container
+PMS_FEATURE_LEVEL="" # Space separated list of features
ICS_APP_NAME="informationservice" # Name for ICS container
ICS_DISPLAY_NAME="Enrichment Coordinator Service" # Display name for ICS container
POLICY_AGENT_CONFIG_FILE="application.yaml" # Container config file name
POLICY_AGENT_DATA_FILE="application_configuration.json" # Container data file name
POLICY_AGENT_CONTAINER_MNT_DIR="/var/policy-management-service" # Mounted dir in the container
+PMS_FEATURE_LEVEL="" # Space separated list of features
ICS_APP_NAME="informationservice" # Name for ICS container
ICS_DISPLAY_NAME="Enrichment Coordinator Service" # Display name for ICS container
POLICY_AGENT_CONFIG_FILE="application.yaml" # Container config file name
POLICY_AGENT_DATA_FILE="application_configuration.json" # Container data file name
POLICY_AGENT_CONTAINER_MNT_DIR="/var/policy-management-service" # Mounted dir in the container
+PMS_FEATURE_LEVEL="" # Space separated list of features
ICS_APP_NAME="informationservice" # Name for ICS container
ICS_DISPLAY_NAME="Information Coordinator Service" # Display name for ICS container
DMAAP_MED_COMPOSE_DIR="dmaapmed" # Dir in simulator_group for docker-compose
#MAAP_MED_CONFIG_MOUNT_PATH="/app" # Internal container path for configuration
DMAAP_MED_DATA_MOUNT_PATH="/configs" # Path in container for data file
-DMAAP_MED_DATA_FILE="type_config.json" # Container data file name
+DMAAP_MED_HOST_DATA_FILE="type_config.json" # Host data file name
+DMAAP_MED_CONTR_DATA_FILE="type_config.json" # Container data file name
+DMAAP_MED_FEATURE_LEVEL="" # Space separated list of features
KAFKAPC_APP_NAME="kafka-procon" # Name for the Kafka procon
-KAFKAPC_DISPLAY_NAME="Kafaka Producer/Consumer"
+KAFKAPC_DISPLAY_NAME="Kafka Producer/Consumer"
KAFKAPC_EXTERNAL_PORT=8096 # Kafka procon container external port (host -> container)
KAFKAPC_INTERNAL_PORT=8090 # Kafka procon container internal port (container -> container)
KAFKAPC_EXTERNAL_SECURE_PORT=8097 # Kafka procon container external secure port (host -> container)
# Policy Agent base image and tags
POLICY_AGENT_IMAGE_BASE="o-ran-sc/nonrtric-a1-policy-management-service"
-POLICY_AGENT_IMAGE_TAG_LOCAL="2.3.0-SNAPSHOT"
-POLICY_AGENT_IMAGE_TAG_REMOTE_SNAPSHOT="2.3.0-SNAPSHOT"
-POLICY_AGENT_IMAGE_TAG_REMOTE="2.3.0"
-POLICY_AGENT_IMAGE_TAG_REMOTE_RELEASE="2.3.0"
+POLICY_AGENT_IMAGE_TAG_LOCAL="2.4.0-SNAPSHOT"
+POLICY_AGENT_IMAGE_TAG_REMOTE_SNAPSHOT="2.4.0-SNAPSHOT"
+POLICY_AGENT_IMAGE_TAG_REMOTE="2.4.0"
+POLICY_AGENT_IMAGE_TAG_REMOTE_RELEASE="2.4.0"
# ICS image and tags
ICS_IMAGE_BASE="o-ran-sc/nonrtric-information-coordinator-service"
# DMAAP Mediator Service
DMAAP_MED_IMAGE_BASE="o-ran-sc/nonrtric-dmaap-mediator-producer"
-DMAAP_MED_IMAGE_TAG_LOCAL="1.0.0-SNAPSHOT"
-DMAAP_MED_IMAGE_TAG_REMOTE_SNAPSHOT="1.0.0-SNAPSHOT"
-DMAAP_MED_IMAGE_TAG_REMOTE="1.0.0"
-DMAAP_MED_IMAGE_TAG_REMOTE_RELEASE="1.0.0"
+DMAAP_MED_IMAGE_TAG_LOCAL="1.1.0-SNAPSHOT"
+DMAAP_MED_IMAGE_TAG_REMOTE_SNAPSHOT="1.1.0-SNAPSHOT"
+DMAAP_MED_IMAGE_TAG_REMOTE="1.1.0"
+DMAAP_MED_IMAGE_TAG_REMOTE_RELEASE="1.1.0"
# DMAAP Adapter Service
DMAAP_ADP_IMAGE_BASE="o-ran-sc/nonrtric-dmaap-adaptor"
-DMAAP_ADP_IMAGE_TAG_LOCAL="1.0.0-SNAPSHOT"
-DMAAP_ADP_IMAGE_TAG_REMOTE_SNAPSHOT="1.0.0-SNAPSHOT"
-DMAAP_ADP_IMAGE_TAG_REMOTE="1.0.0"
-DMAAP_ADP_IMAGE_TAG_REMOTE_RELEASE="1.0.0"
+DMAAP_ADP_IMAGE_TAG_LOCAL="1.1.0-SNAPSHOT"
+DMAAP_ADP_IMAGE_TAG_REMOTE_SNAPSHOT="1.1.0-SNAPSHOT"
+DMAAP_ADP_IMAGE_TAG_REMOTE="1.1.0"
+DMAAP_ADP_IMAGE_TAG_REMOTE_RELEASE="1.1.0"
# Helm Manager
HELM_MANAGER_IMAGE_BASE="o-ran-sc/nonrtric-helm-manager"
HELM_MANAGER_IMAGE_TAG_REMOTE="1.2.0"
HELM_MANAGER_IMAGE_TAG_REMOTE_RELEASE="1.2.0"
-#Consul remote image and tag
-CONSUL_IMAGE_BASE="consul"
-CONSUL_IMAGE_TAG_REMOTE_PROXY="1.7.2"
-#No local image for Consul, remote image always used
-
-
-#CBS remote image and tag
-CBS_IMAGE_BASE="onap/org.onap.dcaegen2.platform.configbinding.app-app"
-CBS_IMAGE_TAG_REMOTE_RELEASE_ONAP="2.3.0"
-#No local image for CBS, remote image always used
-
-
#MR stub image and tag
MRSTUB_IMAGE_BASE="mrstub"
MRSTUB_IMAGE_TAG_LOCAL="latest"
ORAN_IMAGES_APP_NAMES="" # Not used
# List of app short names which images pulled from ONAP
-ONAP_IMAGES_APP_NAMES="CBS DMAAPMR SDNC" # SDNC added as ONAP image
+ONAP_IMAGES_APP_NAMES="DMAAPMR SDNC" # SDNC added as ONAP image
########################################
POLICY_AGENT_CONFIG_FILE="application.yaml" # Container config file name
POLICY_AGENT_DATA_FILE="application_configuration.json" # Container data file name
POLICY_AGENT_CONTAINER_MNT_DIR="/var/policy-management-service" # Mounted dir in the container
+PMS_FEATURE_LEVEL="NOCONSUL INITIALCONFIGMAP" # Space separated list of features
ICS_APP_NAME="informationservice" # Name for ICS container
ICS_DISPLAY_NAME="Information Coordinator Service" # Display name for ICS container
PROD_STUB_ALIVE_URL="/" # Base path for alive check
PROD_STUB_COMPOSE_DIR="prodstub" # Dir in simulator_group for docker-compose
-CONSUL_HOST="consul-server" # Host name of consul
-CONSUL_DISPLAY_NAME="Consul"
-CONSUL_EXTERNAL_PORT=8500 # Consul container external port (host -> container)
-CONSUL_INTERNAL_PORT=8500 # Consul container internal port (container -> container)
-CONSUL_APP_NAME="polman-consul" # Name for consul container
-CONSUL_ALIVE_URL="/ui/dc1/kv" # Base path for alive check
-CONSUL_CBS_COMPOSE_DIR="consul_cbs" # Dir in simulator group for docker compose
-
-CBS_APP_NAME="polman-cbs" # Name for CBS container
-CBS_DISPLAY_NAME="Config Binding Service"
-CBS_EXTERNAL_PORT=10000 # CBS container external port (host -> container)
-CBS_INTERNAL_PORT=10000 # CBS container internal port (container -> container)
-CONFIG_BINDING_SERVICE="config-binding-service" # Host name of CBS
-CBS_ALIVE_URL="/healthcheck" # Base path for alive check
-
RIC_SIM_DISPLAY_NAME="Near-RT RIC A1 Simulator"
RIC_SIM_BASE="g" # Base name of the RIC Simulator container, shall be the group code
# Note, a prefix is added to each container name by the .env file in the 'ric' dir
#MAAP_ADP_CONTAINER_MNT_DIR="/var/dmaap-adaptor-service" # Mounted dir in the container
#DMAAP_MED_ACTUATOR="/actuator/loggers/org.oransc.information" # Url for trace/debug
#DMAAP_MED_CERT_MOUNT_DIR="./cert"
-DMAAP_MED_ALIVE_URL="/status" # Base path for alive check
+DMAAP_MED_ALIVE_URL="/health_check" # Base path for alive check
DMAAP_MED_COMPOSE_DIR="dmaapmed" # Dir in simulator_group for docker-compose
#MAAP_MED_CONFIG_MOUNT_PATH="/app" # Internal container path for configuration
DMAAP_MED_DATA_MOUNT_PATH="/configs" # Path in container for data file
-DMAAP_MED_DATA_FILE="type_config.json" # Container data file name
+DMAAP_MED_HOST_DATA_FILE="type_config_1.json" # Host data file name
+DMAAP_MED_CONTR_DATA_FILE="type_config.json" # Container data file name
+DMAAP_MED_FEATURE_LEVEL="KAFKATYPES" # Space separated list of features
KAFKAPC_APP_NAME="kafka-procon" # Name for the Kafka procon
-KAFKAPC_DISPLAY_NAME="Kafaka Producer/Consumer"
+KAFKAPC_DISPLAY_NAME="Kafka Producer/Consumer"
KAFKAPC_EXTERNAL_PORT=8096 # Kafka procon container external port (host -> container)
KAFKAPC_INTERNAL_PORT=8090 # Kafka procon container internal port (container -> container)
KAFKAPC_EXTERNAL_SECURE_PORT=8097 # Kafka procon container external secure port (host -> container)
echo " [--ricsim-prefix <prefix> ] [--use-local-image <app-nam>+] [--use-snapshot-image <app-nam>+]"
echo " [--use-staging-image <app-nam>+] [--use-release-image <app-nam>+] [--image-repo <repo-address]"
echo " [--repo-policy local|remote] [--cluster-timeout <timeout-in seconds>] [--print-stats]"
- echo " [--override <override-environment-filename> --pre-clean --gen-stats]"
+ echo " [--override <override-environment-filename>] [--pre-clean] [--gen-stats] [--delete-namespaces]"
+ echo " [--delete-containers] [--endpoint-stats]"
}
if [ $# -eq 1 ] && [ "$1" == "help" ]; then
echo "--override <file> - Override setting from the file supplied by --env-file"
echo "--pre-clean - Will clean kube resouces when running docker and vice versa"
echo "--gen-stats - Collect container/pod runtime statistics"
-
+ echo "--delete-namespaces - Delete kubernetes namespaces before starting tests - but only those created by the test scripts. Kube mode only. Ignored if running with prestarted apps."
+ echo "--delete-containers - Delete docker containers before starting tests - but only those created by the test scripts. Docker mode only."
+ echo "--endpoint-stats - Collect endpoint statistics"
echo ""
echo "List of app short names supported: "$APP_SHORT_NAMES
exit 0
exit 1
fi
+
# Create a http message log for this testcase
HTTPLOG=$PWD"/.httplog_"$ATC".txt"
echo "" > $HTTPLOG
rm $TESTLOGS/$ATC/*.txt &> /dev/null
rm $TESTLOGS/$ATC/*.json &> /dev/null
+#Create result file in the log dir
+echo "1" > "$TESTLOGS/$ATC/.result$ATC.txt"
+
# Log all output from the test case to a TC log
TCLOG=$TESTLOGS/$ATC/TC.log
exec &> >(tee ${TCLOG})
COLLECT_RUNTIME_STATS=0
COLLECT_RUNTIME_STATS_PID=0
+#Var to control if endpoint statistics shall be collected
+COLLECT_ENDPOINT_STATS=0
+
+#Var to control if namespaces shall be delete before test setup
+DELETE_KUBE_NAMESPACES=0
+
+#Var to control if containers shall be delete before test setup
+DELETE_CONTAINERS=0
+
#File to keep deviation messages
DEVIATION_FILE=".tmp_deviations"
rm $DEVIATION_FILE &> /dev/null
__print_current_stats
}
+# Function to collect stats on endpoints
+# args: <app-id> <end-point-no> <http-operation> <end-point-url> <http-status> [<count>]
+__collect_endpoint_stats() {
+ if [ $COLLECT_ENDPOINT_STATS -eq 0 ]; then
+ return
+ fi
+ ENDPOINT_COUNT=1
+ if [ $# -gt 5 ]; then
+ ENDPOINT_COUNT=$6
+ fi
+ ENDPOINT_STAT_FILE=$TESTLOGS/$ATC/endpoint_$ATC_$1_$2".log"
+ ENDPOINT_POS=0
+ ENDPOINT_NEG=0
+ if [ -f $ENDPOINT_STAT_FILE ]; then
+ ENDPOINT_VAL=$(< $ENDPOINT_STAT_FILE)
+ ENDPOINT_POS=$(echo $ENDPOINT_VAL | cut -f4 -d ' ' | cut -f1 -d '/')
+ ENDPOINT_NEG=$(echo $ENDPOINT_VAL | cut -f5 -d ' ' | cut -f1 -d '/')
+ fi
+
+ if [ $5 -ge 200 ] && [ $5 -lt 300 ]; then
+ let ENDPOINT_POS=ENDPOINT_POS+$ENDPOINT_COUNT
+ else
+ let ENDPOINT_NEG=ENDPOINT_NEG+$ENDPOINT_COUNT
+ fi
+
+ printf '%-2s %-10s %-45s %-16s %-16s' "#" "$3" "$4" "$ENDPOINT_POS/$ENDPOINT_POS" "$ENDPOINT_NEG/$ENDPOINT_NEG" > $ENDPOINT_STAT_FILE
+}
+
+# Function to collect stats on endpoints
+# args: <app-id> <image-info>
+__collect_endpoint_stats_image_info() {
+ if [ $COLLECT_ENDPOINT_STATS -eq 0 ]; then
+ return
+ fi
+ ENDPOINT_STAT_FILE=$TESTLOGS/$ATC/imageinfo_$ATC_$1".log"
+ echo $POLICY_AGENT_IMAGE > $ENDPOINT_STAT_FILE
+}
+
#Var for measuring execution time
TCTEST_START=$SECONDS
TIMER_MEASUREMENTS=".timer_measurement.txt"
echo -e "Activity \t Duration \t Info" > $TIMER_MEASUREMENTS
-# If this is set, some images (control by the parameter repo-polcy) will be re-tagged and pushed to this repo before any
+# If this is set, some images (controlled by the parameter repo-policy) will be re-tagged and pushed to this repo before any
IMAGE_REPO_ADR=""
IMAGE_REPO_POLICY="local"
CLUSTER_TIME_OUT=0
foundparm=0
fi
fi
+ if [ $paramerror -eq 0 ]; then
+ if [ "$1" == "--delete-namespaces" ]; then
+ if [ $RUNMODE == "DOCKER" ]; then
+ DELETE_KUBE_NAMESPACES=0
+ echo "Option ignored - Delete namespaces (ignored when running docker)"
+ else
+ if [ -z "KUBE_PRESTARTED_IMAGES" ]; then
+ DELETE_KUBE_NAMESPACES=0
+ echo "Option ignored - Delete namespaces (ignored when using prestarted apps)"
+ else
+ DELETE_KUBE_NAMESPACES=1
+ echo "Option set - Delete namespaces"
+ fi
+ fi
+ shift;
+ foundparm=0
+ fi
+ fi
+ if [ $paramerror -eq 0 ]; then
+ if [ "$1" == "--delete-containers" ]; then
+ if [ $RUNMODE == "DOCKER" ]; then
+ DELETE_CONTAINERS=1
+ echo "Option set - Delete containers started by previous test(s)"
+ else
+ echo "Option ignored - Delete containers (ignored when running kube)"
+ fi
+ shift;
+ foundparm=0
+ fi
+ fi
+ if [ $paramerror -eq 0 ]; then
+ if [ "$1" == "--endpoint-stats" ]; then
+ COLLECT_ENDPOINT_STATS=1
+ echo "Option set - Collect endpoint statistics"
+ shift;
+ foundparm=0
+ fi
+ fi
done
echo ""
exit 1
fi
- echo " Node(s) and container container runtime config"
+ echo " Node(s) and container runtime config"
kubectl get nodes -o wide | indent2
fi
fi
#Temp var to check for image pull errors
IMAGE_ERR=0
- # The following sequence pull the configured images
+ # Delete namespaces
+ echo -e $BOLD"Deleting namespaces"$EBOLD
+
+ if [ "$DELETE_KUBE_NAMESPACES" -eq 1 ]; then
+ test_env_namespaces=$(kubectl get ns --no-headers -o custom-columns=":metadata.name" -l autotest=engine) #Get list of ns created by the test env
+ if [ $? -ne 0 ]; then
+ echo " Cannot get list of namespaces...ignoring delete"
+ else
+ for test_env_ns in $test_env_namespaces; do
+ __kube_delete_namespace $test_env_ns
+ done
+ fi
+ else
+ echo " Namespace delete option not set"
+ fi
+ echo ""
+
+ # Delete containers
+ echo -e $BOLD"Deleting containers"$EBOLD
+
+ if [ "$DELETE_CONTAINERS" -eq 1 ]; then
+ echo " Stopping containers label 'nrttest_app'..."
+ docker stop $(docker ps -qa --filter "label=nrttest_app") 2> /dev/null
+ echo " Removing stopped containers..."
+ docker rm $(docker ps -qa --filter "label=nrttest_app") 2> /dev/null
+ else
+ echo " Contatiner delete option not set"
+ fi
+ echo ""
+ # The following sequence pull the configured images
echo -e $BOLD"Pulling configured images, if needed"$EBOLD
if [ ! -z "$IMAGE_REPO_ADR" ] && [ $IMAGE_REPO_POLICY == "local" ]; then
echo -e $YELLOW" Excluding all remote image check/pull when running with image repo: $IMAGE_REPO_ADR and image policy $IMAGE_REPO_POLICY"$EYELLOW
fi
#Create file with OK exit code
echo "0" > "$AUTOTEST_HOME/.result$ATC.txt"
+ echo "0" > "$TESTLOGS/$ATC/.result$ATC.txt"
else
echo -e "One or more tests with status \033[31m\033[1mFAIL\033[0m "
echo -e "\033[31m\033[1m ___ _ ___ _ \033[0m"
if [ $STOP_AT_ERROR -eq 1 ]; then
echo -e $RED"Test script configured to stop at first FAIL, taking all logs and stops"$ERED
store_logs "STOP_AT_ERROR"
+
+ # Update test suite counter
+ if [ -f .tmp_tcsuite_fail_ctr ]; then
+ tmpval=$(< .tmp_tcsuite_fail_ctr)
+ ((tmpval++))
+ echo $tmpval > .tmp_tcsuite_fail_ctr
+ fi
+ if [ -f .tmp_tcsuite_fail ]; then
+ echo " - "$ATC " -- "$TC_ONELINE_DESCR" Execution stopped due to error" >> .tmp_tcsuite_fail
+ fi
exit 1
fi
return 0
echo " Message: $(<./tmp/kubeerr)"
return 1
else
+ kubectl label ns $1 autotest=engine
echo -e " Creating namespace $1 $GREEN$BOLD OK $EBOLD$EGREEN"
fi
else
echo -e $RED" Got: "${FUNCNAME[1]} ${@:2} $ERED
fi
((RES_CONF_FAIL++))
+ __check_stop_at_error
}
# Function to create the docker network for the test
__check_stop_at_error
return
fi
- elif [ $4 = "=" ] && [ "$result" -eq $5 ]; then
+ elif [ "$4" == "=" ] && [ "$result" -eq $5 ]; then
((RES_PASS++))
echo -e " Result=${result} after ${duration} seconds${SAMELINE}"
echo -e $GREEN" PASS${EGREEN} - Result=${result} after ${duration} seconds"
__print_current_stats
return
- elif [ $4 = ">" ] && [ "$result" -gt $5 ]; then
+ elif [ "$4" == ">" ] && [ "$result" -gt $5 ]; then
((RES_PASS++))
echo -e " Result=${result} after ${duration} seconds${SAMELINE}"
echo -e $GREEN" PASS${EGREEN} - Result=${result} after ${duration} seconds"
__print_current_stats
return
- elif [ $4 = "<" ] && [ "$result" -lt $5 ]; then
+ elif [ "$4" == "<" ] && [ "$result" -lt $5 ]; then
((RES_PASS++))
echo -e " Result=${result} after ${duration} seconds${SAMELINE}"
echo -e $GREEN" PASS${EGREEN} - Result=${result} after ${duration} seconds"
__print_current_stats
return
- elif [ $4 = "contain_str" ] && [[ $result =~ $5 ]]; then
+ elif [ "$4" == ">=" ] && [ "$result" -ge $5 ]; then
+ ((RES_PASS++))
+ echo -e " Result=${result} after ${duration} seconds${SAMELINE}"
+ echo -e $GREEN" PASS${EGREEN} - Result=${result} after ${duration} seconds"
+ __print_current_stats
+ return
+ elif [ "$4" == "contain_str" ] && [[ $result =~ $5 ]]; then
((RES_PASS++))
echo -e " Result=${result} after ${duration} seconds${SAMELINE}"
echo -e $GREEN" PASS${EGREEN} - Result=${result} after ${duration} seconds"
echo -e $RED" FAIL ${ERED}- ${3} ${4} ${5} not reached, result = ${result}"
__print_current_stats
__check_stop_at_error
- elif [ $4 = "=" ] && [ "$result" -eq $5 ]; then
+ elif [ "$4" == "=" ] && [ "$result" -eq $5 ]; then
+ ((RES_PASS++))
+ echo -e $GREEN" PASS${EGREEN} - Result=${result}"
+ __print_current_stats
+ elif [ "$4" == ">" ] && [ "$result" -gt $5 ]; then
((RES_PASS++))
echo -e $GREEN" PASS${EGREEN} - Result=${result}"
__print_current_stats
- elif [ $4 = ">" ] && [ "$result" -gt $5 ]; then
+ elif [ "$4" == "<" ] && [ "$result" -lt $5 ]; then
((RES_PASS++))
echo -e $GREEN" PASS${EGREEN} - Result=${result}"
__print_current_stats
- elif [ $4 = "<" ] && [ "$result" -lt $5 ]; then
+ elif [ "$4" == ">=" ] && [ "$result" -ge $5 ]; then
((RES_PASS++))
echo -e $GREEN" PASS${EGREEN} - Result=${result}"
__print_current_stats
- elif [ $4 = "contain_str" ] && [[ $result =~ $5 ]]; then
+ elif [ "$4" == "contain_str" ] && [[ $result =~ $5 ]]; then
((RES_PASS++))
echo -e $GREEN" PASS${EGREEN} - Result=${result}"
__print_current_stats
# Metrics vars
cntr_msg_callbacks=0
+cntr_batch_callbacks=0
cntr_msg_fetched=0
cntr_callbacks={}
hosts_set=set()
APP_READ_URL="/get-event/<string:id>"
APP_READ_ALL_URL="/get-all-events/<string:id>"
DUMP_ALL_URL="/db"
+NULL_URL="/callbacks-null" # Url for ignored callback. Callbacks are not checked, counted or stored
MIME_TEXT="text/plain"
MIME_JSON="application/json"
if (id in cntr_callbacks.keys()):
cntr_callbacks[id][0] += 1
+ cntr_callbacks[id][2] += 1
else:
cntr_callbacks[id]=[]
cntr_callbacks[id].append(1)
cntr_callbacks[id].append(0)
+ cntr_callbacks[id].append(0)
except Exception as e:
print(CAUGHT_EXCEPTION+str(e))
def events_write_mr(id):
global msg_callbacks
global cntr_msg_callbacks
+ global cntr_batch_callbacks
storeas=request.args.get('storeas') #If set, store payload as a md5 hascode and dont log the payload
#Large payloads will otherwise overload the server
if (storeas is None):
print("raw data: str(request.data): "+str(request.data))
do_delay()
+ list_data=False
try:
#if (request.content_type == MIME_JSON):
if (MIME_JSON in request.content_type):
msg_list = json.loads(data)
if (storeas is None):
print("Payload(json): "+str(msg_list))
+ list_data=True
else:
msg_list=[]
print("Payload(content-type="+request.content_type+"). Setting empty json as payload")
with lock:
remote_host_logging(request)
+ if (list_data):
+ cntr_batch_callbacks += 1
for msg in msg_list:
if (storeas is None):
msg=json.loads(msg)
cntr_callbacks[id]=[]
cntr_callbacks[id].append(1)
cntr_callbacks[id].append(0)
+ cntr_callbacks[id].append(0)
+ if (id in msg_callbacks.keys() and list_data):
+ cntr_callbacks[id][2] += 1
except Exception as e:
print(CAUGHT_EXCEPTION+str(e))
def events_write_text(id):
global msg_callbacks
global cntr_msg_callbacks
+ global cntr_batch_callbacks
storeas=request.args.get('storeas') #If set, store payload as a md5 hascode and dont log the payload
#Large payloads will otherwise overload the server
try:
msg_list=None
+ list_data=False
if (MIME_JSON in request.content_type): #Json array of strings
msg_list=json.loads(request.data)
+ list_data=True
else:
data=request.data.decode("utf-8") #Assuming string
msg_list=[]
msg_list.append(data)
+ with lock:
+ cntr_batch_callbacks += 1
+ for msg in msg_list:
+ if (storeas == "md5"):
+ md5msg={}
+ print("msg: "+str(msg))
+ print("msg (endcode str): "+str(msg.encode('utf-8')))
+ md5msg["md5"]=md5(msg.encode('utf-8')).hexdigest()
+ msg=md5msg
+ print("msg (data converted to md5 hash): "+str(msg["md5"]))
+
+ if (isinstance(msg, dict)):
+ msg[TIME_STAMP]=str(datetime.now())
- for msg in msg_list:
- if (storeas == "md5"):
- md5msg={}
- print("msg: "+str(msg))
- print("msg (endcode str): "+str(msg.encode('utf-8')))
- md5msg["md5"]=md5(msg.encode('utf-8')).hexdigest()
- msg=md5msg
- print("msg (data converted to md5 hash): "+str(msg["md5"]))
-
- if (isinstance(msg, dict)):
- msg[TIME_STAMP]=str(datetime.now())
-
- with lock:
cntr_msg_callbacks += 1
if (id in msg_callbacks.keys()):
msg_callbacks[id].append(msg)
cntr_callbacks[id]=[]
cntr_callbacks[id].append(1)
cntr_callbacks[id].append(0)
+ cntr_callbacks[id].append(0)
+ if (id in cntr_callbacks.keys() and list_data):
+ cntr_callbacks[id][2] += 1
except Exception as e:
print(CAUGHT_EXCEPTION+str(e))
traceback.print_exc()
return 'OK',200
-### Functions for test ###
+# Receive a callback message but ignore contents and return 200
+# URI and payload, (PUT or POST): /callbacks-text/<id> <text message>
+# response: OK 200
+@app.route(NULL_URL,
+ methods=['PUT','POST'])
+def null_url(id):
+ return 'OK',200
# Dump the whole db of current callbacks
# URI and parameter, (GET): /db
else:
return Response(str("0"), status=200, mimetype=MIME_TEXT)
+@app.route('/counter/received_callback_batches',
+ methods=['GET'])
+def batches_submitted():
+ req_id = request.args.get('id')
+ if (req_id is None):
+ return Response(str(cntr_batch_callbacks), status=200, mimetype=MIME_TEXT)
+
+ if (req_id in cntr_callbacks.keys()):
+ return Response(str(cntr_callbacks[req_id][2]), status=200, mimetype=MIME_TEXT)
+ else:
+ return Response(str("0"), status=200, mimetype=MIME_TEXT)
+
@app.route('/counter/fetched_callbacks',
methods=['GET'])
def requests_fetched():
global msg_callbacks
global cntr_msg_fetched
global cntr_msg_callbacks
+ global cntr_batch_callbacks
global cntr_callbacks
global forced_settings
msg_callbacks={}
cntr_msg_fetched=0
cntr_msg_callbacks=0
+ cntr_batch_callbacks=0
cntr_callbacks={}
forced_settings['delay']=None
# source function to do curl and check result
. ../common/do_curl_function.sh
+RESP_CONTENT='*' #Dont check resp content type
+
echo "=== CR hello world ==="
RESULT="OK"
do_curl GET / 200
RESULT="0"
do_curl GET /counter/received_callbacks 200
+echo "=== Get counter - callback batches ==="
+RESULT="0"
+do_curl GET /counter/received_callback_batches 200
+
echo "=== Get counter - fetched events ==="
RESULT="0"
do_curl GET /counter/fetched_callbacks 200
RESULT="2"
do_curl GET /counter/received_callbacks 200
+echo "=== Get counter - callback batches ==="
+RESULT="2"
+do_curl GET /counter/received_callback_batches 200
+
echo "=== Get counter - fetched events ==="
RESULT="0"
do_curl GET /counter/fetched_callbacks 200
RESULT="2"
do_curl GET /counter/received_callbacks?id=test 200
+echo "=== Get counter - callback batches ==="
+RESULT="2"
+do_curl GET /counter/received_callback_batches?id=test 200
+
echo "=== Get counter - fetched events ==="
RESULT="0"
do_curl GET /counter/fetched_callbacks?id=test 200
RESULT="0"
do_curl GET /counter/received_callbacks?id=dummy 200
+echo "=== Get counter - callback batches ==="
+RESULT="0"
+do_curl GET /counter/received_callback_batches?id=dummy 200
+
echo "=== Get counter - fetched events ==="
RESULT="0"
do_curl GET /counter/fetched_callbacks?id=dummy 200
RESULT="2"
do_curl GET /counter/received_callbacks 200
+echo "=== Get counter - callback batches ==="
+RESULT="2"
+do_curl GET /counter/received_callback_batches 200
+
echo "=== Get counter - fetched events ==="
RESULT="2"
do_curl GET /counter/fetched_callbacks 200
RESULT="2"
do_curl GET /counter/received_callbacks?id=test 200
+echo "=== Get counter - callback batches ==="
+RESULT="2"
+do_curl GET /counter/received_callback_batches?id=test 200
+
echo "=== Get counter - fetched events ==="
RESULT="2"
do_curl GET /counter/fetched_callbacks?id=test 200
RESULT="5"
do_curl GET /counter/received_callbacks 200
+echo "=== Get counter - callback batches ==="
+RESULT="5"
+do_curl GET /counter/received_callback_batches 200
+
echo "=== Get counter - fetched events ==="
RESULT="2"
do_curl GET /counter/fetched_callbacks 200
RESULT="1"
do_curl GET /counter/received_callbacks?id=test1 200
+echo "=== Get counter - callback batches ==="
+RESULT="1"
+do_curl GET /counter/received_callback_batches?id=test1 200
+
echo "=== Get counter - fetched events ==="
RESULT="0"
do_curl GET /counter/fetched_callbacks?id=test1 200
RESULT="5"
do_curl GET /counter/received_callbacks 200
+echo "=== Get counter - callback batches ==="
+RESULT="5"
+do_curl GET /counter/received_callback_batches 200
+
echo "=== Get counter - fetched events ==="
RESULT="4"
do_curl GET /counter/fetched_callbacks 200
RESULT="1"
do_curl GET /counter/current_messages 200
+echo "=== Send a request ==="
+RESULT="*"
+#create payload
+echo "[{\"DATA-MSG\":\"msg\"},{\"DATA-MSG\":\"msg\"}]" > .tmp.json
+do_curl POST '/callbacks-text/test' 200 .tmp.json
+
+echo "=== Get counter - callbacks ==="
+RESULT="7"
+do_curl GET /counter/received_callbacks 200
+
+echo "=== Get counter - callback batches ==="
+RESULT="6"
+do_curl GET /counter/received_callback_batches 200
+
+echo "=== Get counter - fetched events ==="
+RESULT="4"
+do_curl GET /counter/fetched_callbacks 200
+
+echo "=== Get counter - current events ==="
+RESULT="3"
+do_curl GET /counter/current_messages 200
+
+
echo "=== CR reset ==="
RESULT="OK"
do_curl GET /reset 200
RESULT="0"
do_curl GET /counter/received_callbacks 200
+echo "=== Get counter - callback batches ==="
+RESULT="0"
+do_curl GET /counter/received_callback_batches 200
+
echo "=== Get counter - fetched events ==="
RESULT="0"
do_curl GET /counter/fetched_callbacks 200
- name: https
containerPort: $DMAAP_MED_INTERNAL_SECURE_PORT
volumeMounts:
- - mountPath: $DMAAP_MED_DATA_MOUNT_PATH/$DMAAP_MED_DATA_FILE
- subPath: $DMAAP_MED_DATA_FILE
+ - mountPath: $DMAAP_MED_DATA_MOUNT_PATH/$DMAAP_MED_CONTR_DATA_FILE
+ subPath: $DMAAP_MED_CONTR_DATA_FILE
name: dmaapadp-data-name
env:
- name: INFO_PRODUCER_HOST
value: "$MR_SERVICE_PATH"
- name: LOG_LEVEL
value: Debug
+ - name: KAFKA_BOOTSTRAP_SERVERS
+ value: "$MR_KAFKA_SERVICE_PATH"
volumes:
- configMap:
defaultMode: 420
- INFO_COORD_ADDR=${ICS_SERVICE_PATH}
- DMAAP_MR_ADDR=${MR_SERVICE_PATH}
- LOG_LEVEL=Debug
+ - KAFKA_BOOTSTRAP_SERVERS=${MR_KAFKA_SERVICE_PATH}
volumes:
- - ${DMAAP_MED_HOST_MNT_DIR}/$DMAAP_MED_DATA_FILE:${DMAAP_MED_DATA_MOUNT_PATH}/$DMAAP_MED_DATA_FILE
+ - ${DMAAP_MED_HOST_MNT_DIR}/${DMAAP_MED_CONTR_DATA_FILE}:${DMAAP_MED_DATA_MOUNT_PATH}/${DMAAP_MED_CONTR_DATA_FILE}
labels:
- "nrttest_app=DMAAPMED"
- "nrttest_dp=${DMAAP_MED_DISPLAY_NAME}"
--- /dev/null
+{
+ "types":
+ [
+ {
+ "id": "STD_Fault_Messages",
+ "dmaapTopicUrl": "/events/unauthenticated.dmaapmed.json/dmaapmediatorproducer/STD_Fault_Messages?timeout=15000&limit=100"
+ },
+ {
+ "id": "Kafka_TestTopic",
+ "kafkaInputTopic": "unauthenticated.dmaapmed_kafka.text"
+ }
+ ]
+ }
\ No newline at end of file
--- /dev/null
+################################################################################
+# Copyright (c) 2020 Nordix Foundation. #
+# #
+# Licensed under the Apache License, Version 2.0 (the \"License\"); #
+# you may not use this file except in compliance with the License. #
+# You may obtain a copy of the License at #
+# #
+# http://www.apache.org/licenses/LICENSE-2.0 #
+# #
+# Unless required by applicable law or agreed to in writing, software #
+# distributed under the License is distributed on an \"AS IS\" BASIS, #
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
+# See the License for the specific language governing permissions and #
+# limitations under the License. #
+################################################################################
+
+spring:
+ profiles:
+ active: prod
+ main:
+ allow-bean-definition-overriding: true
+ aop:
+ auto: false
+management:
+ endpoints:
+ web:
+ exposure:
+ # Enabling of springboot actuator features. See springboot documentation.
+ include: "loggers,logfile,health,info,metrics,threaddump,heapdump"
+
+logging:
+ # Configuration of logging
+ level:
+ ROOT: ERROR
+ org.springframework: ERROR
+ org.springframework.data: ERROR
+ org.springframework.web.reactive.function.client.ExchangeFunctions: ERROR
+ ${POLICY_AGENT_PKG_NAME}: INFO
+ file: /var/log/policy-agent/application.log
+
+server:
+ # Configuration of the HTTP/REST server. The parameters are defined and handeled by the springboot framework.
+ # See springboot documentation.
+ port : 8433
+ http-port: 8081
+ ssl:
+ key-store-type: JKS
+ key-store-password: policy_agent
+ key-store: /opt/app/policy-agent/etc/cert/keystore.jks
+ key-password: policy_agent
+ key-alias: policy_agent
+app:
+ # Location of the component configuration file. The file will only be used if the Consul database is not used;
+ # configuration from the Consul will override the file.
+ filepath: /var/policy-management-service/application_configuration.json
+ # path where the service can store data
+ vardata-directory: /var/policy-management-service
+ # path to json schema for config validation
+ config-file-schema-path: /application_configuration_schema.json
+ webclient:
+ # Configuration of the trust store used for the HTTP client (outgoing requests)
+ # The file location and the password for the truststore is only relevant if trust-store-used == true
+ # Note that the same keystore as for the server is used.
+ trust-store-used: false
+ trust-store-password: policy_agent
+ trust-store: /opt/app/policy-agent/etc/cert/truststore.jks
+ # Configuration of usage of HTTP Proxy for the southbound accesses.
+ # The HTTP proxy (if configured) will only be used for accessing NearRT RIC:s
+ http.proxy-host: $AGENT_HTTP_PROXY_CONFIG_HOST_NAME
+ http.proxy-port: $AGENT_HTTP_PROXY_CONFIG_PORT
+ http.proxy-type: HTTP
"taskParameters": [
{
"key": "ORU-ODU-Map",
- "value": "{\"ERICSSON-O-RU-11220\": \"HCL-O-DU-1122\",
- \"ERICSSON-O-RU-11221\": \"HCL-O-DU-1122\",
- \"ERICSSON-O-RU-11222\": \"HCL-O-DU-1122\",
- \"ERICSSON-O-RU-11223\": \"HCL-O-DU-1122\",
- \"ERICSSON-O-RU-11224\": \"HCL-O-DU-1123\",
- \"ERICSSON-O-RU-11225\": \"HCL-O-DU-1123\",
- \"ERICSSON-O-RU-11226\": \"HCL-O-DU-1123\",
- \"ERICSSON-O-RU-11227\": \"HCL-O-DU-1124\",
- \"ERICSSON-O-RU-11228\": \"HCL-O-DU-1125\",
- \"ERICSSON-O-RU-11229\": \"HCL-O-DU-1125\"}"
+ "value": "{\"ERICSSON-O-RU-11220\": \"O-DU-1122\",
+ \"ERICSSON-O-RU-11221\": \"O-DU-1122\",
+ \"ERICSSON-O-RU-11222\": \"O-DU-1122\",
+ \"ERICSSON-O-RU-11223\": \"O-DU-1122\",
+ \"ERICSSON-O-RU-11224\": \"O-DU-1123\",
+ \"ERICSSON-O-RU-11225\": \"O-DU-1123\",
+ \"ERICSSON-O-RU-11226\": \"O-DU-1123\",
+ \"ERICSSON-O-RU-11227\": \"O-DU-1124\",
+ \"ERICSSON-O-RU-11228\": \"O-DU-1125\",
+ \"ERICSSON-O-RU-11229\": \"O-DU-1125\"}"
}
]
}
"carrierTechnology": "RESTCLIENT",
"parameterClassName": "org.onap.policy.apex.plugins.event.carrier.restclient.RestClientCarrierTechnologyParameters",
"parameters": {
- "url": "http://sdnr-sim:9990/rests/data/network-topology:network-topology/topology=topology-netconf/node={OduId}/yang-ext:mount/o-ran-sc-du-hello-world:network-function/du-to-ru-connection={OruId}",
+ "url": "http://sdnr-sim:9990/rests/data/network-topology:network-topology/topology=topology-netconf/node={OduId}/yang-ext:mount/o-ran-sc-du-hello-world:network-function/distributed-unit-functions={OduId}/radio-resource-management-policy-ratio=rrm-pol-1",
"httpMethod" : "PUT",
"httpHeaders" : [
["Authorization", "Basic YWRtaW46S3A4Yko0U1hzek0wV1hsaGFrM2VIbGNzZTJnQXc4NHZhb0dHbUp2VXkyVQ=="]
-{"tosca_definitions_version":"tosca_simple_yaml_1_1_0","topology_template":{"policies":[{"onap.policies.native.apex.LinkMonitor":{"type":"onap.policies.native.Apex","type_version":"1.0.0","name":"onap.policies.native.apex.LinkMonitor","version":"1.0.0","properties":{"engineServiceParameters":{"name":"LinkMonitorApexEngine","version":"0.0.1","id":101,"instanceCount":1,"deploymentPort":12345,"engineParameters":{"executorParameters":{"JAVASCRIPT":{"parameterClassName":"org.onap.policy.apex.plugins.executor.javascript.JavascriptExecutorParameters"}},"contextParameters":{"parameterClassName":"org.onap.policy.apex.context.parameters.ContextParameters","schemaParameters":{"Avro":{"parameterClassName":"org.onap.policy.apex.plugins.context.schema.avro.AvroSchemaHelperParameters"}}},"taskParameters":[{"key":"ORU-ODU-Map","value":"{\"ERICSSON-O-RU-11220\": \"HCL-O-DU-1122\",\n \"ERICSSON-O-RU-11221\": \"HCL-O-DU-1122\",\n \"ERICSSON-O-RU-11222\": \"HCL-O-DU-1122\",\n \"ERICSSON-O-RU-11223\": \"HCL-O-DU-1122\",\n \"ERICSSON-O-RU-11224\": \"HCL-O-DU-1123\",\n \"ERICSSON-O-RU-11225\": \"HCL-O-DU-1123\",\n \"ERICSSON-O-RU-11226\": \"HCL-O-DU-1123\",\n \"ERICSSON-O-RU-11227\": \"HCL-O-DU-1124\",\n \"ERICSSON-O-RU-11228\": \"HCL-O-DU-1125\",\n \"ERICSSON-O-RU-11229\": \"HCL-O-DU-1125\"}"}]},"policy_type_impl":{"apexPolicyModel":{"key":{"name":"LinkMonitorModel","version":"0.0.1"},"keyInformation":{"key":{"name":"LinkMonitorModel_KeyInfo","version":"0.0.1"},"keyInfoMap":{"entry":[{"key":{"name":"ApexMessageOutputEvent","version":"0.0.1"},"value":{"key":{"name":"ApexMessageOutputEvent","version":"0.0.1"},"UUID":"cca47d74-7754-4a61-b163-ca31f66b157b","description":"Generated description for concept referred to by key \"ApexMessageOutputEvent:0.0.1\""}},{"key":{"name":"CreateLinkClearedOutfieldsEvent","version":"0.0.1"},"value":{"key":{"name":"CreateLinkClearedOutfieldsEvent","version":"0.0.1"},"UUID":"a295d6a3-1b73-387e-abba-b41e9b608802","description":"Generated description for concept referred to by key \"CreateLinkClearedOutfieldsEvent:0.0.1\""}},{"key":{"name":"CreateLinkClearedOutfieldsTask","version":"0.0.1"},"value":{"key":{"name":"CreateLinkClearedOutfieldsTask","version":"0.0.1"},"UUID":"fd594e88-411d-4a94-b2be-697b3a0d7adf","description":"This task creates the output fields when link failure is cleared."}},{"key":{"name":"CreateLinkFailureOutfieldsEvent","version":"0.0.1"},"value":{"key":{"name":"CreateLinkFailureOutfieldsEvent","version":"0.0.1"},"UUID":"02be2b5d-45b7-3c54-ae54-97f2b5c30125","description":"Generated description for concept referred to by key \"CreateLinkFailureOutfieldsEvent:0.0.1\""}},{"key":{"name":"CreateLinkFailureOutfieldsTask","version":"0.0.1"},"value":{"key":{"name":"CreateLinkFailureOutfieldsTask","version":"0.0.1"},"UUID":"ac3d9842-80af-4a98-951c-bd79a431c613","description":"This task the output fields when link failure is detected."}},{"key":{"name":"LinkClearedTask","version":"0.0.1"},"value":{"key":{"name":"LinkClearedTask","version":"0.0.1"},"UUID":"eecfde90-896c-4343-8f9c-2603ced94e2d","description":"This task sends a message to the output when link failure is cleared."}},{"key":{"name":"LinkFailureInputEvent","version":"0.0.1"},"value":{"key":{"name":"LinkFailureInputEvent","version":"0.0.1"},"UUID":"c4500941-3f98-4080-a9cc-5b9753ed050b","description":"Generated description for concept referred to by key \"LinkFailureInputEvent:0.0.1\""}},{"key":{"name":"LinkFailureInputSchema","version":"0.0.1"},"value":{"key":{"name":"LinkFailureInputSchema","version":"0.0.1"},"UUID":"3b3974fc-3012-3b02-9f33-c9d8eefe4dc1","description":"Generated description for concept referred to by key \"LinkFailureInputSchema:0.0.1\""}},{"key":{"name":"LinkFailureOutputEvent","version":"0.0.1"},"value":{"key":{"name":"LinkFailureOutputEvent","version":"0.0.1"},"UUID":"4f04aa98-e917-4f4a-882a-c75ba5a99374","description":"Generated description for concept referred to by key \"LinkFailureOutputEvent:0.0.1\""}},{"key":{"name":"LinkFailureOutputSchema","version":"0.0.1"},"value":{"key":{"name":"LinkFailureOutputSchema","version":"0.0.1"},"UUID":"2d1a7f6e-eb9a-3984-be1f-283d98111b84","description":"Generated description for concept referred to by key \"LinkFailureOutputSchema:0.0.1\""}},{"key":{"name":"LinkFailureTask","version":"0.0.1"},"value":{"key":{"name":"LinkFailureTask","version":"0.0.1"},"UUID":"3351b0f4-cf06-4fa2-8823-edf67bd30223","description":"This task updates the config for O-RU when link failure is detected."}},{"key":{"name":"LinkMonitorModel","version":"0.0.1"},"value":{"key":{"name":"LinkMonitorModel","version":"0.0.1"},"UUID":"540226fb-55ee-4f0e-a444-983a0494818e","description":"This is the Apex Policy Model for link monitoring."}},{"key":{"name":"LinkMonitorModel_Events","version":"0.0.1"},"value":{"key":{"name":"LinkMonitorModel_Events","version":"0.0.1"},"UUID":"27ad3e7e-fe3b-3bd6-9081-718705c2bcea","description":"Generated description for concept referred to by key \"LinkMonitorModel_Events:0.0.1\""}},{"key":{"name":"LinkMonitorModel_KeyInfo","version":"0.0.1"},"value":{"key":{"name":"LinkMonitorModel_KeyInfo","version":"0.0.1"},"UUID":"ea0b5f58-eefd-358a-9660-840c640bf981","description":"Generated description for concept referred to by key \"LinkMonitorModel_KeyInfo:0.0.1\""}},{"key":{"name":"LinkMonitorModel_Policies","version":"0.0.1"},"value":{"key":{"name":"LinkMonitorModel_Policies","version":"0.0.1"},"UUID":"ee9e0b0f-2b7d-3ab7-9a98-c5ec05ed823d","description":"Generated description for concept referred to by key \"LinkMonitorModel_Policies:0.0.1\""}},{"key":{"name":"LinkMonitorModel_Schemas","version":"0.0.1"},"value":{"key":{"name":"LinkMonitorModel_Schemas","version":"0.0.1"},"UUID":"fa5f9b8f-796c-3c70-84e9-5140c958c4bb","description":"Generated description for concept referred to by key \"LinkMonitorModel_Schemas:0.0.1\""}},{"key":{"name":"LinkMonitorModel_Tasks","version":"0.0.1"},"value":{"key":{"name":"LinkMonitorModel_Tasks","version":"0.0.1"},"UUID":"eec592f7-69d5-39a9-981a-e552f787ed01","description":"Generated description for concept referred to by key \"LinkMonitorModel_Tasks:0.0.1\""}},{"key":{"name":"LinkMonitorPolicy","version":"0.0.1"},"value":{"key":{"name":"LinkMonitorPolicy","version":"0.0.1"},"UUID":"6c5e410f-489a-46ff-964e-982ce6e8b6d0","description":"Generated description for concept referred to by key \"LinkMonitorPolicy:0.0.1\""}},{"key":{"name":"MessageSchema","version":"0.0.1"},"value":{"key":{"name":"MessageSchema","version":"0.0.1"},"UUID":"ac4b34ac-39d6-3393-a267-8d5b84854018","description":"A schema for messages from apex"}},{"key":{"name":"NoPolicyDefinedTask","version":"0.0.1"},"value":{"key":{"name":"NoPolicyDefinedTask","version":"0.0.1"},"UUID":"d48b619e-d00d-4008-b884-02d76ea4350b","description":"This task sends a message to the output when an event is received for which no policy has been defined."}},{"key":{"name":"OduIdSchema","version":"0.0.1"},"value":{"key":{"name":"OduIdSchema","version":"0.0.1"},"UUID":"50662174-a88b-3cbd-91bd-8e91b40b2660","description":"A schema for O-DU-ID"}},{"key":{"name":"OruIdSchema","version":"0.0.1"},"value":{"key":{"name":"OruIdSchema","version":"0.0.1"},"UUID":"54daf32b-015f-39cd-8530-a1175c5553e9","description":"A schema for O-RU-ID"}}]}},"policies":{"key":{"name":"LinkMonitorModel_Policies","version":"0.0.1"},"policyMap":{"entry":[{"key":{"name":"LinkMonitorPolicy","version":"0.0.1"},"value":{"policyKey":{"name":"LinkMonitorPolicy","version":"0.0.1"},"template":"Freestyle","state":{"entry":[{"key":"LinkClearedState","value":{"stateKey":{"parentKeyName":"LinkMonitorPolicy","parentKeyVersion":"0.0.1","parentLocalName":"NULL","localName":"LinkClearedState"},"trigger":{"name":"CreateLinkClearedOutfieldsEvent","version":"0.0.1"},"stateOutputs":{"entry":[{"key":"LinkClearedLogic_Output_Direct","value":{"key":{"parentKeyName":"LinkMonitorPolicy","parentKeyVersion":"0.0.1","parentLocalName":"LinkClearedState","localName":"LinkClearedLogic_Output_Direct"},"outgoingEvent":{"name":"ApexMessageOutputEvent","version":"0.0.1"},"nextState":{"parentKeyName":"NULL","parentKeyVersion":"0.0.0","parentLocalName":"NULL","localName":"NULL"}}}]},"contextAlbumReference":[],"taskSelectionLogic":{"key":"NULL","logicFlavour":"UNDEFINED","logic":""},"stateFinalizerLogicMap":{"entry":[]},"defaultTask":{"name":"LinkClearedTask","version":"0.0.1"},"taskReferences":{"entry":[{"key":{"name":"LinkClearedTask","version":"0.0.1"},"value":{"key":{"parentKeyName":"LinkMonitorPolicy","parentKeyVersion":"0.0.1","parentLocalName":"LinkClearedState","localName":"LinkClearedTask"},"outputType":"DIRECT","output":{"parentKeyName":"LinkMonitorPolicy","parentKeyVersion":"0.0.1","parentLocalName":"LinkClearedState","localName":"LinkClearedLogic_Output_Direct"}}}]}}},{"key":"LinkFailureOrClearedState","value":{"stateKey":{"parentKeyName":"LinkMonitorPolicy","parentKeyVersion":"0.0.1","parentLocalName":"NULL","localName":"LinkFailureOrClearedState"},"trigger":{"name":"LinkFailureInputEvent","version":"0.0.1"},"stateOutputs":{"entry":[{"key":"CreateLinkClearedOutfieldsLogic_Output_Direct","value":{"key":{"parentKeyName":"LinkMonitorPolicy","parentKeyVersion":"0.0.1","parentLocalName":"LinkFailureOrClearedState","localName":"CreateLinkClearedOutfieldsLogic_Output_Direct"},"outgoingEvent":{"name":"CreateLinkClearedOutfieldsEvent","version":"0.0.1"},"nextState":{"parentKeyName":"LinkMonitorPolicy","parentKeyVersion":"0.0.1","parentLocalName":"NULL","localName":"LinkClearedState"}}},{"key":"CreateLinkFailureOutfieldsLogic_Output_Direct","value":{"key":{"parentKeyName":"LinkMonitorPolicy","parentKeyVersion":"0.0.1","parentLocalName":"LinkFailureOrClearedState","localName":"CreateLinkFailureOutfieldsLogic_Output_Direct"},"outgoingEvent":{"name":"CreateLinkFailureOutfieldsEvent","version":"0.0.1"},"nextState":{"parentKeyName":"LinkMonitorPolicy","parentKeyVersion":"0.0.1","parentLocalName":"NULL","localName":"LinkFailureState"}}},{"key":"NoPolicyDefinedLogic_Output_Direct","value":{"key":{"parentKeyName":"LinkMonitorPolicy","parentKeyVersion":"0.0.1","parentLocalName":"LinkFailureOrClearedState","localName":"NoPolicyDefinedLogic_Output_Direct"},"outgoingEvent":{"name":"ApexMessageOutputEvent","version":"0.0.1"},"nextState":{"parentKeyName":"NULL","parentKeyVersion":"0.0.0","parentLocalName":"NULL","localName":"NULL"}}}]},"contextAlbumReference":[],"taskSelectionLogic":{"key":"TaskSelectionLogic","logicFlavour":"JAVASCRIPT","logic":"/*\n * ============LICENSE_START=======================================================\n * Copyright (C) 2021 Nordix Foundation.\n * ================================================================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * SPDX-License-Identifier: Apache-2.0\n * ============LICENSE_END=========================================================\n */\n\nexecutor.logger.info(\"Task Selection Execution: '\"+executor.subject.id+\n \"'. InputFields: '\"+executor.inFields+\"'\");\n\nvar linkFailureInput = executor.inFields.get(\"LinkFailureInput\");\nvar commonEventHeader = linkFailureInput.get(\"event\").get(\"commonEventHeader\");\nvar domain = commonEventHeader.get(\"domain\");\n\ntaskFailure = executor.subject.getTaskKey(\"CreateLinkFailureOutfieldsTask\");\ntaskCleared = executor.subject.getTaskKey(\"CreateLinkClearedOutfieldsTask\");\ntaskDefault = executor.subject.getDefaultTaskKey();\n\nif (domain == \"fault\") {\n var faultFields = linkFailureInput.get(\"event\").get(\"faultFields\");\n var alarmCondition = faultFields.get(\"alarmCondition\");\n var eventSeverity = faultFields.get(\"eventSeverity\");\n if (alarmCondition == \"28\" && eventSeverity != \"NORMAL\") {\n taskFailure.copyTo(executor.selectedTask);\n } else if (alarmCondition == \"28\" && eventSeverity == \"NORMAL\") {\n taskCleared.copyTo(executor.selectedTask);\n } else {\n taskDefault.copyTo(executor.selectedTask);\n }\n} else {\n taskDefault.copyTo(executor.selectedTask);\n}\n\ntrue;"},"stateFinalizerLogicMap":{"entry":[]},"defaultTask":{"name":"NoPolicyDefinedTask","version":"0.0.1"},"taskReferences":{"entry":[{"key":{"name":"CreateLinkClearedOutfieldsTask","version":"0.0.1"},"value":{"key":{"parentKeyName":"LinkMonitorPolicy","parentKeyVersion":"0.0.1","parentLocalName":"LinkFailureOrClearedState","localName":"CreateLinkClearedOutfieldsTask"},"outputType":"DIRECT","output":{"parentKeyName":"LinkMonitorPolicy","parentKeyVersion":"0.0.1","parentLocalName":"LinkFailureOrClearedState","localName":"CreateLinkClearedOutfieldsLogic_Output_Direct"}}},{"key":{"name":"CreateLinkFailureOutfieldsTask","version":"0.0.1"},"value":{"key":{"parentKeyName":"LinkMonitorPolicy","parentKeyVersion":"0.0.1","parentLocalName":"LinkFailureOrClearedState","localName":"CreateLinkFailureOutfieldsTask"},"outputType":"DIRECT","output":{"parentKeyName":"LinkMonitorPolicy","parentKeyVersion":"0.0.1","parentLocalName":"LinkFailureOrClearedState","localName":"CreateLinkFailureOutfieldsLogic_Output_Direct"}}},{"key":{"name":"NoPolicyDefinedTask","version":"0.0.1"},"value":{"key":{"parentKeyName":"LinkMonitorPolicy","parentKeyVersion":"0.0.1","parentLocalName":"LinkFailureOrClearedState","localName":"NoPolicyDefinedTask"},"outputType":"DIRECT","output":{"parentKeyName":"LinkMonitorPolicy","parentKeyVersion":"0.0.1","parentLocalName":"LinkFailureOrClearedState","localName":"NoPolicyDefinedLogic_Output_Direct"}}}]}}},{"key":"LinkFailureState","value":{"stateKey":{"parentKeyName":"LinkMonitorPolicy","parentKeyVersion":"0.0.1","parentLocalName":"NULL","localName":"LinkFailureState"},"trigger":{"name":"CreateLinkFailureOutfieldsEvent","version":"0.0.1"},"stateOutputs":{"entry":[{"key":"LinkFailureLogic_Output_Direct","value":{"key":{"parentKeyName":"LinkMonitorPolicy","parentKeyVersion":"0.0.1","parentLocalName":"LinkFailureState","localName":"LinkFailureLogic_Output_Direct"},"outgoingEvent":{"name":"LinkFailureOutputEvent","version":"0.0.1"},"nextState":{"parentKeyName":"NULL","parentKeyVersion":"0.0.0","parentLocalName":"NULL","localName":"NULL"}}}]},"contextAlbumReference":[],"taskSelectionLogic":{"key":"NULL","logicFlavour":"UNDEFINED","logic":""},"stateFinalizerLogicMap":{"entry":[]},"defaultTask":{"name":"LinkFailureTask","version":"0.0.1"},"taskReferences":{"entry":[{"key":{"name":"LinkFailureTask","version":"0.0.1"},"value":{"key":{"parentKeyName":"LinkMonitorPolicy","parentKeyVersion":"0.0.1","parentLocalName":"LinkFailureState","localName":"LinkFailureTask"},"outputType":"DIRECT","output":{"parentKeyName":"LinkMonitorPolicy","parentKeyVersion":"0.0.1","parentLocalName":"LinkFailureState","localName":"LinkFailureLogic_Output_Direct"}}}]}}}]},"firstState":"LinkFailureOrClearedState"}}]}},"tasks":{"key":{"name":"LinkMonitorModel_Tasks","version":"0.0.1"},"taskMap":{"entry":[{"key":{"name":"CreateLinkClearedOutfieldsTask","version":"0.0.1"},"value":{"key":{"name":"CreateLinkClearedOutfieldsTask","version":"0.0.1"},"inputFields":{"entry":[{"key":"LinkFailureInput","value":{"key":"LinkFailureInput","fieldSchemaKey":{"name":"LinkFailureInputSchema","version":"0.0.1"},"optional":false}}]},"outputFields":{"entry":[{"key":"OruId","value":{"key":"OruId","fieldSchemaKey":{"name":"OruIdSchema","version":"0.0.1"},"optional":false}}]},"taskParameters":{"entry":[]},"contextAlbumReference":[],"taskLogic":{"key":"TaskLogic","logicFlavour":"JAVASCRIPT","logic":"/*\n * ============LICENSE_START=======================================================\n * Copyright (C) 2021 Nordix Foundation.\n * ================================================================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * SPDX-License-Identifier: Apache-2.0\n * ============LICENSE_END=========================================================\n */\n\nexecutor.logger.info(\"Task Execution: '\"+executor.subject.id+\"'. Input Fields: '\"+executor.inFields+\"'\");\n\nvar linkFailureInput = executor.inFields.get(\"LinkFailureInput\");\nvar oruId = linkFailureInput.get(\"event\").get(\"commonEventHeader\").get(\"sourceName\");\n\nexecutor.outFields.put(\"OruId\", oruId);\n\nexecutor.logger.info(executor.outFields);\n\ntrue;"}}},{"key":{"name":"CreateLinkFailureOutfieldsTask","version":"0.0.1"},"value":{"key":{"name":"CreateLinkFailureOutfieldsTask","version":"0.0.1"},"inputFields":{"entry":[{"key":"LinkFailureInput","value":{"key":"LinkFailureInput","fieldSchemaKey":{"name":"LinkFailureInputSchema","version":"0.0.1"},"optional":false}}]},"outputFields":{"entry":[{"key":"OduId","value":{"key":"OduId","fieldSchemaKey":{"name":"OduIdSchema","version":"0.0.1"},"optional":false}},{"key":"OruId","value":{"key":"OruId","fieldSchemaKey":{"name":"OruIdSchema","version":"0.0.1"},"optional":false}}]},"taskParameters":{"entry":[]},"contextAlbumReference":[],"taskLogic":{"key":"TaskLogic","logicFlavour":"JAVASCRIPT","logic":"/*\n * ============LICENSE_START=======================================================\n * Copyright (C) 2021 Nordix Foundation.\n * ================================================================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * SPDX-License-Identifier: Apache-2.0\n * ============LICENSE_END=========================================================\n */\n\nexecutor.logger.info(\"Task Execution: '\"+executor.subject.id+\"'. Input Fields: '\"+executor.inFields+\"'\");\n\nvar returnValue = true;\nvar linkFailureInput = executor.inFields.get(\"LinkFailureInput\");\nvar oruId = linkFailureInput.get(\"event\").get(\"commonEventHeader\").get(\"sourceName\");\nvar oruOduMap = JSON.parse(executor.parameters.get(\"ORU-ODU-Map\"));\n\nif (oruId in oruOduMap) {\n var oduId = oruOduMap[oruId];\n executor.outFields.put(\"OruId\", oruId);\n executor.outFields.put(\"OduId\", oduId);\n executor.logger.info(executor.outFields);\n} else {\n executor.message = \"No O-RU found in the config with this ID: \" + oruId;\n returnValue = false;\n}\n\nreturnValue;"}}},{"key":{"name":"LinkClearedTask","version":"0.0.1"},"value":{"key":{"name":"LinkClearedTask","version":"0.0.1"},"inputFields":{"entry":[{"key":"OruId","value":{"key":"OruId","fieldSchemaKey":{"name":"OruIdSchema","version":"0.0.1"},"optional":false}}]},"outputFields":{"entry":[{"key":"message","value":{"key":"message","fieldSchemaKey":{"name":"MessageSchema","version":"0.0.1"},"optional":false}}]},"taskParameters":{"entry":[]},"contextAlbumReference":[],"taskLogic":{"key":"TaskLogic","logicFlavour":"JAVASCRIPT","logic":"/*\n * ============LICENSE_START=======================================================\n * Copyright (C) 2021 Nordix Foundation.\n * ================================================================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * SPDX-License-Identifier: Apache-2.0\n * ============LICENSE_END=========================================================\n */\n\nexecutor.logger.info(\"Task Execution: '\"+executor.subject.id+\"'. Input Fields: '\"+executor.inFields+\"'\");\n\nvar oruId = executor.inFields.get(\"OruId\");\n\nexecutor.outFields.put(\"message\", \"CLEARED link failure for O-RU: \" + oruId);\n\nexecutor.logger.info(executor.outFields);\n\ntrue;"}}},{"key":{"name":"LinkFailureTask","version":"0.0.1"},"value":{"key":{"name":"LinkFailureTask","version":"0.0.1"},"inputFields":{"entry":[{"key":"OduId","value":{"key":"OduId","fieldSchemaKey":{"name":"OduIdSchema","version":"0.0.1"},"optional":false}},{"key":"OruId","value":{"key":"OruId","fieldSchemaKey":{"name":"OruIdSchema","version":"0.0.1"},"optional":false}}]},"outputFields":{"entry":[{"key":"LinkFailureOutput","value":{"key":"LinkFailureOutput","fieldSchemaKey":{"name":"LinkFailureOutputSchema","version":"0.0.1"},"optional":false}}]},"taskParameters":{"entry":[]},"contextAlbumReference":[],"taskLogic":{"key":"TaskLogic","logicFlavour":"JAVASCRIPT","logic":"/*\n * ============LICENSE_START=======================================================\n * Copyright (C) 2021 Nordix Foundation.\n * ================================================================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * SPDX-License-Identifier: Apache-2.0\n * ============LICENSE_END=========================================================\n */\n\nexecutor.logger.info(\"Task Execution: '\"+executor.subject.id+\"'. Input Fields: '\"+executor.inFields+\"'\");\n\nvar linkFailureOutput = executor.subject.getOutFieldSchemaHelper(\"LinkFailureOutput\").createNewInstance();\n\nvar oruId = executor.inFields.get(\"OruId\");\nvar oduId = executor.inFields.get(\"OduId\");\n\nvar unlockMessageArray = new java.util.ArrayList();\nfor (var i = 0; i < 1; i++) {\n unlockMessageArray.add({\n \"name\" : oruId,\n \"administrative_DasH_state\" : \"UNLOCKED\"\n });\n}\n\nlinkFailureOutput.put(\"o_DasH_ran_DasH_sc_DasH_du_DasH_hello_DasH_world_ColoN_du_DasH_to_DasH_ru_DasH_connection\", unlockMessageArray);\nexecutor.outFields.put(\"LinkFailureOutput\", linkFailureOutput.toString());\n\nexecutor.getExecutionProperties().setProperty(\"OduId\", oduId);\nexecutor.getExecutionProperties().setProperty(\"OruId\", oruId);\n\nexecutor.logger.info(executor.outFields);\n\ntrue;"}}},{"key":{"name":"NoPolicyDefinedTask","version":"0.0.1"},"value":{"key":{"name":"NoPolicyDefinedTask","version":"0.0.1"},"inputFields":{"entry":[{"key":"LinkFailureInput","value":{"key":"LinkFailureInput","fieldSchemaKey":{"name":"LinkFailureInputSchema","version":"0.0.1"},"optional":false}}]},"outputFields":{"entry":[{"key":"message","value":{"key":"message","fieldSchemaKey":{"name":"MessageSchema","version":"0.0.1"},"optional":false}}]},"taskParameters":{"entry":[]},"contextAlbumReference":[],"taskLogic":{"key":"TaskLogic","logicFlavour":"JAVASCRIPT","logic":"/*\n * ============LICENSE_START=======================================================\n * Copyright (C) 2021 Nordix Foundation.\n * ================================================================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * SPDX-License-Identifier: Apache-2.0\n * ============LICENSE_END=========================================================\n */\n\nexecutor.logger.info(\"Task Execution: '\"+executor.subject.id+\"'. Input Fields: '\"+executor.inFields+\"'\");\n\nexecutor.outFields.put(\"message\", \"No policy defined for this event\");\n\nexecutor.logger.info(executor.outFields);\n\ntrue;"}}}]}},"events":{"key":{"name":"LinkMonitorModel_Events","version":"0.0.1"},"eventMap":{"entry":[{"key":{"name":"ApexMessageOutputEvent","version":"0.0.1"},"value":{"key":{"name":"ApexMessageOutputEvent","version":"0.0.1"},"nameSpace":"org.onap.policy.apex.auth.clieditor","source":"APEX","target":"APEX","parameter":{"entry":[{"key":"message","value":{"key":"message","fieldSchemaKey":{"name":"MessageSchema","version":"0.0.1"},"optional":false}}]}}},{"key":{"name":"CreateLinkClearedOutfieldsEvent","version":"0.0.1"},"value":{"key":{"name":"CreateLinkClearedOutfieldsEvent","version":"0.0.1"},"nameSpace":"org.onap.policy.apex.auth.clieditor","source":"APEX","target":"APEX","parameter":{"entry":[{"key":"OruId","value":{"key":"OruId","fieldSchemaKey":{"name":"OruIdSchema","version":"0.0.1"},"optional":false}}]}}},{"key":{"name":"CreateLinkFailureOutfieldsEvent","version":"0.0.1"},"value":{"key":{"name":"CreateLinkFailureOutfieldsEvent","version":"0.0.1"},"nameSpace":"org.onap.policy.apex.auth.clieditor","source":"APEX","target":"APEX","parameter":{"entry":[{"key":"OduId","value":{"key":"OduId","fieldSchemaKey":{"name":"OduIdSchema","version":"0.0.1"},"optional":false}},{"key":"OruId","value":{"key":"OruId","fieldSchemaKey":{"name":"OruIdSchema","version":"0.0.1"},"optional":false}}]}}},{"key":{"name":"LinkFailureInputEvent","version":"0.0.1"},"value":{"key":{"name":"LinkFailureInputEvent","version":"0.0.1"},"nameSpace":"org.onap.policy.apex.auth.clieditor","source":"DMAAP","target":"APEX","parameter":{"entry":[{"key":"LinkFailureInput","value":{"key":"LinkFailureInput","fieldSchemaKey":{"name":"LinkFailureInputSchema","version":"0.0.1"},"optional":false}}]}}},{"key":{"name":"LinkFailureOutputEvent","version":"0.0.1"},"value":{"key":{"name":"LinkFailureOutputEvent","version":"0.0.1"},"nameSpace":"org.onap.policy.apex.auth.clieditor","source":"APEX","target":"OAM","parameter":{"entry":[{"key":"LinkFailureOutput","value":{"key":"LinkFailureOutput","fieldSchemaKey":{"name":"LinkFailureOutputSchema","version":"0.0.1"},"optional":false}}]}}}]}},"schemas":{"key":{"name":"LinkMonitorModel_Schemas","version":"0.0.1"},"schemas":{"entry":[{"key":{"name":"LinkFailureInputSchema","version":"0.0.1"},"value":{"key":{"name":"LinkFailureInputSchema","version":"0.0.1"},"schemaFlavour":"Avro","schemaDefinition":"{\n \"type\": \"record\",\n \"name\": \"Link_Failure_Input\",\n \"fields\": [\n {\n \"name\": \"event\",\n \"type\": {\n \"type\": \"record\",\n \"name\": \"Event_Type\",\n \"fields\": [\n {\n \"name\": \"commonEventHeader\",\n \"type\": {\n \"type\": \"record\",\n \"name\": \"Common_Event_Header_Type\",\n \"fields\": [\n {\n \"name\": \"domain\",\n \"type\": \"string\"\n },\n {\n \"name\": \"eventId\",\n \"type\": \"string\"\n },\n {\n \"name\": \"eventName\",\n \"type\": \"string\"\n },\n {\n \"name\": \"eventType\",\n \"type\": \"string\"\n },\n {\n \"name\": \"sequence\",\n \"type\": \"int\"\n },\n {\n \"name\": \"priority\",\n \"type\": \"string\"\n },\n {\n \"name\": \"reportingEntityId\",\n \"type\": \"string\"\n },\n {\n \"name\": \"reportingEntityName\",\n \"type\": \"string\"\n },\n {\n \"name\": \"sourceId\",\n \"type\": \"string\"\n },\n {\n \"name\": \"sourceName\",\n \"type\": \"string\"\n },\n {\n \"name\": \"startEpochMicrosec\",\n \"type\": \"string\"\n },\n {\n \"name\": \"lastEpochMicrosec\",\n \"type\": \"string\"\n },\n {\n \"name\": \"nfNamingCode\",\n \"type\": \"string\"\n },\n {\n \"name\": \"nfVendorName\",\n \"type\": \"string\"\n },\n {\n \"name\": \"timeZoneOffset\",\n \"type\": \"string\"\n },\n {\n \"name\": \"version\",\n \"type\": \"string\"\n },\n {\n \"name\": \"vesEventListenerVersion\",\n \"type\": \"string\"\n }\n ]\n }\n },\n {\n \"name\": \"faultFields\",\n \"type\": {\n \"type\": \"record\",\n \"name\": \"Fault_Fields_Type\",\n \"fields\": [\n {\n \"name\": \"faultFieldsVersion\",\n \"type\": \"string\"\n },\n {\n \"name\": \"alarmCondition\",\n \"type\": \"string\"\n },\n {\n \"name\": \"alarmInterfaceA\",\n \"type\": \"string\"\n },\n {\n \"name\": \"eventSourceType\",\n \"type\": \"string\"\n },\n {\n \"name\": \"specificProblem\",\n \"type\": \"string\"\n },\n {\n \"name\": \"eventSeverity\",\n \"type\": \"string\"\n },\n {\n \"name\": \"vfStatus\",\n \"type\": \"string\"\n },\n {\n \"name\": \"alarmAdditionalInformation\",\n \"type\": {\n \"type\": \"record\",\n \"name\": \"Alarm_Additional_Information_Type\",\n \"fields\": [\n {\n \"name\": \"eventTime\",\n \"type\": \"string\"\n },\n {\n \"name\": \"equipType\",\n \"type\": \"string\"\n },\n {\n \"name\": \"vendor\",\n \"type\": \"string\"\n },\n {\n \"name\": \"model\",\n \"type\": \"string\"\n }\n ]\n }\n }\n ]\n }\n }\n ]\n }\n }\n ]\n}"}},{"key":{"name":"LinkFailureOutputSchema","version":"0.0.1"},"value":{"key":{"name":"LinkFailureOutputSchema","version":"0.0.1"},"schemaFlavour":"Avro","schemaDefinition":"{\n \"type\": \"record\",\n \"name\": \"Link_Failure_Output\",\n \"fields\": [\n {\n \"name\": \"o_DasH_ran_DasH_sc_DasH_du_DasH_hello_DasH_world_ColoN_du_DasH_to_DasH_ru_DasH_connection\",\n \"type\": {\n \t\"type\": \"array\",\n \t\"items\": {\n\t\t \"name\": \"Config_Change_Message\",\n \"type\": \"record\",\n \"fields\": [\n {\n \"name\": \"name\",\n \"type\": \"string\"\n },\n\t\t\t{\n \"name\": \"administrative_DasH_state\",\n \"type\": \"string\"\n }\n ]\n }\n\t }\n }\n ]\n}"}},{"key":{"name":"MessageSchema","version":"0.0.1"},"value":{"key":{"name":"MessageSchema","version":"0.0.1"},"schemaFlavour":"Java","schemaDefinition":"java.lang.String"}},{"key":{"name":"OduIdSchema","version":"0.0.1"},"value":{"key":{"name":"OduIdSchema","version":"0.0.1"},"schemaFlavour":"Java","schemaDefinition":"java.lang.String"}},{"key":{"name":"OruIdSchema","version":"0.0.1"},"value":{"key":{"name":"OruIdSchema","version":"0.0.1"},"schemaFlavour":"Java","schemaDefinition":"java.lang.String"}}]}}}}},"eventOutputParameters":{"RestProducer":{"carrierTechnologyParameters":{"carrierTechnology":"RESTCLIENT","parameterClassName":"org.onap.policy.apex.plugins.event.carrier.restclient.RestClientCarrierTechnologyParameters","parameters":{"url":"http://sdnr-sim:9990/rests/data/network-topology:network-topology/topology=topology-netconf/node={OduId}/yang-ext:mount/o-ran-sc-du-hello-world:network-function/du-to-ru-connection={OruId}","httpMethod":"PUT","httpHeaders":[["Authorization","Basic YWRtaW46S3A4Yko0U1hzek0wV1hsaGFrM2VIbGNzZTJnQXc4NHZhb0dHbUp2VXkyVQ=="]]}},"eventProtocolParameters":{"eventProtocol":"JSON","parameters":{"pojoField":"LinkFailureOutput"}},"eventNameFilter":"LinkFailureOutputEvent"},"StdOutProducer":{"carrierTechnologyParameters":{"carrierTechnology":"FILE","parameters":{"standardIo":true}},"eventProtocolParameters":{"eventProtocol":"JSON","parameters":{"pojoField":"message"}},"eventNameFilter":"ApexMessageOutputEvent"}},"eventInputParameters":{"DMaaPConsumer":{"carrierTechnologyParameters":{"carrierTechnology":"RESTCLIENT","parameterClassName":"org.onap.policy.apex.plugins.event.carrier.restclient.RestClientCarrierTechnologyParameters","parameters":{"url":"http://onap-dmaap:3904/events/unauthenticated.SEC_FAULT_OUTPUT/users/link-monitor-nonrtric?timeout=15000&limit=100"}},"eventProtocolParameters":{"eventProtocol":"JSON","parameters":{"versionAlias":"version","pojoField":"LinkFailureInput"}},"eventName":"LinkFailureInputEvent"}}}}}]}}
\ No newline at end of file
+{"tosca_definitions_version":"tosca_simple_yaml_1_1_0","topology_template":{"policies":[{"onap.policies.native.apex.LinkMonitor":{"type":"onap.policies.native.Apex","type_version":"1.0.0","name":"onap.policies.native.apex.LinkMonitor","version":"1.0.0","properties":{"engineServiceParameters":{"name":"LinkMonitorApexEngine","version":"0.0.1","id":101,"instanceCount":1,"deploymentPort":12345,"engineParameters":{"executorParameters":{"JAVASCRIPT":{"parameterClassName":"org.onap.policy.apex.plugins.executor.javascript.JavascriptExecutorParameters"}},"contextParameters":{"parameterClassName":"org.onap.policy.apex.context.parameters.ContextParameters","schemaParameters":{"Avro":{"parameterClassName":"org.onap.policy.apex.plugins.context.schema.avro.AvroSchemaHelperParameters"}}},"taskParameters":[{"key":"ORU-ODU-Map","value":"{\"ERICSSON-O-RU-11220\": \"O-DU-1122\",\r\n \"ERICSSON-O-RU-11221\": \"O-DU-1122\",\r\n \"ERICSSON-O-RU-11222\": \"O-DU-1122\",\r\n \"ERICSSON-O-RU-11223\": \"O-DU-1122\",\r\n \"ERICSSON-O-RU-11224\": \"O-DU-1123\",\r\n \"ERICSSON-O-RU-11225\": \"O-DU-1123\",\r\n \"ERICSSON-O-RU-11226\": \"O-DU-1123\",\r\n \"ERICSSON-O-RU-11227\": \"O-DU-1124\",\r\n \"ERICSSON-O-RU-11228\": \"O-DU-1125\",\r\n \"ERICSSON-O-RU-11229\": \"O-DU-1125\"}"}]},"policy_type_impl":{"apexPolicyModel":{"key":{"name":"LinkMonitorModel","version":"0.0.1"},"keyInformation":{"key":{"name":"LinkMonitorModel_KeyInfo","version":"0.0.1"},"keyInfoMap":{"entry":[{"key":{"name":"ApexMessageOutputEvent","version":"0.0.1"},"value":{"key":{"name":"ApexMessageOutputEvent","version":"0.0.1"},"UUID":"cca47d74-7754-4a61-b163-ca31f66b157b","description":"Generated description for concept referred to by key \"ApexMessageOutputEvent:0.0.1\""}},{"key":{"name":"CreateLinkClearedOutfieldsEvent","version":"0.0.1"},"value":{"key":{"name":"CreateLinkClearedOutfieldsEvent","version":"0.0.1"},"UUID":"a295d6a3-1b73-387e-abba-b41e9b608802","description":"Generated description for concept referred to by key \"CreateLinkClearedOutfieldsEvent:0.0.1\""}},{"key":{"name":"CreateLinkClearedOutfieldsTask","version":"0.0.1"},"value":{"key":{"name":"CreateLinkClearedOutfieldsTask","version":"0.0.1"},"UUID":"fd594e88-411d-4a94-b2be-697b3a0d7adf","description":"This task creates the output fields when link failure is cleared."}},{"key":{"name":"CreateLinkFailureOutfieldsEvent","version":"0.0.1"},"value":{"key":{"name":"CreateLinkFailureOutfieldsEvent","version":"0.0.1"},"UUID":"02be2b5d-45b7-3c54-ae54-97f2b5c30125","description":"Generated description for concept referred to by key \"CreateLinkFailureOutfieldsEvent:0.0.1\""}},{"key":{"name":"CreateLinkFailureOutfieldsTask","version":"0.0.1"},"value":{"key":{"name":"CreateLinkFailureOutfieldsTask","version":"0.0.1"},"UUID":"ac3d9842-80af-4a98-951c-bd79a431c613","description":"This task the output fields when link failure is detected."}},{"key":{"name":"LinkClearedTask","version":"0.0.1"},"value":{"key":{"name":"LinkClearedTask","version":"0.0.1"},"UUID":"eecfde90-896c-4343-8f9c-2603ced94e2d","description":"This task sends a message to the output when link failure is cleared."}},{"key":{"name":"LinkFailureInputEvent","version":"0.0.1"},"value":{"key":{"name":"LinkFailureInputEvent","version":"0.0.1"},"UUID":"c4500941-3f98-4080-a9cc-5b9753ed050b","description":"Generated description for concept referred to by key \"LinkFailureInputEvent:0.0.1\""}},{"key":{"name":"LinkFailureInputSchema","version":"0.0.1"},"value":{"key":{"name":"LinkFailureInputSchema","version":"0.0.1"},"UUID":"3b3974fc-3012-3b02-9f33-c9d8eefe4dc1","description":"Generated description for concept referred to by key \"LinkFailureInputSchema:0.0.1\""}},{"key":{"name":"LinkFailureOutputEvent","version":"0.0.1"},"value":{"key":{"name":"LinkFailureOutputEvent","version":"0.0.1"},"UUID":"4f04aa98-e917-4f4a-882a-c75ba5a99374","description":"Generated description for concept referred to by key \"LinkFailureOutputEvent:0.0.1\""}},{"key":{"name":"LinkFailureOutputSchema","version":"0.0.1"},"value":{"key":{"name":"LinkFailureOutputSchema","version":"0.0.1"},"UUID":"2d1a7f6e-eb9a-3984-be1f-283d98111b84","description":"Generated description for concept referred to by key \"LinkFailureOutputSchema:0.0.1\""}},{"key":{"name":"LinkFailureTask","version":"0.0.1"},"value":{"key":{"name":"LinkFailureTask","version":"0.0.1"},"UUID":"3351b0f4-cf06-4fa2-8823-edf67bd30223","description":"This task updates the config for O-RU when link failure is detected."}},{"key":{"name":"LinkMonitorModel","version":"0.0.1"},"value":{"key":{"name":"LinkMonitorModel","version":"0.0.1"},"UUID":"540226fb-55ee-4f0e-a444-983a0494818e","description":"This is the Apex Policy Model for link monitoring."}},{"key":{"name":"LinkMonitorModel_Events","version":"0.0.1"},"value":{"key":{"name":"LinkMonitorModel_Events","version":"0.0.1"},"UUID":"27ad3e7e-fe3b-3bd6-9081-718705c2bcea","description":"Generated description for concept referred to by key \"LinkMonitorModel_Events:0.0.1\""}},{"key":{"name":"LinkMonitorModel_KeyInfo","version":"0.0.1"},"value":{"key":{"name":"LinkMonitorModel_KeyInfo","version":"0.0.1"},"UUID":"ea0b5f58-eefd-358a-9660-840c640bf981","description":"Generated description for concept referred to by key \"LinkMonitorModel_KeyInfo:0.0.1\""}},{"key":{"name":"LinkMonitorModel_Policies","version":"0.0.1"},"value":{"key":{"name":"LinkMonitorModel_Policies","version":"0.0.1"},"UUID":"ee9e0b0f-2b7d-3ab7-9a98-c5ec05ed823d","description":"Generated description for concept referred to by key \"LinkMonitorModel_Policies:0.0.1\""}},{"key":{"name":"LinkMonitorModel_Schemas","version":"0.0.1"},"value":{"key":{"name":"LinkMonitorModel_Schemas","version":"0.0.1"},"UUID":"fa5f9b8f-796c-3c70-84e9-5140c958c4bb","description":"Generated description for concept referred to by key \"LinkMonitorModel_Schemas:0.0.1\""}},{"key":{"name":"LinkMonitorModel_Tasks","version":"0.0.1"},"value":{"key":{"name":"LinkMonitorModel_Tasks","version":"0.0.1"},"UUID":"eec592f7-69d5-39a9-981a-e552f787ed01","description":"Generated description for concept referred to by key \"LinkMonitorModel_Tasks:0.0.1\""}},{"key":{"name":"LinkMonitorPolicy","version":"0.0.1"},"value":{"key":{"name":"LinkMonitorPolicy","version":"0.0.1"},"UUID":"6c5e410f-489a-46ff-964e-982ce6e8b6d0","description":"Generated description for concept referred to by key \"LinkMonitorPolicy:0.0.1\""}},{"key":{"name":"MessageSchema","version":"0.0.1"},"value":{"key":{"name":"MessageSchema","version":"0.0.1"},"UUID":"ac4b34ac-39d6-3393-a267-8d5b84854018","description":"A schema for messages from apex"}},{"key":{"name":"NoPolicyDefinedTask","version":"0.0.1"},"value":{"key":{"name":"NoPolicyDefinedTask","version":"0.0.1"},"UUID":"d48b619e-d00d-4008-b884-02d76ea4350b","description":"This task sends a message to the output when an event is received for which no policy has been defined."}},{"key":{"name":"OduIdSchema","version":"0.0.1"},"value":{"key":{"name":"OduIdSchema","version":"0.0.1"},"UUID":"50662174-a88b-3cbd-91bd-8e91b40b2660","description":"A schema for O-DU-ID"}},{"key":{"name":"OruIdSchema","version":"0.0.1"},"value":{"key":{"name":"OruIdSchema","version":"0.0.1"},"UUID":"54daf32b-015f-39cd-8530-a1175c5553e9","description":"A schema for O-RU-ID"}}]}},"policies":{"key":{"name":"LinkMonitorModel_Policies","version":"0.0.1"},"policyMap":{"entry":[{"key":{"name":"LinkMonitorPolicy","version":"0.0.1"},"value":{"policyKey":{"name":"LinkMonitorPolicy","version":"0.0.1"},"template":"Freestyle","state":{"entry":[{"key":"LinkClearedState","value":{"stateKey":{"parentKeyName":"LinkMonitorPolicy","parentKeyVersion":"0.0.1","parentLocalName":"NULL","localName":"LinkClearedState"},"trigger":{"name":"CreateLinkClearedOutfieldsEvent","version":"0.0.1"},"stateOutputs":{"entry":[{"key":"LinkClearedLogic_Output_Direct","value":{"key":{"parentKeyName":"LinkMonitorPolicy","parentKeyVersion":"0.0.1","parentLocalName":"LinkClearedState","localName":"LinkClearedLogic_Output_Direct"},"outgoingEvent":{"name":"ApexMessageOutputEvent","version":"0.0.1"},"nextState":{"parentKeyName":"NULL","parentKeyVersion":"0.0.0","parentLocalName":"NULL","localName":"NULL"}}}]},"contextAlbumReference":[],"taskSelectionLogic":{"key":"NULL","logicFlavour":"UNDEFINED","logic":""},"stateFinalizerLogicMap":{"entry":[]},"defaultTask":{"name":"LinkClearedTask","version":"0.0.1"},"taskReferences":{"entry":[{"key":{"name":"LinkClearedTask","version":"0.0.1"},"value":{"key":{"parentKeyName":"LinkMonitorPolicy","parentKeyVersion":"0.0.1","parentLocalName":"LinkClearedState","localName":"LinkClearedTask"},"outputType":"DIRECT","output":{"parentKeyName":"LinkMonitorPolicy","parentKeyVersion":"0.0.1","parentLocalName":"LinkClearedState","localName":"LinkClearedLogic_Output_Direct"}}}]}}},{"key":"LinkFailureOrClearedState","value":{"stateKey":{"parentKeyName":"LinkMonitorPolicy","parentKeyVersion":"0.0.1","parentLocalName":"NULL","localName":"LinkFailureOrClearedState"},"trigger":{"name":"LinkFailureInputEvent","version":"0.0.1"},"stateOutputs":{"entry":[{"key":"CreateLinkClearedOutfieldsLogic_Output_Direct","value":{"key":{"parentKeyName":"LinkMonitorPolicy","parentKeyVersion":"0.0.1","parentLocalName":"LinkFailureOrClearedState","localName":"CreateLinkClearedOutfieldsLogic_Output_Direct"},"outgoingEvent":{"name":"CreateLinkClearedOutfieldsEvent","version":"0.0.1"},"nextState":{"parentKeyName":"LinkMonitorPolicy","parentKeyVersion":"0.0.1","parentLocalName":"NULL","localName":"LinkClearedState"}}},{"key":"CreateLinkFailureOutfieldsLogic_Output_Direct","value":{"key":{"parentKeyName":"LinkMonitorPolicy","parentKeyVersion":"0.0.1","parentLocalName":"LinkFailureOrClearedState","localName":"CreateLinkFailureOutfieldsLogic_Output_Direct"},"outgoingEvent":{"name":"CreateLinkFailureOutfieldsEvent","version":"0.0.1"},"nextState":{"parentKeyName":"LinkMonitorPolicy","parentKeyVersion":"0.0.1","parentLocalName":"NULL","localName":"LinkFailureState"}}},{"key":"NoPolicyDefinedLogic_Output_Direct","value":{"key":{"parentKeyName":"LinkMonitorPolicy","parentKeyVersion":"0.0.1","parentLocalName":"LinkFailureOrClearedState","localName":"NoPolicyDefinedLogic_Output_Direct"},"outgoingEvent":{"name":"ApexMessageOutputEvent","version":"0.0.1"},"nextState":{"parentKeyName":"NULL","parentKeyVersion":"0.0.0","parentLocalName":"NULL","localName":"NULL"}}}]},"contextAlbumReference":[],"taskSelectionLogic":{"key":"TaskSelectionLogic","logicFlavour":"JAVASCRIPT","logic":"/*\n * ============LICENSE_START=======================================================\n * Copyright (C) 2021 Nordix Foundation.\n * ================================================================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * SPDX-License-Identifier: Apache-2.0\n * ============LICENSE_END=========================================================\n */\n\nexecutor.logger.info(\"Task Selection Execution: '\"+executor.subject.id+\n \"'. InputFields: '\"+executor.inFields+\"'\");\n\nvar linkFailureInput = executor.inFields.get(\"LinkFailureInput\");\nvar commonEventHeader = linkFailureInput.get(\"event\").get(\"commonEventHeader\");\nvar domain = commonEventHeader.get(\"domain\");\n\ntaskFailure = executor.subject.getTaskKey(\"CreateLinkFailureOutfieldsTask\");\ntaskCleared = executor.subject.getTaskKey(\"CreateLinkClearedOutfieldsTask\");\ntaskDefault = executor.subject.getDefaultTaskKey();\n\nif (domain == \"fault\") {\n var faultFields = linkFailureInput.get(\"event\").get(\"faultFields\");\n var alarmCondition = faultFields.get(\"alarmCondition\");\n var eventSeverity = faultFields.get(\"eventSeverity\");\n if (alarmCondition == \"28\" && eventSeverity != \"NORMAL\") {\n taskFailure.copyTo(executor.selectedTask);\n } else if (alarmCondition == \"28\" && eventSeverity == \"NORMAL\") {\n taskCleared.copyTo(executor.selectedTask);\n } else {\n taskDefault.copyTo(executor.selectedTask);\n }\n} else {\n taskDefault.copyTo(executor.selectedTask);\n}\n\ntrue;"},"stateFinalizerLogicMap":{"entry":[]},"defaultTask":{"name":"NoPolicyDefinedTask","version":"0.0.1"},"taskReferences":{"entry":[{"key":{"name":"CreateLinkClearedOutfieldsTask","version":"0.0.1"},"value":{"key":{"parentKeyName":"LinkMonitorPolicy","parentKeyVersion":"0.0.1","parentLocalName":"LinkFailureOrClearedState","localName":"CreateLinkClearedOutfieldsTask"},"outputType":"DIRECT","output":{"parentKeyName":"LinkMonitorPolicy","parentKeyVersion":"0.0.1","parentLocalName":"LinkFailureOrClearedState","localName":"CreateLinkClearedOutfieldsLogic_Output_Direct"}}},{"key":{"name":"CreateLinkFailureOutfieldsTask","version":"0.0.1"},"value":{"key":{"parentKeyName":"LinkMonitorPolicy","parentKeyVersion":"0.0.1","parentLocalName":"LinkFailureOrClearedState","localName":"CreateLinkFailureOutfieldsTask"},"outputType":"DIRECT","output":{"parentKeyName":"LinkMonitorPolicy","parentKeyVersion":"0.0.1","parentLocalName":"LinkFailureOrClearedState","localName":"CreateLinkFailureOutfieldsLogic_Output_Direct"}}},{"key":{"name":"NoPolicyDefinedTask","version":"0.0.1"},"value":{"key":{"parentKeyName":"LinkMonitorPolicy","parentKeyVersion":"0.0.1","parentLocalName":"LinkFailureOrClearedState","localName":"NoPolicyDefinedTask"},"outputType":"DIRECT","output":{"parentKeyName":"LinkMonitorPolicy","parentKeyVersion":"0.0.1","parentLocalName":"LinkFailureOrClearedState","localName":"NoPolicyDefinedLogic_Output_Direct"}}}]}}},{"key":"LinkFailureState","value":{"stateKey":{"parentKeyName":"LinkMonitorPolicy","parentKeyVersion":"0.0.1","parentLocalName":"NULL","localName":"LinkFailureState"},"trigger":{"name":"CreateLinkFailureOutfieldsEvent","version":"0.0.1"},"stateOutputs":{"entry":[{"key":"LinkFailureLogic_Output_Direct","value":{"key":{"parentKeyName":"LinkMonitorPolicy","parentKeyVersion":"0.0.1","parentLocalName":"LinkFailureState","localName":"LinkFailureLogic_Output_Direct"},"outgoingEvent":{"name":"LinkFailureOutputEvent","version":"0.0.1"},"nextState":{"parentKeyName":"NULL","parentKeyVersion":"0.0.0","parentLocalName":"NULL","localName":"NULL"}}}]},"contextAlbumReference":[],"taskSelectionLogic":{"key":"NULL","logicFlavour":"UNDEFINED","logic":""},"stateFinalizerLogicMap":{"entry":[]},"defaultTask":{"name":"LinkFailureTask","version":"0.0.1"},"taskReferences":{"entry":[{"key":{"name":"LinkFailureTask","version":"0.0.1"},"value":{"key":{"parentKeyName":"LinkMonitorPolicy","parentKeyVersion":"0.0.1","parentLocalName":"LinkFailureState","localName":"LinkFailureTask"},"outputType":"DIRECT","output":{"parentKeyName":"LinkMonitorPolicy","parentKeyVersion":"0.0.1","parentLocalName":"LinkFailureState","localName":"LinkFailureLogic_Output_Direct"}}}]}}}]},"firstState":"LinkFailureOrClearedState"}}]}},"tasks":{"key":{"name":"LinkMonitorModel_Tasks","version":"0.0.1"},"taskMap":{"entry":[{"key":{"name":"CreateLinkClearedOutfieldsTask","version":"0.0.1"},"value":{"key":{"name":"CreateLinkClearedOutfieldsTask","version":"0.0.1"},"inputFields":{"entry":[{"key":"LinkFailureInput","value":{"key":"LinkFailureInput","fieldSchemaKey":{"name":"LinkFailureInputSchema","version":"0.0.1"},"optional":false}}]},"outputFields":{"entry":[{"key":"OruId","value":{"key":"OruId","fieldSchemaKey":{"name":"OruIdSchema","version":"0.0.1"},"optional":false}}]},"taskParameters":{"entry":[]},"contextAlbumReference":[],"taskLogic":{"key":"TaskLogic","logicFlavour":"JAVASCRIPT","logic":"/*\n * ============LICENSE_START=======================================================\n * Copyright (C) 2021 Nordix Foundation.\n * ================================================================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * SPDX-License-Identifier: Apache-2.0\n * ============LICENSE_END=========================================================\n */\n\nexecutor.logger.info(\"Task Execution: '\"+executor.subject.id+\"'. Input Fields: '\"+executor.inFields+\"'\");\n\nvar linkFailureInput = executor.inFields.get(\"LinkFailureInput\");\nvar oruId = linkFailureInput.get(\"event\").get(\"commonEventHeader\").get(\"sourceName\");\n\nexecutor.outFields.put(\"OruId\", oruId);\n\nexecutor.logger.info(executor.outFields);\n\ntrue;"}}},{"key":{"name":"CreateLinkFailureOutfieldsTask","version":"0.0.1"},"value":{"key":{"name":"CreateLinkFailureOutfieldsTask","version":"0.0.1"},"inputFields":{"entry":[{"key":"LinkFailureInput","value":{"key":"LinkFailureInput","fieldSchemaKey":{"name":"LinkFailureInputSchema","version":"0.0.1"},"optional":false}}]},"outputFields":{"entry":[{"key":"OduId","value":{"key":"OduId","fieldSchemaKey":{"name":"OduIdSchema","version":"0.0.1"},"optional":false}},{"key":"OruId","value":{"key":"OruId","fieldSchemaKey":{"name":"OruIdSchema","version":"0.0.1"},"optional":false}}]},"taskParameters":{"entry":[]},"contextAlbumReference":[],"taskLogic":{"key":"TaskLogic","logicFlavour":"JAVASCRIPT","logic":"/*\n * ============LICENSE_START=======================================================\n * Copyright (C) 2021 Nordix Foundation.\n * ================================================================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * SPDX-License-Identifier: Apache-2.0\n * ============LICENSE_END=========================================================\n */\n\nexecutor.logger.info(\"Task Execution: '\"+executor.subject.id+\"'. Input Fields: '\"+executor.inFields+\"'\");\n\nvar returnValue = true;\nvar linkFailureInput = executor.inFields.get(\"LinkFailureInput\");\nvar oruId = linkFailureInput.get(\"event\").get(\"commonEventHeader\").get(\"sourceName\");\nvar oruOduMap = JSON.parse(executor.parameters.get(\"ORU-ODU-Map\"));\n\nif (oruId in oruOduMap) {\n var oduId = oruOduMap[oruId];\n executor.outFields.put(\"OruId\", oruId);\n executor.outFields.put(\"OduId\", oduId);\n executor.logger.info(executor.outFields);\n} else {\n executor.message = \"No O-RU found in the config with this ID: \" + oruId;\n returnValue = false;\n}\n\nreturnValue;"}}},{"key":{"name":"LinkClearedTask","version":"0.0.1"},"value":{"key":{"name":"LinkClearedTask","version":"0.0.1"},"inputFields":{"entry":[{"key":"OruId","value":{"key":"OruId","fieldSchemaKey":{"name":"OruIdSchema","version":"0.0.1"},"optional":false}}]},"outputFields":{"entry":[{"key":"message","value":{"key":"message","fieldSchemaKey":{"name":"MessageSchema","version":"0.0.1"},"optional":false}}]},"taskParameters":{"entry":[]},"contextAlbumReference":[],"taskLogic":{"key":"TaskLogic","logicFlavour":"JAVASCRIPT","logic":"/*\n * ============LICENSE_START=======================================================\n * Copyright (C) 2021 Nordix Foundation.\n * ================================================================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * SPDX-License-Identifier: Apache-2.0\n * ============LICENSE_END=========================================================\n */\n\nexecutor.logger.info(\"Task Execution: '\"+executor.subject.id+\"'. Input Fields: '\"+executor.inFields+\"'\");\n\nvar oruId = executor.inFields.get(\"OruId\");\n\nexecutor.outFields.put(\"message\", \"CLEARED link failure for O-RU: \" + oruId);\n\nexecutor.logger.info(executor.outFields);\n\ntrue;"}}},{"key":{"name":"LinkFailureTask","version":"0.0.1"},"value":{"key":{"name":"LinkFailureTask","version":"0.0.1"},"inputFields":{"entry":[{"key":"OduId","value":{"key":"OduId","fieldSchemaKey":{"name":"OduIdSchema","version":"0.0.1"},"optional":false}},{"key":"OruId","value":{"key":"OruId","fieldSchemaKey":{"name":"OruIdSchema","version":"0.0.1"},"optional":false}}]},"outputFields":{"entry":[{"key":"LinkFailureOutput","value":{"key":"LinkFailureOutput","fieldSchemaKey":{"name":"LinkFailureOutputSchema","version":"0.0.1"},"optional":false}}]},"taskParameters":{"entry":[]},"contextAlbumReference":[],"taskLogic":{"key":"TaskLogic","logicFlavour":"JAVASCRIPT","logic":"/*\n * ============LICENSE_START=======================================================\n * Copyright (C) 2021 Nordix Foundation.\n * ================================================================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * SPDX-License-Identifier: Apache-2.0\n * ============LICENSE_END=========================================================\n */\n\nexecutor.logger.info(\"Task Execution: '\"+executor.subject.id+\"'. Input Fields: '\"+executor.inFields+\"'\");\n\nvar linkFailureOutput = executor.subject.getOutFieldSchemaHelper(\"LinkFailureOutput\").createNewInstance();\n\nvar oruId = executor.inFields.get(\"OruId\");\nvar oduId = executor.inFields.get(\"OduId\");\n\nvar unlockMessageArray = new java.util.ArrayList();\nfor (var i = 0; i < 1; i++) {\n unlockMessageArray.add({\n \"id\":\"rrm-pol-1\",\n \"radio_DasH_resource_DasH_management_DasH_policy_DasH_max_DasH_ratio\":25,\n \"radio_DasH_resource_DasH_management_DasH_policy_DasH_members\":\n [\n {\n \"mobile_DasH_country_DasH_code\":\"310\",\n \"mobile_DasH_network_DasH_code\":\"150\",\n \"slice_DasH_differentiator\":1,\n \"slice_DasH_service_DasH_type\":1\n }\n ],\n \"radio_DasH_resource_DasH_management_DasH_policy_DasH_min_DasH_ratio\":15,\n \"user_DasH_label\":\"rrm-pol-1\",\n \"resource_DasH_type\":\"prb\",\n \"radio_DasH_resource_DasH_management_DasH_policy_DasH_dedicated_DasH_ratio\":20,\n \"administrative_DasH_state\":\"unlocked\"\n });\n}\n\nlinkFailureOutput.put(\"o_DasH_ran_DasH_sc_DasH_du_DasH_hello_DasH_world_ColoN_radio_DasH_resource_DasH_management_DasH_policy_DasH_ratio\", unlockMessageArray);\nexecutor.outFields.put(\"LinkFailureOutput\", linkFailureOutput.toString());\n\nexecutor.getExecutionProperties().setProperty(\"OduId\", oduId);\nexecutor.getExecutionProperties().setProperty(\"OruId\", oruId);\n\nexecutor.logger.info(executor.outFields);\n\ntrue;"}}},{"key":{"name":"NoPolicyDefinedTask","version":"0.0.1"},"value":{"key":{"name":"NoPolicyDefinedTask","version":"0.0.1"},"inputFields":{"entry":[{"key":"LinkFailureInput","value":{"key":"LinkFailureInput","fieldSchemaKey":{"name":"LinkFailureInputSchema","version":"0.0.1"},"optional":false}}]},"outputFields":{"entry":[{"key":"message","value":{"key":"message","fieldSchemaKey":{"name":"MessageSchema","version":"0.0.1"},"optional":false}}]},"taskParameters":{"entry":[]},"contextAlbumReference":[],"taskLogic":{"key":"TaskLogic","logicFlavour":"JAVASCRIPT","logic":"/*\n * ============LICENSE_START=======================================================\n * Copyright (C) 2021 Nordix Foundation.\n * ================================================================================\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n *\n * SPDX-License-Identifier: Apache-2.0\n * ============LICENSE_END=========================================================\n */\n\nexecutor.logger.info(\"Task Execution: '\"+executor.subject.id+\"'. Input Fields: '\"+executor.inFields+\"'\");\n\nexecutor.outFields.put(\"message\", \"No policy defined for this event\");\n\nexecutor.logger.info(executor.outFields);\n\ntrue;"}}}]}},"events":{"key":{"name":"LinkMonitorModel_Events","version":"0.0.1"},"eventMap":{"entry":[{"key":{"name":"ApexMessageOutputEvent","version":"0.0.1"},"value":{"key":{"name":"ApexMessageOutputEvent","version":"0.0.1"},"nameSpace":"org.onap.policy.apex.auth.clieditor","source":"APEX","target":"APEX","parameter":{"entry":[{"key":"message","value":{"key":"message","fieldSchemaKey":{"name":"MessageSchema","version":"0.0.1"},"optional":false}}]}}},{"key":{"name":"CreateLinkClearedOutfieldsEvent","version":"0.0.1"},"value":{"key":{"name":"CreateLinkClearedOutfieldsEvent","version":"0.0.1"},"nameSpace":"org.onap.policy.apex.auth.clieditor","source":"APEX","target":"APEX","parameter":{"entry":[{"key":"OruId","value":{"key":"OruId","fieldSchemaKey":{"name":"OruIdSchema","version":"0.0.1"},"optional":false}}]}}},{"key":{"name":"CreateLinkFailureOutfieldsEvent","version":"0.0.1"},"value":{"key":{"name":"CreateLinkFailureOutfieldsEvent","version":"0.0.1"},"nameSpace":"org.onap.policy.apex.auth.clieditor","source":"APEX","target":"APEX","parameter":{"entry":[{"key":"OduId","value":{"key":"OduId","fieldSchemaKey":{"name":"OduIdSchema","version":"0.0.1"},"optional":false}},{"key":"OruId","value":{"key":"OruId","fieldSchemaKey":{"name":"OruIdSchema","version":"0.0.1"},"optional":false}}]}}},{"key":{"name":"LinkFailureInputEvent","version":"0.0.1"},"value":{"key":{"name":"LinkFailureInputEvent","version":"0.0.1"},"nameSpace":"org.onap.policy.apex.auth.clieditor","source":"DMAAP","target":"APEX","parameter":{"entry":[{"key":"LinkFailureInput","value":{"key":"LinkFailureInput","fieldSchemaKey":{"name":"LinkFailureInputSchema","version":"0.0.1"},"optional":false}}]}}},{"key":{"name":"LinkFailureOutputEvent","version":"0.0.1"},"value":{"key":{"name":"LinkFailureOutputEvent","version":"0.0.1"},"nameSpace":"org.onap.policy.apex.auth.clieditor","source":"APEX","target":"OAM","parameter":{"entry":[{"key":"LinkFailureOutput","value":{"key":"LinkFailureOutput","fieldSchemaKey":{"name":"LinkFailureOutputSchema","version":"0.0.1"},"optional":false}}]}}}]}},"schemas":{"key":{"name":"LinkMonitorModel_Schemas","version":"0.0.1"},"schemas":{"entry":[{"key":{"name":"LinkFailureInputSchema","version":"0.0.1"},"value":{"key":{"name":"LinkFailureInputSchema","version":"0.0.1"},"schemaFlavour":"Avro","schemaDefinition":"{\n \"type\": \"record\",\n \"name\": \"Link_Failure_Input\",\n \"fields\": [\n {\n \"name\": \"event\",\n \"type\": {\n \"type\": \"record\",\n \"name\": \"Event_Type\",\n \"fields\": [\n {\n \"name\": \"commonEventHeader\",\n \"type\": {\n \"type\": \"record\",\n \"name\": \"Common_Event_Header_Type\",\n \"fields\": [\n {\n \"name\": \"domain\",\n \"type\": \"string\"\n },\n {\n \"name\": \"eventId\",\n \"type\": \"string\"\n },\n {\n \"name\": \"eventName\",\n \"type\": \"string\"\n },\n {\n \"name\": \"eventType\",\n \"type\": \"string\"\n },\n {\n \"name\": \"sequence\",\n \"type\": \"int\"\n },\n {\n \"name\": \"priority\",\n \"type\": \"string\"\n },\n {\n \"name\": \"reportingEntityId\",\n \"type\": \"string\"\n },\n {\n \"name\": \"reportingEntityName\",\n \"type\": \"string\"\n },\n {\n \"name\": \"sourceId\",\n \"type\": \"string\"\n },\n {\n \"name\": \"sourceName\",\n \"type\": \"string\"\n },\n {\n \"name\": \"startEpochMicrosec\",\n \"type\": \"string\"\n },\n {\n \"name\": \"lastEpochMicrosec\",\n \"type\": \"string\"\n },\n {\n \"name\": \"nfNamingCode\",\n \"type\": \"string\"\n },\n {\n \"name\": \"nfVendorName\",\n \"type\": \"string\"\n },\n {\n \"name\": \"timeZoneOffset\",\n \"type\": \"string\"\n },\n {\n \"name\": \"version\",\n \"type\": \"string\"\n },\n {\n \"name\": \"vesEventListenerVersion\",\n \"type\": \"string\"\n }\n ]\n }\n },\n {\n \"name\": \"faultFields\",\n \"type\": {\n \"type\": \"record\",\n \"name\": \"Fault_Fields_Type\",\n \"fields\": [\n {\n \"name\": \"faultFieldsVersion\",\n \"type\": \"string\"\n },\n {\n \"name\": \"alarmCondition\",\n \"type\": \"string\"\n },\n {\n \"name\": \"alarmInterfaceA\",\n \"type\": \"string\"\n },\n {\n \"name\": \"eventSourceType\",\n \"type\": \"string\"\n },\n {\n \"name\": \"specificProblem\",\n \"type\": \"string\"\n },\n {\n \"name\": \"eventSeverity\",\n \"type\": \"string\"\n },\n {\n \"name\": \"vfStatus\",\n \"type\": \"string\"\n },\n {\n \"name\": \"alarmAdditionalInformation\",\n \"type\": {\n \"type\": \"record\",\n \"name\": \"Alarm_Additional_Information_Type\",\n \"fields\": [\n {\n \"name\": \"eventTime\",\n \"type\": \"string\"\n },\n {\n \"name\": \"equipType\",\n \"type\": \"string\"\n },\n {\n \"name\": \"vendor\",\n \"type\": \"string\"\n },\n {\n \"name\": \"model\",\n \"type\": \"string\"\n }\n ]\n }\n }\n ]\n }\n }\n ]\n }\n }\n ]\n}"}},{"key":{"name":"LinkFailureOutputSchema","version":"0.0.1"},"value":{"key":{"name":"LinkFailureOutputSchema","version":"0.0.1"},"schemaFlavour":"Avro","schemaDefinition":"{\n \"name\": \"Link_Failure_Output\",\n \"type\": \"record\",\n \"fields\": [\n {\n \"name\": \"o_DasH_ran_DasH_sc_DasH_du_DasH_hello_DasH_world_ColoN_radio_DasH_resource_DasH_management_DasH_policy_DasH_ratio\",\n \"type\": {\n \"type\": \"array\",\n \"items\": {\n \"name\": \"o_DasH_ran_DasH_sc_DasH_du_DasH_hello_DasH_world_ColoN_radio_DasH_resource_DasH_management_DasH_policy_DasH_ratio_record\",\n \"type\": \"record\",\n \"fields\": [\n {\n \"name\": \"id\",\n \"type\": \"string\"\n },\n {\n \"name\": \"radio_DasH_resource_DasH_management_DasH_policy_DasH_max_DasH_ratio\",\n \"type\": \"int\"\n },\n {\n \"name\": \"radio_DasH_resource_DasH_management_DasH_policy_DasH_members\",\n \"type\": {\n \"type\": \"array\",\n \"items\": {\n \"name\": \"radio_DasH_resource_DasH_management_DasH_policy_DasH_members_record\",\n \"type\": \"record\",\n \"fields\": [\n {\n \"name\": \"mobile_DasH_country_DasH_code\",\n \"type\": \"string\"\n },\n {\n \"name\": \"mobile_DasH_network_DasH_code\",\n \"type\": \"string\"\n },\n {\n \"name\": \"slice_DasH_differentiator\",\n \"type\": \"int\"\n },\n {\n \"name\": \"slice_DasH_service_DasH_type\",\n \"type\": \"int\"\n }\n ]\n }\n }\n },\n {\n \"name\": \"radio_DasH_resource_DasH_management_DasH_policy_DasH_min_DasH_ratio\",\n \"type\": \"int\"\n },\n {\n \"name\": \"user_DasH_label\",\n \"type\": \"string\"\n },\n {\n \"name\": \"resource_DasH_type\",\n \"type\": \"string\"\n },\n {\n \"name\": \"radio_DasH_resource_DasH_management_DasH_policy_DasH_dedicated_DasH_ratio\",\n \"type\": \"int\"\n },\n {\n \"name\": \"administrative_DasH_state\",\n \"type\": \"string\"\n }\n ]\n }\n }\n }\n ]\n}"}},{"key":{"name":"MessageSchema","version":"0.0.1"},"value":{"key":{"name":"MessageSchema","version":"0.0.1"},"schemaFlavour":"Java","schemaDefinition":"java.lang.String"}},{"key":{"name":"OduIdSchema","version":"0.0.1"},"value":{"key":{"name":"OduIdSchema","version":"0.0.1"},"schemaFlavour":"Java","schemaDefinition":"java.lang.String"}},{"key":{"name":"OruIdSchema","version":"0.0.1"},"value":{"key":{"name":"OruIdSchema","version":"0.0.1"},"schemaFlavour":"Java","schemaDefinition":"java.lang.String"}}]}}}}},"eventOutputParameters":{"RestProducer":{"carrierTechnologyParameters":{"carrierTechnology":"RESTCLIENT","parameterClassName":"org.onap.policy.apex.plugins.event.carrier.restclient.RestClientCarrierTechnologyParameters","parameters":{"url":"http://sdnr-sim:9990/rests/data/network-topology:network-topology/topology=topology-netconf/node={OduId}/yang-ext:mount/o-ran-sc-du-hello-world:network-function/distributed-unit-functions={OduId}/radio-resource-management-policy-ratio=rrm-pol-1","httpMethod":"PUT","httpHeaders":[["Authorization","Basic YWRtaW46S3A4Yko0U1hzek0wV1hsaGFrM2VIbGNzZTJnQXc4NHZhb0dHbUp2VXkyVQ=="]]}},"eventProtocolParameters":{"eventProtocol":"JSON","parameters":{"pojoField":"LinkFailureOutput"}},"eventNameFilter":"LinkFailureOutputEvent"},"StdOutProducer":{"carrierTechnologyParameters":{"carrierTechnology":"FILE","parameters":{"standardIo":true}},"eventProtocolParameters":{"eventProtocol":"JSON","parameters":{"pojoField":"message"}},"eventNameFilter":"ApexMessageOutputEvent"}},"eventInputParameters":{"DMaaPConsumer":{"carrierTechnologyParameters":{"carrierTechnology":"RESTCLIENT","parameterClassName":"org.onap.policy.apex.plugins.event.carrier.restclient.RestClientCarrierTechnologyParameters","parameters":{"url":"http://onap-dmaap:3904/events/unauthenticated.SEC_FAULT_OUTPUT/users/link-monitor-nonrtric?timeout=15000&limit=100"}},"eventProtocolParameters":{"eventProtocol":"JSON","parameters":{"versionAlias":"version","pojoField":"LinkFailureInput"}},"eventName":"LinkFailureInputEvent"}}}}}]}}
\ No newline at end of file
var unlockMessageArray = new java.util.ArrayList();
for (var i = 0; i < 1; i++) {
unlockMessageArray.add({
- "name" : oruId,
- "administrative_DasH_state" : "UNLOCKED"
- });
+ "id":"rrm-pol-1",
+ "radio_DasH_resource_DasH_management_DasH_policy_DasH_max_DasH_ratio":25,
+ "radio_DasH_resource_DasH_management_DasH_policy_DasH_members":
+ [
+ {
+ "mobile_DasH_country_DasH_code":"310",
+ "mobile_DasH_network_DasH_code":"150",
+ "slice_DasH_differentiator":1,
+ "slice_DasH_service_DasH_type":1
+ }
+ ],
+ "radio_DasH_resource_DasH_management_DasH_policy_DasH_min_DasH_ratio":15,
+ "user_DasH_label":"rrm-pol-1",
+ "resource_DasH_type":"prb",
+ "radio_DasH_resource_DasH_management_DasH_policy_DasH_dedicated_DasH_ratio":20,
+ "administrative_DasH_state":"unlocked"
+ });
}
-linkFailureOutput.put("o_DasH_ran_DasH_sc_DasH_du_DasH_hello_DasH_world_ColoN_du_DasH_to_DasH_ru_DasH_connection", unlockMessageArray);
+linkFailureOutput.put("o_DasH_ran_DasH_sc_DasH_du_DasH_hello_DasH_world_ColoN_radio_DasH_resource_DasH_management_DasH_policy_DasH_ratio", unlockMessageArray);
executor.outFields.put("LinkFailureOutput", linkFailureOutput.toString());
executor.getExecutionProperties().setProperty("OduId", oduId);
{
- "type": "record",
- "name": "Link_Failure_Output",
- "fields": [
- {
- "name": "o_DasH_ran_DasH_sc_DasH_du_DasH_hello_DasH_world_ColoN_du_DasH_to_DasH_ru_DasH_connection",
- "type": {
- "type": "array",
- "items": {
- "name": "Config_Change_Message",
- "type": "record",
- "fields": [
- {
- "name": "name",
- "type": "string"
- },
- {
- "name": "administrative_DasH_state",
- "type": "string"
- }
- ]
+ "name": "Link_Failure_Output",
+ "type": "record",
+ "fields": [
+ {
+ "name": "o_DasH_ran_DasH_sc_DasH_du_DasH_hello_DasH_world_ColoN_radio_DasH_resource_DasH_management_DasH_policy_DasH_ratio",
+ "type": {
+ "type": "array",
+ "items": {
+ "name": "o_DasH_ran_DasH_sc_DasH_du_DasH_hello_DasH_world_ColoN_radio_DasH_resource_DasH_management_DasH_policy_DasH_ratio_record",
+ "type": "record",
+ "fields": [
+ {
+ "name": "id",
+ "type": "string"
+ },
+ {
+ "name": "radio_DasH_resource_DasH_management_DasH_policy_DasH_max_DasH_ratio",
+ "type": "int"
+ },
+ {
+ "name": "radio_DasH_resource_DasH_management_DasH_policy_DasH_members",
+ "type": {
+ "type": "array",
+ "items": {
+ "name": "radio_DasH_resource_DasH_management_DasH_policy_DasH_members_record",
+ "type": "record",
+ "fields": [
+ {
+ "name": "mobile_DasH_country_DasH_code",
+ "type": "string"
+ },
+ {
+ "name": "mobile_DasH_network_DasH_code",
+ "type": "string"
+ },
+ {
+ "name": "slice_DasH_differentiator",
+ "type": "int"
+ },
+ {
+ "name": "slice_DasH_service_DasH_type",
+ "type": "int"
+ }
+ ]
}
- }
+ }
+ },
+ {
+ "name": "radio_DasH_resource_DasH_management_DasH_policy_DasH_min_DasH_ratio",
+ "type": "int"
+ },
+ {
+ "name": "user_DasH_label",
+ "type": "string"
+ },
+ {
+ "name": "resource_DasH_type",
+ "type": "string"
+ },
+ {
+ "name": "radio_DasH_resource_DasH_management_DasH_policy_DasH_dedicated_DasH_ratio",
+ "type": "int"
+ },
+ {
+ "name": "administrative_DasH_state",
+ "type": "string"
+ }
+ ]
}
- ]
-}
+ }
+ }
+ ]
+}
\ No newline at end of file
SDNRPassword string
}
-const rawSdnrPath = "/rests/data/network-topology:network-topology/topology=topology-netconf/node=[O-DU-ID]/yang-ext:mount/o-ran-sc-du-hello-world:network-function/du-to-ru-connection=[O-RU-ID]"
-const unlockMessage = `{"o-ran-sc-du-hello-world:du-to-ru-connection": [{"name":"[O-RU-ID]","administrative-state":"UNLOCKED"}]}`
+const rawSdnrPath = "/rests/data/network-topology:network-topology/topology=topology-netconf/node=[O-DU-ID]/yang-ext:mount/o-ran-sc-du-hello-world:network-function/distributed-unit-functions=[O-DU-ID]/radio-resource-management-policy-ratio=rrm-pol-1"
+const unlockMessage = `{"o-ran-sc-du-hello-world:radio-resource-management-policy-ratio":[{"id":"rrm-pol-1","radio-resource-management-policy-max-ratio":25,"radio-resource-management-policy-members":[{"mobile-country-code":"310","mobile-network-code":"150","slice-differentiator":1,"slice-service-type":1}],"radio-resource-management-policy-min-ratio":15,"user-label":"rrm-pol-1","resource-type":"prb","radio-resource-management-policy-dedicated-ratio":20,"administrative-state":"unlocked"}]}`
type LinkFailureHandler struct {
lookupService repository.LookupService
func (lfh LinkFailureHandler) sendUnlockMessage(oRuId string) {
if oDuId, err := lfh.lookupService.GetODuID(oRuId); err == nil {
- sdnrPath := getSdnrPath(oRuId, oDuId)
- unlockMessage := lfh.getUnlockMessage(oRuId)
+ sdnrPath := getSdnrPath(oDuId)
if error := restclient.Put(lfh.config.SDNRAddress+sdnrPath, unlockMessage, lfh.client, lfh.config.SDNRUser, lfh.config.SDNRPassword); error == nil {
log.Debugf("Sent unlock message for O-RU: %v to O-DU: %v.", oRuId, oDuId)
} else {
}
-func getSdnrPath(oRuId string, oDuId string) string {
- sdnrPath := strings.Replace(rawSdnrPath, "[O-DU-ID]", oDuId, 1)
- sdnrPath = strings.Replace(sdnrPath, "[O-RU-ID]", oRuId, 1)
+func getSdnrPath(oDuId string) string {
+ sdnrPath := strings.Replace(rawSdnrPath, "[O-DU-ID]", oDuId, -1)
return sdnrPath
}
-
-func (lfh LinkFailureHandler) getUnlockMessage(oRuId string) string {
- return strings.Replace(unlockMessage, "[O-RU-ID]", oRuId, 1)
-}
lookupServiceMock := mocks.LookupService{}
- lookupServiceMock.On("GetODuID", mock.Anything).Return("HCL-O-DU-1122", nil)
+ lookupServiceMock.On("GetODuID", mock.Anything).Return("O-DU-1122", nil)
handlerUnderTest := NewLinkFailureHandler(&lookupServiceMock, Configuration{
SDNRAddress: "http://localhost:9990",
assertions.Equal(http.MethodPut, actualRequest.Method)
assertions.Equal("http", actualRequest.URL.Scheme)
assertions.Equal("localhost:9990", actualRequest.URL.Host)
- expectedSdnrPath := "/rests/data/network-topology:network-topology/topology=topology-netconf/node=HCL-O-DU-1122/yang-ext:mount/o-ran-sc-du-hello-world:network-function/du-to-ru-connection=ERICSSON-O-RU-11220"
+ expectedSdnrPath := "/rests/data/network-topology:network-topology/topology=topology-netconf/node=O-DU-1122/yang-ext:mount/o-ran-sc-du-hello-world:network-function/distributed-unit-functions=O-DU-1122/radio-resource-management-policy-ratio=rrm-pol-1"
assertions.Equal(expectedSdnrPath, actualRequest.URL.Path)
assertions.Equal("application/json; charset=utf-8", actualRequest.Header.Get("Content-Type"))
tempRequest, _ := http.NewRequest("", "", nil)
tempRequest.SetBasicAuth("admin", "pwd")
assertions.Equal(tempRequest.Header.Get("Authorization"), actualRequest.Header.Get("Authorization"))
body, _ := ioutil.ReadAll(actualRequest.Body)
- expectedBody := []byte(`{"o-ran-sc-du-hello-world:du-to-ru-connection": [{"name":"ERICSSON-O-RU-11220","administrative-state":"UNLOCKED"}]}`)
+ expectedBody := []byte(`{"o-ran-sc-du-hello-world:radio-resource-management-policy-ratio":[{"id":"rrm-pol-1","radio-resource-management-policy-max-ratio":25,"radio-resource-management-policy-members":[{"mobile-country-code":"310","mobile-network-code":"150","slice-differentiator":1,"slice-service-type":1}],"radio-resource-management-policy-min-ratio":15,"user-label":"rrm-pol-1","resource-type":"prb","radio-resource-management-policy-dedicated-ratio":20,"administrative-state":"unlocked"}]}`)
assertions.Equal(expectedBody, body)
clientMock.AssertNumberOfCalls(t, "Do", 1)
logString := buf.String()
assertions.Contains(logString, "Sent unlock message")
assertions.Contains(logString, "O-RU: ERICSSON-O-RU-11220")
- assertions.Contains(logString, "O-DU: HCL-O-DU-1122")
+ assertions.Contains(logString, "O-DU: O-DU-1122")
}
func newRequest(method string, url string, bodyAsBytes []byte, t *testing.T) *http.Request {
lookupServiceMock := mocks.LookupService{}
- lookupServiceMock.On("GetODuID", mock.Anything).Return("HCL-O-DU-1122", nil)
+ lookupServiceMock.On("GetODuID", mock.Anything).Return("O-DU-1122", nil)
handlerUnderTest := NewLinkFailureHandler(&lookupServiceMock, Configuration{}, nil)
-ERICSSON-O-RU-11220,HCL-O-DU-1122
-ERICSSON-O-RU-11221,HCL-O-DU-1122
-ERICSSON-O-RU-11222,HCL-O-DU-1122
-ERICSSON-O-RU-11223,HCL-O-DU-1122
-ERICSSON-O-RU-11223,HCL-O-DU-1122
-ERICSSON-O-RU-11224,HCL-O-DU-1123
-ERICSSON-O-RU-11225,HCL-O-DU-1123
-ERICSSON-O-RU-11226,HCL-O-DU-1123
-ERICSSON-O-RU-11227,HCL-O-DU-1124
-ERICSSON-O-RU-11228,HCL-O-DU-1125
-ERICSSON-O-RU-11229,HCL-O-DU-1125
\ No newline at end of file
+ERICSSON-O-RU-11220,O-DU-1122
+ERICSSON-O-RU-11221,O-DU-1122
+ERICSSON-O-RU-11222,O-DU-1122
+ERICSSON-O-RU-11223,O-DU-1122
+ERICSSON-O-RU-11223,O-DU-1122
+ERICSSON-O-RU-11224,O-DU-1123
+ERICSSON-O-RU-11225,O-DU-1123
+ERICSSON-O-RU-11226,O-DU-1123
+ERICSSON-O-RU-11227,O-DU-1124
+ERICSSON-O-RU-11228,O-DU-1125
+ERICSSON-O-RU-11229,O-DU-1125
\ No newline at end of file
started = true
fmt.Println("Start pushing messages for job: ", id)
- startPushingMessages()
+ go startPushingMessages()
}
func deleteJobHandler(w http.ResponseWriter, r *http.Request) {
flag.Parse()
r := mux.NewRouter()
- r.HandleFunc("/rests/data/network-topology:network-topology/topology=topology-netconf/node={O-DU-ID}/yang-ext:mount/o-ran-sc-du-hello-world:network-function/du-to-ru-connection={O-RU-ID}", handleData)
+ r.HandleFunc("/rests/data/network-topology:network-topology/topology=topology-netconf/node={O-DU-ID}/yang-ext:mount/o-ran-sc-du-hello-world:network-function/distributed-unit-functions={O-DU-ID}/radio-resource-management-policy-ratio=rrm-pol-1", handleData)
fmt.Println("Starting SDNR on port: ", *port)
fmt.Println(http.ListenAndServe(fmt.Sprintf(":%v", *port), r))
import time
MR_PATH = "/events/[TOPIC]/users/test/"
-SDNR_PATH = "/rests/data/network-topology:network-topology/topology=topology-netconf/node=[O-DU-ID]/yang-ext:mount/o-ran-sc-du-hello-world:network-function/du-to-ru-connection=[O-RU-ID]"
+SDNR_PATH = "/rests/data/network-topology:network-topology/topology=topology-netconf/node=[O-DU-ID]/yang-ext:mount/o-ran-sc-du-hello-world:network-function/distributed-unit-functions=[O-DU-ID]/radio-resource-management-policy-ratio=rrm-pol-1"
FAUILT_ID = "28"
UNLOCK_MESSAGE = {
- "o-ran-sc-du-hello-world:du-to-ru-connection": [
+ "o-ran-sc-du-hello-world:radio-resource-management-policy-ratio":
+ [
{
- "name":"",
- "administrative-state":"UNLOCKED"
+ "id":"rrm-pol-1",
+ "radio-resource-management-policy-max-ratio":25,
+ "radio-resource-management-policy-members":
+ [
+ {
+ "mobile-country-code":"310",
+ "mobile-network-code":"150",
+ "slice-differentiator":1,
+ "slice-service-type":1
+ }
+ ],
+ "radio-resource-management-policy-min-ratio":15,
+ "user-label":"rrm-pol-1",
+ "resource-type":"prb",
+ "radio-resource-management-policy-dedicated-ratio":20,
+ "administrative-state":"unlocked"
}
]
}
o_du_id = o_ru_to_o_du_map[o_ru_id]
verboseprint("O-DU ID: " + o_du_id)
unlock_msg = json.loads(json.dumps(UNLOCK_MESSAGE))
- unlock_msg["o-ran-sc-du-hello-world:du-to-ru-connection"][0]["name"] = o_ru_id
- send_path = SDNR_PATH.replace("[O-DU-ID]", o_du_id).replace("[O-RU-ID]", o_ru_id)
+ send_path = SDNR_PATH.replace("[O-DU-ID]", o_du_id)
requests.put(sdnr_address + send_path, auth=(sdnr_user, sdnr_pwd), json=unlock_msg)
else:
print("ERROR: No mapping for O-RU ID: " + o_ru_id)
{
- "ERICSSON-O-RU-11220": "HCL-O-DU-1122",
- "ERICSSON-O-RU-11221": "HCL-O-DU-1122",
- "ERICSSON-O-RU-11222": "HCL-O-DU-1122",
- "ERICSSON-O-RU-11223": "HCL-O-DU-1122",
- "ERICSSON-O-RU-11223": "HCL-O-DU-1122",
- "ERICSSON-O-RU-11224": "HCL-O-DU-1123",
- "ERICSSON-O-RU-11225": "HCL-O-DU-1123",
- "ERICSSON-O-RU-11226": "HCL-O-DU-1123",
- "ERICSSON-O-RU-11227": "HCL-O-DU-1124",
- "ERICSSON-O-RU-11228": "HCL-O-DU-1125",
- "ERICSSON-O-RU-11229": "HCL-O-DU-1125",
+ "ERICSSON-O-RU-11220": "O-DU-1122",
+ "ERICSSON-O-RU-11221": "O-DU-1122",
+ "ERICSSON-O-RU-11222": "O-DU-1122",
+ "ERICSSON-O-RU-11223": "O-DU-1122",
+ "ERICSSON-O-RU-11223": "O-DU-1122",
+ "ERICSSON-O-RU-11224": "O-DU-1123",
+ "ERICSSON-O-RU-11225": "O-DU-1123",
+ "ERICSSON-O-RU-11226": "O-DU-1123",
+ "ERICSSON-O-RU-11227": "O-DU-1124",
+ "ERICSSON-O-RU-11228": "O-DU-1125",
+ "ERICSSON-O-RU-11229": "O-DU-1125",
}
\ No newline at end of file
# Server info
HOST_IP = "::"
HOST_PORT = 9990
-APP_URL = "/rests/data/network-topology:network-topology/topology=topology-netconf/node=<string:o_du_id>/yang-ext:mount/o-ran-sc-du-hello-world:network-function/du-to-ru-connection=<string:o_ru_id>"
+APP_URL = "/rests/data/network-topology:network-topology/topology=topology-netconf/node=<string:o_du_id>/yang-ext:mount/o-ran-sc-du-hello-world:network-function/distributed-unit-functions=<string:o_du_id2>/radio-resource-management-policy-ratio=rrm-pol-1"
USERNAME = "admin"
PASSWORD = "Kp8bJ4SXszM0WXlhak3eHlcse2gAw84vaoGGmJvUy2U"
class AlarmClearThread (threading.Thread):
- def __init__(self, sleep_time, o_ru_id):
+ def __init__(self, sleep_time, o_du_id):
threading.Thread.__init__(self)
self.sleep_time = sleep_time
- self.o_ru_id = o_ru_id
+ self.o_du_id = o_du_id
def run(self):
- print(f'Sleeping: {self.sleep_time} before clearing O-DU: {self.o_ru_id}')
+ print(f'Sleeping: {self.sleep_time} before clearing O-DU: {self.o_du_id}')
time.sleep(self.sleep_time)
msg_as_json = json.loads(json.dumps(linkFailureMessage))
- msg_as_json["event"]["commonEventHeader"]["sourceName"] = self.o_ru_id
- print("Sedning alarm clear for O-RU: " + self.o_ru_id)
+ msg_as_json["event"]["commonEventHeader"]["sourceName"] = self.o_du_id
+ print("Sedning alarm clear for O-DU: " + self.o_du_id)
requests.post(mr_host + ":" + mr_port + MR_PATH, json=msg_as_json);
@app.route(APP_URL,
methods=['PUT'])
@auth.login_required
-def sendrequest(o_du_id, o_ru_id):
- print("Got request with O-DU ID: " + o_du_id + " and O-RU ID: " + o_ru_id)
+def sendrequest(o_du_id, o_du_id2):
+ print("Got request with O-DU ID: " + o_du_id)
random_time = int(10 * random.random())
- alarm_clear_thread = AlarmClearThread(random_time, o_ru_id)
+ alarm_clear_thread = AlarmClearThread(random_time, o_du_id)
alarm_clear_thread.start()
return Response(status=200)