ADD /config/keystore.jks /opt/app/policy-agent/etc/cert/keystore.jks
ADD /config/truststore.jks /opt/app/policy-agent/etc/cert/truststore.jks
-RUN chmod -R 777 /opt/app/policy-agent/config/
-RUN chmod -R 777 /opt/app/policy-agent/data/
+RUN groupadd -g 999 appuser && \
+ useradd -r -u 999 -g appuser appuser
+RUN chown -R appuser:appuser /opt/app/policy-agent
+RUN chown -R appuser:appuser /var/log/policy-agent
+USER appuser
ADD target/${JAR} /opt/app/policy-agent/policy-agent.jar
CMD ["java", "-jar", "/opt/app/policy-agent/policy-agent.jar"]
RUN mkdir -p /var/log/dmaap-adaptor-service
RUN mkdir -p /opt/app/dmaap-adaptor-service/etc/cert/
RUN mkdir -p /var/dmaap-adaptor-service
-RUN chmod -R 777 /var/dmaap-adaptor-service
ADD /config/application.yaml /opt/app/dmaap-adaptor-service/config/application.yaml
ADD /config/application_configuration.json /opt/app/dmaap-adaptor-service/data/application_configuration.json_example
ADD /config/keystore.jks /opt/app/dmaap-adaptor-service/etc/cert/keystore.jks
ADD /config/truststore.jks /opt/app/dmaap-adaptor-service/etc/cert/truststore.jks
-RUN chmod -R 777 /opt/app/dmaap-adaptor-service/config/
+
+RUN groupadd -g 999 appuser && \
+ useradd -r -u 999 -g appuser appuser
+RUN chown -R appuser:appuser /var/dmaap-adaptor-service/
+RUN chown -R appuser:appuser /opt/app/dmaap-adaptor-service/
+USER appuser
ADD target/${JAR} /opt/app/dmaap-adaptor-service/dmaap-adaptor.jar
CMD ["java", "-jar", "/opt/app/dmaap-adaptor-service/dmaap-adaptor.jar"]
# O-RAN-SC Non-RealTime RIC DMaaP Information Producer
-This product is a generic information producer (as defined by the Information Coordinator Service (ICS)). It can produce any information that can be retrieved from DMaaP. Its main tasks is to register information types and itself as a producer using the ICS Data Producer API.
+This product is a generic information producer (as defined by the Information Coordinator Service (ICS)). It can produce any information that can be retrieved from DMaaP or Kafka. Its main tasks is to register information types and itself as a producer using the ICS Data Producer API.
A data consumer may create information jobs through the ICS Data Producer API.
-This service will retrieve data from the DMaaP Message Router (MR) and distribute it further to the data consumers (information job owners).
+This service will retrieve data from the DMaaP Message Router (MR) or from the Kafka streaming platform and will distribute it further to the data consumers (information job owners).
The component is a springboot service and is configured as any springboot service through the file `config/application.yaml`. The component log can be retrieved and logging can be controled by means of REST call. See the API documentation (api/api.yaml).
"types":
[
{
- "id": "STD_Fault_Messages",
- "dmaapTopicUrl": events/unauthenticated.SEC_FAULT_OUTPUT/dmaapmediatorproducer/STD-Fault-Messages_1.0.0",
+ "id": "ExampleInformationType1_1.0.0",
+ "dmaapTopicUrl": "events/unauthenticated.SEC_FAULT_OUTPUT/dmaapmediatorproducer/STD-Fault-Messages_1.0.0",
+ "useHttpProxy": true
+ },
+ {
+ "id": "ExampleInformationType2_2.0.0",
+ "kafkaInputTopic": "KafkaInputTopic",
"useHttpProxy": false
}
]
}
```
-Each information has the following properties:
+Each information type has the following properties:
- id the information type identity as exposed in the Information Coordination Service data consumer API
- dmaapTopicUrl the URL to for fetching information from DMaaP
+ - kafkaInputTopic a Kafka topic to get input from
- useHttpProxy if true, the received information will be delivered using a HTTP proxy (provided that one is setup in the application.yaml file). This might for instance be needed if the data consumer is in the RAN or outside the cluster.
-The service producer will constantly poll MR for all configured job types. When receiving messages for a type, it will distribute these messages to all jobs registered for the type. If no jobs for that type are registered, the messages will be discarded. If a consumer is unavailable for distribution, the messages will be discarded for that consumer.
+The service producer will poll MR and/or listen to Kafka topics for all configured job types. When receiving messages for a type, it will distribute these messages to all jobs registered for the type. If a consumer is unavailable for distribution, the messages will be discarded for that consumer.
+
+When an Information Job is created in the Information Coordinator Service Consumer API, it is possible to define a number of job specific properties. For an Information type that has a Kafka topic defined, the following Json schema defines the properties that can be used:
+
+
+```sh
+{
+ "$schema": "http://json-schema.org/draft-04/schema#",
+ "type": "object",
+ "properties": {
+ "filter": {
+ "type": "string"
+ },
+ "maxConcurrency": {
+ "type": "integer"
+ },
+ "bufferTimeout": {
+ "type": "object",
+ "properties": {
+ "maxSize": {
+ "type": "integer"
+ },
+ "maxTimeMiliseconds": {
+ "type": "integer"
+ }
+ },
+ "additionalProperties": false,
+ "required": [
+ "maxSize",
+ "maxTimeMiliseconds"
+ ]
+ }
+ },
+ "additionalProperties": false
+}
+```
+-filter is a regular expression. Only strings that matches the expression will be pushed further to the consumer.
+-maxConcurrency the maximum number of concurrent REST session for the data delivery to the consumer.
+ The default is 1 and that is the number that must be used to guarantee that the object sequence is maintained.
+ A higher number will give higher throughtput.
+-bufferTimeout, can be used to reduce the number of REST calls to the consumer. If defined, a number of objects will be
+ buffered and sent in one REST call to the consumer.
+ The buffered objects will be put in a Json array and quoted. Example;
+ Object1 and Object2 may be posted in one call --> ["Object1", "Object2"]
+ The bufferTimeout is a Json object and the parameters in the object are:
+ - maxSize the maximum number of buffered objects before posting
+ - maxTimeMiliseconds the maximum delay time to buffer before posting
+ If no bufferTimeout is specified, each object will be posted as received in separate calls (not quoted and put in a Json array).
+
+
+For an information type that only has a DMaaP topic, the following Json schema defines the possible parameters to use when creating an information job:
+
+```sh
+{
+ "$schema": "http://json-schema.org/draft-04/schema#",
+ "type": "object",
+ "properties": {
+ "filter": {
+ "type": "string"
+ }
+ },
+ "additionalProperties": false
+}
+```
+-filter is a regular expression. Only strings that matches the expression will be pushed furter to the consumer. This
+ has a similar meaning as in jobs that receives data from Kafka.
## License
}},
"openapi": "3.0.1",
"paths": {
- "/dmaap_dataproducer/info_job": {
- "post": {
- "summary": "Callback for Information Job creation/modification",
- "requestBody": {
- "content": {"application/json": {"schema": {"type": "string"}}},
- "required": true
- },
- "description": "The call is invoked to activate or to modify a data subscription. The endpoint is provided by the Information Producer.",
- "operationId": "jobCreatedCallback",
- "responses": {
- "200": {
- "description": "OK",
- "content": {"application/json": {"schema": {"$ref": "#/components/schemas/void"}}}
- },
- "404": {
- "description": "Information type is not found",
- "content": {"application/json": {"schema": {"$ref": "#/components/schemas/error_information"}}}
- }
- },
- "tags": ["Producer job control API"]
- },
- "get": {
- "summary": "Get all jobs",
- "description": "Returns all info jobs, can be used for trouble shooting",
- "operationId": "getJobs",
- "responses": {"200": {
- "description": "Information jobs",
- "content": {"application/json": {"schema": {
- "type": "array",
- "items": {"$ref": "#/components/schemas/producer_info_job_request"}
- }}}
- }},
- "tags": ["Producer job control API"]
- }
- },
- "/dmaap_dataproducer/health_check": {"get": {
- "summary": "Producer supervision",
- "description": "The endpoint is provided by the Information Producer and is used for supervision of the producer.",
- "operationId": "producerSupervision",
- "responses": {"200": {
- "description": "The producer is OK",
- "content": {"application/json": {"schema": {"type": "string"}}}
- }},
- "tags": ["Producer job control API"]
- }},
"/actuator/threaddump": {"get": {
"summary": "Actuator web endpoint 'threaddump'",
"operationId": "handle_2_1_3",
}],
"tags": ["Information Coordinator Service Simulator (exists only in test)"]
}},
+ "/generic_dataproducer/health_check": {"get": {
+ "summary": "Producer supervision",
+ "description": "The endpoint is provided by the Information Producer and is used for supervision of the producer.",
+ "operationId": "producerSupervision",
+ "responses": {"200": {
+ "description": "The producer is OK",
+ "content": {"application/json": {"schema": {"type": "string"}}}
+ }},
+ "tags": ["Producer job control API"]
+ }},
+ "/generic_dataproducer/info_job": {
+ "post": {
+ "summary": "Callback for Information Job creation/modification",
+ "requestBody": {
+ "content": {"application/json": {"schema": {"type": "string"}}},
+ "required": true
+ },
+ "description": "The call is invoked to activate or to modify a data subscription. The endpoint is provided by the Information Producer.",
+ "operationId": "jobCreatedCallback",
+ "responses": {
+ "200": {
+ "description": "OK",
+ "content": {"application/json": {"schema": {"$ref": "#/components/schemas/void"}}}
+ },
+ "400": {
+ "description": "Other error in the request",
+ "content": {"application/json": {"schema": {"$ref": "#/components/schemas/error_information"}}}
+ },
+ "404": {
+ "description": "Information type is not found",
+ "content": {"application/json": {"schema": {"$ref": "#/components/schemas/error_information"}}}
+ }
+ },
+ "tags": ["Producer job control API"]
+ },
+ "get": {
+ "summary": "Get all jobs",
+ "description": "Returns all info jobs, can be used for trouble shooting",
+ "operationId": "getJobs",
+ "responses": {"200": {
+ "description": "Information jobs",
+ "content": {"application/json": {"schema": {
+ "type": "array",
+ "items": {"$ref": "#/components/schemas/producer_info_job_request"}
+ }}}
+ }},
+ "tags": ["Producer job control API"]
+ }
+ },
"/actuator/loggers": {"get": {
"summary": "Actuator web endpoint 'loggers'",
"operationId": "handle_6",
"tags": ["Information Coordinator Service Simulator (exists only in test)"]
}
},
+ "/generic_dataproducer/info_job/{infoJobId}": {"delete": {
+ "summary": "Callback for Information Job deletion",
+ "description": "The call is invoked to terminate a data subscription. The endpoint is provided by the Information Producer.",
+ "operationId": "jobDeletedCallback",
+ "responses": {"200": {
+ "description": "OK",
+ "content": {"application/json": {"schema": {"$ref": "#/components/schemas/void"}}}
+ }},
+ "parameters": [{
+ "schema": {"type": "string"},
+ "in": "path",
+ "name": "infoJobId",
+ "required": true
+ }],
+ "tags": ["Producer job control API"]
+ }},
"/actuator/metrics/{requiredMetricName}": {"get": {
"summary": "Actuator web endpoint 'metrics-requiredMetricName'",
"operationId": "handle_5",
"tags": ["Actuator"]
}
},
- "/dmaap_dataproducer/info_job/{infoJobId}": {"delete": {
- "summary": "Callback for Information Job deletion",
- "description": "The call is invoked to terminate a data subscription. The endpoint is provided by the Information Producer.",
- "operationId": "jobDeletedCallback",
- "responses": {"200": {
- "description": "OK",
- "content": {"application/json": {"schema": {"$ref": "#/components/schemas/void"}}}
- }},
- "parameters": [{
- "schema": {"type": "string"},
- "in": "path",
- "name": "infoJobId",
- "required": true
- }],
- "tags": ["Producer job control API"]
- }},
"/actuator/health": {"get": {
"summary": "Actuator web endpoint 'health'",
"operationId": "handle_11",
"name": "Copyright (C) 2021 Nordix Foundation. Licensed under the Apache License.",
"url": "http://www.apache.org/licenses/LICENSE-2.0"
},
- "description": "Reads data from DMAAP and sends it further to information consumers",
- "title": "Generic Dmaap Information Producer",
+ "description": "Reads data from DMaaP and Kafka and posts it further to information consumers",
+ "title": "Generic Dmaap and Kafka Information Producer",
"version": "1.0"
},
"tags": [{
openapi: 3.0.1
info:
- title: Generic Dmaap Information Producer
- description: Reads data from DMAAP and sends it further to information consumers
+ title: Generic Dmaap and Kafka Information Producer
+ description: Reads data from DMaaP and Kafka and posts it further to information
+ consumers
license:
name: Copyright (C) 2021 Nordix Foundation. Licensed under the Apache License.
url: http://www.apache.org/licenses/LICENSE-2.0
description: Spring Boot Actuator Web API Documentation
url: https://docs.spring.io/spring-boot/docs/current/actuator-api/html/
paths:
- /dmaap_dataproducer/info_job:
- get:
- tags:
- - Producer job control API
- summary: Get all jobs
- description: Returns all info jobs, can be used for trouble shooting
- operationId: getJobs
- responses:
- 200:
- description: Information jobs
- content:
- application/json:
- schema:
- type: array
- items:
- $ref: '#/components/schemas/producer_info_job_request'
- post:
- tags:
- - Producer job control API
- summary: Callback for Information Job creation/modification
- description: The call is invoked to activate or to modify a data subscription.
- The endpoint is provided by the Information Producer.
- operationId: jobCreatedCallback
- requestBody:
- content:
- application/json:
- schema:
- type: string
- required: true
- responses:
- 200:
- description: OK
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/void'
- 404:
- description: Information type is not found
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/error_information'
- /dmaap_dataproducer/health_check:
- get:
- tags:
- - Producer job control API
- summary: Producer supervision
- description: The endpoint is provided by the Information Producer and is used
- for supervision of the producer.
- operationId: producerSupervision
- responses:
- 200:
- description: The producer is OK
- content:
- application/json:
- schema:
- type: string
/actuator/threaddump:
get:
tags:
application/json:
schema:
type: object
+ /generic_dataproducer/health_check:
+ get:
+ tags:
+ - Producer job control API
+ summary: Producer supervision
+ description: The endpoint is provided by the Information Producer and is used
+ for supervision of the producer.
+ operationId: producerSupervision
+ responses:
+ 200:
+ description: The producer is OK
+ content:
+ application/json:
+ schema:
+ type: string
+ /generic_dataproducer/info_job:
+ get:
+ tags:
+ - Producer job control API
+ summary: Get all jobs
+ description: Returns all info jobs, can be used for trouble shooting
+ operationId: getJobs
+ responses:
+ 200:
+ description: Information jobs
+ content:
+ application/json:
+ schema:
+ type: array
+ items:
+ $ref: '#/components/schemas/producer_info_job_request'
+ post:
+ tags:
+ - Producer job control API
+ summary: Callback for Information Job creation/modification
+ description: The call is invoked to activate or to modify a data subscription.
+ The endpoint is provided by the Information Producer.
+ operationId: jobCreatedCallback
+ requestBody:
+ content:
+ application/json:
+ schema:
+ type: string
+ required: true
+ responses:
+ 200:
+ description: OK
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/void'
+ 400:
+ description: Other error in the request
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/error_information'
+ 404:
+ description: Information type is not found
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/error_information'
/actuator/loggers:
get:
tags:
application/json:
schema:
type: object
+ /generic_dataproducer/info_job/{infoJobId}:
+ delete:
+ tags:
+ - Producer job control API
+ summary: Callback for Information Job deletion
+ description: The call is invoked to terminate a data subscription. The endpoint
+ is provided by the Information Producer.
+ operationId: jobDeletedCallback
+ parameters:
+ - name: infoJobId
+ in: path
+ required: true
+ style: simple
+ explode: false
+ schema:
+ type: string
+ responses:
+ 200:
+ description: OK
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/void'
/actuator/metrics/{requiredMetricName}:
get:
tags:
'*/*':
schema:
type: object
- /dmaap_dataproducer/info_job/{infoJobId}:
- delete:
- tags:
- - Producer job control API
- summary: Callback for Information Job deletion
- description: The call is invoked to terminate a data subscription. The endpoint
- is provided by the Information Producer.
- operationId: jobDeletedCallback
- parameters:
- - name: infoJobId
- in: path
- required: true
- style: simple
- explode: false
- schema:
- type: string
- responses:
- 200:
- description: OK
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/void'
/actuator/health:
get:
tags:
# The HTTP proxy (if configured) will only be used for accessing NearRT RIC:s
http.proxy-host:
http.proxy-port: 0
- ecs-base-url: https://localhost:8434
+ ics-base-url: https://localhost:8434
# Location of the component configuration file. The file will only be used if the Consul database is not used;
# configuration from the Consul will override the file.
configuration-filepath: /opt/app/dmaap-adaptor-service/data/application_configuration.json
dmaap-base-url: http://dradmin:dradmin@localhost:2222
# The url used to adress this component. This is used as a callback url sent to other components.
dmaap-adapter-base-url: https://localhost:8435
- # KAFKA boostrap server. This is only needed if there are Information Types that uses a kafkaInputTopic
+ # KAFKA boostrap servers. This is only needed if there are Information Types that uses a kafkaInputTopic
+ # several redundant boostrap servers can be specified, separated by a comma ','.
kafka:
bootstrap-servers: localhost:9092
{
"types": [
{
- "id": "ExampleInformationType",
+ "id": "ExampleInformationType1",
"dmaapTopicUrl": "/events/unauthenticated.VES_NOTIFICATION_OUTPUT/OpenDcae-c12/C12",
"useHttpProxy": true
+ },
+ {
+ "id": "ExampleInformationType2",
+ "kafkaInputTopic": "TutorialTopic",
+ "useHttpProxy": false
}
+
]
-}
\ No newline at end of file
+}
package org.oran.dmaapadapter;
+import java.io.File;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.boot.ApplicationArguments;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
+import org.springframework.boot.context.properties.EnableConfigurationProperties;
+import org.springframework.context.ConfigurableApplicationContext;
+import org.springframework.scheduling.annotation.EnableScheduling;
+import org.springframework.scheduling.annotation.Scheduled;
@SpringBootApplication
+@EnableConfigurationProperties
+@EnableScheduling
public class Application {
+ private static final Logger logger = LoggerFactory.getLogger(Application.class);
+
+ @Value("${app.configuration-filepath}")
+ private String localConfigurationFilePath;
+
+ private long configFileLastModification = 0;
+ private static ConfigurableApplicationContext applicationContext;
+
public static void main(String[] args) {
- SpringApplication.run(Application.class);
+ applicationContext = SpringApplication.run(Application.class);
}
+ @Scheduled(fixedRate = 10 * 1000)
+ public void checkConfigFileChanges() {
+ long timestamp = new File(localConfigurationFilePath).lastModified();
+ if (configFileLastModification != 0 && timestamp != configFileLastModification) {
+ logger.info("Restarting due to change in the file {}", localConfigurationFilePath);
+ restartApplication();
+ }
+ configFileLastModification = timestamp;
+ }
+
+ private static void restartApplication() {
+ if (applicationContext == null) {
+ logger.info("Cannot restart in unittest");
+ return;
+ }
+ ApplicationArguments args = applicationContext.getBean(ApplicationArguments.class);
+
+ Thread thread = new Thread(() -> {
+ applicationContext.close();
+ applicationContext = SpringApplication.run(Application.class, args.getSourceArgs());
+ });
+
+ thread.setDaemon(false);
+ thread.start();
+ }
}
public class SwaggerConfig {
private SwaggerConfig() {}
- static final String API_TITLE = "Generic Dmaap Information Producer";
- static final String DESCRIPTION = "Reads data from DMAAP and sends it further to information consumers";
+ static final String API_TITLE = "Generic Dmaap and Kafka Information Producer";
+ static final String DESCRIPTION = "Reads data from DMaaP and Kafka and posts it further to information consumers";
}
/**
* Generic reactive REST client.
*/
+@SuppressWarnings("java:S4449") // @Add Nullable to third party api
public class AsyncRestClient {
private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
}
public Mono<String> postWithAuthHeader(String uri, String body, String username, String password,
- MediaType mediaType) {
+ @Nullable MediaType mediaType) {
Object traceTag = createTraceTag();
logger.debug("{} POST (auth) uri = '{}{}''", traceTag, baseUrl, uri);
logger.trace("{} POST body: {}", traceTag, body);
return retrieve(traceTag, request);
}
- public Mono<ResponseEntity<String>> putForEntity(String uri) {
- Object traceTag = createTraceTag();
- logger.debug("{} PUT uri = '{}{}''", traceTag, baseUrl, uri);
- logger.trace("{} PUT body: <empty>", traceTag);
- RequestHeadersSpec<?> request = getWebClient() //
- .put() //
- .uri(uri);
- return retrieve(traceTag, request);
- }
-
public Mono<String> put(String uri, String body) {
return putForEntity(uri, body) //
.map(this::toBody);
import java.util.Collections;
import lombok.Getter;
+import lombok.Setter;
import org.oran.dmaapadapter.configuration.WebClientConfig.HttpProxyConfig;
import org.oran.dmaapadapter.repository.InfoType;
private int httpProxyPort = 0;
@Getter
+ @Setter
@Value("${server.port}")
private int localServerHttpPort;
@Getter
- @Value("${app.ecs-base-url}")
- private String ecsBaseUrl;
+ @Value("${app.ics-base-url}")
+ private String icsBaseUrl;
@Getter
@Value("${app.dmaap-adapter-base-url}")
import java.util.ArrayList;
import java.util.Collection;
+import org.oran.dmaapadapter.exceptions.ServiceException;
import org.oran.dmaapadapter.r1.ProducerJobInfo;
import org.oran.dmaapadapter.repository.InfoTypes;
import org.oran.dmaapadapter.repository.Job;
public static final String API_NAME = "Producer job control API";
public static final String API_DESCRIPTION = "";
- public static final String JOB_URL = "/dmaap_dataproducer/info_job";
- public static final String SUPERVISION_URL = "/dmaap_dataproducer/health_check";
+ public static final String JOB_URL = "/generic_dataproducer/info_job";
+ public static final String SUPERVISION_URL = "/generic_dataproducer/health_check";
private static Gson gson = new GsonBuilder().create();
private final Jobs jobs;
private final InfoTypes types;
content = @Content(schema = @Schema(implementation = VoidResponse.class))), //
@ApiResponse(responseCode = "404", description = "Information type is not found", //
content = @Content(schema = @Schema(implementation = ErrorResponse.ErrorInfo.class))), //
+ @ApiResponse(responseCode = "400", description = "Other error in the request", //
+ content = @Content(schema = @Schema(implementation = ErrorResponse.ErrorInfo.class))) //
})
public ResponseEntity<Object> jobCreatedCallback( //
@RequestBody String body) {
this.jobs.addJob(request.id, request.targetUri, types.getType(request.typeId), request.owner,
request.lastUpdated, toJobParameters(request.jobData));
return new ResponseEntity<>(HttpStatus.OK);
+ } catch (ServiceException e) {
+ logger.warn("jobCreatedCallback failed: {}", e.getMessage());
+ return ErrorResponse.create(e, e.getHttpStatus());
} catch (Exception e) {
- return ErrorResponse.create(e, HttpStatus.NOT_FOUND);
+ logger.warn("jobCreatedCallback failed: {}", e.getMessage());
+ return ErrorResponse.create(e, HttpStatus.BAD_REQUEST);
}
}
@Getter
private final HttpStatus httpStatus;
- public ServiceException(String message) {
- super(message);
- httpStatus = null;
- }
-
- public ServiceException(String message, Exception originalException) {
- super(message, originalException);
- httpStatus = null;
- }
-
public ServiceException(String message, HttpStatus httpStatus) {
super(message);
this.httpStatus = httpStatus;
import org.immutables.gson.Gson;
@Gson.TypeAdapters
-@Schema(name = "consumer_job", description = "Information for an Enrichment Information Job")
+@Schema(name = "consumer_job", description = "Information for an Information Job")
public class ConsumerJobInfo {
@Schema(name = "info_type_id", description = "Information type Idenitifier of the subscription job",
import org.oran.dmaapadapter.exceptions.ServiceException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import org.springframework.http.HttpStatus;
public class InfoTypes {
private static final Logger logger = LoggerFactory.getLogger(InfoTypes.class);
public synchronized InfoType getType(String id) throws ServiceException {
InfoType type = allTypes.get(id);
if (type == null) {
- throw new ServiceException("Could not find type: " + id);
+ throw new ServiceException("Could not find type: " + id, HttpStatus.NOT_FOUND);
}
return type;
}
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.HttpStatus;
import org.springframework.stereotype.Component;
@Component
public synchronized Job getJob(String id) throws ServiceException {
Job job = allJobs.get(id);
if (job == null) {
- throw new ServiceException("Could not find job: " + id);
+ throw new ServiceException("Could not find job: " + id, HttpStatus.NOT_FOUND);
}
return job;
}
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
+import java.util.Set;
import java.util.Vector;
/**
return null;
}
+ public T get(String key1, String key2) {
+ Map<String, T> innerMap = this.map.get(key1);
+ if (innerMap == null) {
+ return null;
+ }
+ return innerMap.get(key2);
+ }
+
public Collection<T> get(String key) {
Map<String, T> innerMap = this.map.get(key);
if (innerMap == null) {
return new Vector<>(innerMap.values());
}
+ public Set<String> keySet() {
+ return this.map.keySet();
+ }
+
public void clear() {
this.map.clear();
}
import org.springframework.http.MediaType;
import reactor.core.publisher.Flux;
-import reactor.core.publisher.FluxSink;
import reactor.core.publisher.Mono;
/**
private static final Logger logger = LoggerFactory.getLogger(DmaapTopicConsumer.class);
private final AsyncRestClient dmaapRestClient;
- private final InfiniteFlux infiniteSubmitter = new InfiniteFlux();
protected final ApplicationConfig applicationConfig;
protected final InfoType type;
protected final Jobs jobs;
- /** Submits new elements until stopped */
- private static class InfiniteFlux {
- private FluxSink<Integer> sink;
- private int counter = 0;
-
- public synchronized Flux<Integer> start() {
- stop();
- return Flux.create(this::next).doOnRequest(this::onRequest);
- }
-
- public synchronized void stop() {
- if (this.sink != null) {
- this.sink.complete();
- this.sink = null;
- }
- }
-
- void onRequest(long no) {
- logger.debug("InfiniteFlux.onRequest {}", no);
- for (long i = 0; i < no; ++i) {
- sink.next(counter++);
- }
- }
-
- void next(FluxSink<Integer> sink) {
- logger.debug("InfiniteFlux.next");
- this.sink = sink;
- sink.next(counter++);
- }
- }
-
public DmaapTopicConsumer(ApplicationConfig applicationConfig, InfoType type, Jobs jobs) {
AsyncRestClientFactory restclientFactory = new AsyncRestClientFactory(applicationConfig.getWebClientConfig());
this.dmaapRestClient = restclientFactory.createRestClientNoHttpProxy("");
}
public void start() {
- infiniteSubmitter.start() //
+ Flux.range(0, Integer.MAX_VALUE) //
.flatMap(notUsed -> getFromMessageRouter(getDmaapUrl()), 1) //
.flatMap(this::pushDataToConsumers) //
.subscribe(//
null, //
throwable -> logger.error("DmaapMessageConsumer error: {}", throwable.getMessage()), //
- () -> logger.warn("DmaapMessageConsumer stopped {}", type.getId())); //
+ this::onComplete); //
+ }
+ private void onComplete() {
+ logger.warn("DmaapMessageConsumer completed {}", type.getId());
+ start();
}
private String getDmaapUrl() {
// Distibute the body to all jobs for this type
return Flux.fromIterable(this.jobs.getJobsForType(this.type)) //
+ .filter(job -> job.isFilterMatch(body)) //
.doOnNext(job -> logger.debug("Sending to consumer {}", job.getCallbackUrl())) //
.flatMap(job -> job.getConsumerRestClient().post("", body, MediaType.APPLICATION_JSON), CONCURRENCY) //
.onErrorResume(this::handleConsumerErrorResponse);
import reactor.core.Disposable;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
-import reactor.core.publisher.Sinks.Many;
/**
* The class streams data from a multi cast sink and sends the data to the Job
this.job = job;
}
- public synchronized void start(Many<String> input) {
+ public synchronized void start(Flux<String> input) {
stop();
this.errorStats.resetKafkaErrors();
this.subscription = getMessagesFromKafka(input, job) //
.flatMap(this::postToClient, job.getParameters().getMaxConcurrency()) //
.onErrorResume(this::handleError) //
.subscribe(this::handleConsumerSentOk, //
- t -> stop(), //
+ this::handleExceptionInStream, //
() -> logger.warn("KafkaMessageConsumer stopped jobId: {}", job.getId()));
}
+ private void handleExceptionInStream(Throwable t) {
+ logger.warn("KafkaMessageConsumer exception: {}, jobId: {}", t.getMessage(), job.getId());
+ stop();
+ }
+
private Mono<String> postToClient(String body) {
logger.debug("Sending to consumer {} {} {}", job.getId(), job.getCallbackUrl(), body);
MediaType contentType = this.job.isBuffered() ? MediaType.APPLICATION_JSON : null;
public synchronized void stop() {
if (this.subscription != null) {
- subscription.dispose();
- subscription = null;
+ this.subscription.dispose();
+ this.subscription = null;
}
}
return this.subscription != null;
}
- private Flux<String> getMessagesFromKafka(Many<String> input, Job job) {
- Flux<String> result = input.asFlux() //
- .filter(job::isFilterMatch);
+ private Flux<String> getMessagesFromKafka(Flux<String> input, Job job) {
+ Flux<String> result = input.filter(job::isFilterMatch);
if (job.isBuffered()) {
result = result.map(this::quote) //
import org.oran.dmaapadapter.repository.InfoTypes;
import org.oran.dmaapadapter.repository.Job;
import org.oran.dmaapadapter.repository.Jobs;
+import org.oran.dmaapadapter.repository.MultiMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
private final Map<String, KafkaTopicListener> topicListeners = new HashMap<>(); // Key is typeId
@Getter
- private final Map<String, KafkaJobDataConsumer> consumers = new HashMap<>(); // Key is jobId
+ private final MultiMap<KafkaJobDataConsumer> consumers = new MultiMap<>(); // Key is typeId, jobId
private static final int CONSUMER_SUPERVISION_INTERVAL_MS = 1000 * 60 * 3;
public void onJobRemoved(Job job) {
removeJob(job);
}
-
});
}
public synchronized void addJob(Job job) {
- if (this.consumers.get(job.getId()) == null && job.getType().isKafkaTopicDefined()) {
+ if (job.getType().isKafkaTopicDefined()) {
+ removeJob(job);
logger.debug("Kafka job added {}", job.getId());
KafkaTopicListener topicConsumer = topicListeners.get(job.getType().getId());
+ if (consumers.get(job.getType().getId()).isEmpty()) {
+ topicConsumer.start();
+ }
KafkaJobDataConsumer subscription = new KafkaJobDataConsumer(job);
- subscription.start(topicConsumer.getOutput());
- consumers.put(job.getId(), subscription);
+ subscription.start(topicConsumer.getOutput().asFlux());
+ consumers.put(job.getType().getId(), job.getId(), subscription);
}
}
public synchronized void removeJob(Job job) {
- KafkaJobDataConsumer d = consumers.remove(job.getId());
+ KafkaJobDataConsumer d = consumers.remove(job.getType().getId(), job.getId());
if (d != null) {
logger.debug("Kafka job removed {}", job.getId());
d.stop();
}
@Scheduled(fixedRate = CONSUMER_SUPERVISION_INTERVAL_MS)
- public synchronized void restartNonRunningTasks() {
-
- for (KafkaJobDataConsumer consumer : consumers.values()) {
- if (!consumer.isRunning()) {
+ public synchronized void restartNonRunningTopics() {
+ for (String typeId : this.consumers.keySet()) {
+ for (KafkaJobDataConsumer consumer : this.consumers.get(typeId)) {
restartTopic(consumer);
}
}
}
private void restartConsumersOfType(KafkaTopicListener topic, InfoType type) {
- this.consumers.forEach((jobId, consumer) -> {
- if (consumer.getJob().getType().getId().equals(type.getId())) {
- consumer.start(topic.getOutput());
- }
- });
+ this.consumers.get(type.getId()).forEach(consumer -> consumer.start(topic.getOutput().asFlux()));
}
}
public KafkaTopicListener(ApplicationConfig applicationConfig, InfoType type) {
this.applicationConfig = applicationConfig;
this.type = type;
- start();
}
public Many<String> getOutput() {
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.http.HttpStatus;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import reactor.core.publisher.Mono;
/**
- * Registers the types and this producer in ECS. This is done when needed.
+ * Registers the types and this producer in Innformation Coordinator Service.
+ * This is done when needed.
*/
@Component
@EnableScheduling
private static final String PRODUCER_ID = "DmaapGenericInfoProducer";
@Getter
- private boolean isRegisteredInEcs = false;
+ private boolean isRegisteredInIcs = false;
private static final int REGISTRATION_SUPERVISION_INTERVAL_MS = 1000 * 5;
public ProducerRegstrationTask(@Autowired ApplicationConfig applicationConfig, @Autowired InfoTypes types) {
}
@Scheduled(fixedRate = REGISTRATION_SUPERVISION_INTERVAL_MS)
- public void supervisionTask() {
- checkRegistration() //
- .filter(isRegistrationOk -> !isRegistrationOk || !this.isRegisteredInEcs) //
- .flatMap(isRegisterred -> registerTypesAndProducer()) //
- .subscribe( //
- null, //
- this::handleRegistrationFailure, //
- this::handleRegistrationCompleted);
+ public void runSupervisionTask() {
+ supervisionTask().subscribe( //
+ null, //
+ this::handleRegistrationFailure, //
+ this::handleRegistrationCompleted);
+ }
+
+ public Mono<String> supervisionTask() {
+ return checkRegistration() //
+ .filter(isRegistrationOk -> !isRegistrationOk || !this.isRegisteredInIcs) //
+ .flatMap(isRegisterred -> registerTypesAndProducer());
}
private void handleRegistrationCompleted() {
- isRegisteredInEcs = true;
+ isRegisteredInIcs = true;
}
private void handleRegistrationFailure(Throwable t) {
// Returns TRUE if registration is correct
private Mono<Boolean> checkRegistration() {
- final String url = applicationConfig.getEcsBaseUrl() + "/data-producer/v1/info-producers/" + PRODUCER_ID;
+ final String url = applicationConfig.getIcsBaseUrl() + "/data-producer/v1/info-producers/" + PRODUCER_ID;
return restClient.get(url) //
.flatMap(this::isRegisterredInfoCorrect) //
.onErrorResume(t -> Mono.just(Boolean.FALSE));
private Mono<Boolean> isRegisterredInfoCorrect(String registerredInfoStr) {
ProducerRegistrationInfo registerredInfo = gson.fromJson(registerredInfoStr, ProducerRegistrationInfo.class);
if (isEqual(producerRegistrationInfo(), registerredInfo)) {
- logger.trace("Already registered in ECS");
+ logger.trace("Already registered in ICS");
return Mono.just(Boolean.TRUE);
} else {
return Mono.just(Boolean.FALSE);
}
private String registerTypeUrl(InfoType type) {
- return applicationConfig.getEcsBaseUrl() + "/data-producer/v1/info-types/" + type.getId();
+ return applicationConfig.getIcsBaseUrl() + "/data-producer/v1/info-types/" + type.getId();
}
private Mono<String> registerTypesAndProducer() {
final int CONCURRENCY = 20;
final String producerUrl =
- applicationConfig.getEcsBaseUrl() + "/data-producer/v1/info-producers/" + PRODUCER_ID;
+ applicationConfig.getIcsBaseUrl() + "/data-producer/v1/info-producers/" + PRODUCER_ID;
return Flux.fromIterable(this.types.getAll()) //
.doOnNext(type -> logger.info("Registering type {}", type.getId())) //
}
private Object jsonSchemaObject(InfoType type) throws IOException, ServiceException {
-
- if (type.isKafkaTopicDefined()) {
- String schemaStrKafka = readSchemaFile("/typeSchemaKafka.json");
- return jsonObject(schemaStrKafka);
- } else {
- // An object with no properties
- String schemaStr = "{" //
- + "\"type\": \"object\"," //
- + "\"properties\": {}," //
- + "\"additionalProperties\": false" //
- + "}"; //
-
- return jsonObject(schemaStr);
- }
+ String schemaFile = type.isKafkaTopicDefined() ? "/typeSchemaKafka.json" : "/typeSchemaDmaap.json";
+ return jsonObject(readSchemaFile(schemaFile));
}
private String readSchemaFile(String filePath) throws IOException, ServiceException {
InputStream in = getClass().getResourceAsStream(filePath);
logger.debug("Reading application schema file from: {} with: {}", filePath, in);
if (in == null) {
- throw new ServiceException("Could not readfile: " + filePath);
+ throw new ServiceException("Could not readfile: " + filePath, HttpStatus.INTERNAL_SERVER_ERROR);
}
return CharStreams.toString(new InputStreamReader(in, StandardCharsets.UTF_8));
}
+ @SuppressWarnings("java:S2139") // Log exception
private Object jsonObject(String json) {
try {
return JsonParser.parseString(json).getAsJsonObject();
} catch (Exception e) {
- logger.error("Bug, error in JSON: {}", json);
- throw new NullPointerException(e.toString());
+ logger.error("Bug, error in JSON: {} {}", json, e.getMessage());
+ throw new NullPointerException(e.getMessage());
}
}
}
private ProducerRegistrationInfo producerRegistrationInfo() {
-
return ProducerRegistrationInfo.builder() //
.jobCallbackUrl(baseUrl() + ProducerCallbacksController.JOB_URL) //
.producerSupervisionCallbackUrl(baseUrl() + ProducerCallbacksController.SUPERVISION_URL) //
--- /dev/null
+{
+ "$schema": "http://json-schema.org/draft-04/schema#",
+ "type": "object",
+ "properties": {
+ "filter": {
+ "type": "string"
+ }
+ },
+ "additionalProperties": false
+}
"type": "integer"
}
},
+ "additionalProperties": false,
"required": [
"maxSize",
"maxTimeMiliseconds"
]
}
},
- "required": []
+ "additionalProperties": false
}
\ No newline at end of file
import org.json.JSONObject;
import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.oran.dmaapadapter.clients.AsyncRestClient;
import org.oran.dmaapadapter.controllers.ProducerCallbacksController;
import org.oran.dmaapadapter.r1.ConsumerJobInfo;
import org.oran.dmaapadapter.r1.ProducerJobInfo;
-import org.oran.dmaapadapter.repository.InfoType;
import org.oran.dmaapadapter.repository.InfoTypes;
+import org.oran.dmaapadapter.repository.Job;
import org.oran.dmaapadapter.repository.Jobs;
+import org.oran.dmaapadapter.tasks.KafkaJobDataConsumer;
+import org.oran.dmaapadapter.tasks.KafkaTopicConsumers;
+import org.oran.dmaapadapter.tasks.ProducerRegstrationTask;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.context.SpringBootTest.WebEnvironment;
import org.springframework.test.context.junit.jupiter.SpringExtension;
import org.springframework.web.reactive.function.client.WebClientResponseException;
+import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.test.StepVerifier;
@ExtendWith(SpringExtension.class)
-@SpringBootTest(webEnvironment = WebEnvironment.DEFINED_PORT)
+@SpringBootTest(webEnvironment = WebEnvironment.RANDOM_PORT)
@TestPropertySource(properties = { //
"server.ssl.key-store=./config/keystore.jks", //
"app.webclient.trust-store=./config/truststore.jks", //
private ConsumerController consumerController;
@Autowired
- private EcsSimulatorController ecsSimulatorController;
+ private IcsSimulatorController icsSimulatorController;
+
+ @Autowired
+ KafkaTopicConsumers kafkaTopicConsumers;
+
+ @Autowired
+ ProducerRegstrationTask producerRegistrationTask;
private com.google.gson.Gson gson = new com.google.gson.GsonBuilder().create();
static class TestApplicationConfig extends ApplicationConfig {
@Override
- public String getEcsBaseUrl() {
+ public String getIcsBaseUrl() {
return thisProcessUrl();
}
}
}
+ @BeforeEach
+ void setPort() {
+ this.applicationConfig.setLocalServerHttpPort(this.localServerHttpPort);
+ }
+
@AfterEach
void reset() {
this.consumerController.testResults.reset();
- this.ecsSimulatorController.testResults.reset();
+ this.icsSimulatorController.testResults.reset();
this.jobs.clear();
}
}
private ConsumerJobInfo consumerJobInfo() {
- InfoType type = this.types.getAll().iterator().next();
- return consumerJobInfo(type.getId(), "EI_JOB_ID");
+ return consumerJobInfo("DmaapInformationType", "EI_JOB_ID");
}
private Object jsonObject() {
}
@Test
- void testWholeChain() throws Exception {
+ void testReceiveAndPostDataFromKafka() {
+ final String JOB_ID = "ID";
+ final String TYPE_ID = "KafkaInformationType";
+ await().untilAsserted(() -> assertThat(icsSimulatorController.testResults.registrationInfo).isNotNull());
+ assertThat(icsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(this.types.size());
+
+ // Create a job
+ Job.Parameters param = new Job.Parameters("", new Job.BufferTimeout(123, 456), 1);
+ String targetUri = baseUrl() + ConsumerController.CONSUMER_TARGET_URL;
+ ConsumerJobInfo kafkaJobInfo =
+ new ConsumerJobInfo(TYPE_ID, jsonObject(gson.toJson(param)), "owner", targetUri, "");
+
+ this.icsSimulatorController.addJob(kafkaJobInfo, JOB_ID, restClient());
+ await().untilAsserted(() -> assertThat(this.jobs.size()).isEqualTo(1));
+
+ KafkaJobDataConsumer kafkaConsumer = this.kafkaTopicConsumers.getConsumers().get(TYPE_ID, JOB_ID);
+
+ // Handle received data from Kafka, check that it has been posted to the
+ // consumer
+ kafkaConsumer.start(Flux.just("data"));
+
+ ConsumerController.TestResults consumer = this.consumerController.testResults;
+ await().untilAsserted(() -> assertThat(consumer.receivedBodies.size()).isEqualTo(1));
+ assertThat(consumer.receivedBodies.get(0)).isEqualTo("[\"data\"]");
+
+ // Test send an exception
+ kafkaConsumer.start(Flux.error(new NullPointerException()));
+
+ // Test regular restart of stopped
+ kafkaConsumer.stop();
+ this.kafkaTopicConsumers.restartNonRunningTopics();
+ await().untilAsserted(() -> assertThat(kafkaConsumer.isRunning()).isTrue());
+
+ // Delete the job
+ this.icsSimulatorController.deleteJob(JOB_ID, restClient());
+ await().untilAsserted(() -> assertThat(this.jobs.size()).isZero());
+ }
+
+ @Test
+ void testReceiveAndPostDataFromDmaap() throws Exception {
final String JOB_ID = "ID";
// Register producer, Register types
- await().untilAsserted(() -> assertThat(ecsSimulatorController.testResults.registrationInfo).isNotNull());
- assertThat(ecsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(1);
+ await().untilAsserted(() -> assertThat(icsSimulatorController.testResults.registrationInfo).isNotNull());
+ assertThat(icsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(this.types.size());
+ assertThat(producerRegistrationTask.isRegisteredInIcs()).isTrue();
+ producerRegistrationTask.supervisionTask().block();
// Create a job
- this.ecsSimulatorController.addJob(consumerJobInfo(), JOB_ID, restClient());
+ this.icsSimulatorController.addJob(consumerJobInfo(), JOB_ID, restClient());
await().untilAsserted(() -> assertThat(this.jobs.size()).isEqualTo(1));
// Return two messages from DMAAP and verify that these are sent to the owner of
String jobUrl = baseUrl() + ProducerCallbacksController.JOB_URL;
String jobs = restClient().get(jobUrl).block();
- assertThat(jobs).contains("ExampleInformationType");
+ assertThat(jobs).contains(JOB_ID);
// Delete the job
- this.ecsSimulatorController.deleteJob(JOB_ID, restClient());
+ this.icsSimulatorController.deleteJob(JOB_ID, restClient());
await().untilAsserted(() -> assertThat(this.jobs.size()).isZero());
-
}
@Test
void testReRegister() throws Exception {
// Wait foir register types and producer
- await().untilAsserted(() -> assertThat(ecsSimulatorController.testResults.registrationInfo).isNotNull());
- assertThat(ecsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(1);
+ await().untilAsserted(() -> assertThat(icsSimulatorController.testResults.registrationInfo).isNotNull());
+ assertThat(icsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(this.types.size());
// Clear the registration, should trigger a re-register
- ecsSimulatorController.testResults.reset();
- await().untilAsserted(() -> assertThat(ecsSimulatorController.testResults.registrationInfo).isNotNull());
- assertThat(ecsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(1);
+ icsSimulatorController.testResults.reset();
+ await().untilAsserted(() -> assertThat(icsSimulatorController.testResults.registrationInfo).isNotNull());
+ assertThat(icsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(this.types.size());
// Just clear the registerred types, should trigger a re-register
- ecsSimulatorController.testResults.types.clear();
+ icsSimulatorController.testResults.types.clear();
await().untilAsserted(
- () -> assertThat(ecsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(1));
-
+ () -> assertThat(icsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(2));
}
private void testErrorCode(Mono<?> request, HttpStatus expStatus, String responseContains) {
}
return true;
}
-
}
@RestController("IcsSimulatorController")
@Tag(name = "Information Coordinator Service Simulator (exists only in test)")
-public class EcsSimulatorController {
+public class IcsSimulatorController {
private final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private final static Gson gson = new GsonBuilder().create();
ProducerJobInfo request =
new ProducerJobInfo(job.jobDefinition, jobId, job.infoTypeId, job.jobResultUri, job.owner, "TIMESTAMP");
String body = gson.toJson(request);
- logger.info("ECS Simulator PUT job: {}", body);
+ logger.info("ICS Simulator PUT job: {}", body);
restClient.post(url, body, MediaType.APPLICATION_JSON).block();
}
public void deleteJob(String jobId, AsyncRestClient restClient) {
String url = this.testResults.registrationInfo.jobCallbackUrl + "/" + jobId;
- logger.info("ECS Simulator DELETE job: {}", url);
+ logger.info("ICS Simulator DELETE job: {}", url);
restClient.delete(url).block();
}
import org.oran.dmaapadapter.configuration.WebClientConfig;
import org.oran.dmaapadapter.configuration.WebClientConfig.HttpProxyConfig;
import org.oran.dmaapadapter.r1.ConsumerJobInfo;
-import org.oran.dmaapadapter.repository.InfoType;
import org.oran.dmaapadapter.repository.InfoTypes;
+import org.oran.dmaapadapter.repository.Job;
import org.oran.dmaapadapter.repository.Jobs;
import org.oran.dmaapadapter.tasks.ProducerRegstrationTask;
import org.springframework.beans.factory.annotation.Autowired;
"server.ssl.key-store=./config/keystore.jks", //
"app.webclient.trust-store=./config/truststore.jks", //
"app.configuration-filepath=./src/test/resources/test_application_configuration.json", //
- "app.ecs-base-url=https://localhost:8434" //
+ "app.ics-base-url=https://localhost:8434" //
})
-class IntegrationWithEcs {
+class IntegrationWithIcs {
- private static final String EI_JOB_ID = "EI_JOB_ID";
+ private static final String DMAAP_JOB_ID = "DMAAP_JOB_ID";
+ private static final String DMAAP_TYPE_ID = "DmaapInformationType";
@Autowired
private ApplicationConfig applicationConfig;
static class TestApplicationConfig extends ApplicationConfig {
@Override
- public String getEcsBaseUrl() {
+ public String getIcsBaseUrl() {
return "https://localhost:8434";
}
@AfterEach
void reset() {
this.consumerController.testResults.reset();
- this.jobs.clear();
- this.types.clear();
+ assertThat(this.jobs.size()).isZero();
}
private AsyncRestClient restClient(boolean useTrustValidation) {
return "https://localhost:" + this.applicationConfig.getLocalServerHttpPort();
}
- private String ecsBaseUrl() {
- return applicationConfig.getEcsBaseUrl();
+ private String icsBaseUrl() {
+ return applicationConfig.getIcsBaseUrl();
}
private String jobUrl(String jobId) {
- return ecsBaseUrl() + "/data-consumer/v1/info-jobs/" + jobId;
+ return icsBaseUrl() + "/data-consumer/v1/info-jobs/" + jobId + "?typeCheck=true";
}
- private void createInformationJobInEcs(String jobId) {
- String body = gson.toJson(consumerJobInfo());
+ private void createInformationJobInIcs(String typeId, String jobId, String filter) {
+ String body = gson.toJson(consumerJobInfo(typeId, filter));
try {
// Delete the job if it already exists
- deleteInformationJobInEcs(jobId);
+ deleteInformationJobInIcs(jobId);
} catch (Exception e) {
}
restClient().putForEntity(jobUrl(jobId), body).block();
}
- private void deleteInformationJobInEcs(String jobId) {
+ private void deleteInformationJobInIcs(String jobId) {
restClient().delete(jobUrl(jobId)).block();
}
- private ConsumerJobInfo consumerJobInfo() {
- InfoType type = this.types.getAll().iterator().next();
- return consumerJobInfo(type.getId(), EI_JOB_ID);
- }
-
- private Object jsonObject() {
- return jsonObject("{}");
+ private ConsumerJobInfo consumerJobInfo(String typeId, String filter) {
+ return consumerJobInfo(typeId, DMAAP_JOB_ID, filter);
}
private Object jsonObject(String json) {
}
}
- private ConsumerJobInfo consumerJobInfo(String typeId, String infoJobId) {
+ private String quote(String str) {
+ return "\"" + str + "\"";
+ }
+
+ private String consumerUri() {
+ return selfBaseUrl() + ConsumerController.CONSUMER_TARGET_URL;
+ }
+
+ private ConsumerJobInfo consumerJobInfo(String typeId, String infoJobId, String filter) {
try {
- String targetUri = selfBaseUrl() + ConsumerController.CONSUMER_TARGET_URL;
- return new ConsumerJobInfo(typeId, jsonObject(), "owner", targetUri, "");
+
+ String jsonStr = "{ \"filter\" :" + quote(filter) + "}";
+ return new ConsumerJobInfo(typeId, jsonObject(jsonStr), "owner", consumerUri(), "");
} catch (Exception e) {
return null;
}
}
+ @Test
+ void testCreateKafkaJob() {
+ await().untilAsserted(() -> assertThat(producerRegstrationTask.isRegisteredInIcs()).isTrue());
+ final String TYPE_ID = "KafkaInformationType";
+
+ Job.Parameters param = new Job.Parameters("filter", new Job.BufferTimeout(123, 456), 1);
+
+ ConsumerJobInfo jobInfo =
+ new ConsumerJobInfo(TYPE_ID, jsonObject(gson.toJson(param)), "owner", consumerUri(), "");
+ String body = gson.toJson(jobInfo);
+
+ restClient().putForEntity(jobUrl("KAFKA_JOB_ID"), body).block();
+
+ await().untilAsserted(() -> assertThat(this.jobs.size()).isEqualTo(1));
+
+ deleteInformationJobInIcs("KAFKA_JOB_ID");
+ await().untilAsserted(() -> assertThat(this.jobs.size()).isZero());
+ }
+
@Test
void testWholeChain() throws Exception {
- await().untilAsserted(() -> assertThat(producerRegstrationTask.isRegisteredInEcs()).isTrue());
+ await().untilAsserted(() -> assertThat(producerRegstrationTask.isRegisteredInIcs()).isTrue());
- createInformationJobInEcs(EI_JOB_ID);
+ createInformationJobInIcs(DMAAP_TYPE_ID, DMAAP_JOB_ID, ".*DmaapResponse.*");
await().untilAsserted(() -> assertThat(this.jobs.size()).isEqualTo(1));
DmaapSimulatorController.dmaapResponses.add("DmaapResponse1");
DmaapSimulatorController.dmaapResponses.add("DmaapResponse2");
+ DmaapSimulatorController.dmaapResponses.add("Junk");
ConsumerController.TestResults results = this.consumerController.testResults;
await().untilAsserted(() -> assertThat(results.receivedBodies.size()).isEqualTo(2));
assertThat(results.receivedBodies.get(0)).isEqualTo("DmaapResponse1");
- deleteInformationJobInEcs(EI_JOB_ID);
+ deleteInformationJobInIcs(DMAAP_JOB_ID);
await().untilAsserted(() -> assertThat(this.jobs.size()).isZero());
- synchronized (this) {
- // logger.warn("**************** Keeping server alive! " +
- // this.applicationConfig.getLocalServerHttpPort());
- // this.wait();
- }
}
}
@TestPropertySource(properties = { //
"server.ssl.key-store=./config/keystore.jks", //
"app.webclient.trust-store=./config/truststore.jks", //
- "app.configuration-filepath=./src/test/resources/test_application_configuration_kafka.json"//
+ "app.configuration-filepath=./src/test/resources/test_application_configuration.json"//
})
class IntegrationWithKafka {
+ final String TYPE_ID = "KafkaInformationType";
+
@Autowired
private ApplicationConfig applicationConfig;
private ConsumerController consumerController;
@Autowired
- private EcsSimulatorController ecsSimulatorController;
+ private IcsSimulatorController icsSimulatorController;
@Autowired
private KafkaTopicConsumers kafkaTopicConsumers;
- private com.google.gson.Gson gson = new com.google.gson.GsonBuilder().create();
+ private static com.google.gson.Gson gson = new com.google.gson.GsonBuilder().create();
private static final Logger logger = LoggerFactory.getLogger(IntegrationWithKafka.class);
static class TestApplicationConfig extends ApplicationConfig {
@Override
- public String getEcsBaseUrl() {
+ public String getIcsBaseUrl() {
return thisProcessUrl();
}
@AfterEach
void reset() {
this.consumerController.testResults.reset();
- this.ecsSimulatorController.testResults.reset();
+ this.icsSimulatorController.testResults.reset();
this.jobs.clear();
}
return "https://localhost:" + this.applicationConfig.getLocalServerHttpPort();
}
- private Object jobParametersAsJsonObject(String filter, long maxTimeMiliseconds, int maxSize, int maxConcurrency) {
+ private static Object jobParametersAsJsonObject(String filter, long maxTimeMiliseconds, int maxSize,
+ int maxConcurrency) {
Job.Parameters param =
new Job.Parameters(filter, new Job.BufferTimeout(maxSize, maxTimeMiliseconds), maxConcurrency);
String str = gson.toJson(param);
return jsonObject(str);
}
- private Object jsonObject(String json) {
+ private static Object jsonObject(String json) {
try {
return JsonParser.parseString(json).getAsJsonObject();
} catch (Exception e) {
}
}
- private ConsumerJobInfo consumerJobInfo(String filter, Duration maxTime, int maxSize, int maxConcurrency) {
+ ConsumerJobInfo consumerJobInfo(String filter, Duration maxTime, int maxSize, int maxConcurrency) {
try {
- InfoType type = this.types.getAll().iterator().next();
- String typeId = type.getId();
String targetUri = baseUrl() + ConsumerController.CONSUMER_TARGET_URL;
- return new ConsumerJobInfo(typeId,
+ return new ConsumerJobInfo(TYPE_ID,
jobParametersAsJsonObject(filter, maxTime.toMillis(), maxSize, maxConcurrency), "owner", targetUri,
"");
} catch (Exception e) {
return SenderOptions.create(props);
}
- private SenderRecord<Integer, String, Integer> senderRecord(String data, int i) {
- final InfoType infoType = this.types.getAll().iterator().next();
- return SenderRecord.create(new ProducerRecord<>(infoType.getKafkaInputTopic(), i, data + i), i);
+ private SenderRecord<Integer, String, Integer> senderRecord(String data) {
+ final InfoType infoType = this.types.get(TYPE_ID);
+ int key = 1;
+ int correlationMetadata = 2;
+ return SenderRecord.create(new ProducerRecord<>(infoType.getKafkaInputTopic(), key, data), correlationMetadata);
}
private void sendDataToStream(Flux<SenderRecord<Integer, String, Integer>> dataToSend) {
}
@Test
- void kafkaIntegrationTest() throws InterruptedException {
+ void kafkaIntegrationTest() throws Exception {
final String JOB_ID1 = "ID1";
final String JOB_ID2 = "ID2";
// Register producer, Register types
- await().untilAsserted(() -> assertThat(ecsSimulatorController.testResults.registrationInfo).isNotNull());
- assertThat(ecsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(1);
+ await().untilAsserted(() -> assertThat(icsSimulatorController.testResults.registrationInfo).isNotNull());
+ assertThat(icsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(this.types.size());
// Create two jobs. One buffering and one with a filter
- this.ecsSimulatorController.addJob(consumerJobInfo(null, Duration.ofMillis(400), 1000, 20), JOB_ID1,
+ this.icsSimulatorController.addJob(consumerJobInfo(null, Duration.ofMillis(400), 1000, 20), JOB_ID1,
restClient());
- this.ecsSimulatorController.addJob(consumerJobInfo("^Message_1$", Duration.ZERO, 0, 1), JOB_ID2, restClient());
+ this.icsSimulatorController.addJob(consumerJobInfo("^Message_1$", Duration.ZERO, 0, 1), JOB_ID2, restClient());
await().untilAsserted(() -> assertThat(this.jobs.size()).isEqualTo(2));
- var dataToSend = Flux.range(1, 3).map(i -> senderRecord("Message_", i)); // Message_1, Message_2 etc.
+ var dataToSend = Flux.range(1, 3).map(i -> senderRecord("Message_" + i)); // Message_1, Message_2 etc.
sendDataToStream(dataToSend);
verifiedReceivedByConsumer("Message_1", "[\"Message_1\", \"Message_2\", \"Message_3\"]");
- // Just for testing quoting
- this.consumerController.testResults.reset();
- dataToSend = Flux.just(senderRecord("Message\"_", 1));
- sendDataToStream(dataToSend);
- verifiedReceivedByConsumer("[\"Message\\\"_1\"]");
-
// Delete the jobs
- this.ecsSimulatorController.deleteJob(JOB_ID1, restClient());
- this.ecsSimulatorController.deleteJob(JOB_ID2, restClient());
+ this.icsSimulatorController.deleteJob(JOB_ID1, restClient());
+ this.icsSimulatorController.deleteJob(JOB_ID2, restClient());
await().untilAsserted(() -> assertThat(this.jobs.size()).isZero());
- await().untilAsserted(() -> assertThat(this.kafkaTopicConsumers.getConsumers()).isEmpty());
+ await().untilAsserted(() -> assertThat(this.kafkaTopicConsumers.getConsumers().keySet()).isEmpty());
}
@Test
final String JOB_ID2 = "ID2";
// Register producer, Register types
- await().untilAsserted(() -> assertThat(ecsSimulatorController.testResults.registrationInfo).isNotNull());
- assertThat(ecsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(1);
+ await().untilAsserted(() -> assertThat(icsSimulatorController.testResults.registrationInfo).isNotNull());
+ assertThat(icsSimulatorController.testResults.registrationInfo.supportedTypeIds).hasSize(this.types.size());
// Create two jobs.
- this.ecsSimulatorController.addJob(consumerJobInfo(null, Duration.ZERO, 0, 1), JOB_ID1, restClient());
- this.ecsSimulatorController.addJob(consumerJobInfo(null, Duration.ZERO, 0, 1), JOB_ID2, restClient());
+ this.icsSimulatorController.addJob(consumerJobInfo(null, Duration.ofMillis(400), 1000, 1), JOB_ID1,
+ restClient());
+ this.icsSimulatorController.addJob(consumerJobInfo(null, Duration.ZERO, 0, 1), JOB_ID2, restClient());
await().untilAsserted(() -> assertThat(this.jobs.size()).isEqualTo(2));
- var dataToSend = Flux.range(1, 1000000).map(i -> senderRecord("Message_", i)); // Message_1, Message_2 etc.
+ var dataToSend = Flux.range(1, 1000000).map(i -> senderRecord("Message_" + i)); // Message_1, Message_2 etc.
sendDataToStream(dataToSend); // this should overflow
- KafkaJobDataConsumer consumer = kafkaTopicConsumers.getConsumers().values().iterator().next();
+ KafkaJobDataConsumer consumer = kafkaTopicConsumers.getConsumers().get(TYPE_ID).iterator().next();
await().untilAsserted(() -> assertThat(consumer.isRunning()).isFalse());
this.consumerController.testResults.reset();
- kafkaTopicConsumers.restartNonRunningTasks();
- this.ecsSimulatorController.deleteJob(JOB_ID2, restClient()); // Delete one job
+ this.icsSimulatorController.deleteJob(JOB_ID2, restClient()); // Delete one job
+ kafkaTopicConsumers.restartNonRunningTopics();
Thread.sleep(1000); // Restarting the input seems to take some asynch time
- dataToSend = Flux.range(1, 1).map(i -> senderRecord("Howdy_", i));
+ dataToSend = Flux.just(senderRecord("Howdy\""));
sendDataToStream(dataToSend);
- verifiedReceivedByConsumer("Howdy_1");
+ verifiedReceivedByConsumer("[\"Howdy\\\"\"]");
+
+ // Delete the jobs
+ this.icsSimulatorController.deleteJob(JOB_ID1, restClient());
+ this.icsSimulatorController.deleteJob(JOB_ID2, restClient());
+
+ await().untilAsserted(() -> assertThat(this.jobs.size()).isZero());
+ await().untilAsserted(() -> assertThat(this.kafkaTopicConsumers.getConsumers().keySet()).isEmpty());
}
}
{
"types": [
{
- "id": "ExampleInformationType",
+ "id": "DmaapInformationType",
"dmaapTopicUrl": "/dmaap-topic-1",
"useHttpProxy": false
+ },
+ {
+ "id": "KafkaInformationType",
+ "kafkaInputTopic": "TutorialTopic",
+ "useHttpProxy": false
}
]
}
\ No newline at end of file
+++ /dev/null
-{
- "types": [
- {
- "id": "ExampleInformationType",
- "kafkaInputTopic": "TutorialTopic",
- "useHttpProxy": false
- }
- ]
-}
\ No newline at end of file
>- INFO_PRODUCER_HOST **Required**. The host for the producer. Example: `https://mrproducer`
>- INFO_PRODUCER_PORT Optional. The port for the product. Defaults to `8085`.
->- INFO_COORD_ADDR Optional. The address of the Information Coordinator. Defaults to `https://enrichmentservice:8434`.
+>- INFO_COORD_ADDR Optional. The address of the Information Coordinator. Defaults to `https://informationservice:8434`.
>- DMAAP_MR_ADDR Optional. The address of the DMaaP Message Router. Defaults to `https://message-router.onap:3905`.
>- PRODUCER_CERT_PATH Optional. The path to the certificate to use for https. Defaults to `security/producer.crt`
>- PRODUCER_KEY_PATH Optional. The path to the key to the certificate to use for https. Defaults to `security/producer.key`
Once the initial registration is done, the producer will constantly poll MR for all configured job types. When receiving messages for a type, it will distribute these messages to all jobs registered for the type. If no jobs for that type are registered, the messages will be discarded. If a consumer is unavailable for distribution, the messages will be discarded for that consumer until it is available again.
+The producer provides a REST API to control the log level. The available levels are the same as the ones used in the configuration above.
+
+ PUT https://mrproducer:8085/admin/log?level=<new level>
+
## Development
To make it easy to test during development of the producer, two stubs are provided in the `stub` folder.
-One, under the `dmaap` folder, called `dmaap` that stubs MR and respond with an array with one message with `eventSeverity` alternating between `NORMAL` and `CRITICAL`. The default port is `3905`, but this can be overridden by passing a `-port [PORT]` flag when starting the stub. To build and start the stub, do the following:
+One, under the `dmaap` folder, called `dmaap` that stubs MR and respond with an array with one message with `eventSeverity` alternating between `NORMAL` and `CRITICAL`. The default port is `3905`, but this can be overridden by passing a `-port <PORT>` flag when starting the stub. To build and start the stub, do the following:
>1. cd stub/dmaap
>2. go build
->3. ./dmaap
+>3. ./dmaap [-port \<PORT>]
-One, under the `consumer` folder, called `consumer` that at startup will register a job of type `STD_Fault_Messages` in ICS, and then listen for REST calls and print the body of them. By default, it listens to the port `40935`, but his can be overridden by passing a `-port [PORT]` flag when starting the stub. To build and start the stub, do the following:
+One, under the `consumer` folder, called `consumer` that at startup will register a job of type `STD_Fault_Messages` in ICS, and then listen for REST calls and print the body of them. By default, it listens to the port `40935`, but his can be overridden by passing a `-port <PORT>` flag when starting the stub. To build and start the stub, do the following:
>1. cd stub/consumer
>2. go build
->3. ./consumer
+>3. ./consumer [-port \<PORT>]
Mocks needed for unit tests have been generated using `github.com/stretchr/testify/mock` and are checked in under the `mocks` folder. **Note!** Keep in mind that if any of the mocked interfaces change, a new mock for that interface must be generated and checked in.
--- /dev/null
+#!/bin/bash
+##############################################################################
+#
+# Copyright (C) 2021: Nordix Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+##############################################################################
+
+go build
+
+go test ./...
+++ /dev/null
----
-tag: 1.0.0
go 1.17
require (
+ github.com/gorilla/mux v1.8.0
+ github.com/hashicorp/go-retryablehttp v0.7.0
github.com/sirupsen/logrus v1.8.1
github.com/stretchr/testify v1.7.0
)
require (
github.com/davecgh/go-spew v1.1.1 // indirect
- github.com/gorilla/mux v1.8.0 // indirect
github.com/hashicorp/go-cleanhttp v0.5.1 // indirect
- github.com/hashicorp/go-retryablehttp v0.7.0 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/stretchr/objx v0.1.0 // indirect
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037 // indirect
github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So=
github.com/hashicorp/go-cleanhttp v0.5.1 h1:dH3aiDG9Jvb5r5+bYHsikaOUIpcM0xvgMXVoDkXMzJM=
github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
+github.com/hashicorp/go-hclog v0.9.2 h1:CG6TE5H9/JXsFWJCfoIVpKFIkFe6ysEuHirp4DxCsHI=
github.com/hashicorp/go-hclog v0.9.2/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ=
github.com/hashicorp/go-retryablehttp v0.7.0 h1:eu1EI/mbirUgP5C8hVsTNaGZreBDlYiwC1FZWkvQPQ4=
github.com/hashicorp/go-retryablehttp v0.7.0/go.mod h1:vAew36LZh98gCBJNLH42IQ1ER/9wtLZZ8meHqQvEYWY=
package config
import (
+ "encoding/json"
"fmt"
"os"
"strconv"
return &Config{
InfoProducerHost: getEnv("INFO_PRODUCER_HOST", ""),
InfoProducerPort: getEnvAsInt("INFO_PRODUCER_PORT", 8085),
- InfoCoordinatorAddress: getEnv("INFO_COORD_ADDR", "https://enrichmentservice:8434"),
+ InfoCoordinatorAddress: getEnv("INFO_COORD_ADDR", "https://informationservice:8434"),
DMaaPMRAddress: getEnv("DMAAP_MR_ADDR", "https://message-router.onap:3905"),
ProducerCertPath: getEnv("PRODUCER_CERT_PATH", "security/producer.crt"),
ProducerKeyPath: getEnv("PRODUCER_KEY_PATH", "security/producer.key"),
return log.InfoLevel
}
}
+
+func GetJobTypesFromConfiguration(configFile string) ([]TypeDefinition, error) {
+ typeDefsByte, err := os.ReadFile(configFile)
+ if err != nil {
+ return nil, err
+ }
+ typeDefs := struct {
+ Types []TypeDefinition `json:"types"`
+ }{}
+ err = json.Unmarshal(typeDefsByte, &typeDefs)
+ if err != nil {
+ return nil, err
+ }
+
+ return typeDefs.Types, nil
+}
import (
"bytes"
"os"
- "reflect"
+ "path/filepath"
"testing"
log "github.com/sirupsen/logrus"
LogLevel: log.InfoLevel,
InfoProducerHost: "",
InfoProducerPort: 8085,
- InfoCoordinatorAddress: "https://enrichmentservice:8434",
+ InfoCoordinatorAddress: "https://informationservice:8434",
DMaaPMRAddress: "https://message-router.onap:3905",
ProducerCertPath: "security/producer.crt",
ProducerKeyPath: "security/producer.key",
}
- if got := New(); !reflect.DeepEqual(got, &wantConfig) {
- t.Errorf("New() = %v, want %v", got, &wantConfig)
- }
+ got := New()
+ assertions.Equal(&wantConfig, got)
logString := buf.String()
assertions.Contains(logString, "Invalid int value: wrong for variable: INFO_PRODUCER_PORT. Default value: 8085 will be used")
}
LogLevel: log.InfoLevel,
InfoProducerHost: "",
InfoProducerPort: 8085,
- InfoCoordinatorAddress: "https://enrichmentservice:8434",
+ InfoCoordinatorAddress: "https://informationservice:8434",
DMaaPMRAddress: "https://message-router.onap:3905",
ProducerCertPath: "security/producer.crt",
ProducerKeyPath: "security/producer.key",
logString := buf.String()
assertions.Contains(logString, "Invalid log level: wrong. Log level will be Info!")
}
+
+const typeDefinition = `{"types": [{"id": "type1", "dmaapTopicUrl": "events/unauthenticated.SEC_FAULT_OUTPUT/dmaapmediatorproducer/type1"}]}`
+
+func TestGetTypesFromConfiguration_fileOkShouldReturnSliceOfTypeDefinitions(t *testing.T) {
+ assertions := require.New(t)
+ typesDir, err := os.MkdirTemp("", "configs")
+ if err != nil {
+ t.Errorf("Unable to create temporary directory for types due to: %v", err)
+ }
+ fname := filepath.Join(typesDir, "type_config.json")
+ t.Cleanup(func() {
+ os.RemoveAll(typesDir)
+ })
+ if err = os.WriteFile(fname, []byte(typeDefinition), 0666); err != nil {
+ t.Errorf("Unable to create temporary config file for types due to: %v", err)
+ }
+
+ types, err := GetJobTypesFromConfiguration(fname)
+
+ wantedType := TypeDefinition{
+ Id: "type1",
+ DmaapTopicURL: "events/unauthenticated.SEC_FAULT_OUTPUT/dmaapmediatorproducer/type1",
+ }
+ wantedTypes := []TypeDefinition{wantedType}
+ assertions.EqualValues(wantedTypes, types)
+ assertions.Nil(err)
+}
package jobs
import (
- "encoding/json"
"fmt"
- "os"
"sync"
log "github.com/sirupsen/logrus"
}
type JobTypesManager interface {
- LoadTypesFromConfiguration() ([]config.TypeDefinition, error)
+ LoadTypesFromConfiguration(types []config.TypeDefinition) []config.TypeDefinition
GetSupportedTypes() []string
}
}
type JobsManagerImpl struct {
- configFile string
allTypes map[string]TypeData
pollClient restclient.HTTPClient
mrAddress string
distributeClient restclient.HTTPClient
}
-func NewJobsManagerImpl(typeConfigFilePath string, pollClient restclient.HTTPClient, mrAddr string, distributeClient restclient.HTTPClient) *JobsManagerImpl {
+func NewJobsManagerImpl(pollClient restclient.HTTPClient, mrAddr string, distributeClient restclient.HTTPClient) *JobsManagerImpl {
return &JobsManagerImpl{
- configFile: typeConfigFilePath,
allTypes: make(map[string]TypeData),
pollClient: pollClient,
mrAddress: mrAddr,
return nil
}
-func (jm *JobsManagerImpl) LoadTypesFromConfiguration() ([]config.TypeDefinition, error) {
- typeDefsByte, err := os.ReadFile(jm.configFile)
- if err != nil {
- return nil, err
- }
- typeDefs := struct {
- Types []config.TypeDefinition `json:"types"`
- }{}
- err = json.Unmarshal(typeDefsByte, &typeDefs)
- if err != nil {
- return nil, err
- }
- for _, typeDef := range typeDefs.Types {
+func (jm *JobsManagerImpl) LoadTypesFromConfiguration(types []config.TypeDefinition) []config.TypeDefinition {
+ for _, typeDef := range types {
jm.allTypes[typeDef.Id] = TypeData{
TypeId: typeDef.Id,
DMaaPTopicURL: typeDef.DmaapTopicURL,
jobsHandler: newJobsHandler(typeDef.Id, typeDef.DmaapTopicURL, jm.pollClient, jm.distributeClient),
}
}
- return typeDefs.Types, nil
+ return types
}
func (jm *JobsManagerImpl) GetSupportedTypes() []string {
"bytes"
"io/ioutil"
"net/http"
- "os"
- "path/filepath"
"sync"
"testing"
"time"
func TestJobsManagerGetTypes_filesOkShouldReturnSliceOfTypesAndProvideSupportedTypes(t *testing.T) {
assertions := require.New(t)
- typesDir, err := os.MkdirTemp("", "configs")
- if err != nil {
- t.Errorf("Unable to create temporary directory for types due to: %v", err)
- }
- fname := filepath.Join(typesDir, "type_config.json")
- managerUnderTest := NewJobsManagerImpl(fname, nil, "", nil)
- t.Cleanup(func() {
- os.RemoveAll(typesDir)
- })
- if err = os.WriteFile(fname, []byte(typeDefinition), 0666); err != nil {
- t.Errorf("Unable to create temporary config file for types due to: %v", err)
- }
- types, err := managerUnderTest.LoadTypesFromConfiguration()
+
+ managerUnderTest := NewJobsManagerImpl(nil, "", nil)
+
wantedType := config.TypeDefinition{
Id: "type1",
DmaapTopicURL: "events/unauthenticated.SEC_FAULT_OUTPUT/dmaapmediatorproducer/type1",
}
wantedTypes := []config.TypeDefinition{wantedType}
+
+ types := managerUnderTest.LoadTypesFromConfiguration(wantedTypes)
+
assertions.EqualValues(wantedTypes, types)
- assertions.Nil(err)
supportedTypes := managerUnderTest.GetSupportedTypes()
assertions.EqualValues([]string{"type1"}, supportedTypes)
func TestJobsManagerAddJobWhenTypeIsSupported_shouldAddJobToChannel(t *testing.T) {
assertions := require.New(t)
- managerUnderTest := NewJobsManagerImpl("", nil, "", nil)
+ managerUnderTest := NewJobsManagerImpl(nil, "", nil)
wantedJob := JobInfo{
Owner: "owner",
LastUpdated: "now",
func TestJobsManagerAddJobWhenTypeIsNotSupported_shouldReturnError(t *testing.T) {
assertions := require.New(t)
- managerUnderTest := NewJobsManagerImpl("", nil, "", nil)
+ managerUnderTest := NewJobsManagerImpl(nil, "", nil)
jobInfo := JobInfo{
InfoTypeIdentity: "type1",
}
func TestJobsManagerAddJobWhenJobIdMissing_shouldReturnError(t *testing.T) {
assertions := require.New(t)
- managerUnderTest := NewJobsManagerImpl("", nil, "", nil)
+ managerUnderTest := NewJobsManagerImpl(nil, "", nil)
managerUnderTest.allTypes["type1"] = TypeData{
TypeId: "type1",
}
func TestJobsManagerAddJobWhenTargetUriMissing_shouldReturnError(t *testing.T) {
assertions := require.New(t)
- managerUnderTest := NewJobsManagerImpl("", nil, "", nil)
+ managerUnderTest := NewJobsManagerImpl(nil, "", nil)
managerUnderTest.allTypes["type1"] = TypeData{
TypeId: "type1",
}
func TestJobsManagerDeleteJob_shouldSendDeleteToChannel(t *testing.T) {
assertions := require.New(t)
- managerUnderTest := NewJobsManagerImpl("", nil, "", nil)
+ managerUnderTest := NewJobsManagerImpl(nil, "", nil)
jobsHandler := jobsHandler{
deleteJobCh: make(chan string)}
managerUnderTest.allTypes["type1"] = TypeData{
})
jobsHandler := newJobsHandler("type1", "/topicUrl", pollClientMock, distributeClientMock)
- jobsManager := NewJobsManagerImpl("", pollClientMock, "http://mrAddr", distributeClientMock)
+ jobsManager := NewJobsManagerImpl(pollClientMock, "http://mrAddr", distributeClientMock)
jobsManager.allTypes["type1"] = TypeData{
DMaaPTopicURL: "/topicUrl",
TypeId: "type1",
"time"
"github.com/hashicorp/go-retryablehttp"
+ log "github.com/sirupsen/logrus"
)
// HTTPClient interface
func CreateRetryClient(cert tls.Certificate) *http.Client {
rawRetryClient := retryablehttp.NewClient()
+ rawRetryClient.Logger = leveledLogger{}
rawRetryClient.RetryWaitMax = time.Minute
rawRetryClient.RetryMax = math.MaxInt
rawRetryClient.HTTPClient.Transport = getSecureTransportWithoutVerify(cert)
u, _ := url.Parse(configUrl)
return u.Scheme == "https"
}
+
+// Used to get leveled logging in the RetryClient
+type leveledLogger struct {
+}
+
+func (ll leveledLogger) Error(msg string, keysAndValues ...interface{}) {
+ log.WithFields(getFields(keysAndValues)).Error(msg)
+}
+func (ll leveledLogger) Info(msg string, keysAndValues ...interface{}) {
+ log.WithFields(getFields(keysAndValues)).Info(msg)
+}
+func (ll leveledLogger) Debug(msg string, keysAndValues ...interface{}) {
+ log.WithFields(getFields(keysAndValues)).Debug(msg)
+}
+func (ll leveledLogger) Warn(msg string, keysAndValues ...interface{}) {
+ log.WithFields(getFields(keysAndValues)).Warn(msg)
+}
+
+func getFields(keysAndValues []interface{}) log.Fields {
+ fields := log.Fields{}
+ for i := 0; i < len(keysAndValues); i = i + 2 {
+ fields[fmt.Sprint(keysAndValues[i])] = keysAndValues[i+1]
+ }
+ return fields
+}
"net/http"
"github.com/gorilla/mux"
+ log "github.com/sirupsen/logrus"
"oransc.org/nonrtric/dmaapmediatorproducer/internal/jobs"
)
const AddJobPath = "/jobs"
const jobIdToken = "infoJobId"
const deleteJobPath = AddJobPath + "/{" + jobIdToken + "}"
+const logLevelToken = "level"
+const logAdminPath = "/admin/log"
type ProducerCallbackHandler struct {
jobsManager jobs.JobsManager
r.HandleFunc(StatusPath, statusHandler).Methods(http.MethodGet).Name("status")
r.HandleFunc(AddJobPath, callbackHandler.addInfoJobHandler).Methods(http.MethodPost).Name("add")
r.HandleFunc(deleteJobPath, callbackHandler.deleteInfoJobHandler).Methods(http.MethodDelete).Name("delete")
+ r.HandleFunc(logAdminPath, callbackHandler.setLogLevel).Methods(http.MethodPut).Name("setLogLevel")
r.NotFoundHandler = ¬FoundHandler{}
r.MethodNotAllowedHandler = &methodNotAllowedHandler{}
return r
h.jobsManager.DeleteJobFromRESTCall(id)
}
+func (h *ProducerCallbackHandler) setLogLevel(w http.ResponseWriter, r *http.Request) {
+ query := r.URL.Query()
+ logLevelStr := query.Get(logLevelToken)
+ if loglevel, err := log.ParseLevel(logLevelStr); err == nil {
+ log.SetLevel(loglevel)
+ } else {
+ http.Error(w, fmt.Sprintf("Invalid log level: %v. Log level will not be changed!", logLevelStr), http.StatusBadRequest)
+ return
+ }
+}
+
type notFoundHandler struct{}
func (h *notFoundHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
handler.ServeHTTP(responseRecorder, newRequest(http.MethodPut, "/status", nil, t))
assertions.Equal(http.StatusMethodNotAllowed, responseRecorder.Code)
assertions.Contains(responseRecorder.Body.String(), "Method is not supported.")
+
+ setLogLevelRoute := r.Get("setLogLevel")
+ assertions.NotNil(setLogLevelRoute)
+ supportedMethods, err = setLogLevelRoute.GetMethods()
+ assertions.Equal([]string{http.MethodPut}, supportedMethods)
+ assertions.Nil(err)
+ path, _ = setLogLevelRoute.GetPathTemplate()
+ assertions.Equal("/admin/log", path)
}
func TestStatusHandler(t *testing.T) {
},
},
wantedStatus: http.StatusOK,
- wantedBody: "",
},
{
name: "AddInfoJobHandler with incorrect job info, should return BadRequest",
jobHandlerMock.AssertCalled(t, "DeleteJobFromRESTCall", "job1")
}
+func TestSetLogLevel(t *testing.T) {
+ assertions := require.New(t)
+
+ type args struct {
+ logLevel string
+ }
+ tests := []struct {
+ name string
+ args args
+ wantedStatus int
+ wantedBody string
+ }{
+ {
+ name: "Set to valid log level, should return OK",
+ args: args{
+ logLevel: "Debug",
+ },
+ wantedStatus: http.StatusOK,
+ },
+ {
+ name: "Set to invalid log level, should return BadRequest",
+ args: args{
+ logLevel: "bad",
+ },
+ wantedStatus: http.StatusBadRequest,
+ wantedBody: "Invalid log level: bad",
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ callbackHandlerUnderTest := NewProducerCallbackHandler(nil)
+
+ handler := http.HandlerFunc(callbackHandlerUnderTest.setLogLevel)
+ responseRecorder := httptest.NewRecorder()
+ r, _ := http.NewRequest(http.MethodPut, "/admin/log?level="+tt.args.logLevel, nil)
+
+ handler.ServeHTTP(responseRecorder, r)
+
+ assertions.Equal(tt.wantedStatus, responseRecorder.Code, tt.name)
+ assertions.Contains(responseRecorder.Body.String(), tt.wantedBody, tt.name)
+ })
+ }
+}
+
func newRequest(method string, url string, jobInfo *jobs.JobInfo, t *testing.T) *http.Request {
var body io.Reader
if jobInfo != nil {
}
retryClient := restclient.CreateRetryClient(cert)
- jobsManager := jobs.NewJobsManagerImpl("configs/type_config.json", retryClient, configuration.DMaaPMRAddress, restclient.CreateClientWithoutRetry(cert, 5*time.Second))
+ jobsManager := jobs.NewJobsManagerImpl(retryClient, configuration.DMaaPMRAddress, restclient.CreateClientWithoutRetry(cert, 10*time.Second))
if err := registerTypesAndProducer(jobsManager, configuration.InfoCoordinatorAddress, callbackAddress, retryClient); err != nil {
log.Fatalf("Stopping producer due to: %v", err)
}
}
func registerTypesAndProducer(jobTypesHandler jobs.JobTypesManager, infoCoordinatorAddress string, callbackAddress string, client restclient.HTTPClient) error {
registrator := config.NewRegistratorImpl(infoCoordinatorAddress, client)
- if types, err := jobTypesHandler.LoadTypesFromConfiguration(); err == nil {
- if regErr := registrator.RegisterTypes(types); regErr != nil {
- return fmt.Errorf("unable to register all types due to: %v", regErr)
- }
- } else {
- return fmt.Errorf("unable to get types to register due to: %v", err)
+ configTypes, err := config.GetJobTypesFromConfiguration("configs/type_config.json")
+ if err != nil {
+ return fmt.Errorf("unable to register all types due to: %v", err)
}
+ regErr := registrator.RegisterTypes(jobTypesHandler.LoadTypesFromConfiguration(configTypes))
+ if regErr != nil {
+ return fmt.Errorf("unable to register all types due to: %v", regErr)
+ }
+
producer := config.ProducerRegistrationInfo{
InfoProducerSupervisionCallbackUrl: callbackAddress + server.StatusPath,
SupportedInfoTypes: jobTypesHandler.GetSupportedTypes(),
--- /dev/null
+<!--
+ ============LICENSE_START=======================================================
+ Copyright (C) 2021 Nordix Foundation.
+ ================================================================================
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+ SPDX-License-Identifier: Apache-2.0
+ ============LICENSE_END=========================================================
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+
+ <groupId>oransc.org</groupId>
+ <artifactId>dmaapmediatorproducer</artifactId>
+ <version>1.0.0</version>
+ <properties>
+ <docker-maven-plugin.version>0.30.0</docker-maven-plugin.version>
+ </properties>
+
+ <build>
+ <plugins>
+ <plugin>
+ <artifactId>exec-maven-plugin</artifactId>
+ <groupId>org.codehaus.mojo</groupId>
+ <executions>
+ <execution>
+ <id>Build Go binary</id>
+ <phase>generate-sources</phase>
+ <goals>
+ <goal>exec</goal>
+ </goals>
+ <configuration>
+ <executable>${basedir}/build_and_test.sh</executable>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>io.fabric8</groupId>
+ <artifactId>docker-maven-plugin</artifactId>
+ <version>${docker-maven-plugin.version}</version>
+ <inherited>false</inherited>
+ <executions>
+ <execution>
+ <id>generate-nonrtric-dmaap-mediator-producer-image</id>
+ <phase>package</phase>
+ <goals>
+ <goal>build</goal>
+ </goals>
+ <configuration>
+ <pullRegistry>${env.CONTAINER_PULL_REGISTRY}</pullRegistry>
+ <images>
+ 
+ </images>
+ </configuration>
+ </execution>
+ <execution>
+ <id>push-nonrtric-dmaap-mediator-producer-image</id>
+ <goals>
+ <goal>build</goal>
+ <goal>push</goal>
+ </goals>
+ <configuration>
+ <pullRegistry>${env.CONTAINER_PULL_REGISTRY}</pullRegistry>
+ <pushRegistry>${env.CONTAINER_PUSH_REGISTRY}</pushRegistry>
+ <images>
+ 
+ </images>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+</project>
registerJob(*port)
- fmt.Print("Starting consumer on port: ", *port)
+ fmt.Println("Starting consumer on port: ", *port)
fmt.Println(http.ListenAndServe(fmt.Sprintf(":%v", *port), nil))
}
InfoTypeId: "STD_Fault_Messages",
JobDefinition: "{}",
}
- fmt.Print("Registering consumer: ", jobInfo)
+ fmt.Println("Registering consumer: ", jobInfo)
body, _ := json.Marshal(jobInfo)
putErr := restclient.Put(fmt.Sprintf("http://localhost:8083/data-consumer/v1/info-jobs/job%v", port), body, &httpClient)
if putErr != nil {
- fmt.Printf("Unable to register consumer: %v", putErr)
+ fmt.Println("Unable to register consumer: ", putErr)
}
}
var responseBody []byte
if critical {
responseBody = getFaultMessage("CRITICAL")
+ fmt.Println("Sending CRITICAL")
critical = false
} else {
responseBody = getFaultMessage("NORMAL")
+ fmt.Println("Sending NORMAL")
critical = true
}
- // w.Write(responseBody)
fmt.Fprint(w, string(responseBody))
}
NONRTRIC_GATEWAY_IMAGE_BASE="nexus3.o-ran-sc.org:10002/o-ran-sc/nonrtric-gateway"
NONRTRIC_GATEWAY_IMAGE_TAG="1.0.0"
-#ECS
-ECS_IMAGE_BASE="nexus3.o-ran-sc.org:10002/o-ran-sc/nonrtric-enrichment-coordinator-service"
-ECS_IMAGE_TAG="1.1.0"
+#ICS
+ICS_IMAGE_BASE="nexus3.o-ran-sc.org:10002/o-ran-sc/nonrtric-information-coordinator-service"
+ICS_IMAGE_TAG="1.1.0"
#CONSUMER
CONSUMER_IMAGE_BASE="eexit/mirror-http-server"
#DMAAP_MEDIATOR_JAVA
DMAAP_MEDIATOR_JAVA_BASE="nexus3.o-ran-sc.org:10003/o-ran-sc/nonrtric-dmaap-adaptor"
-DMAAP_MEDIATOR_JAVA_TAG="1.0.0-SNAPSHOT"
\ No newline at end of file
+DMAAP_MEDIATOR_JAVA_TAG="1.0.0-SNAPSHOT"
By default, if the containers are started up and running by docker-compose file in the same directory, just run commands:
./preparePmsData.sh
-prepareEcsData.sh
-This is to generate some data into the ECS microservice
+prepareIcsData.sh
+This is to generate some data into the ICS microservice
prepareDmaapMsg.sh
This is to generate some data into the Dmaap MR, so that PMS reads message from MR
## O-RAN-SC Control Panel
-The Non-RT RIC Control Panel is a graphical user interface that enables the user to view and manage the A1 policies in the RAN and also view producers and jobs for the Enrichement coordinator service.
+The Non-RT RIC Control Panel is a graphical user interface that enables the user to view and manage the A1 policies in the RAN and also view producers and jobs for the Information coordinator service.
### O-RAN-SC Control Panel Gateway:
-To view the policy or enrichment information in control panel gui along with Policy Management Service & Enrichment Coordinator Service you should also have nonrtric gateway because all the request from the gui is passed through this API gateway.
+To view the policy or information jobs and types in control panel gui along with Policy Management Service & Information Coordinator Service you should also have nonrtric gateway because all the request from the gui is passed through this API gateway.
#### Prerequisite:
To start all the necessary components, run the following command:
-docker-compose -f docker-compose.yaml -f control-panel/docker-compose.yaml -f nonrtric-gateway/docker-compose.yaml -f policy-service/docker-compose.yaml -f ecs/docker-compose.yaml -f a1-sim/docker-compose.yaml up
\ No newline at end of file
+docker-compose -f docker-compose.yaml -f control-panel/docker-compose.yaml -f nonrtric-gateway/docker-compose.yaml -f policy-service/docker-compose.yaml -f ics/docker-compose.yaml -f a1-sim/docker-compose.yaml up
# The scripts in data/ will generate some dummy data in the running system.
# It will create:
-# one EiProducer in ECS
-# one EiType in ECS
-# one EiJob in ECS
+# one InfoProducer in ICS
+# one InfoType in ICS
+# one InfoJob in ICS
# Run command:
-# ./prepareEcsData.sh [ECS port] [http/https]
+# ./prepareIcsData.sh [ICS port] [http/https]
-ecs_port=${1:-8083}
+ics_port=${1:-8083}
httpx=${4:-"http"}
SHELL_FOLDER=$(cd "$(dirname "$0")";pwd)
-echo "using ecs port: "$ecs_port
+echo "using ics port: "$ics_port
echo "using protocol: "$httpx
echo -e "\n"
-echo "ECS status:"
-curl -skw " %{http_code}" $httpx://localhost:$ecs_port/status
+echo "ICS status:"
+curl -skw " %{http_code}" $httpx://localhost:$ics_port/status
echo -e "\n"
-# Create EiType
-echo "Create EiType:"
-curl -X PUT -skw %{http_code} $httpx://localhost:$ecs_port/data-producer/v1/info-types/type1 -H accept:application/json -H Content-Type:application/json --data-binary @${SHELL_FOLDER}/testdata/ECS/EiType.json
+# Create InfoType
+echo "Create InfoType:"
+curl -X PUT -skw %{http_code} $httpx://localhost:$ics_port/data-producer/v1/info-types/type1 -H accept:application/json -H Content-Type:application/json --data-binary @${SHELL_FOLDER}/testdata/ICS/InfoType.json
echo -e "\n"
-# Get EiTypes
-echo "Get EiTypes:"
-curl -X GET -skw %{http_code} $httpx://localhost:$ecs_port/data-producer/v1/info-types -H Content-Type:application/json | jq
+# Get InfoTypes
+echo "Get InfoTypes:"
+curl -X GET -skw %{http_code} $httpx://localhost:$ics_port/data-producer/v1/info-types -H Content-Type:application/json | jq
echo -e "\n"
-# Get Individual EiType
-echo "Get Individual EiType:"
-curl -X GET -skw %{http_code} $httpx://localhost:$ecs_port/data-producer/v1/info-types/type1 -H Content-Type:application/json | jq
+# Get Individual InfoType
+echo "Get Individual InfoType:"
+curl -X GET -skw %{http_code} $httpx://localhost:$ics_port/data-producer/v1/info-types/type1 -H Content-Type:application/json | jq
echo -e "\n"
-# Create EiProducer
-echo "Create EiProducer:"
-curl -X PUT -skw %{http_code} $httpx://localhost:$ecs_port/data-producer/v1/info-producers/1 -H Content-Type:application/json --data-binary @${SHELL_FOLDER}/testdata/ECS/EiProducer.json
+# Create InfoProducer
+echo "Create InfoProducer:"
+curl -X PUT -skw %{http_code} $httpx://localhost:$ics_port/data-producer/v1/info-producers/1 -H Content-Type:application/json --data-binary @${SHELL_FOLDER}/testdata/ICS/InfoProducer.json
echo -e "\n"
-# Get EiProducers
-echo "Get EiProducers:"
-curl -X GET -skw %{http_code} $httpx://localhost:$ecs_port/data-producer/v1/info-producers -H Content-Type:application/json | jq
+# Get InfoProducers
+echo "Get InfoProducers:"
+curl -X GET -skw %{http_code} $httpx://localhost:$ics_port/data-producer/v1/info-producers -H Content-Type:application/json | jq
echo -e "\n"
-# Get Individual EiProducer
-echo "Get Individual EiProducer:"
-curl -X GET -skw %{http_code} $httpx://localhost:$ecs_port/data-producer/v1/info-producers/1 -H Content-Type:application/json | jq
+# Get Individual InfoProducer
+echo "Get Individual InfoProducer:"
+curl -X GET -skw %{http_code} $httpx://localhost:$ics_port/data-producer/v1/info-producers/1 -H Content-Type:application/json | jq
echo -e "\n"
-# Get Individual EiProducer Status
-echo "Get Individual EiProducer:"
-curl -X GET -skw %{http_code} $httpx://localhost:$ecs_port/data-producer/v1/info-producers/1/status -H Content-Type:application/json | jq
+# Get Individual InfoProducer Status
+echo "Get Individual InfoProducer:"
+curl -X GET -skw %{http_code} $httpx://localhost:$ics_port/data-producer/v1/info-producers/1/status -H Content-Type:application/json | jq
echo -e "\n"
-# Create EiJob
-echo "Create EiJob Of A Certain Type type1:"
-curl -X PUT -skw %{http_code} $httpx://localhost:$ecs_port/A1-EI/v1/eijobs/job1 -H Content-Type:application/json --data-binary @${SHELL_FOLDER}/testdata/ECS/EiJob.json
+# Create InfoJob
+echo "Create InfoJob Of A Certain Type type1:"
+curl -X PUT -skw %{http_code} $httpx://localhost:$ics_port/A1-EI/v1/eijobs/job1 -H Content-Type:application/json --data-binary @${SHELL_FOLDER}/testdata/ICS/InfoJob.json
echo -e "\n"
-# Get EiJobs
-echo "Get EiJobs:"
-curl -X GET -skw %{http_code} $httpx://localhost:$ecs_port/A1-EI/v1/eijobs -H Content-Type:application/json | jq
+# Get InfoJobs
+echo "Get InfoJobs:"
+curl -X GET -skw %{http_code} $httpx://localhost:$ics_port/A1-EI/v1/eijobs -H Content-Type:application/json | jq
echo -e "\n"
-# Get Individual EiJob:
-echo "Get Individual EiJob:"
-curl -X GET -skw %{http_code} $httpx://localhost:$ecs_port/A1-EI/v1/eijobs/job1 -H Content-Type:application/json | jq
+# Get Individual InfoJob:
+echo "Get Individual InfoJob:"
+curl -X GET -skw %{http_code} $httpx://localhost:$ics_port/A1-EI/v1/eijobs/job1 -H Content-Type:application/json | jq
echo -e "\n"
\ No newline at end of file
# ./sendMsgToMediator.sh [dmaap-mr port] [http/https]
SHELL_FOLDER=$(cd "$(dirname "$0")";pwd)
-bash ${SHELL_FOLDER}/prepareEcsData.sh
+bash ${SHELL_FOLDER}/prepareIcsData.sh
dmaa_mr_port=${1:-3904}
httpx=${2:-"http"}
@startuml
dmaap_mr <- dmaap_mediator: dmaap_mediator reads msg from dmaap_mr
-dmaap_mediator -> ecs: dmaap_mediator gets jobs from ecs
+dmaap_mediator -> ics: dmaap_mediator gets jobs from ics
dmaap_mediator -> consumer: callbackUrl, send msg to consumer
@enduml
\ No newline at end of file
environment:
- INFO_PRODUCER_HOST=http://consumer
- INFO_PRODUCER_PORT=8088
- - INFO_COORD_ADDR=http://ecs:8083
+ - INFO_COORD_ADDR=http://ics:8083
- DMAAP_MR_ADDR=http://dmaap-mr:3904
- PRODUCER_CERT_PATH=security/producer.crt
- PRODUCER_KEY_PATH=security/producer.key
- LOG_LEVEL=Debug
networks:
- - default
\ No newline at end of file
+ - default
http.proxy-host:
http.proxy-port: 0
vardata-directory: /var/dmaap-adaptor-service
- ecs-base-url: http://ecs:8083
+ ics-base-url: http://ics:8083
# Location of the component configuration file. The file will only be used if the Consul database is not used;
# configuration from the Consul will override the file.
configuration-filepath: /opt/app/dmaap-adaptor-service/data/application_configuration.json
name: nonrtric-docker-net
services:
- ecs:
- image: "${ECS_IMAGE_BASE}:${ECS_IMAGE_TAG}"
- container_name: ecs
+ ics:
+ image: "${ICS_IMAGE_BASE}:${ICS_IMAGE_TAG}"
+ container_name: ics
networks:
default:
aliases:
- - enrichment-service-container
+ - information-service-container
ports:
- 8083:8083
- 8434:8434
The core Non-RT RIC consists of several parts, with available APIs described in the sections below:
* The A1 Policy Management Service
-* The Enrichment Coordinator Service
+* The Information Coordinator Service
+* DMaaP Adaptor
* The Non-RT-RIC App Catalogue
* K8S Helm Chart LCM Manager (Initial) **<ToDo>**
For information about the A1 Policy Management Service that is implemented in ONAP, see `ONAP docs <https://docs.onap.org/projects/onap-ccsdk-oran/en/latest/index.html>`_ and `wiki <https://wiki.onap.org/pages/viewpage.action?pageId=84672221>`_.
-Enrichment Coordinator Service
-==============================
+Information Coordinator Service
+===============================
-See `A1 Enrichment Information Coordination Service API <./ecs-api.html>`_ for full details of the API.
+See `A1 Information Information Coordination Service API <./ics-api.html>`_ for full details of the API.
The API is also described in Swagger-JSON and YAML:
:header: "API name", "|swagger-icon|", "|yaml-icon|"
:widths: 10,5,5
- "A1 Enrichment Information Coordination Service API", ":download:`link <../enrichment-coordinator-service/api/ecs-api.json>`", ":download:`link <../enrichment-coordinator-service/api/ecs-api.yaml>`"
+ "A1 Information Coordination Service API", ":download:`link <../information-coordinator-service/api/ics-api.json>`", ":download:`link <../information-coordinator-service/api/ics-api.yaml>`"
+
+DMaaP Adaptor
+=============
+
+The DMaaP Adaptor provides support for push delivery of any data received from DMaap or Kafka.
+
+See `DMaaP Adaptor API <./dmaap-adaptor-api.html>`_ for full details of the API.
+
+The API is also described in Swagger-JSON and YAML:
+
+
+.. csv-table::
+ :header: "API name", "|swagger-icon|", "|yaml-icon|"
+ :widths: 10,5, 5
+
+ "DMaaP Adaptor API", ":download:`link <../dmaap-adaptor-java/api/api.json>`", ":download:`link <../dmaap-adaptor-java/api/api.yaml>`"
Non-RT-RIC App Catalogue (Initial)
==================================
'http://127.0.0.1.*',
'https://gerrit.o-ran-sc.org.*',
'./rac-api.html', #Generated file that doesn't exist at link check.
- './ecs-api.html' #Generated file that doesn't exist at link check.
+ './ics-api.html', #Generated file that doesn't exist at link check.
+ './dmaap-adaptor-api.html' #Generated file that doesn't exist at link check.
]
extensions = ['sphinxcontrib.redoc', 'sphinx.ext.intersphinx',]
'embed': True,
},
{
- 'name': 'ECS API',
- 'page': 'ecs-api',
- 'spec': '../enrichment-coordinator-service/api/ecs-api.json',
+ 'name': 'ICS API',
+ 'page': 'ics-api',
+ 'spec': '../information-coordinator-service/api/ics-api.json',
+ 'embed': True,
+ },
+ {
+ 'name': 'DMaaP Adaptor API',
+ 'page': 'dmaap-adaptor-api',
+ 'spec': '../dmaap-adaptor-java/api/api.json',
'embed': True,
}
]
The A1 Policy Management Service is implemented in ONAP. For documentation see `ONAP CCSDK documentation <https://docs.onap.org/projects/onap-ccsdk-oran/en/latest/index.html>`_
and `wiki <https://wiki.onap.org/pages/viewpage.action?pageId=84672221>`_.
-Enrichment Coordinator Service
-------------------------------
-The Enrichment Coordinator Service is a Java 11 web application built using the Spring Framework. Using Spring Boot
+Information Coordinator Service
+-------------------------------
+The Information Coordinator Service is a Java 11 web application built using the Spring Framework. Using Spring Boot
dependencies, it runs as a standalone application.
Its main functionality is to act as a data subscription broker and to decouple data producer from data consumers.
-See the ./config/README file in the *enrichment-coordinator-service* directory Gerrit repo on how to create and setup
+See the ./config/README file in the *information-coordinator-service* directory Gerrit repo on how to create and setup
the certificates and private keys needed for HTTPS.
Start standalone
++++++++++++++++
-The project uses Maven. To start the Enrichment Coordinator Service as a freestanding application, run the following
-command in the *enrichment-coordinator-service* directory:
+The project uses Maven. To start the Information Coordinator Service as a freestanding application, run the following
+command in the *information-coordinator-service* directory:
+-----------------------------+
| mvn spring-boot:run |
Start in Docker
+++++++++++++++
-To build and deploy the Enrichment Coordinator Service, go to the "enrichment-coordinator-service" folder and run the
+To build and deploy the Information Coordinator Service, go to the "information-coordinator-service" folder and run the
following command:
+-----------------------------+
Then start the container by running the following command:
+--------------------------------------------------------------------+
- | docker run nonrtric-enrichment-coordinator-service |
+ | docker run nonrtric-information-coordinator-service |
+--------------------------------------------------------------------+
Initial Non-RT-RIC App Catalogue
docker-compose -f docker-compose.yaml
-f policy-service/docker-compose.yaml
- -f ecs/docker-compose.yaml
+ -f ics/docker-compose.yaml
Install with Helm
+++++++++++++++++
* Non-RT-RIC Control Panel / Dashboard
* A1 Policy Management Service (developed in ONAP)
* A1/SDNC Controller & A1 Adapter (Controller plugin)
-* Enrichment Information Coordinator
+* Information Coordinator Service
* Non-RT-RIC (Spring Cloud) Service Gateway
* Non-RT-RIC (Kong) Service Exposure Prototyping
* Initial Non-RT-RIC App Catalogue
* Near-RT-RIC A1 Simulator
+* DMaap Adaptor
The code base for "D" Release is in the `NONRTRIC <https://gerrit.o-ran-sc.org/r/admin/repos/nonrtric>`_, `NONRTRIC-ControlPanel <https://gerrit.o-ran-sc.org/r/admin/repos/portal/nonrtric-controlpanel>`_, and `Near-RT-RIC A1-Simulator <https://gerrit.o-ran-sc.org/r/admin/repos/sim/a1-interface>`_ , Gerrit source repositories (D Branch).
* View and Manage A1 policies in the RAN (near-RT-RICs)
* Interacts with the Policy agent’s NBI (REST API)
* Graphical A1 policy creation/editing is model-driven, based on policy type’s JSON schema
-* View and manage producers and jobs for the Enrichment coordinator service
+* View and manage producers and jobs for the Information coordinator service
* Configure A1 Policy Management Service (e.g. add/remove near-rt-rics)
* Interacts with the A1-PolicyManagementService & A1-EI-Coordinator (REST NBIs) via Service Exposure gateway
See also: `A1 Policy Management Service in ONAP <https://wiki.onap.org/pages/viewpage.action?pageId=84672221>`_
+
+Implementation:
+* Implemented as a Java Spring Boot application
+
A1/SDNC Controller & A1 Adapter (Controller plugin)
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Mediation point for A1 interface termination in SMO/NONRTRIC
See also: `A1 Adapter/Controller Functions in ONAP <https://wiki.onap.org/pages/viewpage.action?pageId=84672221>`_
-Enrichment Information Job Coordination Service
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+Information Coordination Service
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Coordinate/Register A1-EI Types, Producers, Consumers, and Jobs.
* Query status of A1-EI jobs
* Monitors all near-RT-RICs and recovers from inconsistencies
* After EI-type/Producer/Consumer/Job is successfully registered delivery/flow can happen directly between A1-EI Producers (in SMO/NONRTRIC domain) and A1-EI consumers (near-RT-RICs in RAN domain)
-* *Being extended to coordinate non-A1 Enrichment Information exchange between NONRTRIC Apps*
+* *Being extended to coordinate non-A1 Information exchange between NONRTRIC Apps*
+
+
+Implementation:
+* Implemented as a Java Spring Boot application
+
+Dmaap Adapter
+~~~~~~~~~~~~~
+
+Is a generic information producer, which registeres itself as an information producer of information types (in Information Coordination Service).
+The information types are defined in a configuration file.
+Information jobs can retrieve data from DMaap or Kafka topics and push this to data consumers (accessing the ICS API).
+
+Implementation:
+* Implemented as a Java Spring Boot application
Non-RT-RIC (Spring Cloud) Service Gateway
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Support Apps to use A1 Services
* `Spring Cloud Gateway <https://cloud.spring.io/spring-cloud-gateway>`_ provides the library to build a basic API gateway
-* Exposes A1 Policy Management Service & Enrichment Coordinator Service.
+* Exposes A1 Policy Management Service & Information Coordinator Service.
* Additional predicates can be added in code or preferably in the Gateway yaml configuration.
Implementation:
Stateful A1 test stub.
* Used to create multiple stateful A1 providers (simulated near-rt-rics)
-* Supports A1-Policy and A1-EnrichmentInformation
+* Supports A1-Policy and A1-Enrichment Information
* Swagger-based northbound interface, so easy to change the A1 profile exposed (e.g. A1 version, A1 Policy Types, A1-E1 consumers, etc)
* All A1-AP versions supported
+++ /dev/null
-#
-# ============LICENSE_START=======================================================
-# Copyright (C) 2020 Nordix Foundation.
-# ================================================================================
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# SPDX-License-Identifier: Apache-2.0
-# ============LICENSE_END=========================================================
-#
-FROM openjdk:11-jre-slim
-
-ARG JAR
-
-WORKDIR /opt/app/enrichment-coordinator-service
-RUN mkdir -p /var/log/enrichment-coordinator-service
-RUN mkdir -p /opt/app/enrichment-coordinator-service/etc/cert/
-RUN mkdir -p /var/enrichment-coordinator-service
-RUN chmod -R 777 /var/enrichment-coordinator-service
-
-EXPOSE 8083 8434
-
-ADD /config/application.yaml /opt/app/enrichment-coordinator-service/config/application.yaml
-ADD target/${JAR} /opt/app/enrichment-coordinator-service/enrichment-coordinator-service.jar
-ADD /config/keystore.jks /opt/app/enrichment-coordinator-service/etc/cert/keystore.jks
-ADD /config/truststore.jks /opt/app/enrichment-coordinator-service/etc/cert/truststore.jks
-
-
-RUN chmod -R 777 /opt/app/enrichment-coordinator-service/config/
-
-CMD ["java", "-jar", "/opt/app/enrichment-coordinator-service/enrichment-coordinator-service.jar"]
-
-
-
-
--- /dev/null
+#
+# ============LICENSE_START=======================================================
+# Copyright (C) 2020 Nordix Foundation.
+# ================================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# SPDX-License-Identifier: Apache-2.0
+# ============LICENSE_END=========================================================
+#
+FROM openjdk:11-jre-slim
+
+ARG JAR
+
+WORKDIR /opt/app/information-coordinator-service
+RUN mkdir -p /var/log/information-coordinator-service
+RUN mkdir -p /opt/app/information-coordinator-service/etc/cert/
+RUN mkdir -p /var/information-coordinator-service
+
+EXPOSE 8083 8434
+
+ADD /config/application.yaml /opt/app/information-coordinator-service/config/application.yaml
+ADD target/${JAR} /opt/app/information-coordinator-service/information-coordinator-service.jar
+ADD /config/keystore.jks /opt/app/information-coordinator-service/etc/cert/keystore.jks
+ADD /config/truststore.jks /opt/app/information-coordinator-service/etc/cert/truststore.jks
+
+RUN groupadd -g 999 appuser && \
+ useradd -r -u 999 -g appuser appuser
+RUN chown -R appuser:appuser /opt/app/information-coordinator-service
+RUN chown -R appuser:appuser /var/information-coordinator-service
+RUN chown -R appuser:appuser /var/log/information-coordinator-service
+USER appuser
+
+CMD ["java", "-jar", "/opt/app/information-coordinator-service/information-coordinator-service.jar"]
+
+
+
+
}
},
"consumer_job": {
- "description": "Information for an Enrichment Information Job",
+ "description": "Information for an Information Job",
"type": "object",
"required": [
"info_type_id",
"description": "Void/empty ",
"type": "object"
},
+ "Link": {
+ "type": "object",
+ "properties": {
+ "templated": {"type": "boolean"},
+ "href": {"type": "string"}
+ }
+ },
"consumer_type_subscription_info": {
"description": "Information for an information type subscription",
"type": "object",
}],
"tags": ["A1-EI (registration)"]
}},
+ "/actuator/threaddump": {"get": {
+ "summary": "Actuator web endpoint 'threaddump'",
+ "operationId": "handle_2_1_3",
+ "responses": {"200": {
+ "description": "OK",
+ "content": {"*/*": {"schema": {"type": "object"}}}
+ }},
+ "tags": ["Actuator"]
+ }},
"/example_dataproducer/info_job": {"post": {
"summary": "Callback for Information Job creation/modification",
"requestBody": {
"tags": ["Data consumer"]
}
},
+ "/actuator/loggers": {"get": {
+ "summary": "Actuator web endpoint 'loggers'",
+ "operationId": "handle_6",
+ "responses": {"200": {
+ "description": "OK",
+ "content": {"*/*": {"schema": {"type": "object"}}}
+ }},
+ "tags": ["Actuator"]
+ }},
+ "/actuator/health/**": {"get": {
+ "summary": "Actuator web endpoint 'health-path'",
+ "operationId": "handle_12",
+ "responses": {"200": {
+ "description": "OK",
+ "content": {"*/*": {"schema": {"type": "object"}}}
+ }},
+ "tags": ["Actuator"]
+ }},
+ "/data-consumer/v1/info-types": {"get": {
+ "summary": "Information type identifiers",
+ "operationId": "getinfoTypeIdentifiers",
+ "responses": {"200": {
+ "description": "Information type identifiers",
+ "content": {"application/json": {"schema": {
+ "type": "array",
+ "items": {"type": "string"}
+ }}}
+ }},
+ "tags": ["Data consumer"]
+ }},
+ "/actuator/metrics/{requiredMetricName}": {"get": {
+ "summary": "Actuator web endpoint 'metrics-requiredMetricName'",
+ "operationId": "handle_5",
+ "responses": {"200": {
+ "description": "OK",
+ "content": {"*/*": {"schema": {"type": "object"}}}
+ }},
+ "parameters": [{
+ "schema": {"type": "string"},
+ "in": "path",
+ "name": "requiredMetricName",
+ "required": true
+ }],
+ "tags": ["Actuator"]
+ }},
+ "/actuator": {"get": {
+ "summary": "Actuator root web endpoint",
+ "operationId": "links_1",
+ "responses": {"200": {
+ "description": "OK",
+ "content": {"*/*": {"schema": {
+ "additionalProperties": {
+ "additionalProperties": {"$ref": "#/components/schemas/Link"},
+ "type": "object"
+ },
+ "type": "object"
+ }}}
+ }},
+ "tags": ["Actuator"]
+ }},
+ "/data-consumer/v1/info-jobs": {"get": {
+ "summary": "Information Job identifiers",
+ "description": "query for information job identifiers",
+ "operationId": "getJobIds",
+ "responses": {
+ "200": {
+ "description": "Information information job identifiers",
+ "content": {"application/json": {"schema": {
+ "type": "array",
+ "items": {"type": "string"}
+ }}}
+ },
+ "404": {
+ "description": "Information type is not found",
+ "content": {"application/json": {"schema": {"$ref": "#/components/schemas/ProblemDetails"}}}
+ }
+ },
+ "parameters": [
+ {
+ "schema": {"type": "string"},
+ "in": "query",
+ "name": "infoTypeId",
+ "description": "selects subscription jobs of matching information type",
+ "required": false
+ },
+ {
+ "schema": {"type": "string"},
+ "in": "query",
+ "name": "owner",
+ "description": "selects result for one owner",
+ "required": false
+ }
+ ],
+ "tags": ["Data consumer"]
+ }},
+ "/actuator/loggers/{name}": {
+ "post": {
+ "summary": "Actuator web endpoint 'loggers-name'",
+ "operationId": "handle_0",
+ "responses": {"200": {
+ "description": "OK",
+ "content": {"*/*": {"schema": {"type": "object"}}}
+ }},
+ "parameters": [{
+ "schema": {"type": "string"},
+ "in": "path",
+ "name": "name",
+ "required": true
+ }],
+ "tags": ["Actuator"]
+ },
+ "get": {
+ "summary": "Actuator web endpoint 'loggers-name'",
+ "operationId": "handle_7",
+ "responses": {"200": {
+ "description": "OK",
+ "content": {"*/*": {"schema": {"type": "object"}}}
+ }},
+ "parameters": [{
+ "schema": {"type": "string"},
+ "in": "path",
+ "name": "name",
+ "required": true
+ }],
+ "tags": ["Actuator"]
+ }
+ },
+ "/example_dataconsumer/info_jobs/{infoJobId}/status": {"post": {
+ "summary": "Callback for changed Information Job status",
+ "requestBody": {
+ "content": {"application/json": {"schema": {"$ref": "#/components/schemas/EiJobStatusObject"}}},
+ "required": true
+ },
+ "description": "The primitive is implemented by the data consumer and is invoked when a Information Job status has been changed.",
+ "operationId": "jobStatusCallback",
+ "responses": {"200": {
+ "description": "OK",
+ "content": {"application/json": {"schema": {"$ref": "#/components/schemas/Void"}}}
+ }},
+ "parameters": [{
+ "schema": {"type": "string"},
+ "in": "path",
+ "name": "infoJobId",
+ "required": true
+ }],
+ "tags": ["A1-EI (callbacks)"]
+ }},
+ "/A1-EI/v1/eijobs/{eiJobId}/status": {"get": {
+ "summary": "EI job status",
+ "operationId": "getEiJobStatus_1",
+ "responses": {
+ "200": {
+ "description": "EI job status",
+ "content": {"application/json": {"schema": {"$ref": "#/components/schemas/EiJobStatusObject"}}}
+ },
+ "404": {
+ "description": "Enrichment Information job is not found",
+ "content": {"application/json": {"schema": {"$ref": "#/components/schemas/ProblemDetails"}}}
+ }
+ },
+ "parameters": [{
+ "schema": {"type": "string"},
+ "in": "path",
+ "name": "eiJobId",
+ "required": true
+ }],
+ "tags": ["A1-EI (registration)"]
+ }},
+ "/data-producer/v1/info-producers/{infoProducerId}/status": {"get": {
+ "summary": "Information producer status",
+ "operationId": "getInfoProducerStatus",
+ "responses": {
+ "200": {
+ "description": "Information producer status",
+ "content": {"application/json": {"schema": {"$ref": "#/components/schemas/producer_status"}}}
+ },
+ "404": {
+ "description": "Information producer is not found",
+ "content": {"application/json": {"schema": {"$ref": "#/components/schemas/ProblemDetails"}}}
+ }
+ },
+ "parameters": [{
+ "schema": {"type": "string"},
+ "in": "path",
+ "name": "infoProducerId",
+ "required": true
+ }],
+ "tags": ["Data producer (registration)"]
+ }},
+ "/data-consumer/v1/info-jobs/{infoJobId}/status": {"get": {
+ "summary": "Job status",
+ "operationId": "getEiJobStatus",
+ "responses": {
+ "200": {
+ "description": "Information subscription job status",
+ "content": {"application/json": {"schema": {"$ref": "#/components/schemas/consumer_job_status"}}}
+ },
+ "404": {
+ "description": "Information subscription job is not found",
+ "content": {"application/json": {"schema": {"$ref": "#/components/schemas/ProblemDetails"}}}
+ }
+ },
+ "parameters": [{
+ "schema": {"type": "string"},
+ "in": "path",
+ "name": "infoJobId",
+ "required": true
+ }],
+ "tags": ["Data consumer"]
+ }},
+ "/actuator/metrics": {"get": {
+ "summary": "Actuator web endpoint 'metrics'",
+ "operationId": "handle_4",
+ "responses": {"200": {
+ "description": "OK",
+ "content": {"*/*": {"schema": {"type": "object"}}}
+ }},
+ "tags": ["Actuator"]
+ }},
+ "/actuator/info": {"get": {
+ "summary": "Actuator web endpoint 'info'",
+ "operationId": "handle_9",
+ "responses": {"200": {
+ "description": "OK",
+ "content": {"*/*": {"schema": {"type": "object"}}}
+ }},
+ "tags": ["Actuator"]
+ }},
"/example_dataproducer/health_check": {"get": {
"summary": "Producer supervision",
"description": "The endpoint is provided by the Information Producer and is used for supervision of the producer.",
}},
"tags": ["A1-EI (registration)"]
}},
- "/data-consumer/v1/info-types": {"get": {
- "summary": "Information type identifiers",
- "operationId": "getinfoTypeIdentifiers",
- "responses": {"200": {
- "description": "Information type identifiers",
- "content": {"application/json": {"schema": {
- "type": "array",
- "items": {"type": "string"}
- }}}
- }},
- "tags": ["Data consumer"]
- }},
"/data-producer/v1/info-producers/{infoProducerId}": {
"get": {
"summary": "Individual Information Producer",
"tags": ["A1-EI (registration)"]
}
},
- "/data-consumer/v1/info-jobs": {"get": {
- "summary": "Information Job identifiers",
- "description": "query for information job identifiers",
- "operationId": "getJobIds",
- "responses": {
- "200": {
- "description": "Information information job identifiers",
- "content": {"application/json": {"schema": {
- "type": "array",
- "items": {"type": "string"}
- }}}
- },
- "404": {
- "description": "Information type is not found",
- "content": {"application/json": {"schema": {"$ref": "#/components/schemas/ProblemDetails"}}}
- }
- },
- "parameters": [
- {
- "schema": {"type": "string"},
- "in": "query",
- "name": "infoTypeId",
- "description": "selects subscription jobs of matching information type",
- "required": false
- },
- {
- "schema": {"type": "string"},
- "in": "query",
- "name": "owner",
- "description": "selects result for one owner",
- "required": false
- }
- ],
- "tags": ["Data consumer"]
+ "/actuator/logfile": {"get": {
+ "summary": "Actuator web endpoint 'logfile'",
+ "operationId": "handle_8",
+ "responses": {"200": {
+ "description": "OK",
+ "content": {"*/*": {"schema": {"type": "object"}}}
+ }},
+ "tags": ["Actuator"]
}},
"/data-consumer/v1/info-jobs/{infoJobId}": {
"get": {
}],
"tags": ["Data consumer"]
}},
- "/example_dataconsumer/info_jobs/{infoJobId}/status": {"post": {
- "summary": "Callback for changed Information Job status",
- "requestBody": {
- "content": {"application/json": {"schema": {"$ref": "#/components/schemas/EiJobStatusObject"}}},
- "required": true
- },
- "description": "The primitive is implemented by the data consumer and is invoked when a Information Job status has been changed.",
- "operationId": "jobStatusCallback",
+ "/actuator/health": {"get": {
+ "summary": "Actuator web endpoint 'health'",
+ "operationId": "handle_11",
"responses": {"200": {
"description": "OK",
- "content": {"application/json": {"schema": {"$ref": "#/components/schemas/Void"}}}
+ "content": {"*/*": {"schema": {"type": "object"}}}
}},
- "parameters": [{
- "schema": {"type": "string"},
- "in": "path",
- "name": "infoJobId",
- "required": true
- }],
- "tags": ["A1-EI (callbacks)"]
+ "tags": ["Actuator"]
}},
"/A1-EI/v1/eijobs": {"get": {
"summary": "EI job identifiers",
],
"tags": ["A1-EI (registration)"]
}},
- "/A1-EI/v1/eijobs/{eiJobId}/status": {"get": {
- "summary": "EI job status",
- "operationId": "getEiJobStatus_1",
- "responses": {
- "200": {
- "description": "EI job status",
- "content": {"application/json": {"schema": {"$ref": "#/components/schemas/EiJobStatusObject"}}}
- },
- "404": {
- "description": "Enrichment Information job is not found",
- "content": {"application/json": {"schema": {"$ref": "#/components/schemas/ProblemDetails"}}}
- }
- },
- "parameters": [{
- "schema": {"type": "string"},
- "in": "path",
- "name": "eiJobId",
- "required": true
- }],
- "tags": ["A1-EI (registration)"]
- }},
- "/data-producer/v1/info-producers/{infoProducerId}/status": {"get": {
- "summary": "Information producer status",
- "operationId": "getInfoProducerStatus",
- "responses": {
- "200": {
- "description": "Information producer status",
- "content": {"application/json": {"schema": {"$ref": "#/components/schemas/producer_status"}}}
- },
- "404": {
- "description": "Information producer is not found",
- "content": {"application/json": {"schema": {"$ref": "#/components/schemas/ProblemDetails"}}}
- }
- },
- "parameters": [{
- "schema": {"type": "string"},
- "in": "path",
- "name": "infoProducerId",
- "required": true
- }],
- "tags": ["Data producer (registration)"]
- }},
"/data-producer/v1/info-producers/{infoProducerId}/info-jobs": {"get": {
"summary": "Information Job definitions",
"description": "Information Job definitions for one Information Producer",
}],
"tags": ["Data producer (registration)"]
}},
- "/data-consumer/v1/info-jobs/{infoJobId}/status": {"get": {
- "summary": "Job status",
- "operationId": "getEiJobStatus",
- "responses": {
- "200": {
- "description": "Information subscription job status",
- "content": {"application/json": {"schema": {"$ref": "#/components/schemas/consumer_job_status"}}}
- },
- "404": {
- "description": "Information subscription job is not found",
- "content": {"application/json": {"schema": {"$ref": "#/components/schemas/ProblemDetails"}}}
- }
- },
- "parameters": [{
- "schema": {"type": "string"},
- "in": "path",
- "name": "infoJobId",
- "required": true
- }],
- "tags": ["Data consumer"]
- }},
"/example_dataconsumer/info_type_status": {"post": {
"summary": "Callback for changed Information type registration status",
"requestBody": {
"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Void"}}}
}},
"tags": ["Data consumer (callbacks)"]
+ }},
+ "/actuator/heapdump": {"get": {
+ "summary": "Actuator web endpoint 'heapdump'",
+ "operationId": "handle_10",
+ "responses": {"200": {
+ "description": "OK",
+ "content": {"*/*": {"schema": {"type": "object"}}}
+ }},
+ "tags": ["Actuator"]
}}
},
"info": {
{
"name": "Data consumer",
"description": "API for data consumers"
+ },
+ {
+ "name": "Actuator",
+ "description": "Monitor and interact",
+ "externalDocs": {
+ "description": "Spring Boot Actuator Web API Documentation",
+ "url": "https://docs.spring.io/spring-boot/docs/current/actuator-api/html/"
+ }
}
]
}
\ No newline at end of file
description: API for monitoring of the service
- name: Data consumer
description: API for data consumers
+- name: Actuator
+ description: Monitor and interact
+ externalDocs:
+ description: Spring Boot Actuator Web API Documentation
+ url: https://docs.spring.io/spring-boot/docs/current/actuator-api/html/
paths:
/example_dataproducer/info_job/{infoJobId}:
delete:
application/json:
schema:
$ref: '#/components/schemas/ProblemDetails'
+ /actuator/threaddump:
+ get:
+ tags:
+ - Actuator
+ summary: Actuator web endpoint 'threaddump'
+ operationId: handle_2_1_3
+ responses:
+ 200:
+ description: OK
+ content:
+ '*/*':
+ schema:
+ type: object
/example_dataproducer/info_job:
post:
tags:
application/json:
schema:
$ref: '#/components/schemas/ProblemDetails'
+ /actuator/loggers:
+ get:
+ tags:
+ - Actuator
+ summary: Actuator web endpoint 'loggers'
+ operationId: handle_6
+ responses:
+ 200:
+ description: OK
+ content:
+ '*/*':
+ schema:
+ type: object
+ /actuator/health/**:
+ get:
+ tags:
+ - Actuator
+ summary: Actuator web endpoint 'health-path'
+ operationId: handle_12
+ responses:
+ 200:
+ description: OK
+ content:
+ '*/*':
+ schema:
+ type: object
+ /data-consumer/v1/info-types:
+ get:
+ tags:
+ - Data consumer
+ summary: Information type identifiers
+ operationId: getinfoTypeIdentifiers
+ responses:
+ 200:
+ description: Information type identifiers
+ content:
+ application/json:
+ schema:
+ type: array
+ items:
+ type: string
+ /actuator/metrics/{requiredMetricName}:
+ get:
+ tags:
+ - Actuator
+ summary: Actuator web endpoint 'metrics-requiredMetricName'
+ operationId: handle_5
+ parameters:
+ - name: requiredMetricName
+ in: path
+ required: true
+ style: simple
+ explode: false
+ schema:
+ type: string
+ responses:
+ 200:
+ description: OK
+ content:
+ '*/*':
+ schema:
+ type: object
+ /actuator:
+ get:
+ tags:
+ - Actuator
+ summary: Actuator root web endpoint
+ operationId: links_1
+ responses:
+ 200:
+ description: OK
+ content:
+ '*/*':
+ schema:
+ type: object
+ additionalProperties:
+ type: object
+ additionalProperties:
+ $ref: '#/components/schemas/Link'
+ /data-consumer/v1/info-jobs:
+ get:
+ tags:
+ - Data consumer
+ summary: Information Job identifiers
+ description: query for information job identifiers
+ operationId: getJobIds
+ parameters:
+ - name: infoTypeId
+ in: query
+ description: selects subscription jobs of matching information type
+ required: false
+ style: form
+ explode: true
+ schema:
+ type: string
+ - name: owner
+ in: query
+ description: selects result for one owner
+ required: false
+ style: form
+ explode: true
+ schema:
+ type: string
+ responses:
+ 200:
+ description: Information information job identifiers
+ content:
+ application/json:
+ schema:
+ type: array
+ items:
+ type: string
+ 404:
+ description: Information type is not found
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ProblemDetails'
+ /actuator/loggers/{name}:
+ get:
+ tags:
+ - Actuator
+ summary: Actuator web endpoint 'loggers-name'
+ operationId: handle_7
+ parameters:
+ - name: name
+ in: path
+ required: true
+ style: simple
+ explode: false
+ schema:
+ type: string
+ responses:
+ 200:
+ description: OK
+ content:
+ '*/*':
+ schema:
+ type: object
+ post:
+ tags:
+ - Actuator
+ summary: Actuator web endpoint 'loggers-name'
+ operationId: handle_0
+ parameters:
+ - name: name
+ in: path
+ required: true
+ style: simple
+ explode: false
+ schema:
+ type: string
+ responses:
+ 200:
+ description: OK
+ content:
+ '*/*':
+ schema:
+ type: object
+ /example_dataconsumer/info_jobs/{infoJobId}/status:
+ post:
+ tags:
+ - A1-EI (callbacks)
+ summary: Callback for changed Information Job status
+ description: The primitive is implemented by the data consumer and is invoked
+ when a Information Job status has been changed.
+ operationId: jobStatusCallback
+ parameters:
+ - name: infoJobId
+ in: path
+ required: true
+ style: simple
+ explode: false
+ schema:
+ type: string
+ requestBody:
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/EiJobStatusObject'
+ required: true
+ responses:
+ 200:
+ description: OK
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/Void'
+ /A1-EI/v1/eijobs/{eiJobId}/status:
+ get:
+ tags:
+ - A1-EI (registration)
+ summary: EI job status
+ operationId: getEiJobStatus_1
+ parameters:
+ - name: eiJobId
+ in: path
+ required: true
+ style: simple
+ explode: false
+ schema:
+ type: string
+ responses:
+ 200:
+ description: EI job status
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/EiJobStatusObject'
+ 404:
+ description: Enrichment Information job is not found
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ProblemDetails'
+ /data-producer/v1/info-producers/{infoProducerId}/status:
+ get:
+ tags:
+ - Data producer (registration)
+ summary: Information producer status
+ operationId: getInfoProducerStatus
+ parameters:
+ - name: infoProducerId
+ in: path
+ required: true
+ style: simple
+ explode: false
+ schema:
+ type: string
+ responses:
+ 200:
+ description: Information producer status
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/producer_status'
+ 404:
+ description: Information producer is not found
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ProblemDetails'
+ /data-consumer/v1/info-jobs/{infoJobId}/status:
+ get:
+ tags:
+ - Data consumer
+ summary: Job status
+ operationId: getEiJobStatus
+ parameters:
+ - name: infoJobId
+ in: path
+ required: true
+ style: simple
+ explode: false
+ schema:
+ type: string
+ responses:
+ 200:
+ description: Information subscription job status
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/consumer_job_status'
+ 404:
+ description: Information subscription job is not found
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ProblemDetails'
+ /actuator/metrics:
+ get:
+ tags:
+ - Actuator
+ summary: Actuator web endpoint 'metrics'
+ operationId: handle_4
+ responses:
+ 200:
+ description: OK
+ content:
+ '*/*':
+ schema:
+ type: object
+ /actuator/info:
+ get:
+ tags:
+ - Actuator
+ summary: Actuator web endpoint 'info'
+ operationId: handle_9
+ responses:
+ 200:
+ description: OK
+ content:
+ '*/*':
+ schema:
+ type: object
/example_dataproducer/health_check:
get:
tags:
type: array
items:
type: string
- /data-consumer/v1/info-types:
- get:
- tags:
- - Data consumer
- summary: Information type identifiers
- operationId: getinfoTypeIdentifiers
- responses:
- 200:
- description: Information type identifiers
- content:
- application/json:
- schema:
- type: array
- items:
- type: string
/data-producer/v1/info-producers/{infoProducerId}:
get:
tags:
application/json:
schema:
$ref: '#/components/schemas/ProblemDetails'
- /data-consumer/v1/info-jobs:
+ /actuator/logfile:
get:
tags:
- - Data consumer
- summary: Information Job identifiers
- description: query for information job identifiers
- operationId: getJobIds
- parameters:
- - name: infoTypeId
- in: query
- description: selects subscription jobs of matching information type
- required: false
- style: form
- explode: true
- schema:
- type: string
- - name: owner
- in: query
- description: selects result for one owner
- required: false
- style: form
- explode: true
- schema:
- type: string
+ - Actuator
+ summary: Actuator web endpoint 'logfile'
+ operationId: handle_8
responses:
200:
- description: Information information job identifiers
- content:
- application/json:
- schema:
- type: array
- items:
- type: string
- 404:
- description: Information type is not found
+ description: OK
content:
- application/json:
+ '*/*':
schema:
- $ref: '#/components/schemas/ProblemDetails'
+ type: object
/data-consumer/v1/info-jobs/{infoJobId}:
get:
tags:
application/json:
schema:
$ref: '#/components/schemas/ProblemDetails'
- /example_dataconsumer/info_jobs/{infoJobId}/status:
- post:
+ /actuator/health:
+ get:
tags:
- - A1-EI (callbacks)
- summary: Callback for changed Information Job status
- description: The primitive is implemented by the data consumer and is invoked
- when a Information Job status has been changed.
- operationId: jobStatusCallback
- parameters:
- - name: infoJobId
- in: path
- required: true
- style: simple
- explode: false
- schema:
- type: string
- requestBody:
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/EiJobStatusObject'
- required: true
+ - Actuator
+ summary: Actuator web endpoint 'health'
+ operationId: handle_11
responses:
200:
description: OK
content:
- application/json:
+ '*/*':
schema:
- $ref: '#/components/schemas/Void'
+ type: object
/A1-EI/v1/eijobs:
get:
tags:
application/json:
schema:
$ref: '#/components/schemas/ProblemDetails'
- /A1-EI/v1/eijobs/{eiJobId}/status:
- get:
- tags:
- - A1-EI (registration)
- summary: EI job status
- operationId: getEiJobStatus_1
- parameters:
- - name: eiJobId
- in: path
- required: true
- style: simple
- explode: false
- schema:
- type: string
- responses:
- 200:
- description: EI job status
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/EiJobStatusObject'
- 404:
- description: Enrichment Information job is not found
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/ProblemDetails'
- /data-producer/v1/info-producers/{infoProducerId}/status:
- get:
- tags:
- - Data producer (registration)
- summary: Information producer status
- operationId: getInfoProducerStatus
- parameters:
- - name: infoProducerId
- in: path
- required: true
- style: simple
- explode: false
- schema:
- type: string
- responses:
- 200:
- description: Information producer status
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/producer_status'
- 404:
- description: Information producer is not found
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/ProblemDetails'
/data-producer/v1/info-producers/{infoProducerId}/info-jobs:
get:
tags:
application/json:
schema:
$ref: '#/components/schemas/ProblemDetails'
- /data-consumer/v1/info-jobs/{infoJobId}/status:
- get:
- tags:
- - Data consumer
- summary: Job status
- operationId: getEiJobStatus
- parameters:
- - name: infoJobId
- in: path
- required: true
- style: simple
- explode: false
- schema:
- type: string
- responses:
- 200:
- description: Information subscription job status
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/consumer_job_status'
- 404:
- description: Information subscription job is not found
- content:
- application/json:
- schema:
- $ref: '#/components/schemas/ProblemDetails'
/example_dataconsumer/info_type_status:
post:
tags:
application/json:
schema:
$ref: '#/components/schemas/Void'
+ /actuator/heapdump:
+ get:
+ tags:
+ - Actuator
+ summary: Actuator web endpoint 'heapdump'
+ operationId: handle_10
+ responses:
+ 200:
+ description: OK
+ content:
+ '*/*':
+ schema:
+ type: object
components:
schemas:
consumer_information_type:
status_notification_uri:
type: string
description: The target of Information subscription job status notifications
- description: Information for an Enrichment Information Job
+ description: Information for an Information Job
producer_status:
required:
- operational_state
Void:
type: object
description: 'Void/empty '
+ Link:
+ type: object
+ properties:
+ templated:
+ type: boolean
+ href:
+ type: string
consumer_type_subscription_info:
required:
- owner
allow-bean-definition-overriding: true
aop:
auto: false
+springdoc:
+ show-actuator: true
management:
endpoints:
web:
org.springframework: ERROR
org.springframework.data: ERROR
org.springframework.web.reactive.function.client.ExchangeFunctions: ERROR
- org.oransc.enrichment: INFO
+ org.oransc.ics: INFO
file:
- name: /var/log/enrichment-coordinator-service/application.log
+ name: /var/log/information-coordinator-service/application.log
server:
# Configuration of the HTTP/REST server. The parameters are defined and handeled by the springboot framework.
# See springboot documentation.
ssl:
key-store-type: JKS
key-store-password: policy_agent
- key-store: /opt/app/enrichment-coordinator-service/etc/cert/keystore.jks
+ key-store: /opt/app/information-coordinator-service/etc/cert/keystore.jks
key-password: policy_agent
key-alias: policy_agent
app:
# Note that the same keystore as for the server is used.
trust-store-used: false
trust-store-password: policy_agent
- trust-store: /opt/app/enrichment-coordinator-service/etc/cert/truststore.jks
+ trust-store: /opt/app/information-coordinator-service/etc/cert/truststore.jks
# Configuration of usage of HTTP Proxy for the southbound accesses.
# The HTTP proxy (if configured) will only be used for accessing NearRT RIC:s
http.proxy-host:
http.proxy-port: 0
- vardata-directory: /var/enrichment-coordinator-service
+ vardata-directory: /var/information-coordinator-service
<relativePath />
</parent>
<groupId>org.o-ran-sc.nonrtric</groupId>
- <artifactId>enrichment-coordinator-service</artifactId>
+ <artifactId>information-coordinator-service</artifactId>
<version>1.2.0-SNAPSHOT</version>
<licenses>
<license>
<goal>generate</goal>
</goals>
<configuration>
- <inputSpec>${project.basedir}/api/ecs-api.json</inputSpec>
+ <inputSpec>${project.basedir}/api/ics-api.json</inputSpec>
<language>openapi-yaml</language>
<output>${project.basedir}/api</output>
<configOptions>
- <outputFile>ecs-api.yaml</outputFile>
+ <outputFile>ics-api.yaml</outputFile>
</configOptions>
</configuration>
</execution>
<inherited>false</inherited>
<executions>
<execution>
- <id>generate-enrichment-coordinator-service-image</id>
+ <id>generate-information-coordinator-service-image</id>
<phase>package</phase>
<goals>
<goal>build</goal>
<pullRegistry>${env.CONTAINER_PULL_REGISTRY}</pullRegistry>
<images>
<image>
- <name>o-ran-sc/nonrtric-enrichment-coordinator-service:${project.version}</name>
+ <name>o-ran-sc/nonrtric-information-coordinator-service:${project.version}</name>
<build>
<cleanup>try</cleanup>
<contextDir>${basedir}</contextDir>
</configuration>
</execution>
<execution>
- <id>push-enrichment-coordinator-service-image</id>
+ <id>push-information-coordinator-service-image</id>
<goals>
<goal>build</goal>
<goal>push</goal>
<pushRegistry>${env.CONTAINER_PUSH_REGISTRY}</pushRegistry>
<images>
<image>
- <name>o-ran-sc/nonrtric-enrichment-coordinator-service:${project.version}</name>
+ <name>o-ran-sc/nonrtric-information-coordinator-service:${project.version}</name>
<build>
<contextDir>${basedir}</contextDir>
<dockerFile>Dockerfile</dockerFile>
<system>JIRA</system>
<url>https://jira.o-ran-sc.org/</url>
</issueManagement>
-</project>
+</project>
\ No newline at end of file
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment;
+package org.oransc.ics;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment;
+package org.oransc.ics;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.lang.invoke.MethodHandles;
import org.apache.catalina.connector.Connector;
-import org.oransc.enrichment.configuration.ApplicationConfig;
-import org.oransc.enrichment.controllers.r1producer.ProducerCallbacks;
-import org.oransc.enrichment.repository.InfoJobs;
-import org.oransc.enrichment.repository.InfoTypes;
+import org.oransc.ics.configuration.ApplicationConfig;
+import org.oransc.ics.controllers.r1producer.ProducerCallbacks;
+import org.oransc.ics.repository.InfoJobs;
+import org.oransc.ics.repository.InfoTypes;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment;
+package org.oransc.ics;
import io.swagger.v3.oas.annotations.OpenAPIDefinition;
import io.swagger.v3.oas.annotations.info.Info;
import io.swagger.v3.oas.annotations.info.License;
import io.swagger.v3.oas.annotations.tags.Tag;
-import org.oransc.enrichment.controllers.StatusController;
-import org.oransc.enrichment.controllers.a1e.A1eConsts;
-import org.oransc.enrichment.controllers.r1consumer.ConsumerConsts;
-import org.oransc.enrichment.controllers.r1producer.ProducerConsts;
+import org.oransc.ics.controllers.StatusController;
+import org.oransc.ics.controllers.a1e.A1eConsts;
+import org.oransc.ics.controllers.r1consumer.ConsumerConsts;
+import org.oransc.ics.controllers.r1producer.ProducerConsts;
/**
* Swagger configuration class that uses swagger documentation type and scans
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment.clients;
+package org.oransc.ics.clients;
import io.netty.channel.ChannelOption;
import io.netty.handler.ssl.SslContext;
import java.lang.invoke.MethodHandles;
import java.util.concurrent.atomic.AtomicInteger;
-import org.oransc.enrichment.configuration.WebClientConfig.HttpProxyConfig;
+import org.oransc.ics.configuration.WebClientConfig.HttpProxyConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.http.MediaType;
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment.clients;
+package org.oransc.ics.clients;
import io.netty.handler.ssl.SslContext;
import io.netty.handler.ssl.SslContextBuilder;
import javax.net.ssl.KeyManagerFactory;
-import org.oransc.enrichment.configuration.WebClientConfig;
-import org.oransc.enrichment.configuration.WebClientConfig.HttpProxyConfig;
+import org.oransc.ics.configuration.WebClientConfig;
+import org.oransc.ics.configuration.WebClientConfig.HttpProxyConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.util.ResourceUtils;
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment.configuration;
+package org.oransc.ics.configuration;
import lombok.Getter;
-import org.oransc.enrichment.configuration.WebClientConfig.HttpProxyConfig;
+import org.oransc.ics.configuration.WebClientConfig.HttpProxyConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment.configuration;
+package org.oransc.ics.configuration;
import org.immutables.value.Value;
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment.controllers;
+package org.oransc.ics.controllers;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import io.swagger.v3.oas.annotations.media.Schema;
-import org.oransc.enrichment.exceptions.ServiceException;
+import org.oransc.ics.exceptions.ServiceException;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment.controllers;
+package org.oransc.ics.controllers;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.gson.annotations.SerializedName;
import io.swagger.v3.oas.annotations.tags.Tag;
import org.immutables.gson.Gson;
-import org.oransc.enrichment.repository.InfoJobs;
-import org.oransc.enrichment.repository.InfoProducers;
-import org.oransc.enrichment.repository.InfoTypes;
+import org.oransc.ics.repository.InfoJobs;
+import org.oransc.ics.repository.InfoProducers;
+import org.oransc.ics.repository.InfoTypes;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment.controllers;
+package org.oransc.ics.controllers;
import io.swagger.v3.oas.annotations.media.Schema;
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment.controllers.a1e;
+package org.oransc.ics.controllers.a1e;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import java.lang.invoke.MethodHandles;
import java.util.Collection;
-import org.oransc.enrichment.clients.AsyncRestClient;
-import org.oransc.enrichment.clients.AsyncRestClientFactory;
-import org.oransc.enrichment.configuration.ApplicationConfig;
-import org.oransc.enrichment.repository.InfoJob;
-import org.oransc.enrichment.repository.InfoJobs;
-import org.oransc.enrichment.repository.InfoProducers;
-import org.oransc.enrichment.repository.InfoType;
+import org.oransc.ics.clients.AsyncRestClient;
+import org.oransc.ics.clients.AsyncRestClientFactory;
+import org.oransc.ics.configuration.ApplicationConfig;
+import org.oransc.ics.repository.InfoJob;
+import org.oransc.ics.repository.InfoJobs;
+import org.oransc.ics.repository.InfoProducers;
+import org.oransc.ics.repository.InfoType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment.controllers.a1e;
+package org.oransc.ics.controllers.a1e;
public class A1eConsts {
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment.controllers.a1e;
+package org.oransc.ics.controllers.a1e;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.gson.Gson;
import java.util.List;
import org.json.JSONObject;
-import org.oransc.enrichment.configuration.ApplicationConfig;
-import org.oransc.enrichment.controllers.ErrorResponse;
-import org.oransc.enrichment.controllers.VoidResponse;
-import org.oransc.enrichment.controllers.r1producer.ProducerCallbacks;
-import org.oransc.enrichment.exceptions.ServiceException;
-import org.oransc.enrichment.repository.InfoJob;
-import org.oransc.enrichment.repository.InfoJobs;
-import org.oransc.enrichment.repository.InfoProducers;
-import org.oransc.enrichment.repository.InfoType;
-import org.oransc.enrichment.repository.InfoTypes;
+import org.oransc.ics.configuration.ApplicationConfig;
+import org.oransc.ics.controllers.ErrorResponse;
+import org.oransc.ics.controllers.VoidResponse;
+import org.oransc.ics.controllers.r1producer.ProducerCallbacks;
+import org.oransc.ics.exceptions.ServiceException;
+import org.oransc.ics.repository.InfoJob;
+import org.oransc.ics.repository.InfoJobs;
+import org.oransc.ics.repository.InfoProducers;
+import org.oransc.ics.repository.InfoType;
+import org.oransc.ics.repository.InfoTypes;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment.controllers.a1e;
+package org.oransc.ics.controllers.a1e;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.gson.annotations.SerializedName;
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment.controllers.a1e;
+package org.oransc.ics.controllers.a1e;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.gson.annotations.SerializedName;
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment.controllers.a1e;
+package org.oransc.ics.controllers.a1e;
import io.swagger.v3.oas.annotations.media.Schema;
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment.controllers.r1consumer;
+package org.oransc.ics.controllers.r1consumer;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
-import org.oransc.enrichment.clients.AsyncRestClient;
-import org.oransc.enrichment.clients.AsyncRestClientFactory;
-import org.oransc.enrichment.configuration.ApplicationConfig;
-import org.oransc.enrichment.repository.InfoType;
-import org.oransc.enrichment.repository.InfoTypeSubscriptions;
+import org.oransc.ics.clients.AsyncRestClient;
+import org.oransc.ics.clients.AsyncRestClientFactory;
+import org.oransc.ics.configuration.ApplicationConfig;
+import org.oransc.ics.repository.InfoType;
+import org.oransc.ics.repository.InfoTypeSubscriptions;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import reactor.core.publisher.Mono;
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment.controllers.r1consumer;
+package org.oransc.ics.controllers.r1consumer;
public class ConsumerConsts {
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment.controllers.r1consumer;
+package org.oransc.ics.controllers.r1consumer;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.gson.Gson;
import java.util.List;
import org.json.JSONObject;
-import org.oransc.enrichment.controllers.ErrorResponse;
-import org.oransc.enrichment.controllers.VoidResponse;
-import org.oransc.enrichment.controllers.r1producer.ProducerCallbacks;
-import org.oransc.enrichment.exceptions.ServiceException;
-import org.oransc.enrichment.repository.InfoJob;
-import org.oransc.enrichment.repository.InfoJobs;
-import org.oransc.enrichment.repository.InfoProducer;
-import org.oransc.enrichment.repository.InfoProducers;
-import org.oransc.enrichment.repository.InfoType;
-import org.oransc.enrichment.repository.InfoTypeSubscriptions;
-import org.oransc.enrichment.repository.InfoTypes;
+import org.oransc.ics.controllers.ErrorResponse;
+import org.oransc.ics.controllers.VoidResponse;
+import org.oransc.ics.controllers.r1producer.ProducerCallbacks;
+import org.oransc.ics.exceptions.ServiceException;
+import org.oransc.ics.repository.InfoJob;
+import org.oransc.ics.repository.InfoJobs;
+import org.oransc.ics.repository.InfoProducer;
+import org.oransc.ics.repository.InfoProducers;
+import org.oransc.ics.repository.InfoType;
+import org.oransc.ics.repository.InfoTypeSubscriptions;
+import org.oransc.ics.repository.InfoTypes;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment.controllers.r1consumer;
+package org.oransc.ics.controllers.r1consumer;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.gson.annotations.SerializedName;
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment.controllers.r1consumer;
+package org.oransc.ics.controllers.r1consumer;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.gson.annotations.SerializedName;
import org.immutables.gson.Gson;
@Gson.TypeAdapters
-@Schema(name = "consumer_job", description = "Information for an Enrichment Information Job")
+@Schema(name = "consumer_job", description = "Information for an Information Job")
public class ConsumerJobInfo {
@Schema(
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment.controllers.r1consumer;
+package org.oransc.ics.controllers.r1consumer;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.gson.annotations.SerializedName;
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment.controllers.r1consumer;
+package org.oransc.ics.controllers.r1consumer;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.gson.annotations.SerializedName;
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment.controllers.r1consumer;
+package org.oransc.ics.controllers.r1consumer;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.gson.annotations.SerializedName;
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment.controllers.r1producer;
+package org.oransc.ics.controllers.r1producer;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import java.time.Duration;
import java.util.Collection;
-import org.oransc.enrichment.clients.AsyncRestClient;
-import org.oransc.enrichment.clients.AsyncRestClientFactory;
-import org.oransc.enrichment.configuration.ApplicationConfig;
-import org.oransc.enrichment.repository.InfoJob;
-import org.oransc.enrichment.repository.InfoJobs;
-import org.oransc.enrichment.repository.InfoProducer;
-import org.oransc.enrichment.repository.InfoProducers;
+import org.oransc.ics.clients.AsyncRestClient;
+import org.oransc.ics.clients.AsyncRestClientFactory;
+import org.oransc.ics.configuration.ApplicationConfig;
+import org.oransc.ics.repository.InfoJob;
+import org.oransc.ics.repository.InfoJobs;
+import org.oransc.ics.repository.InfoProducer;
+import org.oransc.ics.repository.InfoProducers;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment.controllers.r1producer;
+package org.oransc.ics.controllers.r1producer;
public class ProducerConsts {
public static final String PRODUCER_API_NAME = "Data producer (registration)";
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment.controllers.r1producer;
+package org.oransc.ics.controllers.r1producer;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import java.util.Collection;
import java.util.List;
-import org.oransc.enrichment.controllers.ErrorResponse;
-import org.oransc.enrichment.controllers.VoidResponse;
-import org.oransc.enrichment.exceptions.ServiceException;
-import org.oransc.enrichment.repository.InfoJob;
-import org.oransc.enrichment.repository.InfoJobs;
-import org.oransc.enrichment.repository.InfoProducer;
-import org.oransc.enrichment.repository.InfoProducers;
-import org.oransc.enrichment.repository.InfoType;
-import org.oransc.enrichment.repository.InfoTypeSubscriptions;
-import org.oransc.enrichment.repository.InfoTypes;
+import org.oransc.ics.controllers.ErrorResponse;
+import org.oransc.ics.controllers.VoidResponse;
+import org.oransc.ics.exceptions.ServiceException;
+import org.oransc.ics.repository.InfoJob;
+import org.oransc.ics.repository.InfoJobs;
+import org.oransc.ics.repository.InfoProducer;
+import org.oransc.ics.repository.InfoProducers;
+import org.oransc.ics.repository.InfoType;
+import org.oransc.ics.repository.InfoTypeSubscriptions;
+import org.oransc.ics.repository.InfoTypes;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment.controllers.r1producer;
+package org.oransc.ics.controllers.r1producer;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.gson.annotations.SerializedName;
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment.controllers.r1producer;
+package org.oransc.ics.controllers.r1producer;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.gson.annotations.SerializedName;
import io.swagger.v3.oas.annotations.media.Schema;
import org.immutables.gson.Gson;
-import org.oransc.enrichment.repository.InfoJob;
+import org.oransc.ics.repository.InfoJob;
@Gson.TypeAdapters
@Schema(
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment.controllers.r1producer;
+package org.oransc.ics.controllers.r1producer;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.gson.annotations.SerializedName;
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment.controllers.r1producer;
+package org.oransc.ics.controllers.r1producer;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.gson.annotations.SerializedName;
* ============LICENSE_END========================================================================
*/
-package org.oransc.enrichment.exceptions;
+package org.oransc.ics.exceptions;
import lombok.Getter;
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment.repository;
+package org.oransc.ics.repository;
import java.lang.invoke.MethodHandles;
import java.time.Instant;
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment.repository;
+package org.oransc.ics.repository;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import java.util.ServiceLoader;
import java.util.Vector;
-import org.oransc.enrichment.configuration.ApplicationConfig;
-import org.oransc.enrichment.controllers.r1producer.ProducerCallbacks;
-import org.oransc.enrichment.exceptions.ServiceException;
+import org.oransc.ics.configuration.ApplicationConfig;
+import org.oransc.ics.controllers.r1producer.ProducerCallbacks;
+import org.oransc.ics.exceptions.ServiceException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.http.HttpStatus;
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment.repository;
+package org.oransc.ics.repository;
import java.util.Collection;
import java.util.HashSet;
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment.repository;
+package org.oransc.ics.repository;
import java.lang.invoke.MethodHandles;
import java.util.ArrayList;
import lombok.Builder;
import lombok.Getter;
-import org.oransc.enrichment.controllers.a1e.A1eCallbacks;
-import org.oransc.enrichment.controllers.r1producer.ProducerCallbacks;
-import org.oransc.enrichment.exceptions.ServiceException;
+import org.oransc.ics.controllers.a1e.A1eCallbacks;
+import org.oransc.ics.controllers.r1producer.ProducerCallbacks;
+import org.oransc.ics.exceptions.ServiceException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment.repository;
+package org.oransc.ics.repository;
import lombok.Getter;
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment.repository;
+package org.oransc.ics.repository;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import lombok.Builder;
import lombok.Getter;
-import org.oransc.enrichment.configuration.ApplicationConfig;
-import org.oransc.enrichment.exceptions.ServiceException;
+import org.oransc.ics.configuration.ApplicationConfig;
+import org.oransc.ics.exceptions.ServiceException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment.repository;
+package org.oransc.ics.repository;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import java.util.ServiceLoader;
import java.util.Vector;
-import org.oransc.enrichment.configuration.ApplicationConfig;
-import org.oransc.enrichment.exceptions.ServiceException;
+import org.oransc.ics.configuration.ApplicationConfig;
+import org.oransc.ics.exceptions.ServiceException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.http.HttpStatus;
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment.repository;
+package org.oransc.ics.repository;
import java.util.Collection;
import java.util.Collections;
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment.tasks;
+package org.oransc.ics.tasks;
-import org.oransc.enrichment.configuration.ApplicationConfig;
-import org.oransc.enrichment.controllers.a1e.A1eCallbacks;
-import org.oransc.enrichment.controllers.r1producer.ProducerCallbacks;
-import org.oransc.enrichment.repository.InfoJob;
-import org.oransc.enrichment.repository.InfoJobs;
-import org.oransc.enrichment.repository.InfoProducer;
-import org.oransc.enrichment.repository.InfoProducers;
+import org.oransc.ics.configuration.ApplicationConfig;
+import org.oransc.ics.controllers.a1e.A1eCallbacks;
+import org.oransc.ics.controllers.r1producer.ProducerCallbacks;
+import org.oransc.ics.repository.InfoJob;
+import org.oransc.ics.repository.InfoJobs;
+import org.oransc.ics.repository.InfoProducer;
+import org.oransc.ics.repository.InfoProducers;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment;
+package org.oransc.ics;
import static org.assertj.core.api.Assertions.assertThat;
import static org.awaitility.Awaitility.await;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
-import org.oransc.enrichment.clients.AsyncRestClient;
-import org.oransc.enrichment.clients.AsyncRestClientFactory;
-import org.oransc.enrichment.configuration.ApplicationConfig;
-import org.oransc.enrichment.configuration.ImmutableHttpProxyConfig;
-import org.oransc.enrichment.configuration.ImmutableWebClientConfig;
-import org.oransc.enrichment.configuration.WebClientConfig;
-import org.oransc.enrichment.configuration.WebClientConfig.HttpProxyConfig;
-import org.oransc.enrichment.controller.ConsumerSimulatorController;
-import org.oransc.enrichment.controller.ProducerSimulatorController;
-import org.oransc.enrichment.controllers.a1e.A1eConsts;
-import org.oransc.enrichment.controllers.a1e.A1eEiJobInfo;
-import org.oransc.enrichment.controllers.a1e.A1eEiJobStatus;
-import org.oransc.enrichment.controllers.a1e.A1eEiTypeInfo;
-import org.oransc.enrichment.controllers.r1consumer.ConsumerConsts;
-import org.oransc.enrichment.controllers.r1consumer.ConsumerInfoTypeInfo;
-import org.oransc.enrichment.controllers.r1consumer.ConsumerJobInfo;
-import org.oransc.enrichment.controllers.r1consumer.ConsumerJobStatus;
-import org.oransc.enrichment.controllers.r1consumer.ConsumerTypeRegistrationInfo;
-import org.oransc.enrichment.controllers.r1consumer.ConsumerTypeSubscriptionInfo;
-import org.oransc.enrichment.controllers.r1producer.ProducerCallbacks;
-import org.oransc.enrichment.controllers.r1producer.ProducerConsts;
-import org.oransc.enrichment.controllers.r1producer.ProducerInfoTypeInfo;
-import org.oransc.enrichment.controllers.r1producer.ProducerJobInfo;
-import org.oransc.enrichment.controllers.r1producer.ProducerRegistrationInfo;
-import org.oransc.enrichment.controllers.r1producer.ProducerStatusInfo;
-import org.oransc.enrichment.exceptions.ServiceException;
-import org.oransc.enrichment.repository.InfoJob;
-import org.oransc.enrichment.repository.InfoJobs;
-import org.oransc.enrichment.repository.InfoProducer;
-import org.oransc.enrichment.repository.InfoProducers;
-import org.oransc.enrichment.repository.InfoType;
-import org.oransc.enrichment.repository.InfoTypeSubscriptions;
-import org.oransc.enrichment.repository.InfoTypes;
-import org.oransc.enrichment.tasks.ProducerSupervision;
+import org.oransc.ics.clients.AsyncRestClient;
+import org.oransc.ics.clients.AsyncRestClientFactory;
+import org.oransc.ics.configuration.ApplicationConfig;
+import org.oransc.ics.configuration.ImmutableHttpProxyConfig;
+import org.oransc.ics.configuration.ImmutableWebClientConfig;
+import org.oransc.ics.configuration.WebClientConfig;
+import org.oransc.ics.configuration.WebClientConfig.HttpProxyConfig;
+import org.oransc.ics.controller.ConsumerSimulatorController;
+import org.oransc.ics.controller.ProducerSimulatorController;
+import org.oransc.ics.controllers.a1e.A1eConsts;
+import org.oransc.ics.controllers.a1e.A1eEiJobInfo;
+import org.oransc.ics.controllers.a1e.A1eEiJobStatus;
+import org.oransc.ics.controllers.a1e.A1eEiTypeInfo;
+import org.oransc.ics.controllers.r1consumer.ConsumerConsts;
+import org.oransc.ics.controllers.r1consumer.ConsumerInfoTypeInfo;
+import org.oransc.ics.controllers.r1consumer.ConsumerJobInfo;
+import org.oransc.ics.controllers.r1consumer.ConsumerJobStatus;
+import org.oransc.ics.controllers.r1consumer.ConsumerTypeRegistrationInfo;
+import org.oransc.ics.controllers.r1consumer.ConsumerTypeSubscriptionInfo;
+import org.oransc.ics.controllers.r1producer.ProducerCallbacks;
+import org.oransc.ics.controllers.r1producer.ProducerConsts;
+import org.oransc.ics.controllers.r1producer.ProducerInfoTypeInfo;
+import org.oransc.ics.controllers.r1producer.ProducerJobInfo;
+import org.oransc.ics.controllers.r1producer.ProducerRegistrationInfo;
+import org.oransc.ics.controllers.r1producer.ProducerStatusInfo;
+import org.oransc.ics.exceptions.ServiceException;
+import org.oransc.ics.repository.InfoJob;
+import org.oransc.ics.repository.InfoJobs;
+import org.oransc.ics.repository.InfoProducer;
+import org.oransc.ics.repository.InfoProducers;
+import org.oransc.ics.repository.InfoType;
+import org.oransc.ics.repository.InfoTypeSubscriptions;
+import org.oransc.ics.repository.InfoTypes;
+import org.oransc.ics.tasks.ProducerSupervision;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
assertThat(jsonObj.remove("servers")).isNotNull();
String indented = jsonObj.toString(4);
- try (PrintStream out = new PrintStream(new FileOutputStream("api/ecs-api.json"))) {
+ try (PrintStream out = new PrintStream(new FileOutputStream("api/ics-api.json"))) {
out.print(indented);
}
}
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment;
+package org.oransc.ics;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
"server.ssl.key-store=./config/keystore.jks", //
"app.webclient.trust-store=./config/truststore.jks", "app.vardata-directory=./target"})
@SuppressWarnings("squid:S3577") // Not containing any tests since it is a mock.
-class MockEnrichmentService {
+class MockInformationService {
private static final Logger logger = LoggerFactory.getLogger(ApplicationTest.class);
@LocalServerPort
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment.clients;
+package org.oransc.ics.clients;
import io.netty.util.internal.logging.InternalLoggerFactory;
import io.netty.util.internal.logging.JdkLoggerFactory;
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment.controller;
+package org.oransc.ics.controller;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.media.Content;
import lombok.Getter;
-import org.oransc.enrichment.controllers.VoidResponse;
-import org.oransc.enrichment.controllers.a1e.A1eConsts;
-import org.oransc.enrichment.controllers.a1e.A1eEiJobStatus;
-import org.oransc.enrichment.controllers.r1consumer.ConsumerConsts;
-import org.oransc.enrichment.controllers.r1consumer.ConsumerTypeRegistrationInfo;
+import org.oransc.ics.controllers.VoidResponse;
+import org.oransc.ics.controllers.a1e.A1eConsts;
+import org.oransc.ics.controllers.a1e.A1eEiJobStatus;
+import org.oransc.ics.controllers.r1consumer.ConsumerConsts;
+import org.oransc.ics.controllers.r1consumer.ConsumerTypeRegistrationInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.http.HttpStatus;
* ========================LICENSE_END===================================
*/
-package org.oransc.enrichment.controller;
+package org.oransc.ics.controller;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.media.Content;
import lombok.Getter;
-import org.oransc.enrichment.controllers.ErrorResponse;
-import org.oransc.enrichment.controllers.VoidResponse;
-import org.oransc.enrichment.controllers.r1producer.ProducerConsts;
-import org.oransc.enrichment.controllers.r1producer.ProducerJobInfo;
+import org.oransc.ics.controllers.ErrorResponse;
+import org.oransc.ics.controllers.VoidResponse;
+import org.oransc.ics.controllers.r1producer.ProducerConsts;
+import org.oransc.ics.controllers.r1producer.ProducerJobInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.http.HttpStatus;
-Subproject commit 558d6d2de33bb8cf4b16df980a0cdf3b1747a8e2
+Subproject commit 6e31874958b44f45c5dd78aef5c783916b16c6ee
<sonar-maven-plugin.version>3.7.0.1746</sonar-maven-plugin.version>
</properties>
<modules>
- <module>policy-agent</module>
- <module>enrichment-coordinator-service</module>
+ <module>a1-policy-management-service</module>
+ <module>information-coordinator-service</module>
<module>r-app-catalogue</module>
<module>helm-manager</module>
<module>dmaap-adaptor-java</module>
+ <module>dmaap-mediator-producer</module>
</modules>
<build>
<plugins>
EXPOSE 8680 8633
+RUN groupadd -g 999 appuser && \
+ useradd -r -u 999 -g appuser appuser
+RUN chown -R appuser:appuser /opt/app/r-app-catalogue/
+RUN chown -R appuser:appuser /var/log/r-app-catalogue/
+USER appuser
+
ADD /config/application.yaml /opt/app/r-app-catalogue/config/application.yaml
ADD /config/r-app-catalogue-keystore.jks /opt/app/r-app-catalogue/etc/cert/keystore.jks
ADD target/${JAR} /opt/app/r-app-catalogue/r-app-catalogue.jar
-
-RUN chmod -R 777 /opt/app/r-app-catalogue/config/
-
CMD ["java", "-jar", "/opt/app/r-app-catalogue/r-app-catalogue.jar"]
#Supported run modes
SUPPORTED_RUNMODES="DOCKER KUBE"
-. ../common/testcase_common.sh $@
-. ../common/agent_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/http_proxy_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
+. ../common/testcase_common.sh $@
setup_testenvironment
# Create service to be able to receive events when rics becomes available
# Must use rest towards the agent since dmaap is not configured yet
- api_put_service 201 "ric-registration" 0 "$CR_SERVICE_APP_PATH/ric-registration"
+ api_put_service 201 "ric-registration" 0 "$CR_SERVICE_APP_PATH_0/ric-registration"
if [ $__httpx == "HTTPS" ]; then
use_cr_https
start_mr "$MR_READ_TOPIC" "/events" "users/policy-agent" \
"$MR_WRITE_TOPIC" "/events" "users/mr-stub"
- start_cr
+ start_cr 1
start_control_panel $SIM_GROUP/$CONTROL_PANEL_COMPOSE_DIR/$CONTROL_PANEL_CONFIG_FILE
api_equal json:policy-instances 0
- cr_equal received_callbacks 3 120
+ cr_equal 0 received_callbacks 3 120
- cr_api_check_all_sync_events 200 ric-registration ricsim_g1_1 ricsim_g2_1 ricsim_g3_1
+ cr_api_check_all_sync_events 200 0 ric-registration ricsim_g1_1 ricsim_g2_1 ricsim_g3_1
else
api_equal json:rics 2 300
echo "##### Service registry and supervision #####"
echo "############################################"
- api_put_service 201 "serv1" 1000 "$CR_SERVICE_APP_PATH/1"
+ api_put_service 201 "serv1" 1000 "$CR_SERVICE_APP_PATH_0/1"
api_get_service_ids 200 "serv1" "ric-registration"
echo "############################################"
if [ "$PMS_VERSION" == "V2" ]; then
- notificationurl=$CR_SERVICE_APP_PATH"/test"
+ notificationurl=$CR_SERVICE_APP_PATH_0"/test"
else
notificationurl=""
fi
fi
if [ "$PMS_VERSION" == "V2" ]; then
- cr_equal received_callbacks 3
+ cr_equal 0 received_callbacks 3
fi
if [[ $interface = *"DMAAP"* ]]; then
SUPPORTED_RUNMODES="DOCKER KUBE"
. ../common/testcase_common.sh $@
-. ../common/agent_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
setup_testenvironment
start_mr
-start_cr
+start_cr 1
if [ $RUNMODE == "DOCKER" ]; then
start_consul_cbs
# Create policies
if [ "$PMS_VERSION" == "V2" ]; then
- notificationurl=$CR_SERVICE_APP_PATH"/test"
+ notificationurl=$CR_SERVICE_APP_PATH_0"/test"
else
notificationurl=""
fi
use_agent_rest_http
-api_put_service 201 "service1" 3600 "$CR_SERVICE_APP_PATH/1"
+api_put_service 201 "service1" 3600 "$CR_SERVICE_APP_PATH_0/1"
api_put_policy 201 "service1" ricsim_g1_1 1 2000 NOTRANSIENT $notificationurl testdata/OSC/pi1_template.json 1
#Update policies
use_agent_rest_http
-api_put_service 200 "service1" 3600 "$CR_SERVICE_APP_PATH/1"
+api_put_service 200 "service1" 3600 "$CR_SERVICE_APP_PATH_0/1"
api_put_policy 200 "service1" ricsim_g1_1 1 2000 NOTRANSIENT $notificationurl testdata/OSC/pi1_template.json 1
#Supported run modes
SUPPORTED_RUNMODES="DOCKER KUBE"
-. ../common/testcase_common.sh $@
-. ../common/agent_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
+. ../common/testcase_common.sh $@
setup_testenvironment
# Create service to be able to receive events when rics becomes available
# Must use rest towards the agent since dmaap is not configured yet
- api_put_service 201 "ric-registration" 0 "$CR_SERVICE_APP_PATH/ric-registration"
+ api_put_service 201 "ric-registration" 0 "$CR_SERVICE_APP_PATH_0/ric-registration"
if [ $__httpx == "HTTPS" ]; then
start_mr
- start_cr
+ start_cr 1
start_control_panel $SIM_GROUP/$CONTROL_PANEL_COMPOSE_DIR/$CONTROL_PANEL_CONFIG_FILE
fi
if [ "$PMS_VERSION" == "V2" ]; then
- cr_equal received_callbacks 3 120
- cr_api_check_all_sync_events 200 ric-registration ricsim_g1_1 ricsim_g2_1 ricsim_g3_1
+ cr_equal 0 received_callbacks 3 120
+ cr_api_check_all_sync_events 200 0 ric-registration ricsim_g1_1 ricsim_g2_1 ricsim_g3_1
fi
mr_equal requests_submitted 0
api_get_services 404 "service1"
- api_put_service 201 "service1" 1000 "$CR_SERVICE_APP_PATH/1"
+ api_put_service 201 "service1" 1000 "$CR_SERVICE_APP_PATH_0/1"
- api_put_service 200 "service1" 2000 "$CR_SERVICE_APP_PATH/1"
+ api_put_service 200 "service1" 2000 "$CR_SERVICE_APP_PATH_0/1"
- api_put_service 400 "service2" -1 "$CR_SERVICE_APP_PATH/2"
+ api_put_service 400 "service2" -1 "$CR_SERVICE_APP_PATH_0/2"
- api_put_service 400 "service2" "wrong" "$CR_SERVICE_APP_PATH/2"
+ api_put_service 400 "service2" "wrong" "$CR_SERVICE_APP_PATH_0/2"
api_put_service 400 "service2" 100 "/test"
api_put_service 201 "service2" 300 "ftp://localhost:80/test"
- api_get_services 200 "service1" "service1" 2000 "$CR_SERVICE_APP_PATH/1"
+ api_get_services 200 "service1" "service1" 2000 "$CR_SERVICE_APP_PATH_0/1"
api_get_service_ids 200 "service1" "service2" "ric-registration"
- api_put_service 201 "service3" 5000 "$CR_SERVICE_APP_PATH/3"
+ api_put_service 201 "service3" 5000 "$CR_SERVICE_APP_PATH_0/3"
api_get_service_ids 200 "service1" "service2" "service3" "ric-registration"
- api_get_services 200 "service1" "service1" 2000 "$CR_SERVICE_APP_PATH/1"
+ api_get_services 200 "service1" "service1" 2000 "$CR_SERVICE_APP_PATH_0/1"
- api_get_services 200 NOSERVICE "service1" 2000 "$CR_SERVICE_APP_PATH/1" "service2" 300 "ftp://localhost:80/test" "service3" 5000 "$CR_SERVICE_APP_PATH/3" "ric-registration" 0 "$CR_SERVICE_APP_PATH/ric-registration"
+ api_get_services 200 NOSERVICE "service1" 2000 "$CR_SERVICE_APP_PATH_0/1" "service2" 300 "ftp://localhost:80/test" "service3" 5000 "$CR_SERVICE_APP_PATH_0/3" "ric-registration" 0 "$CR_SERVICE_APP_PATH_0/ric-registration"
api_get_services 200
api_get_service_ids 200 "service2" "service3" "ric-registration"
- api_put_service 201 "service1" 50 "$CR_SERVICE_APP_PATH/1"
+ api_put_service 201 "service1" 50 "$CR_SERVICE_APP_PATH_0/1"
api_get_service_ids 200 "service1" "service2" "service3" "ric-registration"
- api_put_service 201 "service10" 3600 "$CR_SERVICE_APP_PATH/1"
+ api_put_service 201 "service10" 3600 "$CR_SERVICE_APP_PATH_0/1"
if [ "$PMS_VERSION" == "V2" ]; then
- notificationurl=$CR_SERVICE_APP_PATH"/test"
+ notificationurl=$CR_SERVICE_APP_PATH_0"/test"
else
notificationurl=""
fi
fi
if [ "$PMS_VERSION" == "V2" ]; then
- cr_equal received_callbacks 3
+ cr_equal 0 received_callbacks 3
fi
if [[ $interface = *"DMAAP"* ]]; then
#Supported run modes
SUPPORTED_RUNMODES="DOCKER KUBE"
-. ../common/testcase_common.sh $@
-. ../common/agent_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
+. ../common/testcase_common.sh $@
setup_testenvironment
start_mr
-start_cr
+start_cr 1
if [ $RUNMODE == "DOCKER" ]; then
start_consul_cbs
sim_print ricsim_g3_1 interface
fi
-api_put_service 201 "service1" 15 "$CR_SERVICE_APP_PATH/service1"
+api_put_service 201 "service1" 15 "$CR_SERVICE_APP_PATH_0/service1"
-api_get_services 200 "service1" "service1" 15 "$CR_SERVICE_APP_PATH/service1"
+api_get_services 200 "service1" "service1" 15 "$CR_SERVICE_APP_PATH_0/service1"
-api_put_service 201 "service2" 120 "$CR_SERVICE_APP_PATH/service2"
+api_put_service 201 "service2" 120 "$CR_SERVICE_APP_PATH_0/service2"
-api_get_services 200 "service2" "service2" 120 "$CR_SERVICE_APP_PATH/service2"
+api_get_services 200 "service2" "service2" 120 "$CR_SERVICE_APP_PATH_0/service2"
-api_put_service 200 "service1" 50 "$CR_SERVICE_APP_PATH/service1"
-api_put_service 200 "service2" 180 "$CR_SERVICE_APP_PATH/service2"
+api_put_service 200 "service1" 50 "$CR_SERVICE_APP_PATH_0/service1"
+api_put_service 200 "service2" 180 "$CR_SERVICE_APP_PATH_0/service2"
-api_get_services 200 "service1" "service1" 50 "$CR_SERVICE_APP_PATH/service1"
-api_get_services 200 "service2" "service2" 180 "$CR_SERVICE_APP_PATH/service2"
+api_get_services 200 "service1" "service1" 50 "$CR_SERVICE_APP_PATH_0/service1"
+api_get_services 200 "service2" "service2" 180 "$CR_SERVICE_APP_PATH_0/service2"
api_get_service_ids 200 "service1" "service2"
sleep_wait 30 "Waiting for keep alive timeout"
-api_get_services 200 "service1" "service1" 50 "$CR_SERVICE_APP_PATH/service1"
-api_get_services 200 "service2" "service2" 180 "$CR_SERVICE_APP_PATH/service2"
+api_get_services 200 "service1" "service1" 50 "$CR_SERVICE_APP_PATH_0/service1"
+api_get_services 200 "service2" "service2" 180 "$CR_SERVICE_APP_PATH_0/service2"
sleep_wait 100 "Waiting for keep alive timeout"
api_get_services 404 "service1"
-api_get_services 200 "service2" "service2" 180 "$CR_SERVICE_APP_PATH/service2"
+api_get_services 200 "service2" "service2" 180 "$CR_SERVICE_APP_PATH_0/service2"
api_delete_services 204 "service2"
api_get_services 404 "service1"
api_get_services 404 "service2"
-api_put_service 201 "service3" 60 "$CR_SERVICE_APP_PATH/service3"
+api_put_service 201 "service3" 60 "$CR_SERVICE_APP_PATH_0/service3"
-api_get_services 200 "service3" "service3" 60 "$CR_SERVICE_APP_PATH/service3"
+api_get_services 200 "service3" "service3" 60 "$CR_SERVICE_APP_PATH_0/service3"
sleep_wait 30 "Waiting for keep alive timeout"
-api_put_service 200 "service3" 60 "$CR_SERVICE_APP_PATH/service3"
+api_put_service 200 "service3" 60 "$CR_SERVICE_APP_PATH_0/service3"
sleep_wait 100 "Waiting for keep alive timeout"
api_get_services 404 "service3"
-api_put_service 201 "service4" 120 "$CR_SERVICE_APP_PATH/service4"
+api_put_service 201 "service4" 120 "$CR_SERVICE_APP_PATH_0/service4"
sleep_wait 60 "Waiting for keep alive timeout"
-api_get_services 200 "service4" "service4" 120 "$CR_SERVICE_APP_PATH/service4"
+api_get_services 200 "service4" "service4" 120 "$CR_SERVICE_APP_PATH_0/service4"
api_put_services_keepalive 200 "service4"
sleep_wait 90 "Waiting for keep alive timeout"
-api_get_services 200 "service4" "service4" 120 "$CR_SERVICE_APP_PATH/service4"
+api_get_services 200 "service4" "service4" 120 "$CR_SERVICE_APP_PATH_0/service4"
api_delete_services 204 "service4"
api_put_services_keepalive 404 "service4"
# Policy delete after timeout
-api_put_service 201 "service10" 600 "$CR_SERVICE_APP_PATH/service10"
+api_put_service 201 "service10" 600 "$CR_SERVICE_APP_PATH_0/service10"
sim_put_policy_type 201 ricsim_g1_1 1 testdata/OSC/sim_1.json
fi
if [ "$PMS_VERSION" == "V2" ]; then
- notificationurl=$CR_SERVICE_APP_PATH"/test"
+ notificationurl=$CR_SERVICE_APP_PATH_0"/test"
else
notificationurl=""
fi
sim_equal ricsim_g3_1 num_instances 1
fi
-api_put_service 200 "service10" 10 "$CR_SERVICE_APP_PATH/service10"
+api_put_service 200 "service10" 10 "$CR_SERVICE_APP_PATH_0/service10"
#Wait for service expiry
api_equal json:policies 0 120
#
-TC_ONELINE_DESCR="ECS full interfaces walkthrough"
+TC_ONELINE_DESCR="ICS full interfaces walkthrough"
#App names to include in the test when running docker, space separated list
-DOCKER_INCLUDED_IMAGES="ECS PRODSTUB CR RICSIM CP HTTPPROXY NGW KUBEPROXY"
+DOCKER_INCLUDED_IMAGES="ICS PRODSTUB CR RICSIM CP HTTPPROXY NGW KUBEPROXY"
#App names to include in the test when running kubernetes, space separated list
-KUBE_INCLUDED_IMAGES="PRODSTUB CR ECS RICSIM CP HTTPPROXY KUBEPROXY NGW"
+KUBE_INCLUDED_IMAGES="PRODSTUB CR ICS RICSIM CP HTTPPROXY KUBEPROXY NGW"
#Prestarted app (not started by script) to include in the test when running kubernetes, space separated list
KUBE_PRESTARTED_IMAGES=""
#Supported run modes
SUPPORTED_RUNMODES="DOCKER KUBE"
-. ../common/testcase_common.sh $@
-. ../common/ecs_api_functions.sh
-. ../common/prodstub_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/http_proxy_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
+. ../common/testcase_common.sh $@
setup_testenvironment
start_kube_proxy
-use_ecs_rest_https
+use_ics_rest_https
use_prod_stub_https
start_http_proxy
-start_ecs NOPROXY $SIM_GROUP/$ECS_COMPOSE_DIR/$ECS_CONFIG_FILE #Change NOPROXY to PROXY to run with http proxy
+start_ics NOPROXY $SIM_GROUP/$ICS_COMPOSE_DIR/$ICS_CONFIG_FILE #Change NOPROXY to PROXY to run with http proxy
if [ $RUNMODE == "KUBE" ]; then
- ecs_api_admin_reset
+ ics_api_admin_reset
fi
start_prod_stub
-set_ecs_debug
+set_ics_debug
start_control_panel $SIM_GROUP/$CONTROL_PANEL_COMPOSE_DIR/$CONTROL_PANEL_CONFIG_FILE
start_ric_simulators ricsim_g3 4 STD_2.0.0
fi
-start_cr
+start_cr 1
CB_JOB="$PROD_STUB_SERVICE_PATH$PROD_STUB_JOB_CALLBACK"
CB_SV="$PROD_STUB_SERVICE_PATH$PROD_STUB_SUPERVISION_CALLBACK"
TARGET160="http://localhost:80/target" # Dummy target, no target for info data in this env...
#Status callbacks for eijobs
-STATUS1="$CR_SERVICE_APP_PATH/job1-status"
-STATUS2="$CR_SERVICE_APP_PATH/job2-status"
-STATUS3="$CR_SERVICE_APP_PATH/job3-status"
-STATUS8="$CR_SERVICE_APP_PATH/job8-status"
-STATUS10="$CR_SERVICE_APP_PATH/job10-status"
+STATUS1="$CR_SERVICE_APP_PATH_0/job1-status"
+STATUS2="$CR_SERVICE_APP_PATH_0/job2-status"
+STATUS3="$CR_SERVICE_APP_PATH_0/job3-status"
+STATUS8="$CR_SERVICE_APP_PATH_0/job8-status"
+STATUS10="$CR_SERVICE_APP_PATH_0/job10-status"
#Status callbacks for infojobs
-INFOSTATUS101="$CR_SERVICE_APP_PATH/info-job101-status"
-INFOSTATUS102="$CR_SERVICE_APP_PATH/info-job102-status"
-INFOSTATUS103="$CR_SERVICE_APP_PATH/info-job103-status"
-INFOSTATUS108="$CR_SERVICE_APP_PATH/info-job108-status"
-INFOSTATUS110="$CR_SERVICE_APP_PATH/info-job110-status"
-INFOSTATUS150="$CR_SERVICE_APP_PATH/info-job150-status"
-INFOSTATUS160="$CR_SERVICE_APP_PATH/info-job160-status"
-
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+INFOSTATUS101="$CR_SERVICE_APP_PATH_0/info-job101-status"
+INFOSTATUS102="$CR_SERVICE_APP_PATH_0/info-job102-status"
+INFOSTATUS103="$CR_SERVICE_APP_PATH_0/info-job103-status"
+INFOSTATUS108="$CR_SERVICE_APP_PATH_0/info-job108-status"
+INFOSTATUS110="$CR_SERVICE_APP_PATH_0/info-job110-status"
+INFOSTATUS150="$CR_SERVICE_APP_PATH_0/info-job150-status"
+INFOSTATUS160="$CR_SERVICE_APP_PATH_0/info-job160-status"
+
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
#Type registration status callbacks
- TYPESTATUS1="$CR_SERVICE_APP_PATH/type-status1"
- TYPESTATUS2="$CR_SERVICE_APP_PATH/type-status2"
+ TYPESTATUS1="$CR_SERVICE_APP_PATH_0/type-status1"
+ TYPESTATUS2="$CR_SERVICE_APP_PATH_0/type-status2"
- ecs_api_idc_put_subscription 201 subscription-id-1 owner1 $TYPESTATUS1
+ ics_api_idc_put_subscription 201 subscription-id-1 owner1 $TYPESTATUS1
- ecs_api_idc_get_subscription_ids 200 NOOWNER subscription-id-1
+ ics_api_idc_get_subscription_ids 200 NOOWNER subscription-id-1
- ecs_api_idc_get_subscription_ids 200 owner1 subscription-id-1
+ ics_api_idc_get_subscription_ids 200 owner1 subscription-id-1
- ecs_api_idc_get_subscription_ids 200 test EMPTY
+ ics_api_idc_get_subscription_ids 200 test EMPTY
- ecs_api_idc_get_subscription 200 subscription-id-1 owner1 $TYPESTATUS1
+ ics_api_idc_get_subscription 200 subscription-id-1 owner1 $TYPESTATUS1
- ecs_api_idc_get_subscription 404 test
+ ics_api_idc_get_subscription 404 test
- ecs_api_idc_put_subscription 200 subscription-id-1 owner1 $TYPESTATUS1
+ ics_api_idc_put_subscription 200 subscription-id-1 owner1 $TYPESTATUS1
- ecs_api_idc_put_subscription 200 subscription-id-1 owner1 $TYPESTATUS1
+ ics_api_idc_put_subscription 200 subscription-id-1 owner1 $TYPESTATUS1
- ecs_api_idc_put_subscription 201 subscription-id-2 owner2 $TYPESTATUS2
+ ics_api_idc_put_subscription 201 subscription-id-2 owner2 $TYPESTATUS2
- ecs_api_idc_get_subscription_ids 200 NOOWNER subscription-id-1 subscription-id-2
+ ics_api_idc_get_subscription_ids 200 NOOWNER subscription-id-1 subscription-id-2
- ecs_api_idc_get_subscription_ids 200 owner1 subscription-id-1
+ ics_api_idc_get_subscription_ids 200 owner1 subscription-id-1
- ecs_api_idc_get_subscription_ids 200 owner2 subscription-id-2
+ ics_api_idc_get_subscription_ids 200 owner2 subscription-id-2
- ecs_api_idc_get_subscription 200 subscription-id-1 owner1 $TYPESTATUS1
- ecs_api_idc_get_subscription 200 subscription-id-2 owner2 $TYPESTATUS2
+ ics_api_idc_get_subscription 200 subscription-id-1 owner1 $TYPESTATUS1
+ ics_api_idc_get_subscription 200 subscription-id-2 owner2 $TYPESTATUS2
- ecs_api_idc_delete_subscription 204 subscription-id-2
+ ics_api_idc_delete_subscription 204 subscription-id-2
- ecs_api_idc_get_subscription_ids 200 NOOWNER subscription-id-1
+ ics_api_idc_get_subscription_ids 200 NOOWNER subscription-id-1
- ecs_api_edp_put_type_2 201 type1 testdata/ecs/ei-type-1.json
+ ics_api_edp_put_type_2 201 type1 testdata/ics/ei-type-1.json
- cr_equal received_callbacks 1 30
- cr_equal received_callbacks?id=type-status1 1
- cr_api_check_all_ecs_subscription_events 200 type-status1 type1 testdata/ecs/ei-type-1.json REGISTERED
+ cr_equal 0 received_callbacks 1 30
+ cr_equal 0 received_callbacks?id=type-status1 1
+ cr_api_check_all_ics_subscription_events 200 0 type-status1 type1 testdata/ics/ei-type-1.json REGISTERED
- ecs_api_edp_delete_type_2 204 type1
+ ics_api_edp_delete_type_2 204 type1
- cr_equal received_callbacks 2 30
- cr_equal received_callbacks?id=type-status1 2
- cr_api_check_all_ecs_subscription_events 200 type-status1 type1 testdata/ecs/ei-type-1.json DEREGISTERED
+ cr_equal 0 received_callbacks 2 30
+ cr_equal 0 received_callbacks?id=type-status1 2
+ cr_api_check_all_ics_subscription_events 200 0 type-status1 type1 testdata/ics/ei-type-1.json DEREGISTERED
- ecs_api_idc_put_subscription 201 subscription-id-2 owner2 $TYPESTATUS2
- ecs_api_idc_get_subscription_ids 200 NOOWNER subscription-id-1 subscription-id-2
+ ics_api_idc_put_subscription 201 subscription-id-2 owner2 $TYPESTATUS2
+ ics_api_idc_get_subscription_ids 200 NOOWNER subscription-id-1 subscription-id-2
- ecs_api_edp_put_type_2 201 type1 testdata/ecs/ei-type-1.json
+ ics_api_edp_put_type_2 201 type1 testdata/ics/ei-type-1.json
- cr_equal received_callbacks 4 30
- cr_equal received_callbacks?id=type-status1 3
- cr_equal received_callbacks?id=type-status2 1
- cr_api_check_all_ecs_subscription_events 200 type-status1 type1 testdata/ecs/ei-type-1.json REGISTERED
+ cr_equal 0 received_callbacks 4 30
+ cr_equal 0 received_callbacks?id=type-status1 3
+ cr_equal 0 received_callbacks?id=type-status2 1
+ cr_api_check_all_ics_subscription_events 200 0 type-status1 type1 testdata/ics/ei-type-1.json REGISTERED
- ecs_api_idc_delete_subscription 204 subscription-id-2
+ ics_api_idc_delete_subscription 204 subscription-id-2
- ecs_api_edp_delete_type_2 204 type1
+ ics_api_edp_delete_type_2 204 type1
- cr_equal received_callbacks 5 30
- cr_equal received_callbacks?id=type-status1 4
- cr_equal received_callbacks?id=type-status2 1
- cr_api_check_all_ecs_subscription_events 200 type-status1 type1 testdata/ecs/ei-type-1.json DEREGISTERED
+ cr_equal 0 received_callbacks 5 30
+ cr_equal 0 received_callbacks?id=type-status1 4
+ cr_equal 0 received_callbacks?id=type-status2 1
+ cr_api_check_all_ics_subscription_events 200 0 type-status1 type1 testdata/ics/ei-type-1.json DEREGISTERED
- cr_api_reset
+ cr_api_reset 0
fi
### Setup prodstub sim to accept calls for producers, types and jobs
prodstub_arm_job_create 200 prod-e job10
prodstub_arm_job_create 200 prod-f job10
-### ecs status
-ecs_api_service_status 200
+### ics status
+ics_api_service_status 200
-cr_equal received_callbacks 0
+cr_equal 0 received_callbacks 0
### Initial tests - no config made
### GET: type ids, types, producer ids, producers, job ids, jobs
### DELETE: jobs
-ecs_api_a1_get_type_ids 200 EMPTY
-ecs_api_a1_get_type 404 test-type
+ics_api_a1_get_type_ids 200 EMPTY
+ics_api_a1_get_type 404 test-type
-ecs_api_edp_get_type_ids 200 EMPTY
-if [ $ECS_VERSION == "V1-1" ]; then
- ecs_api_edp_get_type 404 test-type
+ics_api_edp_get_type_ids 200 EMPTY
+if [ $ICS_VERSION == "V1-1" ]; then
+ ics_api_edp_get_type 404 test-type
else
- ecs_api_edp_get_type_2 404 test-type
+ ics_api_edp_get_type_2 404 test-type
fi
-if [ $ECS_VERSION == "V1-1" ]; then
- ecs_api_edp_get_producer_ids 200 EMPTY
- ecs_api_edp_get_producer 404 test-prod
+if [ $ICS_VERSION == "V1-1" ]; then
+ ics_api_edp_get_producer_ids 200 EMPTY
+ ics_api_edp_get_producer 404 test-prod
else
- ecs_api_edp_get_producer_ids_2 200 NOTYPE EMPTY
- ecs_api_edp_get_producer_2 404 test-prod
+ ics_api_edp_get_producer_ids_2 200 NOTYPE EMPTY
+ ics_api_edp_get_producer_2 404 test-prod
fi
-ecs_api_edp_get_producer_status 404 test-prod
+ics_api_edp_get_producer_status 404 test-prod
-ecs_api_edp_delete_producer 404 test-prod
+ics_api_edp_delete_producer 404 test-prod
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_get_job_ids 404 test-type NOWNER
- ecs_api_a1_get_job_ids 404 test-type test-owner
+ ics_api_a1_get_job_ids 404 test-type NOWNER
+ ics_api_a1_get_job_ids 404 test-type test-owner
- ecs_api_a1_get_job 404 test-type test-job
+ ics_api_a1_get_job 404 test-type test-job
- ecs_api_a1_get_job_status 404 test-type test-job
+ ics_api_a1_get_job_status 404 test-type test-job
else
- ecs_api_a1_get_job_ids 200 test-type NOWNER EMPTY
- ecs_api_a1_get_job_ids 200 test-type test-owner EMPTY
+ ics_api_a1_get_job_ids 200 test-type NOWNER EMPTY
+ ics_api_a1_get_job_ids 200 test-type test-owner EMPTY
- ecs_api_a1_get_job 404 test-job
+ ics_api_a1_get_job 404 test-job
- ecs_api_a1_get_job_status 404 test-job
+ ics_api_a1_get_job_status 404 test-job
fi
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_delete_job 404 test-type test-job
+ ics_api_a1_delete_job 404 test-type test-job
else
- ecs_api_a1_delete_job 404 test-job
+ ics_api_a1_delete_job 404 test-job
fi
-if [ $ECS_VERSION == "V1-1" ]; then
- ecs_api_edp_get_producer_jobs 404 test-prod
+if [ $ICS_VERSION == "V1-1" ]; then
+ ics_api_edp_get_producer_jobs 404 test-prod
else
- ecs_api_edp_get_producer_jobs_2 404 test-prod
+ ics_api_edp_get_producer_jobs_2 404 test-prod
fi
-if [ $ECS_VERSION == "V1-2" ]; then
- ecs_api_edp_get_type_2 404 test-type
- ecs_api_edp_delete_type_2 404 test-type
+if [ $ICS_VERSION == "V1-2" ]; then
+ ics_api_edp_get_type_2 404 test-type
+ ics_api_edp_delete_type_2 404 test-type
fi
### Setup of producer/job and testing apis ###
## Setup prod-a
-if [ $ECS_VERSION == "V1-1" ]; then
- ecs_api_edp_put_producer 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ecs/ei-type-1.json
- ecs_api_edp_put_producer 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ecs/ei-type-1.json
+if [ $ICS_VERSION == "V1-1" ]; then
+ ics_api_edp_put_producer 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ics/ei-type-1.json
+ ics_api_edp_put_producer 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ics/ei-type-1.json
else
#V1-2
- ecs_api_edp_get_type_ids 200 EMPTY
- ecs_api_edp_get_type_2 404 type1
- ecs_api_edp_put_producer_2 404 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
+ ics_api_edp_get_type_ids 200 EMPTY
+ ics_api_edp_get_type_2 404 type1
+ ics_api_edp_put_producer_2 404 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
# Create type, delete and create again
- ecs_api_edp_put_type_2 201 type1 testdata/ecs/ei-type-1.json
- ecs_api_edp_get_type_2 200 type1
- ecs_api_edp_get_type_ids 200 type1
- ecs_api_edp_delete_type_2 204 type1
- ecs_api_edp_get_type_2 404 type1
- ecs_api_edp_get_type_ids 200 EMPTY
- if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPE-INFO"* ]]; then
- ecs_api_edp_put_type_2 201 type1 testdata/ecs/ei-type-1.json testdata/ecs/info-type-info.json
+ ics_api_edp_put_type_2 201 type1 testdata/ics/ei-type-1.json
+ ics_api_edp_get_type_2 200 type1
+ ics_api_edp_get_type_ids 200 type1
+ ics_api_edp_delete_type_2 204 type1
+ ics_api_edp_get_type_2 404 type1
+ ics_api_edp_get_type_ids 200 EMPTY
+ if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPE-INFO"* ]]; then
+ ics_api_edp_put_type_2 201 type1 testdata/ics/ei-type-1.json testdata/ics/info-type-info.json
else
- ecs_api_edp_put_type_2 201 type1 testdata/ecs/ei-type-1.json
+ ics_api_edp_put_type_2 201 type1 testdata/ics/ei-type-1.json
fi
- ecs_api_edp_get_type_ids 200 type1
- if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPE-INFO"* ]]; then
- ecs_api_edp_get_type_2 200 type1 testdata/ecs/ei-type-1.json testdata/ecs/info-type-info.json
+ ics_api_edp_get_type_ids 200 type1
+ if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPE-INFO"* ]]; then
+ ics_api_edp_get_type_2 200 type1 testdata/ics/ei-type-1.json testdata/ics/info-type-info.json
else
- ecs_api_edp_get_type_2 200 type1 testdata/ecs/ei-type-1.json
+ ics_api_edp_get_type_2 200 type1 testdata/ics/ei-type-1.json
fi
- ecs_api_edp_put_producer_2 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
- ecs_api_edp_put_producer_2 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
+ ics_api_edp_put_producer_2 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
+ ics_api_edp_put_producer_2 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
- if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
- cr_equal received_callbacks 3 30
- cr_equal received_callbacks?id=type-status1 3
- cr_api_check_all_ecs_subscription_events 200 type-status1 type1 testdata/ecs/ei-type-1.json REGISTERED type1 testdata/ecs/ei-type-1.json DEREGISTERED type1 testdata/ecs/ei-type-1.json REGISTERED
+ if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+ cr_equal 0 received_callbacks 3 30
+ cr_equal 0 received_callbacks?id=type-status1 3
+ cr_api_check_all_ics_subscription_events 200 0 type-status1 type1 testdata/ics/ei-type-1.json REGISTERED type1 testdata/ics/ei-type-1.json DEREGISTERED type1 testdata/ics/ei-type-1.json REGISTERED
else
- cr_equal received_callbacks 0
+ cr_equal 0 received_callbacks 0
fi
fi
-ecs_api_a1_get_type_ids 200 type1
+ics_api_a1_get_type_ids 200 type1
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_get_type 200 type1 testdata/ecs/ei-type-1.json
+ ics_api_a1_get_type 200 type1 testdata/ics/ei-type-1.json
else
- ecs_api_a1_get_type 200 type1 testdata/ecs/empty-type.json
+ ics_api_a1_get_type 200 type1 testdata/ics/empty-type.json
fi
-ecs_api_edp_get_type_ids 200 type1
-if [ $ECS_VERSION == "V1-1" ]; then
- ecs_api_edp_get_type 200 type1 testdata/ecs/ei-type-1.json prod-a
+ics_api_edp_get_type_ids 200 type1
+if [ $ICS_VERSION == "V1-1" ]; then
+ ics_api_edp_get_type 200 type1 testdata/ics/ei-type-1.json prod-a
else
- if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPE-INFO"* ]]; then
- ecs_api_edp_get_type_2 200 type1 testdata/ecs/ei-type-1.json testdata/ecs/info-type-info.json
+ if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPE-INFO"* ]]; then
+ ics_api_edp_get_type_2 200 type1 testdata/ics/ei-type-1.json testdata/ics/info-type-info.json
else
- ecs_api_edp_get_type_2 200 type1 testdata/ecs/ei-type-1.json
+ ics_api_edp_get_type_2 200 type1 testdata/ics/ei-type-1.json
fi
fi
-if [ $ECS_VERSION == "V1-1" ]; then
- ecs_api_edp_get_producer_ids 200 prod-a
+if [ $ICS_VERSION == "V1-1" ]; then
+ ics_api_edp_get_producer_ids 200 prod-a
else
- ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-a
- ecs_api_edp_get_producer_ids_2 200 type1 prod-a
- ecs_api_edp_get_producer_ids_2 200 type2 EMPTY
+ ics_api_edp_get_producer_ids_2 200 NOTYPE prod-a
+ ics_api_edp_get_producer_ids_2 200 type1 prod-a
+ ics_api_edp_get_producer_ids_2 200 type2 EMPTY
fi
-if [ $ECS_VERSION == "V1-1" ]; then
- ecs_api_edp_get_producer 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ecs/ei-type-1.json
+if [ $ICS_VERSION == "V1-1" ]; then
+ ics_api_edp_get_producer 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ics/ei-type-1.json
else
- ecs_api_edp_get_producer_2 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
+ ics_api_edp_get_producer_2 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
fi
-ecs_api_edp_get_producer_status 200 prod-a ENABLED
+ics_api_edp_get_producer_status 200 prod-a ENABLED
-ecs_api_a1_get_job_ids 200 type1 NOWNER EMPTY
-ecs_api_a1_get_job_ids 200 type1 test-owner EMPTY
+ics_api_a1_get_job_ids 200 type1 NOWNER EMPTY
+ics_api_a1_get_job_ids 200 type1 test-owner EMPTY
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_get_job 404 type1 test-job
+ ics_api_a1_get_job 404 type1 test-job
- ecs_api_a1_get_job_status 404 type1 test-job
+ ics_api_a1_get_job_status 404 type1 test-job
else
- ecs_api_a1_get_job 404 test-job
+ ics_api_a1_get_job 404 test-job
- ecs_api_a1_get_job_status 404 test-job
+ ics_api_a1_get_job_status 404 test-job
fi
-if [ $ECS_VERSION == "V1-1" ]; then
- ecs_api_edp_get_producer_jobs 200 prod-a EMPTY
+if [ $ICS_VERSION == "V1-1" ]; then
+ ics_api_edp_get_producer_jobs 200 prod-a EMPTY
else
- ecs_api_edp_get_producer_jobs_2 200 prod-a EMPTY
+ ics_api_edp_get_producer_jobs_2 200 prod-a EMPTY
fi
## Create a job for prod-a
## job1 - prod-a
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_put_job 201 type1 job1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json
+ ics_api_a1_put_job 201 type1 job1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json
else
- ecs_api_a1_put_job 201 job1 type1 $TARGET1 ricsim_g3_1 $STATUS1 testdata/ecs/job-template.json
+ ics_api_a1_put_job 201 job1 type1 $TARGET1 ricsim_g3_1 $STATUS1 testdata/ics/job-template.json
fi
# Check the job data in the producer
-if [ $ECS_VERSION == "V1-1" ]; then
- prodstub_check_jobdata 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json
+if [ $ICS_VERSION == "V1-1" ]; then
+ prodstub_check_jobdata 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json
else
- if [[ "$ECS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
- prodstub_check_jobdata_2 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json
+ if [[ "$ICS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
+ prodstub_check_jobdata_2 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json
else
- prodstub_check_jobdata_3 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json
+ prodstub_check_jobdata_3 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json
fi
fi
-ecs_api_a1_get_job_ids 200 type1 NOWNER job1
-ecs_api_a1_get_job_ids 200 type1 ricsim_g3_1 job1
+ics_api_a1_get_job_ids 200 type1 NOWNER job1
+ics_api_a1_get_job_ids 200 type1 ricsim_g3_1 job1
if [ ! -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_get_job_ids 200 NOTYPE NOWNER job1
+ ics_api_a1_get_job_ids 200 NOTYPE NOWNER job1
fi
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_get_job 200 type1 job1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json
+ ics_api_a1_get_job 200 type1 job1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json
- ecs_api_a1_get_job_status 200 type1 job1 ENABLED
+ ics_api_a1_get_job_status 200 type1 job1 ENABLED
else
- ecs_api_a1_get_job 200 job1 type1 $TARGET1 ricsim_g3_1 $STATUS1 testdata/ecs/job-template.json
+ ics_api_a1_get_job 200 job1 type1 $TARGET1 ricsim_g3_1 $STATUS1 testdata/ics/job-template.json
- ecs_api_a1_get_job_status 200 job1 ENABLED
+ ics_api_a1_get_job_status 200 job1 ENABLED
fi
prodstub_equal create/prod-a/job1 1
-if [ $ECS_VERSION == "V1-1" ]; then
- ecs_api_edp_get_producer_jobs 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json
+if [ $ICS_VERSION == "V1-1" ]; then
+ ics_api_edp_get_producer_jobs 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json
else
- ecs_api_edp_get_producer_jobs_2 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json
+ ics_api_edp_get_producer_jobs_2 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json
fi
## Create a second job for prod-a
## job2 - prod-a
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_put_job 201 type1 job2 $TARGET2 ricsim_g3_2 testdata/ecs/job-template.json
+ ics_api_a1_put_job 201 type1 job2 $TARGET2 ricsim_g3_2 testdata/ics/job-template.json
else
- ecs_api_a1_put_job 201 job2 type1 $TARGET2 ricsim_g3_2 $STATUS2 testdata/ecs/job-template.json
+ ics_api_a1_put_job 201 job2 type1 $TARGET2 ricsim_g3_2 $STATUS2 testdata/ics/job-template.json
fi
# Check the job data in the producer
-if [ $ECS_VERSION == "V1-1" ]; then
- prodstub_check_jobdata 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ecs/job-template.json
+if [ $ICS_VERSION == "V1-1" ]; then
+ prodstub_check_jobdata 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ics/job-template.json
else
- if [[ "$ECS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
- prodstub_check_jobdata_2 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ecs/job-template.json
+ if [[ "$ICS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
+ prodstub_check_jobdata_2 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ics/job-template.json
else
- prodstub_check_jobdata_3 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ecs/job-template.json
+ prodstub_check_jobdata_3 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ics/job-template.json
fi
fi
-ecs_api_a1_get_job_ids 200 type1 NOWNER job1 job2
-ecs_api_a1_get_job_ids 200 type1 ricsim_g3_1 job1
-ecs_api_a1_get_job_ids 200 type1 ricsim_g3_2 job2
+ics_api_a1_get_job_ids 200 type1 NOWNER job1 job2
+ics_api_a1_get_job_ids 200 type1 ricsim_g3_1 job1
+ics_api_a1_get_job_ids 200 type1 ricsim_g3_2 job2
if [ ! -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_get_job_ids 200 NOTYPE NOWNER job1 job2
+ ics_api_a1_get_job_ids 200 NOTYPE NOWNER job1 job2
fi
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_get_job 200 type1 job2 $TARGET2 ricsim_g3_2 testdata/ecs/job-template.json
+ ics_api_a1_get_job 200 type1 job2 $TARGET2 ricsim_g3_2 testdata/ics/job-template.json
- ecs_api_a1_get_job_status 200 type1 job2 ENABLED
+ ics_api_a1_get_job_status 200 type1 job2 ENABLED
else
- ecs_api_a1_get_job 200 job2 type1 $TARGET2 ricsim_g3_2 $STATUS2 testdata/ecs/job-template.json
+ ics_api_a1_get_job 200 job2 type1 $TARGET2 ricsim_g3_2 $STATUS2 testdata/ics/job-template.json
- ecs_api_a1_get_job_status 200 job2 ENABLED
+ ics_api_a1_get_job_status 200 job2 ENABLED
fi
prodstub_equal create/prod-a/job2 1
-if [ $ECS_VERSION == "V1-1" ]; then
- ecs_api_edp_get_producer_jobs 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json job2 type1 $TARGET2 ricsim_g3_2 testdata/ecs/job-template.json
+if [ $ICS_VERSION == "V1-1" ]; then
+ ics_api_edp_get_producer_jobs 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json job2 type1 $TARGET2 ricsim_g3_2 testdata/ics/job-template.json
else
- ecs_api_edp_get_producer_jobs_2 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json job2 type1 $TARGET2 ricsim_g3_2 testdata/ecs/job-template.json
+ ics_api_edp_get_producer_jobs_2 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json job2 type1 $TARGET2 ricsim_g3_2 testdata/ics/job-template.json
fi
## Setup prod-b
-if [ $ECS_VERSION == "V1-1" ]; then
- ecs_api_edp_put_producer 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type2 testdata/ecs/ei-type-2.json
-else
- ecs_api_edp_put_type_2 201 type2 testdata/ecs/ei-type-2.json
- ecs_api_edp_put_producer_2 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type2
- if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
- cr_equal received_callbacks 4 30
- cr_equal received_callbacks?id=type-status1 4
- cr_api_check_all_ecs_subscription_events 200 type-status1 type2 testdata/ecs/ei-type-2.json REGISTERED
+if [ $ICS_VERSION == "V1-1" ]; then
+ ics_api_edp_put_producer 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type2 testdata/ics/ei-type-2.json
+else
+ ics_api_edp_put_type_2 201 type2 testdata/ics/ei-type-2.json
+ ics_api_edp_put_producer_2 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type2
+ if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+ cr_equal 0 received_callbacks 4 30
+ cr_equal 0 received_callbacks?id=type-status1 4
+ cr_api_check_all_ics_subscription_events 200 0 type-status1 type2 testdata/ics/ei-type-2.json REGISTERED
else
- cr_equal received_callbacks 0
+ cr_equal 0 received_callbacks 0
fi
fi
-ecs_api_a1_get_type_ids 200 type1 type2
+ics_api_a1_get_type_ids 200 type1 type2
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_get_type 200 type1 testdata/ecs/ei-type-1.json
- ecs_api_a1_get_type 200 type2 testdata/ecs/ei-type-2.json
+ ics_api_a1_get_type 200 type1 testdata/ics/ei-type-1.json
+ ics_api_a1_get_type 200 type2 testdata/ics/ei-type-2.json
else
- ecs_api_a1_get_type 200 type1 testdata/ecs/empty-type.json
- ecs_api_a1_get_type 200 type2 testdata/ecs/empty-type.json
+ ics_api_a1_get_type 200 type1 testdata/ics/empty-type.json
+ ics_api_a1_get_type 200 type2 testdata/ics/empty-type.json
fi
-ecs_api_edp_get_type_ids 200 type1 type2
-if [ $ECS_VERSION == "V1-1" ]; then
- ecs_api_edp_get_type 200 type1 testdata/ecs/ei-type-1.json prod-a
- ecs_api_edp_get_type 200 type2 testdata/ecs/ei-type-2.json prod-b
+ics_api_edp_get_type_ids 200 type1 type2
+if [ $ICS_VERSION == "V1-1" ]; then
+ ics_api_edp_get_type 200 type1 testdata/ics/ei-type-1.json prod-a
+ ics_api_edp_get_type 200 type2 testdata/ics/ei-type-2.json prod-b
else
- if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPE-INFO"* ]]; then
- ecs_api_edp_get_type_2 200 type1 testdata/ecs/ei-type-1.json testdata/ecs/info-type-info.json
+ if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPE-INFO"* ]]; then
+ ics_api_edp_get_type_2 200 type1 testdata/ics/ei-type-1.json testdata/ics/info-type-info.json
else
- ecs_api_edp_get_type_2 200 type1 testdata/ecs/ei-type-1.json
+ ics_api_edp_get_type_2 200 type1 testdata/ics/ei-type-1.json
fi
- ecs_api_edp_get_type_2 200 type2 testdata/ecs/ei-type-2.json
+ ics_api_edp_get_type_2 200 type2 testdata/ics/ei-type-2.json
fi
-if [ $ECS_VERSION == "V1-1" ]; then
- ecs_api_edp_get_producer_ids 200 prod-a prod-b
+if [ $ICS_VERSION == "V1-1" ]; then
+ ics_api_edp_get_producer_ids 200 prod-a prod-b
else
- ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-a prod-b
+ ics_api_edp_get_producer_ids_2 200 NOTYPE prod-a prod-b
fi
-if [ $ECS_VERSION == "V1-1" ]; then
- ecs_api_edp_get_producer 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ecs/ei-type-1.json
- ecs_api_edp_get_producer 200 prod-b $CB_JOB/prod-b $CB_SV/prod-b type2 testdata/ecs/ei-type-2.json
+if [ $ICS_VERSION == "V1-1" ]; then
+ ics_api_edp_get_producer 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ics/ei-type-1.json
+ ics_api_edp_get_producer 200 prod-b $CB_JOB/prod-b $CB_SV/prod-b type2 testdata/ics/ei-type-2.json
else
- ecs_api_edp_get_producer_2 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
- ecs_api_edp_get_producer_2 200 prod-b $CB_JOB/prod-b $CB_SV/prod-b type2
+ ics_api_edp_get_producer_2 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
+ ics_api_edp_get_producer_2 200 prod-b $CB_JOB/prod-b $CB_SV/prod-b type2
fi
-ecs_api_edp_get_producer_status 200 prod-b ENABLED
+ics_api_edp_get_producer_status 200 prod-b ENABLED
## Create job for prod-b
## job3 - prod-b
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_put_job 201 type2 job3 $TARGET3 ricsim_g3_3 testdata/ecs/job-template.json
+ ics_api_a1_put_job 201 type2 job3 $TARGET3 ricsim_g3_3 testdata/ics/job-template.json
else
- ecs_api_a1_put_job 201 job3 type2 $TARGET3 ricsim_g3_3 $STATUS3 testdata/ecs/job-template.json
+ ics_api_a1_put_job 201 job3 type2 $TARGET3 ricsim_g3_3 $STATUS3 testdata/ics/job-template.json
fi
prodstub_equal create/prod-b/job3 1
# Check the job data in the producer
-if [ $ECS_VERSION == "V1-1" ]; then
- prodstub_check_jobdata 200 prod-b job3 type2 $TARGET3 ricsim_g3_3 testdata/ecs/job-template.json
+if [ $ICS_VERSION == "V1-1" ]; then
+ prodstub_check_jobdata 200 prod-b job3 type2 $TARGET3 ricsim_g3_3 testdata/ics/job-template.json
else
- if [[ "$ECS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
- prodstub_check_jobdata_2 200 prod-b job3 type2 $TARGET3 ricsim_g3_3 testdata/ecs/job-template.json
+ if [[ "$ICS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
+ prodstub_check_jobdata_2 200 prod-b job3 type2 $TARGET3 ricsim_g3_3 testdata/ics/job-template.json
else
- prodstub_check_jobdata_3 200 prod-b job3 type2 $TARGET3 ricsim_g3_3 testdata/ecs/job-template.json
+ prodstub_check_jobdata_3 200 prod-b job3 type2 $TARGET3 ricsim_g3_3 testdata/ics/job-template.json
fi
fi
-ecs_api_a1_get_job_ids 200 type1 NOWNER job1 job2
-ecs_api_a1_get_job_ids 200 type2 NOWNER job3
-ecs_api_a1_get_job_ids 200 type1 ricsim_g3_1 job1
-ecs_api_a1_get_job_ids 200 type1 ricsim_g3_2 job2
-ecs_api_a1_get_job_ids 200 type2 ricsim_g3_3 job3
+ics_api_a1_get_job_ids 200 type1 NOWNER job1 job2
+ics_api_a1_get_job_ids 200 type2 NOWNER job3
+ics_api_a1_get_job_ids 200 type1 ricsim_g3_1 job1
+ics_api_a1_get_job_ids 200 type1 ricsim_g3_2 job2
+ics_api_a1_get_job_ids 200 type2 ricsim_g3_3 job3
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_get_job 200 type2 job3 $TARGET3 ricsim_g3_3 testdata/ecs/job-template.json
+ ics_api_a1_get_job 200 type2 job3 $TARGET3 ricsim_g3_3 testdata/ics/job-template.json
- ecs_api_a1_get_job_status 200 type2 job3 ENABLED
+ ics_api_a1_get_job_status 200 type2 job3 ENABLED
else
- ecs_api_a1_get_job 200 job3 type2 $TARGET3 ricsim_g3_3 $STATUS3 testdata/ecs/job-template.json
+ ics_api_a1_get_job 200 job3 type2 $TARGET3 ricsim_g3_3 $STATUS3 testdata/ics/job-template.json
- ecs_api_a1_get_job_status 200 job3 ENABLED
+ ics_api_a1_get_job_status 200 job3 ENABLED
fi
-if [ $ECS_VERSION == "V1-1" ]; then
- ecs_api_edp_get_producer_jobs 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json job2 type1 $TARGET2 ricsim_g3_2 testdata/ecs/job-template.json
- ecs_api_edp_get_producer_jobs 200 prod-b job3 type2 $TARGET3 ricsim_g3_3 testdata/ecs/job-template.json
+if [ $ICS_VERSION == "V1-1" ]; then
+ ics_api_edp_get_producer_jobs 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json job2 type1 $TARGET2 ricsim_g3_2 testdata/ics/job-template.json
+ ics_api_edp_get_producer_jobs 200 prod-b job3 type2 $TARGET3 ricsim_g3_3 testdata/ics/job-template.json
else
- ecs_api_edp_get_producer_jobs_2 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json job2 type1 $TARGET2 ricsim_g3_2 testdata/ecs/job-template.json
- ecs_api_edp_get_producer_jobs_2 200 prod-b job3 type2 $TARGET3 ricsim_g3_3 testdata/ecs/job-template.json
+ ics_api_edp_get_producer_jobs_2 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json job2 type1 $TARGET2 ricsim_g3_2 testdata/ics/job-template.json
+ ics_api_edp_get_producer_jobs_2 200 prod-b job3 type2 $TARGET3 ricsim_g3_3 testdata/ics/job-template.json
fi
## Setup prod-c (no types)
-if [ $ECS_VERSION == "V1-1" ]; then
- ecs_api_edp_put_producer 201 prod-c $CB_JOB/prod-c $CB_SV/prod-c NOTYPE
+if [ $ICS_VERSION == "V1-1" ]; then
+ ics_api_edp_put_producer 201 prod-c $CB_JOB/prod-c $CB_SV/prod-c NOTYPE
else
- ecs_api_edp_put_producer_2 201 prod-c $CB_JOB/prod-c $CB_SV/prod-c NOTYPE
+ ics_api_edp_put_producer_2 201 prod-c $CB_JOB/prod-c $CB_SV/prod-c NOTYPE
fi
-if [ $ECS_VERSION == "V1-1" ]; then
- ecs_api_edp_get_producer_ids 200 prod-a prod-b prod-c
+if [ $ICS_VERSION == "V1-1" ]; then
+ ics_api_edp_get_producer_ids 200 prod-a prod-b prod-c
else
- ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-a prod-b prod-c
+ ics_api_edp_get_producer_ids_2 200 NOTYPE prod-a prod-b prod-c
fi
-if [ $ECS_VERSION == "V1-1" ]; then
- ecs_api_edp_get_producer 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ecs/ei-type-1.json
- ecs_api_edp_get_producer 200 prod-b $CB_JOB/prod-b $CB_SV/prod-b type2 testdata/ecs/ei-type-2.json
- ecs_api_edp_get_producer 200 prod-c $CB_JOB/prod-c $CB_SV/prod-c EMPTY
+if [ $ICS_VERSION == "V1-1" ]; then
+ ics_api_edp_get_producer 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ics/ei-type-1.json
+ ics_api_edp_get_producer 200 prod-b $CB_JOB/prod-b $CB_SV/prod-b type2 testdata/ics/ei-type-2.json
+ ics_api_edp_get_producer 200 prod-c $CB_JOB/prod-c $CB_SV/prod-c EMPTY
else
- ecs_api_edp_get_producer_2 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
- ecs_api_edp_get_producer_2 200 prod-b $CB_JOB/prod-b $CB_SV/prod-b type2
- ecs_api_edp_get_producer_2 200 prod-c $CB_JOB/prod-c $CB_SV/prod-c EMPTY
+ ics_api_edp_get_producer_2 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
+ ics_api_edp_get_producer_2 200 prod-b $CB_JOB/prod-b $CB_SV/prod-b type2
+ ics_api_edp_get_producer_2 200 prod-c $CB_JOB/prod-c $CB_SV/prod-c EMPTY
fi
-ecs_api_edp_get_producer_status 200 prod-c ENABLED
+ics_api_edp_get_producer_status 200 prod-c ENABLED
## Delete job3 and prod-b and re-create if different order
# Delete job then producer
-ecs_api_a1_get_job_ids 200 NOTYPE NOWNER job1 job2 job3
-if [ $ECS_VERSION == "V1-1" ]; then
- ecs_api_edp_get_producer_ids 200 prod-a prod-b prod-c
+ics_api_a1_get_job_ids 200 NOTYPE NOWNER job1 job2 job3
+if [ $ICS_VERSION == "V1-1" ]; then
+ ics_api_edp_get_producer_ids 200 prod-a prod-b prod-c
else
- ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-a prod-b prod-c
+ ics_api_edp_get_producer_ids_2 200 NOTYPE prod-a prod-b prod-c
fi
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_delete_job 204 type2 job3
+ ics_api_a1_delete_job 204 type2 job3
else
- ecs_api_a1_delete_job 204 job3
+ ics_api_a1_delete_job 204 job3
fi
-ecs_api_a1_get_job_ids 200 NOTYPE NOWNER job1 job2
-if [ $ECS_VERSION == "V1-1" ]; then
- ecs_api_edp_get_producer_ids 200 prod-a prod-b prod-c
+ics_api_a1_get_job_ids 200 NOTYPE NOWNER job1 job2
+if [ $ICS_VERSION == "V1-1" ]; then
+ ics_api_edp_get_producer_ids 200 prod-a prod-b prod-c
else
- ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-a prod-b prod-c
+ ics_api_edp_get_producer_ids_2 200 NOTYPE prod-a prod-b prod-c
fi
-ecs_api_edp_delete_producer 204 prod-b
+ics_api_edp_delete_producer 204 prod-b
-ecs_api_edp_get_producer_status 404 prod-b
+ics_api_edp_get_producer_status 404 prod-b
-ecs_api_a1_get_job_ids 200 NOTYPE NOWNER job1 job2
-if [ $ECS_VERSION == "V1-1" ]; then
- ecs_api_edp_get_producer_ids 200 prod-a prod-c
+ics_api_a1_get_job_ids 200 NOTYPE NOWNER job1 job2
+if [ $ICS_VERSION == "V1-1" ]; then
+ ics_api_edp_get_producer_ids 200 prod-a prod-c
else
- ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-a prod-c
+ ics_api_edp_get_producer_ids_2 200 NOTYPE prod-a prod-c
fi
prodstub_equal delete/prod-b/job3 1
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_put_job 404 type2 job3 $TARGET3 ricsim_g3_3 testdata/ecs/job-template.json
+ ics_api_a1_put_job 404 type2 job3 $TARGET3 ricsim_g3_3 testdata/ics/job-template.json
else
- if [ $ECS_VERSION == "V1-1" ]; then
- ecs_api_a1_put_job 404 job3 type2 $TARGET3 ricsim_g3_3 $STATUS3 testdata/ecs/job-template.json
+ if [ $ICS_VERSION == "V1-1" ]; then
+ ics_api_a1_put_job 404 job3 type2 $TARGET3 ricsim_g3_3 $STATUS3 testdata/ics/job-template.json
else
- ecs_api_a1_put_job 201 job3 type2 $TARGET3 ricsim_g3_3 $STATUS3 testdata/ecs/job-template.json
- ecs_api_a1_get_job_status 200 job3 DISABLED
+ ics_api_a1_put_job 201 job3 type2 $TARGET3 ricsim_g3_3 $STATUS3 testdata/ics/job-template.json
+ ics_api_a1_get_job_status 200 job3 DISABLED
fi
fi
# Put producer then job
-if [ $ECS_VERSION == "V1-1" ]; then
- ecs_api_edp_put_producer 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type2 testdata/ecs/ei-type-2.json
+if [ $ICS_VERSION == "V1-1" ]; then
+ ics_api_edp_put_producer 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type2 testdata/ics/ei-type-2.json
else
- ecs_api_edp_put_producer_2 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type2
+ ics_api_edp_put_producer_2 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type2
fi
-ecs_api_edp_get_producer_status 200 prod-b ENABLED
+ics_api_edp_get_producer_status 200 prod-b ENABLED
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_put_job 201 type2 job3 $TARGET3 ricsim_g3_3 testdata/ecs/job-template2.json
- ecs_api_a1_get_job_status 200 type2 job3 ENABLED
+ ics_api_a1_put_job 201 type2 job3 $TARGET3 ricsim_g3_3 testdata/ics/job-template2.json
+ ics_api_a1_get_job_status 200 type2 job3 ENABLED
else
- if [ $ECS_VERSION == "V1-1" ]; then
- ecs_api_a1_put_job 201 job3 type2 $TARGET3 ricsim_g3_3 $STATUS3 testdata/ecs/job-template2.json
+ if [ $ICS_VERSION == "V1-1" ]; then
+ ics_api_a1_put_job 201 job3 type2 $TARGET3 ricsim_g3_3 $STATUS3 testdata/ics/job-template2.json
else
- ecs_api_a1_put_job 200 job3 type2 $TARGET3 ricsim_g3_3 $STATUS3 testdata/ecs/job-template2.json
+ ics_api_a1_put_job 200 job3 type2 $TARGET3 ricsim_g3_3 $STATUS3 testdata/ics/job-template2.json
fi
- ecs_api_a1_get_job_status 200 job3 ENABLED
+ ics_api_a1_get_job_status 200 job3 ENABLED
fi
-if [ $ECS_VERSION == "V1-1" ]; then
- prodstub_check_jobdata 200 prod-b job3 type2 $TARGET3 ricsim_g3_3 testdata/ecs/job-template2.json
+if [ $ICS_VERSION == "V1-1" ]; then
+ prodstub_check_jobdata 200 prod-b job3 type2 $TARGET3 ricsim_g3_3 testdata/ics/job-template2.json
else
- if [[ "$ECS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
- prodstub_check_jobdata_2 200 prod-b job3 type2 $TARGET3 ricsim_g3_3 testdata/ecs/job-template2.json
+ if [[ "$ICS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
+ prodstub_check_jobdata_2 200 prod-b job3 type2 $TARGET3 ricsim_g3_3 testdata/ics/job-template2.json
else
- prodstub_check_jobdata_3 200 prod-b job3 type2 $TARGET3 ricsim_g3_3 testdata/ecs/job-template2.json
+ prodstub_check_jobdata_3 200 prod-b job3 type2 $TARGET3 ricsim_g3_3 testdata/ics/job-template2.json
fi
fi
-ecs_api_a1_get_job_ids 200 NOTYPE NOWNER job1 job2 job3
-if [ $ECS_VERSION == "V1-1" ]; then
- ecs_api_edp_get_producer_ids 200 prod-a prod-b prod-c
+ics_api_a1_get_job_ids 200 NOTYPE NOWNER job1 job2 job3
+if [ $ICS_VERSION == "V1-1" ]; then
+ ics_api_edp_get_producer_ids 200 prod-a prod-b prod-c
else
- ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-a prod-b prod-c
+ ics_api_edp_get_producer_ids_2 200 NOTYPE prod-a prod-b prod-c
fi
-if [ $ECS_VERSION == "V1-1" ]; then
+if [ $ICS_VERSION == "V1-1" ]; then
prodstub_equal create/prod-b/job3 2
else
prodstub_equal create/prod-b/job3 3
prodstub_equal delete/prod-b/job3 1
# Delete only the producer
-ecs_api_edp_delete_producer 204 prod-b
+ics_api_edp_delete_producer 204 prod-b
-ecs_api_edp_get_producer_status 404 prod-b
+ics_api_edp_get_producer_status 404 prod-b
-ecs_api_a1_get_job_ids 200 NOTYPE NOWNER job1 job2 job3
-if [ $ECS_VERSION == "V1-1" ]; then
- ecs_api_edp_get_producer_ids 200 prod-a prod-c
+ics_api_a1_get_job_ids 200 NOTYPE NOWNER job1 job2 job3
+if [ $ICS_VERSION == "V1-1" ]; then
+ ics_api_edp_get_producer_ids 200 prod-a prod-c
else
- ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-a prod-c
+ ics_api_edp_get_producer_ids_2 200 NOTYPE prod-a prod-c
fi
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_get_job_status 200 type2 job3 DISABLED
+ ics_api_a1_get_job_status 200 type2 job3 DISABLED
else
- ecs_api_a1_get_job_status 200 job3 DISABLED
+ ics_api_a1_get_job_status 200 job3 DISABLED
fi
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
- cr_equal received_callbacks 5 30
- cr_equal received_callbacks?id=type-status1 4
- cr_equal received_callbacks?id=job3-status 1
- cr_api_check_all_ecs_events 200 job3-status DISABLED
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+ cr_equal 0 received_callbacks 5 30
+ cr_equal 0 received_callbacks?id=type-status1 4
+ cr_equal 0 received_callbacks?id=job3-status 1
+ cr_api_check_all_ics_events 200 0 job3-status DISABLED
else
- cr_equal received_callbacks 1 30
- cr_equal received_callbacks?id=job3-status 1
- cr_api_check_all_ecs_events 200 job3-status DISABLED
+ cr_equal 0 received_callbacks 1 30
+ cr_equal 0 received_callbacks?id=job3-status 1
+ cr_api_check_all_ics_events 200 0 job3-status DISABLED
fi
# Re-create the producer
-if [ $ECS_VERSION == "V1-1" ]; then
- ecs_api_edp_put_producer 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type2 testdata/ecs/ei-type-2.json
+if [ $ICS_VERSION == "V1-1" ]; then
+ ics_api_edp_put_producer 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type2 testdata/ics/ei-type-2.json
else
- ecs_api_edp_put_producer_2 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type2
+ ics_api_edp_put_producer_2 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type2
fi
-ecs_api_edp_get_producer_status 200 prod-b ENABLED
+ics_api_edp_get_producer_status 200 prod-b ENABLED
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_get_job_status 200 type2 job3 ENABLED
+ ics_api_a1_get_job_status 200 type2 job3 ENABLED
else
- ecs_api_a1_get_job_status 200 job3 ENABLED
+ ics_api_a1_get_job_status 200 job3 ENABLED
fi
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
- cr_equal received_callbacks 6 30
- cr_equal received_callbacks?id=type-status1 4
- cr_equal received_callbacks?id=job3-status 2
- cr_api_check_all_ecs_events 200 job3-status ENABLED
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+ cr_equal 0 received_callbacks 6 30
+ cr_equal 0 received_callbacks?id=type-status1 4
+ cr_equal 0 received_callbacks?id=job3-status 2
+ cr_api_check_all_ics_events 200 0 job3-status ENABLED
else
- cr_equal received_callbacks 2 30
- cr_equal received_callbacks?id=job3-status 2
- cr_api_check_all_ecs_events 200 job3-status ENABLED
+ cr_equal 0 received_callbacks 2 30
+ cr_equal 0 received_callbacks?id=job3-status 2
+ cr_api_check_all_ics_events 200 0 job3-status ENABLED
fi
-if [ $ECS_VERSION == "V1-1" ]; then
- prodstub_check_jobdata 200 prod-b job3 type2 $TARGET3 ricsim_g3_3 testdata/ecs/job-template2.json
+if [ $ICS_VERSION == "V1-1" ]; then
+ prodstub_check_jobdata 200 prod-b job3 type2 $TARGET3 ricsim_g3_3 testdata/ics/job-template2.json
else
- if [[ "$ECS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
- prodstub_check_jobdata_2 200 prod-b job3 type2 $TARGET3 ricsim_g3_3 testdata/ecs/job-template2.json
+ if [[ "$ICS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
+ prodstub_check_jobdata_2 200 prod-b job3 type2 $TARGET3 ricsim_g3_3 testdata/ics/job-template2.json
else
- prodstub_check_jobdata_3 200 prod-b job3 type2 $TARGET3 ricsim_g3_3 testdata/ecs/job-template2.json
+ prodstub_check_jobdata_3 200 prod-b job3 type2 $TARGET3 ricsim_g3_3 testdata/ics/job-template2.json
fi
fi
## Setup prod-d
-if [ $ECS_VERSION == "V1-1" ]; then
- ecs_api_edp_put_producer 201 prod-d $CB_JOB/prod-d $CB_SV/prod-d type4 testdata/ecs/ei-type-4.json
+if [ $ICS_VERSION == "V1-1" ]; then
+ ics_api_edp_put_producer 201 prod-d $CB_JOB/prod-d $CB_SV/prod-d type4 testdata/ics/ei-type-4.json
else
- ecs_api_edp_put_type_2 201 type4 testdata/ecs/ei-type-4.json
- ecs_api_edp_put_producer_2 201 prod-d $CB_JOB/prod-d $CB_SV/prod-d type4
+ ics_api_edp_put_type_2 201 type4 testdata/ics/ei-type-4.json
+ ics_api_edp_put_producer_2 201 prod-d $CB_JOB/prod-d $CB_SV/prod-d type4
fi
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
- cr_equal received_callbacks 7 30
- cr_equal received_callbacks?id=type-status1 5
- cr_api_check_all_ecs_subscription_events 200 type-status1 type4 testdata/ecs/ei-type-4.json REGISTERED
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+ cr_equal 0 received_callbacks 7 30
+ cr_equal 0 received_callbacks?id=type-status1 5
+ cr_api_check_all_ics_subscription_events 200 0 type-status1 type4 testdata/ics/ei-type-4.json REGISTERED
fi
-ecs_api_a1_get_job_ids 200 type4 NOWNER EMPTY
+ics_api_a1_get_job_ids 200 type4 NOWNER EMPTY
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_put_job 201 type4 job8 $TARGET8 ricsim_g3_4 testdata/ecs/job-template.json
+ ics_api_a1_put_job 201 type4 job8 $TARGET8 ricsim_g3_4 testdata/ics/job-template.json
else
- ecs_api_a1_put_job 201 job8 type4 $TARGET8 ricsim_g3_4 $STATUS8 testdata/ecs/job-template.json
+ ics_api_a1_put_job 201 job8 type4 $TARGET8 ricsim_g3_4 $STATUS8 testdata/ics/job-template.json
fi
-if [ $ECS_VERSION == "V1-1" ]; then
- prodstub_check_jobdata 200 prod-d job8 type4 $TARGET8 ricsim_g3_4 testdata/ecs/job-template.json
+if [ $ICS_VERSION == "V1-1" ]; then
+ prodstub_check_jobdata 200 prod-d job8 type4 $TARGET8 ricsim_g3_4 testdata/ics/job-template.json
else
- if [[ "$ECS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
- prodstub_check_jobdata_2 200 prod-d job8 type4 $TARGET8 ricsim_g3_4 testdata/ecs/job-template.json
+ if [[ "$ICS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
+ prodstub_check_jobdata_2 200 prod-d job8 type4 $TARGET8 ricsim_g3_4 testdata/ics/job-template.json
else
- prodstub_check_jobdata_3 200 prod-d job8 type4 $TARGET8 ricsim_g3_4 testdata/ecs/job-template.json
+ prodstub_check_jobdata_3 200 prod-d job8 type4 $TARGET8 ricsim_g3_4 testdata/ics/job-template.json
fi
fi
prodstub_equal create/prod-d/job8 1
prodstub_equal delete/prod-d/job8 0
-ecs_api_a1_get_job_ids 200 type4 NOWNER job8
+ics_api_a1_get_job_ids 200 type4 NOWNER job8
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_get_job_status 200 type4 job8 ENABLED
+ ics_api_a1_get_job_status 200 type4 job8 ENABLED
else
- ecs_api_a1_get_job_status 200 job8 ENABLED
+ ics_api_a1_get_job_status 200 job8 ENABLED
fi
# Re-PUT the producer with zero types
-if [ $ECS_VERSION == "V1-1" ]; then
- ecs_api_edp_put_producer 200 prod-d $CB_JOB/prod-d $CB_SV/prod-d NOTYPE
+if [ $ICS_VERSION == "V1-1" ]; then
+ ics_api_edp_put_producer 200 prod-d $CB_JOB/prod-d $CB_SV/prod-d NOTYPE
else
- ecs_api_edp_put_producer_2 200 prod-d $CB_JOB/prod-d $CB_SV/prod-d NOTYPE
+ ics_api_edp_put_producer_2 200 prod-d $CB_JOB/prod-d $CB_SV/prod-d NOTYPE
fi
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_get_job_ids 404 type4 NOWNER
+ ics_api_a1_get_job_ids 404 type4 NOWNER
else
- ecs_api_a1_get_job_ids 200 type4 NOWNER job8
- ecs_api_a1_get_job_ids 200 NOTYPE NOWNER job1 job2 job3 job8
+ ics_api_a1_get_job_ids 200 type4 NOWNER job8
+ ics_api_a1_get_job_ids 200 NOTYPE NOWNER job1 job2 job3 job8
fi
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_get_job_status 200 type4 job8 DISABLED
+ ics_api_a1_get_job_status 200 type4 job8 DISABLED
else
- ecs_api_a1_get_job_status 200 job8 DISABLED
+ ics_api_a1_get_job_status 200 job8 DISABLED
fi
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
- cr_equal received_callbacks 8 30
- cr_equal received_callbacks?id=type-status1 5
- cr_equal received_callbacks?id=job8-status 1
- cr_api_check_all_ecs_events 200 job8-status DISABLED
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+ cr_equal 0 received_callbacks 8 30
+ cr_equal 0 received_callbacks?id=type-status1 5
+ cr_equal 0 received_callbacks?id=job8-status 1
+ cr_api_check_all_ics_events 200 0 job8-status DISABLED
else
- cr_equal received_callbacks 3 30
- cr_equal received_callbacks?id=job8-status 1
- cr_api_check_all_ecs_events 200 job8-status DISABLED
+ cr_equal 0 received_callbacks 3 30
+ cr_equal 0 received_callbacks?id=job8-status 1
+ cr_api_check_all_ics_events 200 0 job8-status DISABLED
fi
prodstub_equal create/prod-d/job8 1
prodstub_equal delete/prod-d/job8 0
## Re-setup prod-d
-if [ $ECS_VERSION == "V1-1" ]; then
- ecs_api_edp_put_producer 200 prod-d $CB_JOB/prod-d $CB_SV/prod-d type4 testdata/ecs/ei-type-4.json
+if [ $ICS_VERSION == "V1-1" ]; then
+ ics_api_edp_put_producer 200 prod-d $CB_JOB/prod-d $CB_SV/prod-d type4 testdata/ics/ei-type-4.json
else
- ecs_api_edp_put_type_2 200 type4 testdata/ecs/ei-type-4.json
- ecs_api_edp_put_producer_2 200 prod-d $CB_JOB/prod-d $CB_SV/prod-d type4
+ ics_api_edp_put_type_2 200 type4 testdata/ics/ei-type-4.json
+ ics_api_edp_put_producer_2 200 prod-d $CB_JOB/prod-d $CB_SV/prod-d type4
fi
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_get_job_ids 404 type4 NOWNER
+ ics_api_a1_get_job_ids 404 type4 NOWNER
else
- ecs_api_a1_get_job_ids 200 type4 NOWNER job8
- ecs_api_a1_get_job_ids 200 NOTYPE NOWNER job1 job2 job3 job8
+ ics_api_a1_get_job_ids 200 type4 NOWNER job8
+ ics_api_a1_get_job_ids 200 NOTYPE NOWNER job1 job2 job3 job8
fi
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_get_job_status 200 type4 job8 ENABLED
+ ics_api_a1_get_job_status 200 type4 job8 ENABLED
else
- ecs_api_a1_get_job_status 200 job8 ENABLED
+ ics_api_a1_get_job_status 200 job8 ENABLED
fi
-ecs_api_edp_get_producer_status 200 prod-a ENABLED
-ecs_api_edp_get_producer_status 200 prod-b ENABLED
-ecs_api_edp_get_producer_status 200 prod-c ENABLED
-ecs_api_edp_get_producer_status 200 prod-d ENABLED
+ics_api_edp_get_producer_status 200 prod-a ENABLED
+ics_api_edp_get_producer_status 200 prod-b ENABLED
+ics_api_edp_get_producer_status 200 prod-c ENABLED
+ics_api_edp_get_producer_status 200 prod-d ENABLED
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
- cr_equal received_callbacks 10 30
- cr_equal received_callbacks?id=type-status1 6
- cr_api_check_all_ecs_subscription_events 200 type-status1 type4 testdata/ecs/ei-type-4.json REGISTERED
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+ cr_equal 0 received_callbacks 10 30
+ cr_equal 0 received_callbacks?id=type-status1 6
+ cr_api_check_all_ics_subscription_events 200 0 type-status1 type4 testdata/ics/ei-type-4.json REGISTERED
- cr_equal received_callbacks?id=job8-status 2
- cr_api_check_all_ecs_events 200 job8-status ENABLED
+ cr_equal 0 received_callbacks?id=job8-status 2
+ cr_api_check_all_ics_events 200 0 job8-status ENABLED
else
- cr_equal received_callbacks 4 30
- cr_equal received_callbacks?id=job8-status 2
- cr_api_check_all_ecs_events 200 job8-status ENABLED
+ cr_equal 0 received_callbacks 4 30
+ cr_equal 0 received_callbacks?id=job8-status 2
+ cr_api_check_all_ics_events 200 0 job8-status ENABLED
fi
prodstub_equal create/prod-d/job8 2
prodstub_equal delete/prod-d/job8 0
## Setup prod-e
-if [ $ECS_VERSION == "V1-1" ]; then
- ecs_api_edp_put_producer 201 prod-e $CB_JOB/prod-e $CB_SV/prod-e type6 testdata/ecs/ei-type-6.json
+if [ $ICS_VERSION == "V1-1" ]; then
+ ics_api_edp_put_producer 201 prod-e $CB_JOB/prod-e $CB_SV/prod-e type6 testdata/ics/ei-type-6.json
else
- ecs_api_edp_put_type_2 201 type6 testdata/ecs/ei-type-6.json
- ecs_api_edp_put_producer_2 201 prod-e $CB_JOB/prod-e $CB_SV/prod-e type6
+ ics_api_edp_put_type_2 201 type6 testdata/ics/ei-type-6.json
+ ics_api_edp_put_producer_2 201 prod-e $CB_JOB/prod-e $CB_SV/prod-e type6
fi
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
- cr_equal received_callbacks 11 30
- cr_equal received_callbacks?id=type-status1 7
- cr_api_check_all_ecs_subscription_events 200 type-status1 type6 testdata/ecs/ei-type-6.json REGISTERED
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+ cr_equal 0 received_callbacks 11 30
+ cr_equal 0 received_callbacks?id=type-status1 7
+ cr_api_check_all_ics_subscription_events 200 0 type-status1 type6 testdata/ics/ei-type-6.json REGISTERED
fi
-ecs_api_a1_get_job_ids 200 type6 NOWNER EMPTY
+ics_api_a1_get_job_ids 200 type6 NOWNER EMPTY
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_put_job 201 type6 job10 $TARGET10 ricsim_g3_4 testdata/ecs/job-template.json
+ ics_api_a1_put_job 201 type6 job10 $TARGET10 ricsim_g3_4 testdata/ics/job-template.json
else
- ecs_api_a1_put_job 201 job10 type6 $TARGET10 ricsim_g3_4 $STATUS10 testdata/ecs/job-template.json
+ ics_api_a1_put_job 201 job10 type6 $TARGET10 ricsim_g3_4 $STATUS10 testdata/ics/job-template.json
fi
-if [ $ECS_VERSION == "V1-1" ]; then
- prodstub_check_jobdata 200 prod-e job10 type6 $TARGET10 ricsim_g3_4 testdata/ecs/job-template.json
+if [ $ICS_VERSION == "V1-1" ]; then
+ prodstub_check_jobdata 200 prod-e job10 type6 $TARGET10 ricsim_g3_4 testdata/ics/job-template.json
else
- if [[ "$ECS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
- prodstub_check_jobdata_2 200 prod-e job10 type6 $TARGET10 ricsim_g3_4 testdata/ecs/job-template.json
+ if [[ "$ICS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
+ prodstub_check_jobdata_2 200 prod-e job10 type6 $TARGET10 ricsim_g3_4 testdata/ics/job-template.json
else
- prodstub_check_jobdata_3 200 prod-e job10 type6 $TARGET10 ricsim_g3_4 testdata/ecs/job-template.json
+ prodstub_check_jobdata_3 200 prod-e job10 type6 $TARGET10 ricsim_g3_4 testdata/ics/job-template.json
fi
fi
prodstub_equal create/prod-e/job10 1
prodstub_equal delete/prod-e/job10 0
-ecs_api_a1_get_job_ids 200 type6 NOWNER job10
+ics_api_a1_get_job_ids 200 type6 NOWNER job10
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_get_job_status 200 type6 job10 ENABLED
+ ics_api_a1_get_job_status 200 type6 job10 ENABLED
else
- ecs_api_a1_get_job_status 200 job10 ENABLED
+ ics_api_a1_get_job_status 200 job10 ENABLED
fi
## Setup prod-f
-if [ $ECS_VERSION == "V1-1" ]; then
- ecs_api_edp_put_producer 201 prod-f $CB_JOB/prod-f $CB_SV/prod-f type6 testdata/ecs/ei-type-6.json
+if [ $ICS_VERSION == "V1-1" ]; then
+ ics_api_edp_put_producer 201 prod-f $CB_JOB/prod-f $CB_SV/prod-f type6 testdata/ics/ei-type-6.json
else
- ecs_api_edp_put_type_2 200 type6 testdata/ecs/ei-type-6.json
- ecs_api_edp_put_producer_2 201 prod-f $CB_JOB/prod-f $CB_SV/prod-f type6
+ ics_api_edp_put_type_2 200 type6 testdata/ics/ei-type-6.json
+ ics_api_edp_put_producer_2 201 prod-f $CB_JOB/prod-f $CB_SV/prod-f type6
fi
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
- cr_equal received_callbacks 12 30
- cr_equal received_callbacks?id=type-status1 8
- cr_api_check_all_ecs_subscription_events 200 type-status1 type6 testdata/ecs/ei-type-6.json REGISTERED
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+ cr_equal 0 received_callbacks 12 30
+ cr_equal 0 received_callbacks?id=type-status1 8
+ cr_api_check_all_ics_subscription_events 200 0 type-status1 type6 testdata/ics/ei-type-6.json REGISTERED
fi
-ecs_api_a1_get_job_ids 200 type6 NOWNER job10
+ics_api_a1_get_job_ids 200 type6 NOWNER job10
-if [ $ECS_VERSION == "V1-1" ]; then
- prodstub_check_jobdata 200 prod-f job10 type6 $TARGET10 ricsim_g3_4 testdata/ecs/job-template.json
+if [ $ICS_VERSION == "V1-1" ]; then
+ prodstub_check_jobdata 200 prod-f job10 type6 $TARGET10 ricsim_g3_4 testdata/ics/job-template.json
else
- if [[ "$ECS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
- prodstub_check_jobdata_2 200 prod-f job10 type6 $TARGET10 ricsim_g3_4 testdata/ecs/job-template.json
+ if [[ "$ICS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
+ prodstub_check_jobdata_2 200 prod-f job10 type6 $TARGET10 ricsim_g3_4 testdata/ics/job-template.json
else
- prodstub_check_jobdata_3 200 prod-f job10 type6 $TARGET10 ricsim_g3_4 testdata/ecs/job-template.json
+ prodstub_check_jobdata_3 200 prod-f job10 type6 $TARGET10 ricsim_g3_4 testdata/ics/job-template.json
fi
fi
prodstub_equal create/prod-f/job10 1
prodstub_equal delete/prod-f/job10 0
-ecs_api_a1_get_job_ids 200 type6 NOWNER job10
+ics_api_a1_get_job_ids 200 type6 NOWNER job10
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_get_job_status 200 type6 job10 ENABLED
+ ics_api_a1_get_job_status 200 type6 job10 ENABLED
else
- ecs_api_a1_get_job_status 200 job10 ENABLED
+ ics_api_a1_get_job_status 200 job10 ENABLED
fi
## Status updates prod-a and jobs
-if [ $ECS_VERSION == "V1-1" ]; then
- ecs_api_edp_get_producer_ids 200 prod-a prod-b prod-c prod-d prod-e prod-f
+if [ $ICS_VERSION == "V1-1" ]; then
+ ics_api_edp_get_producer_ids 200 prod-a prod-b prod-c prod-d prod-e prod-f
else
- ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-a prod-b prod-c prod-d prod-e prod-f
+ ics_api_edp_get_producer_ids_2 200 NOTYPE prod-a prod-b prod-c prod-d prod-e prod-f
fi
-ecs_api_edp_get_producer_status 200 prod-a ENABLED
-ecs_api_edp_get_producer_status 200 prod-b ENABLED
-ecs_api_edp_get_producer_status 200 prod-c ENABLED
-ecs_api_edp_get_producer_status 200 prod-d ENABLED
-ecs_api_edp_get_producer_status 200 prod-e ENABLED
-ecs_api_edp_get_producer_status 200 prod-f ENABLED
+ics_api_edp_get_producer_status 200 prod-a ENABLED
+ics_api_edp_get_producer_status 200 prod-b ENABLED
+ics_api_edp_get_producer_status 200 prod-c ENABLED
+ics_api_edp_get_producer_status 200 prod-d ENABLED
+ics_api_edp_get_producer_status 200 prod-e ENABLED
+ics_api_edp_get_producer_status 200 prod-f ENABLED
# Arm producer prod-a for supervision failure
prodstub_arm_producer 200 prod-a 400
# Wait for producer prod-a to go disabled
-ecs_api_edp_get_producer_status 200 prod-a DISABLED 360
+ics_api_edp_get_producer_status 200 prod-a DISABLED 360
-if [ $ECS_VERSION == "V1-1" ]; then
- ecs_api_edp_get_producer_ids 200 prod-a prod-b prod-c prod-d prod-e prod-f
+if [ $ICS_VERSION == "V1-1" ]; then
+ ics_api_edp_get_producer_ids 200 prod-a prod-b prod-c prod-d prod-e prod-f
else
- ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-a prod-b prod-c prod-d prod-e prod-f
+ ics_api_edp_get_producer_ids_2 200 NOTYPE prod-a prod-b prod-c prod-d prod-e prod-f
fi
-ecs_api_edp_get_producer_status 200 prod-a DISABLED
-ecs_api_edp_get_producer_status 200 prod-b ENABLED
-ecs_api_edp_get_producer_status 200 prod-c ENABLED
-ecs_api_edp_get_producer_status 200 prod-d ENABLED
-ecs_api_edp_get_producer_status 200 prod-e ENABLED
-ecs_api_edp_get_producer_status 200 prod-f ENABLED
+ics_api_edp_get_producer_status 200 prod-a DISABLED
+ics_api_edp_get_producer_status 200 prod-b ENABLED
+ics_api_edp_get_producer_status 200 prod-c ENABLED
+ics_api_edp_get_producer_status 200 prod-d ENABLED
+ics_api_edp_get_producer_status 200 prod-e ENABLED
+ics_api_edp_get_producer_status 200 prod-f ENABLED
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_get_job_status 200 type1 job1 ENABLED
- ecs_api_a1_get_job_status 200 type1 job2 ENABLED
- ecs_api_a1_get_job_status 200 type2 job3 ENABLED
- ecs_api_a1_get_job_status 200 type4 job8 ENABLED
- ecs_api_a1_get_job_status 200 type6 job10 ENABLED
+ ics_api_a1_get_job_status 200 type1 job1 ENABLED
+ ics_api_a1_get_job_status 200 type1 job2 ENABLED
+ ics_api_a1_get_job_status 200 type2 job3 ENABLED
+ ics_api_a1_get_job_status 200 type4 job8 ENABLED
+ ics_api_a1_get_job_status 200 type6 job10 ENABLED
else
- ecs_api_a1_get_job_status 200 job1 ENABLED
- ecs_api_a1_get_job_status 200 job2 ENABLED
- ecs_api_a1_get_job_status 200 job3 ENABLED
- ecs_api_a1_get_job_status 200 job8 ENABLED
- ecs_api_a1_get_job_status 200 job10 ENABLED
+ ics_api_a1_get_job_status 200 job1 ENABLED
+ ics_api_a1_get_job_status 200 job2 ENABLED
+ ics_api_a1_get_job_status 200 job3 ENABLED
+ ics_api_a1_get_job_status 200 job8 ENABLED
+ ics_api_a1_get_job_status 200 job10 ENABLED
fi
# Arm producer prod-a for supervision
prodstub_arm_producer 200 prod-a 200
# Wait for producer prod-a to go enabled
-ecs_api_edp_get_producer_status 200 prod-a ENABLED 360
+ics_api_edp_get_producer_status 200 prod-a ENABLED 360
-if [ $ECS_VERSION == "V1-1" ]; then
- ecs_api_edp_get_producer_ids 200 prod-a prod-b prod-c prod-d prod-e prod-f
+if [ $ICS_VERSION == "V1-1" ]; then
+ ics_api_edp_get_producer_ids 200 prod-a prod-b prod-c prod-d prod-e prod-f
else
- ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-a prod-b prod-c prod-d prod-e prod-f
+ ics_api_edp_get_producer_ids_2 200 NOTYPE prod-a prod-b prod-c prod-d prod-e prod-f
fi
-ecs_api_edp_get_producer_status 200 prod-a ENABLED
-ecs_api_edp_get_producer_status 200 prod-b ENABLED
-ecs_api_edp_get_producer_status 200 prod-c ENABLED
-ecs_api_edp_get_producer_status 200 prod-d ENABLED
-ecs_api_edp_get_producer_status 200 prod-e ENABLED
-ecs_api_edp_get_producer_status 200 prod-f ENABLED
+ics_api_edp_get_producer_status 200 prod-a ENABLED
+ics_api_edp_get_producer_status 200 prod-b ENABLED
+ics_api_edp_get_producer_status 200 prod-c ENABLED
+ics_api_edp_get_producer_status 200 prod-d ENABLED
+ics_api_edp_get_producer_status 200 prod-e ENABLED
+ics_api_edp_get_producer_status 200 prod-f ENABLED
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_get_job_status 200 type1 job1 ENABLED
- ecs_api_a1_get_job_status 200 type1 job2 ENABLED
- ecs_api_a1_get_job_status 200 type2 job3 ENABLED
- ecs_api_a1_get_job_status 200 type4 job8 ENABLED
- ecs_api_a1_get_job_status 200 type6 job10 ENABLED
+ ics_api_a1_get_job_status 200 type1 job1 ENABLED
+ ics_api_a1_get_job_status 200 type1 job2 ENABLED
+ ics_api_a1_get_job_status 200 type2 job3 ENABLED
+ ics_api_a1_get_job_status 200 type4 job8 ENABLED
+ ics_api_a1_get_job_status 200 type6 job10 ENABLED
else
- ecs_api_a1_get_job_status 200 job1 ENABLED
- ecs_api_a1_get_job_status 200 job2 ENABLED
- ecs_api_a1_get_job_status 200 job3 ENABLED
- ecs_api_a1_get_job_status 200 job8 ENABLED
- ecs_api_a1_get_job_status 200 job10 ENABLED
+ ics_api_a1_get_job_status 200 job1 ENABLED
+ ics_api_a1_get_job_status 200 job2 ENABLED
+ ics_api_a1_get_job_status 200 job3 ENABLED
+ ics_api_a1_get_job_status 200 job8 ENABLED
+ ics_api_a1_get_job_status 200 job10 ENABLED
fi
# Arm producer prod-a for supervision failure
prodstub_arm_producer 200 prod-a 400
# Wait for producer prod-a to go disabled
-ecs_api_edp_get_producer_status 200 prod-a DISABLED 360
+ics_api_edp_get_producer_status 200 prod-a DISABLED 360
-if [ $ECS_VERSION == "V1-1" ]; then
- ecs_api_edp_get_producer_ids 200 prod-a prod-b prod-c prod-d prod-e prod-f
+if [ $ICS_VERSION == "V1-1" ]; then
+ ics_api_edp_get_producer_ids 200 prod-a prod-b prod-c prod-d prod-e prod-f
else
- ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-a prod-b prod-c prod-d prod-e prod-f
+ ics_api_edp_get_producer_ids_2 200 NOTYPE prod-a prod-b prod-c prod-d prod-e prod-f
fi
-ecs_api_edp_get_producer_status 200 prod-a DISABLED
-ecs_api_edp_get_producer_status 200 prod-b ENABLED
-ecs_api_edp_get_producer_status 200 prod-c ENABLED
-ecs_api_edp_get_producer_status 200 prod-d ENABLED
-ecs_api_edp_get_producer_status 200 prod-e ENABLED
-ecs_api_edp_get_producer_status 200 prod-f ENABLED
+ics_api_edp_get_producer_status 200 prod-a DISABLED
+ics_api_edp_get_producer_status 200 prod-b ENABLED
+ics_api_edp_get_producer_status 200 prod-c ENABLED
+ics_api_edp_get_producer_status 200 prod-d ENABLED
+ics_api_edp_get_producer_status 200 prod-e ENABLED
+ics_api_edp_get_producer_status 200 prod-f ENABLED
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_get_job_status 200 type1 job1 ENABLED
- ecs_api_a1_get_job_status 200 type1 job2 ENABLED
- ecs_api_a1_get_job_status 200 type2 job3 ENABLED
- ecs_api_a1_get_job_status 200 type4 job8 ENABLED
- ecs_api_a1_get_job_status 200 type6 job10 ENABLED
+ ics_api_a1_get_job_status 200 type1 job1 ENABLED
+ ics_api_a1_get_job_status 200 type1 job2 ENABLED
+ ics_api_a1_get_job_status 200 type2 job3 ENABLED
+ ics_api_a1_get_job_status 200 type4 job8 ENABLED
+ ics_api_a1_get_job_status 200 type6 job10 ENABLED
else
- ecs_api_a1_get_job_status 200 job1 ENABLED
- ecs_api_a1_get_job_status 200 job2 ENABLED
- ecs_api_a1_get_job_status 200 job3 ENABLED
- ecs_api_a1_get_job_status 200 job8 ENABLED
- ecs_api_a1_get_job_status 200 job10 ENABLED
+ ics_api_a1_get_job_status 200 job1 ENABLED
+ ics_api_a1_get_job_status 200 job2 ENABLED
+ ics_api_a1_get_job_status 200 job3 ENABLED
+ ics_api_a1_get_job_status 200 job8 ENABLED
+ ics_api_a1_get_job_status 200 job10 ENABLED
fi
# Wait for producer prod-a to be removed
-if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
- ecs_equal json:data-producer/v1/info-producers 5 1000
+if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+ ics_equal json:data-producer/v1/info-producers 5 1000
else
- ecs_equal json:ei-producer/v1/eiproducers 5 1000
+ ics_equal json:ei-producer/v1/eiproducers 5 1000
fi
-if [ $ECS_VERSION == "V1-1" ]; then
- ecs_api_edp_get_producer_ids 200 prod-b prod-c prod-d prod-e prod-f
+if [ $ICS_VERSION == "V1-1" ]; then
+ ics_api_edp_get_producer_ids 200 prod-b prod-c prod-d prod-e prod-f
else
- ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-b prod-c prod-d prod-e prod-f
+ ics_api_edp_get_producer_ids_2 200 NOTYPE prod-b prod-c prod-d prod-e prod-f
fi
-ecs_api_edp_get_producer_status 404 prod-a
-ecs_api_edp_get_producer_status 200 prod-b ENABLED
-ecs_api_edp_get_producer_status 200 prod-c ENABLED
-ecs_api_edp_get_producer_status 200 prod-d ENABLED
-ecs_api_edp_get_producer_status 200 prod-e ENABLED
-ecs_api_edp_get_producer_status 200 prod-f ENABLED
+ics_api_edp_get_producer_status 404 prod-a
+ics_api_edp_get_producer_status 200 prod-b ENABLED
+ics_api_edp_get_producer_status 200 prod-c ENABLED
+ics_api_edp_get_producer_status 200 prod-d ENABLED
+ics_api_edp_get_producer_status 200 prod-e ENABLED
+ics_api_edp_get_producer_status 200 prod-f ENABLED
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_get_job_status 200 type1 job1 DISABLED
- ecs_api_a1_get_job_status 200 type1 job2 DISABLED
- ecs_api_a1_get_job_status 200 type2 job3 ENABLED
- ecs_api_a1_get_job_status 200 type4 job8 ENABLED
- ecs_api_a1_get_job_status 200 type6 job10 ENABLED
+ ics_api_a1_get_job_status 200 type1 job1 DISABLED
+ ics_api_a1_get_job_status 200 type1 job2 DISABLED
+ ics_api_a1_get_job_status 200 type2 job3 ENABLED
+ ics_api_a1_get_job_status 200 type4 job8 ENABLED
+ ics_api_a1_get_job_status 200 type6 job10 ENABLED
else
- ecs_api_a1_get_job_status 200 job1 DISABLED
- ecs_api_a1_get_job_status 200 job2 DISABLED
- ecs_api_a1_get_job_status 200 job3 ENABLED
- ecs_api_a1_get_job_status 200 job8 ENABLED
- ecs_api_a1_get_job_status 200 job10 ENABLED
+ ics_api_a1_get_job_status 200 job1 DISABLED
+ ics_api_a1_get_job_status 200 job2 DISABLED
+ ics_api_a1_get_job_status 200 job3 ENABLED
+ ics_api_a1_get_job_status 200 job8 ENABLED
+ ics_api_a1_get_job_status 200 job10 ENABLED
fi
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
- cr_equal received_callbacks 14 30
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+ cr_equal 0 received_callbacks 14 30
else
- cr_equal received_callbacks 6 30
+ cr_equal 0 received_callbacks 6 30
fi
-cr_equal received_callbacks?id=job1-status 1
-cr_equal received_callbacks?id=job2-status 1
+cr_equal 0 received_callbacks?id=job1-status 1
+cr_equal 0 received_callbacks?id=job2-status 1
-cr_api_check_all_ecs_events 200 job1-status DISABLED
-cr_api_check_all_ecs_events 200 job2-status DISABLED
+cr_api_check_all_ics_events 200 0 job1-status DISABLED
+cr_api_check_all_ics_events 200 0 job2-status DISABLED
# Arm producer prod-e for supervision failure
prodstub_arm_producer 200 prod-e 400
-ecs_api_edp_get_producer_status 200 prod-e DISABLED 1000
+ics_api_edp_get_producer_status 200 prod-e DISABLED 1000
-if [ $ECS_VERSION == "V1-1" ]; then
- ecs_api_edp_get_producer_ids 200 prod-b prod-c prod-d prod-e prod-f
+if [ $ICS_VERSION == "V1-1" ]; then
+ ics_api_edp_get_producer_ids 200 prod-b prod-c prod-d prod-e prod-f
else
- ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-b prod-c prod-d prod-e prod-f
+ ics_api_edp_get_producer_ids_2 200 NOTYPE prod-b prod-c prod-d prod-e prod-f
fi
-ecs_api_edp_get_producer_status 404 prod-a
-ecs_api_edp_get_producer_status 200 prod-b ENABLED
-ecs_api_edp_get_producer_status 200 prod-c ENABLED
-ecs_api_edp_get_producer_status 200 prod-d ENABLED
-ecs_api_edp_get_producer_status 200 prod-e DISABLED
-ecs_api_edp_get_producer_status 200 prod-f ENABLED
+ics_api_edp_get_producer_status 404 prod-a
+ics_api_edp_get_producer_status 200 prod-b ENABLED
+ics_api_edp_get_producer_status 200 prod-c ENABLED
+ics_api_edp_get_producer_status 200 prod-d ENABLED
+ics_api_edp_get_producer_status 200 prod-e DISABLED
+ics_api_edp_get_producer_status 200 prod-f ENABLED
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_get_job_status 200 type1 job1 DISABLED
- ecs_api_a1_get_job_status 200 type1 job2 DISABLED
- ecs_api_a1_get_job_status 200 type2 job3 ENABLED
- ecs_api_a1_get_job_status 200 type4 job8 ENABLED
- ecs_api_a1_get_job_status 200 type6 job10 ENABLED
+ ics_api_a1_get_job_status 200 type1 job1 DISABLED
+ ics_api_a1_get_job_status 200 type1 job2 DISABLED
+ ics_api_a1_get_job_status 200 type2 job3 ENABLED
+ ics_api_a1_get_job_status 200 type4 job8 ENABLED
+ ics_api_a1_get_job_status 200 type6 job10 ENABLED
else
- ecs_api_a1_get_job_status 200 job1 DISABLED
- ecs_api_a1_get_job_status 200 job2 DISABLED
- ecs_api_a1_get_job_status 200 job3 ENABLED
- ecs_api_a1_get_job_status 200 job8 ENABLED
- ecs_api_a1_get_job_status 200 job10 ENABLED
+ ics_api_a1_get_job_status 200 job1 DISABLED
+ ics_api_a1_get_job_status 200 job2 DISABLED
+ ics_api_a1_get_job_status 200 job3 ENABLED
+ ics_api_a1_get_job_status 200 job8 ENABLED
+ ics_api_a1_get_job_status 200 job10 ENABLED
fi
#Disable create for job10 in prod-e
#Update tjob 10 - only prod-f will be updated
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_put_job 200 type6 job10 $TARGET10 ricsim_g3_4 testdata/ecs/job-template2.json
+ ics_api_a1_put_job 200 type6 job10 $TARGET10 ricsim_g3_4 testdata/ics/job-template2.json
else
- ecs_api_a1_put_job 200 job10 type6 $TARGET10 ricsim_g3_4 $STATUS10 testdata/ecs/job-template2.json
+ ics_api_a1_put_job 200 job10 type6 $TARGET10 ricsim_g3_4 $STATUS10 testdata/ics/job-template2.json
fi
#Reset producer and job responses
prodstub_arm_producer 200 prod-e 200
prodstub_arm_job_create 200 prod-e job10 200
-ecs_api_edp_get_producer_status 200 prod-e ENABLED 360
+ics_api_edp_get_producer_status 200 prod-e ENABLED 360
-if [ $ECS_VERSION == "V1-1" ]; then
- ecs_api_edp_get_producer_ids 200 prod-b prod-c prod-d prod-e prod-f
+if [ $ICS_VERSION == "V1-1" ]; then
+ ics_api_edp_get_producer_ids 200 prod-b prod-c prod-d prod-e prod-f
else
- ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-b prod-c prod-d prod-e prod-f
+ ics_api_edp_get_producer_ids_2 200 NOTYPE prod-b prod-c prod-d prod-e prod-f
fi
#Wait for job to be updated
sleep_wait 120
-if [ $ECS_VERSION == "V1-1" ]; then
- prodstub_check_jobdata 200 prod-f job10 type6 $TARGET10 ricsim_g3_4 testdata/ecs/job-template2.json
+if [ $ICS_VERSION == "V1-1" ]; then
+ prodstub_check_jobdata 200 prod-f job10 type6 $TARGET10 ricsim_g3_4 testdata/ics/job-template2.json
else
- if [[ "$ECS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
- prodstub_check_jobdata_2 200 prod-f job10 type6 $TARGET10 ricsim_g3_4 testdata/ecs/job-template2.json
+ if [[ "$ICS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
+ prodstub_check_jobdata_2 200 prod-f job10 type6 $TARGET10 ricsim_g3_4 testdata/ics/job-template2.json
else
- prodstub_check_jobdata_3 200 prod-f job10 type6 $TARGET10 ricsim_g3_4 testdata/ecs/job-template2.json
+ prodstub_check_jobdata_3 200 prod-f job10 type6 $TARGET10 ricsim_g3_4 testdata/ics/job-template2.json
fi
fi
prodstub_arm_producer 200 prod-f 400
-ecs_api_edp_get_producer_status 200 prod-f DISABLED 360
+ics_api_edp_get_producer_status 200 prod-f DISABLED 360
-if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
- ecs_equal json:data-producer/v1/info-producers 4 1000
+if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+ ics_equal json:data-producer/v1/info-producers 4 1000
else
- ecs_equal json:ei-producer/v1/eiproducers 4 1000
+ ics_equal json:ei-producer/v1/eiproducers 4 1000
fi
-if [ $ECS_VERSION == "V1-1" ]; then
- ecs_api_edp_get_producer_ids 200 prod-b prod-c prod-d prod-e
+if [ $ICS_VERSION == "V1-1" ]; then
+ ics_api_edp_get_producer_ids 200 prod-b prod-c prod-d prod-e
else
- ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-b prod-c prod-d prod-e
+ ics_api_edp_get_producer_ids_2 200 NOTYPE prod-b prod-c prod-d prod-e
fi
-ecs_api_edp_get_producer_status 404 prod-a
-ecs_api_edp_get_producer_status 200 prod-b ENABLED
-ecs_api_edp_get_producer_status 200 prod-c ENABLED
-ecs_api_edp_get_producer_status 200 prod-d ENABLED
-ecs_api_edp_get_producer_status 200 prod-e ENABLED
-ecs_api_edp_get_producer_status 404 prod-f
+ics_api_edp_get_producer_status 404 prod-a
+ics_api_edp_get_producer_status 200 prod-b ENABLED
+ics_api_edp_get_producer_status 200 prod-c ENABLED
+ics_api_edp_get_producer_status 200 prod-d ENABLED
+ics_api_edp_get_producer_status 200 prod-e ENABLED
+ics_api_edp_get_producer_status 404 prod-f
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_get_job_status 200 type1 job1 DISABLED
- ecs_api_a1_get_job_status 200 type1 job2 DISABLED
- ecs_api_a1_get_job_status 200 type2 job3 ENABLED
- ecs_api_a1_get_job_status 200 type4 job8 ENABLED
- ecs_api_a1_get_job_status 200 type6 job10 ENABLED
+ ics_api_a1_get_job_status 200 type1 job1 DISABLED
+ ics_api_a1_get_job_status 200 type1 job2 DISABLED
+ ics_api_a1_get_job_status 200 type2 job3 ENABLED
+ ics_api_a1_get_job_status 200 type4 job8 ENABLED
+ ics_api_a1_get_job_status 200 type6 job10 ENABLED
else
- ecs_api_a1_get_job_status 200 job1 DISABLED
- ecs_api_a1_get_job_status 200 job2 DISABLED
- ecs_api_a1_get_job_status 200 job3 ENABLED
- ecs_api_a1_get_job_status 200 job8 ENABLED
- ecs_api_a1_get_job_status 200 job10 ENABLED
+ ics_api_a1_get_job_status 200 job1 DISABLED
+ ics_api_a1_get_job_status 200 job2 DISABLED
+ ics_api_a1_get_job_status 200 job3 ENABLED
+ ics_api_a1_get_job_status 200 job8 ENABLED
+ ics_api_a1_get_job_status 200 job10 ENABLED
fi
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
- cr_equal received_callbacks 14 30
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+ cr_equal 0 received_callbacks 14 30
else
- cr_equal received_callbacks 6 30
+ cr_equal 0 received_callbacks 6 30
fi
-if [[ "$ECS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
+if [[ "$ICS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
# End test if info types is not impl in tested version
- check_ecs_logs
+ check_ics_logs
store_logs END
### Initial tests - no config made
### GET: type ids, types, producer ids, producers, job ids, jobs
### DELETE: jobs
-ecs_api_idc_get_type_ids 200 type1 type2 type4 type6
-ecs_api_idc_get_type 404 test-type
+ics_api_idc_get_type_ids 200 type1 type2 type4 type6
+ics_api_idc_get_type 404 test-type
-ecs_api_edp_get_type_ids 200 type1 type2 type4 type6
-ecs_api_edp_get_type_2 404 test-type
+ics_api_edp_get_type_ids 200 type1 type2 type4 type6
+ics_api_edp_get_type_2 404 test-type
-ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-b prod-c prod-d prod-e
-ecs_api_edp_get_producer_2 404 test-prod
-ecs_api_edp_get_producer_status 404 test-prod
+ics_api_edp_get_producer_ids_2 200 NOTYPE prod-b prod-c prod-d prod-e
+ics_api_edp_get_producer_2 404 test-prod
+ics_api_edp_get_producer_status 404 test-prod
-ecs_api_edp_delete_producer 404 test-prod
+ics_api_edp_delete_producer 404 test-prod
-ecs_api_idc_get_job_ids 200 test-type NOWNER EMPTY
-ecs_api_idc_get_job_ids 200 test-type test-owner EMPTY
+ics_api_idc_get_job_ids 200 test-type NOWNER EMPTY
+ics_api_idc_get_job_ids 200 test-type test-owner EMPTY
-ecs_api_idc_get_job 404 test-job
+ics_api_idc_get_job 404 test-job
-ecs_api_idc_get_job_status2 404 test-job
+ics_api_idc_get_job_status2 404 test-job
-ecs_api_idc_delete_job 404 test-job
+ics_api_idc_delete_job 404 test-job
-ecs_api_edp_get_producer_jobs_2 404 test-prod
+ics_api_edp_get_producer_jobs_2 404 test-prod
-ecs_api_edp_get_type_2 404 test-type
-ecs_api_edp_delete_type_2 404 test-type
+ics_api_edp_get_type_2 404 test-type
+ics_api_edp_delete_type_2 404 test-type
### Setup of producer/job and testing apis ###
## Setup prod-ia
-ecs_api_edp_get_type_ids 200 type1 type2 type4 type6
-ecs_api_edp_get_type_2 404 type101
-ecs_api_edp_put_producer_2 404 prod-ia $CB_JOB/prod-ia $CB_SV/prod-ia type101
+ics_api_edp_get_type_ids 200 type1 type2 type4 type6
+ics_api_edp_get_type_2 404 type101
+ics_api_edp_put_producer_2 404 prod-ia $CB_JOB/prod-ia $CB_SV/prod-ia type101
# Create type, delete and create again
-ecs_api_edp_put_type_2 201 type101 testdata/ecs/info-type-1.json
-ecs_api_edp_get_type_2 200 type101
-ecs_api_edp_get_type_ids 200 type101 type1 type2 type4 type6
-ecs_api_edp_delete_type_2 204 type101
-ecs_api_edp_get_type_2 404 type101
-ecs_api_edp_get_type_ids 200 type1 type2 type4 type6
-ecs_api_edp_put_type_2 201 type101 testdata/ecs/info-type-1.json
-ecs_api_edp_get_type_ids 200 type101 type1 type2 type4 type6
-ecs_api_edp_get_type_2 200 type101 testdata/ecs/info-type-1.json
+ics_api_edp_put_type_2 201 type101 testdata/ics/info-type-1.json
+ics_api_edp_get_type_2 200 type101
+ics_api_edp_get_type_ids 200 type101 type1 type2 type4 type6
+ics_api_edp_delete_type_2 204 type101
+ics_api_edp_get_type_2 404 type101
+ics_api_edp_get_type_ids 200 type1 type2 type4 type6
+ics_api_edp_put_type_2 201 type101 testdata/ics/info-type-1.json
+ics_api_edp_get_type_ids 200 type101 type1 type2 type4 type6
+ics_api_edp_get_type_2 200 type101 testdata/ics/info-type-1.json
-ecs_api_edp_put_producer_2 201 prod-ia $CB_JOB/prod-ia $CB_SV/prod-ia type101
-ecs_api_edp_put_producer_2 200 prod-ia $CB_JOB/prod-ia $CB_SV/prod-ia type101
+ics_api_edp_put_producer_2 201 prod-ia $CB_JOB/prod-ia $CB_SV/prod-ia type101
+ics_api_edp_put_producer_2 200 prod-ia $CB_JOB/prod-ia $CB_SV/prod-ia type101
-ecs_api_edp_delete_type_2 406 type101
+if [[ "$ICS_FEATURE_LEVEL" == *"RESP_CODE_CHANGE_1" ]]; then
+ ics_api_edp_delete_type_2 409 type101
+else
+ ics_api_edp_delete_type_2 406 type101
+fi
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
- cr_equal received_callbacks 17 30
- cr_equal received_callbacks?id=type-status1 11
- cr_api_check_all_ecs_subscription_events 200 type-status1 type101 testdata/ecs/info-type-1.json REGISTERED type101 testdata/ecs/info-type-1.json DEREGISTERED type101 testdata/ecs/info-type-1.json REGISTERED
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+ cr_equal 0 received_callbacks 17 30
+ cr_equal 0 received_callbacks?id=type-status1 11
+ cr_api_check_all_ics_subscription_events 200 0 type-status1 type101 testdata/ics/info-type-1.json REGISTERED type101 testdata/ics/info-type-1.json DEREGISTERED type101 testdata/ics/info-type-1.json REGISTERED
else
- cr_equal received_callbacks 6
+ cr_equal 0 received_callbacks 6
fi
-ecs_api_edp_get_type_ids 200 type101 type1 type2 type4 type6
-ecs_api_edp_get_type_2 200 type101 testdata/ecs/info-type-1.json
+ics_api_edp_get_type_ids 200 type101 type1 type2 type4 type6
+ics_api_edp_get_type_2 200 type101 testdata/ics/info-type-1.json
-ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-b prod-c prod-d prod-e
-ecs_api_edp_get_producer_ids_2 200 type101 prod-ia
-ecs_api_edp_get_producer_ids_2 200 type102 EMPTY
+ics_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-b prod-c prod-d prod-e
+ics_api_edp_get_producer_ids_2 200 type101 prod-ia
+ics_api_edp_get_producer_ids_2 200 type102 EMPTY
-ecs_api_edp_get_producer_2 200 prod-ia $CB_JOB/prod-ia $CB_SV/prod-ia type101
+ics_api_edp_get_producer_2 200 prod-ia $CB_JOB/prod-ia $CB_SV/prod-ia type101
-ecs_api_edp_get_producer_status 200 prod-ia ENABLED
+ics_api_edp_get_producer_status 200 prod-ia ENABLED
-ecs_api_idc_get_job_ids 200 type101 NOWNER EMPTY
-ecs_api_idc_get_job_ids 200 type101 test-owner EMPTY
+ics_api_idc_get_job_ids 200 type101 NOWNER EMPTY
+ics_api_idc_get_job_ids 200 type101 test-owner EMPTY
-ecs_api_idc_get_job 404 test-job
+ics_api_idc_get_job 404 test-job
-ecs_api_idc_get_job_status2 404 test-job
-ecs_api_edp_get_producer_jobs_2 200 prod-ia EMPTY
+ics_api_idc_get_job_status2 404 test-job
+ics_api_edp_get_producer_jobs_2 200 prod-ia EMPTY
## Create a job for prod-ia
## job101 - prod-ia
-ecs_api_idc_put_job 201 job101 type101 $TARGET101 info-owner-1 $INFOSTATUS101 testdata/ecs/job-template.json VALIDATE
+ics_api_idc_put_job 201 job101 type101 $TARGET101 info-owner-1 $INFOSTATUS101 testdata/ics/job-template.json VALIDATE
# Check the job data in the producer
-prodstub_check_jobdata_3 200 prod-ia job101 type101 $TARGET101 info-owner-1 testdata/ecs/job-template.json
+prodstub_check_jobdata_3 200 prod-ia job101 type101 $TARGET101 info-owner-1 testdata/ics/job-template.json
-ecs_api_idc_get_job_ids 200 type101 NOWNER job101
-ecs_api_idc_get_job_ids 200 type101 info-owner-1 job101
+ics_api_idc_get_job_ids 200 type101 NOWNER job101
+ics_api_idc_get_job_ids 200 type101 info-owner-1 job101
-ecs_api_idc_get_job_ids 200 NOTYPE NOWNER job101 job1 job2 job3 job8 job10
+ics_api_idc_get_job_ids 200 NOTYPE NOWNER job101 job1 job2 job3 job8 job10
-ecs_api_idc_get_job 200 job101 type101 $TARGET101 info-owner-1 $INFOSTATUS101 testdata/ecs/job-template.json
+ics_api_idc_get_job 200 job101 type101 $TARGET101 info-owner-1 $INFOSTATUS101 testdata/ics/job-template.json
-ecs_api_idc_get_job_status2 200 job101 ENABLED 1 prod-ia
+ics_api_idc_get_job_status2 200 job101 ENABLED 1 prod-ia
prodstub_equal create/prod-ia/job101 1
-ecs_api_edp_get_producer_jobs_2 200 prod-ia job101 type101 $TARGET101 info-owner-1 testdata/ecs/job-template.json
+ics_api_edp_get_producer_jobs_2 200 prod-ia job101 type101 $TARGET101 info-owner-1 testdata/ics/job-template.json
## Create a second job for prod-ia
## job102 - prod-ia
-ecs_api_idc_put_job 201 job102 type101 $TARGET102 info-owner-2 $INFOSTATUS102 testdata/ecs/job-template.json VALIDATE
+ics_api_idc_put_job 201 job102 type101 $TARGET102 info-owner-2 $INFOSTATUS102 testdata/ics/job-template.json VALIDATE
# Check the job data in the producer
-prodstub_check_jobdata_3 200 prod-ia job102 type101 $TARGET102 info-owner-2 testdata/ecs/job-template.json
-ecs_api_idc_get_job_ids 200 type101 NOWNER job101 job102
-ecs_api_idc_get_job_ids 200 type101 info-owner-1 job101
-ecs_api_idc_get_job_ids 200 type101 info-owner-2 job102
-ecs_api_idc_get_job_ids 200 NOTYPE NOWNER job101 job102 job1 job2 job3 job8 job10
+prodstub_check_jobdata_3 200 prod-ia job102 type101 $TARGET102 info-owner-2 testdata/ics/job-template.json
+ics_api_idc_get_job_ids 200 type101 NOWNER job101 job102
+ics_api_idc_get_job_ids 200 type101 info-owner-1 job101
+ics_api_idc_get_job_ids 200 type101 info-owner-2 job102
+ics_api_idc_get_job_ids 200 NOTYPE NOWNER job101 job102 job1 job2 job3 job8 job10
-ecs_api_idc_get_job 200 job102 type101 $TARGET102 info-owner-2 $INFOSTATUS102 testdata/ecs/job-template.json
+ics_api_idc_get_job 200 job102 type101 $TARGET102 info-owner-2 $INFOSTATUS102 testdata/ics/job-template.json
-ecs_api_idc_get_job_status2 200 job102 ENABLED 1 prod-ia
+ics_api_idc_get_job_status2 200 job102 ENABLED 1 prod-ia
prodstub_equal create/prod-ia/job102 1
-ecs_api_edp_get_producer_jobs_2 200 prod-ia job101 type101 $TARGET101 info-owner-1 testdata/ecs/job-template.json job102 type101 $TARGET102 info-owner-2 testdata/ecs/job-template.json
+ics_api_edp_get_producer_jobs_2 200 prod-ia job101 type101 $TARGET101 info-owner-1 testdata/ics/job-template.json job102 type101 $TARGET102 info-owner-2 testdata/ics/job-template.json
## Setup prod-ib
-ecs_api_edp_put_type_2 201 type102 testdata/ecs/info-type-2.json
-ecs_api_edp_put_producer_2 201 prod-ib $CB_JOB/prod-ib $CB_SV/prod-ib type102
+ics_api_edp_put_type_2 201 type102 testdata/ics/info-type-2.json
+ics_api_edp_put_producer_2 201 prod-ib $CB_JOB/prod-ib $CB_SV/prod-ib type102
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
- cr_equal received_callbacks 18 30
- cr_equal received_callbacks?id=type-status1 12
- cr_api_check_all_ecs_subscription_events 200 type-status1 type102 testdata/ecs/info-type-2.json REGISTERED
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+ cr_equal 0 received_callbacks 18 30
+ cr_equal 0 received_callbacks?id=type-status1 12
+ cr_api_check_all_ics_subscription_events 200 0 type-status1 type102 testdata/ics/info-type-2.json REGISTERED
else
- cr_equal received_callbacks 6
+ cr_equal 0 received_callbacks 6
fi
-ecs_api_idc_get_type_ids 200 type101 type102 type1 type2 type4 type6
+ics_api_idc_get_type_ids 200 type101 type102 type1 type2 type4 type6
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
- ecs_api_idc_get_type 200 type101 testdata/ecs/info-type-1.json ENABLED 1
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+ ics_api_idc_get_type 200 type101 testdata/ics/info-type-1.json ENABLED 1
- ecs_api_idc_get_type 200 type102 testdata/ecs/info-type-2.json ENABLED 1
+ ics_api_idc_get_type 200 type102 testdata/ics/info-type-2.json ENABLED 1
else
- ecs_api_idc_get_type 200 type101 testdata/ecs/info-type-1.json
+ ics_api_idc_get_type 200 type101 testdata/ics/info-type-1.json
- ecs_api_idc_get_type 200 type102 testdata/ecs/info-type-2.json
+ ics_api_idc_get_type 200 type102 testdata/ics/info-type-2.json
fi
-ecs_api_edp_get_type_ids 200 type101 type102 type1 type2 type4 type6
-ecs_api_edp_get_type_2 200 type101 testdata/ecs/info-type-1.json
-ecs_api_edp_get_type_2 200 type102 testdata/ecs/info-type-2.json
+ics_api_edp_get_type_ids 200 type101 type102 type1 type2 type4 type6
+ics_api_edp_get_type_2 200 type101 testdata/ics/info-type-1.json
+ics_api_edp_get_type_2 200 type102 testdata/ics/info-type-2.json
-ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ib prod-b prod-c prod-d prod-e
+ics_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ib prod-b prod-c prod-d prod-e
-ecs_api_edp_get_producer_2 200 prod-ia $CB_JOB/prod-ia $CB_SV/prod-ia type101
-ecs_api_edp_get_producer_2 200 prod-ib $CB_JOB/prod-ib $CB_SV/prod-ib type102
+ics_api_edp_get_producer_2 200 prod-ia $CB_JOB/prod-ia $CB_SV/prod-ia type101
+ics_api_edp_get_producer_2 200 prod-ib $CB_JOB/prod-ib $CB_SV/prod-ib type102
-ecs_api_edp_get_producer_status 200 prod-ib ENABLED
+ics_api_edp_get_producer_status 200 prod-ib ENABLED
## Create job for prod-ib
## job103 - prod-ib
-ecs_api_idc_put_job 201 job103 type102 $TARGET103 info-owner-3 $INFOSTATUS103 testdata/ecs/job-template.json VALIDATE
+ics_api_idc_put_job 201 job103 type102 $TARGET103 info-owner-3 $INFOSTATUS103 testdata/ics/job-template.json VALIDATE
prodstub_equal create/prod-ib/job103 1
# Check the job data in the producer
-prodstub_check_jobdata_3 200 prod-ib job103 type102 $TARGET103 info-owner-3 testdata/ecs/job-template.json
+prodstub_check_jobdata_3 200 prod-ib job103 type102 $TARGET103 info-owner-3 testdata/ics/job-template.json
-ecs_api_idc_get_job_ids 200 type101 NOWNER job101 job102
-ecs_api_idc_get_job_ids 200 type102 NOWNER job103
-ecs_api_idc_get_job_ids 200 type101 info-owner-1 job101
-ecs_api_idc_get_job_ids 200 type101 info-owner-2 job102
-ecs_api_idc_get_job_ids 200 type102 info-owner-3 job103
+ics_api_idc_get_job_ids 200 type101 NOWNER job101 job102
+ics_api_idc_get_job_ids 200 type102 NOWNER job103
+ics_api_idc_get_job_ids 200 type101 info-owner-1 job101
+ics_api_idc_get_job_ids 200 type101 info-owner-2 job102
+ics_api_idc_get_job_ids 200 type102 info-owner-3 job103
-ecs_api_idc_get_job 200 job103 type102 $TARGET103 info-owner-3 $INFOSTATUS103 testdata/ecs/job-template.json
+ics_api_idc_get_job 200 job103 type102 $TARGET103 info-owner-3 $INFOSTATUS103 testdata/ics/job-template.json
-ecs_api_idc_get_job_status2 200 job103 ENABLED 1 prod-ib
+ics_api_idc_get_job_status2 200 job103 ENABLED 1 prod-ib
-ecs_api_edp_get_producer_jobs_2 200 prod-ia job101 type101 $TARGET101 info-owner-1 testdata/ecs/job-template.json job102 type101 $TARGET102 info-owner-2 testdata/ecs/job-template.json
-ecs_api_edp_get_producer_jobs_2 200 prod-ib job103 type102 $TARGET103 info-owner-3 testdata/ecs/job-template.json
+ics_api_edp_get_producer_jobs_2 200 prod-ia job101 type101 $TARGET101 info-owner-1 testdata/ics/job-template.json job102 type101 $TARGET102 info-owner-2 testdata/ics/job-template.json
+ics_api_edp_get_producer_jobs_2 200 prod-ib job103 type102 $TARGET103 info-owner-3 testdata/ics/job-template.json
## Setup prod-ic (no types)
-ecs_api_edp_put_producer_2 201 prod-ic $CB_JOB/prod-ic $CB_SV/prod-ic NOTYPE
+ics_api_edp_put_producer_2 201 prod-ic $CB_JOB/prod-ic $CB_SV/prod-ic NOTYPE
-ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ib prod-ic prod-b prod-c prod-d prod-e
+ics_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ib prod-ic prod-b prod-c prod-d prod-e
-ecs_api_edp_get_producer_2 200 prod-ia $CB_JOB/prod-ia $CB_SV/prod-ia type101
-ecs_api_edp_get_producer_2 200 prod-ib $CB_JOB/prod-ib $CB_SV/prod-ib type102
-ecs_api_edp_get_producer_2 200 prod-ic $CB_JOB/prod-ic $CB_SV/prod-ic EMPTY
+ics_api_edp_get_producer_2 200 prod-ia $CB_JOB/prod-ia $CB_SV/prod-ia type101
+ics_api_edp_get_producer_2 200 prod-ib $CB_JOB/prod-ib $CB_SV/prod-ib type102
+ics_api_edp_get_producer_2 200 prod-ic $CB_JOB/prod-ic $CB_SV/prod-ic EMPTY
-ecs_api_edp_get_producer_status 200 prod-ic ENABLED
+ics_api_edp_get_producer_status 200 prod-ic ENABLED
## Delete job103 and prod-ib and re-create if different order
# Delete job then producer
-ecs_api_idc_get_job_ids 200 NOTYPE NOWNER job101 job102 job103 job1 job2 job3 job8 job10
-ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ib prod-ic prod-b prod-c prod-d prod-e
+ics_api_idc_get_job_ids 200 NOTYPE NOWNER job101 job102 job103 job1 job2 job3 job8 job10
+ics_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ib prod-ic prod-b prod-c prod-d prod-e
-ecs_api_idc_delete_job 204 job103
+ics_api_idc_delete_job 204 job103
-ecs_api_idc_get_job_ids 200 NOTYPE NOWNER job101 job102 job1 job2 job3 job8 job10
-ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ib prod-ic prod-b prod-c prod-d prod-e
+ics_api_idc_get_job_ids 200 NOTYPE NOWNER job101 job102 job1 job2 job3 job8 job10
+ics_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ib prod-ic prod-b prod-c prod-d prod-e
-ecs_api_edp_delete_producer 204 prod-ib
+ics_api_edp_delete_producer 204 prod-ib
-ecs_api_edp_get_producer_status 404 prod-ib
+ics_api_edp_get_producer_status 404 prod-ib
-ecs_api_idc_get_job_ids 200 NOTYPE NOWNER job101 job102 job1 job2 job3 job8 job10
-ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ic prod-b prod-c prod-d prod-e
+ics_api_idc_get_job_ids 200 NOTYPE NOWNER job101 job102 job1 job2 job3 job8 job10
+ics_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ic prod-b prod-c prod-d prod-e
prodstub_equal delete/prod-ib/job103 1
-ecs_api_idc_put_job 201 job103 type102 $TARGET103 info-owner-3 $INFOSTATUS103 testdata/ecs/job-template.json VALIDATE
-ecs_api_idc_get_job_status2 200 job103 DISABLED EMPTYPROD
+ics_api_idc_put_job 201 job103 type102 $TARGET103 info-owner-3 $INFOSTATUS103 testdata/ics/job-template.json VALIDATE
+ics_api_idc_get_job_status2 200 job103 DISABLED EMPTYPROD
# Put producer then job
-ecs_api_edp_put_producer_2 201 prod-ib $CB_JOB/prod-ib $CB_SV/prod-ib type102
+ics_api_edp_put_producer_2 201 prod-ib $CB_JOB/prod-ib $CB_SV/prod-ib type102
-ecs_api_edp_get_producer_status 200 prod-ib ENABLED
+ics_api_edp_get_producer_status 200 prod-ib ENABLED
-ecs_api_idc_put_job 200 job103 type102 $TARGET103 info-owner-3 $INFOSTATUS103 testdata/ecs/job-template2.json VALIDATE
-ecs_api_idc_get_job_status2 200 job103 ENABLED 1 prod-ib
+ics_api_idc_put_job 200 job103 type102 $TARGET103 info-owner-3 $INFOSTATUS103 testdata/ics/job-template2.json VALIDATE
+ics_api_idc_get_job_status2 200 job103 ENABLED 1 prod-ib
-prodstub_check_jobdata_3 200 prod-ib job103 type102 $TARGET103 info-owner-3 testdata/ecs/job-template2.json
+prodstub_check_jobdata_3 200 prod-ib job103 type102 $TARGET103 info-owner-3 testdata/ics/job-template2.json
-ecs_api_idc_get_job_ids 200 NOTYPE NOWNER job101 job102 job103 job1 job2 job3 job8 job10
-ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ib prod-ic prod-b prod-c prod-d prod-e
+ics_api_idc_get_job_ids 200 NOTYPE NOWNER job101 job102 job103 job1 job2 job3 job8 job10
+ics_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ib prod-ic prod-b prod-c prod-d prod-e
prodstub_equal create/prod-ib/job103 3
prodstub_equal delete/prod-ib/job103 1
# Delete only the producer
-ecs_api_edp_delete_producer 204 prod-ib
+ics_api_edp_delete_producer 204 prod-ib
-ecs_api_edp_get_producer_status 404 prod-ib
+ics_api_edp_get_producer_status 404 prod-ib
-ecs_api_idc_get_job_ids 200 NOTYPE NOWNER job101 job102 job103 job1 job2 job3 job8 job10
-ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ic prod-b prod-c prod-d prod-e
+ics_api_idc_get_job_ids 200 NOTYPE NOWNER job101 job102 job103 job1 job2 job3 job8 job10
+ics_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ic prod-b prod-c prod-d prod-e
-ecs_api_idc_get_job_status2 200 job103 DISABLED EMPTYPROD
+ics_api_idc_get_job_status2 200 job103 DISABLED EMPTYPROD
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
- cr_equal received_callbacks 19 30
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+ cr_equal 0 received_callbacks 19 30
- cr_equal received_callbacks?id=info-job103-status 1
- cr_api_check_all_ecs_events 200 info-job103-status DISABLED
+ cr_equal 0 received_callbacks?id=info-job103-status 1
+ cr_api_check_all_ics_events 200 0 info-job103-status DISABLED
else
- cr_equal received_callbacks 7 30
- cr_equal received_callbacks?id=info-job103-status 1
- cr_api_check_all_ecs_events 200 info-job103-status DISABLED
+ cr_equal 0 received_callbacks 7 30
+ cr_equal 0 received_callbacks?id=info-job103-status 1
+ cr_api_check_all_ics_events 200 0 info-job103-status DISABLED
fi
# Re-create the producer
-ecs_api_edp_put_producer_2 201 prod-ib $CB_JOB/prod-ib $CB_SV/prod-ib type102
+ics_api_edp_put_producer_2 201 prod-ib $CB_JOB/prod-ib $CB_SV/prod-ib type102
-ecs_api_edp_get_producer_status 200 prod-ib ENABLED
+ics_api_edp_get_producer_status 200 prod-ib ENABLED
-ecs_api_idc_get_job_status2 200 job103 ENABLED 1 prod-ib
+ics_api_idc_get_job_status2 200 job103 ENABLED 1 prod-ib
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
- cr_equal received_callbacks 20 30
- cr_equal received_callbacks?id=info-job103-status 2
- cr_api_check_all_ecs_events 200 info-job103-status ENABLED
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+ cr_equal 0 received_callbacks 20 30
+ cr_equal 0 received_callbacks?id=info-job103-status 2
+ cr_api_check_all_ics_events 200 0 info-job103-status ENABLED
else
- cr_equal received_callbacks 8 30
- cr_equal received_callbacks?id=info-job103-status 2
- cr_api_check_all_ecs_events 200 info-job103-status ENABLED
+ cr_equal 0 received_callbacks 8 30
+ cr_equal 0 received_callbacks?id=info-job103-status 2
+ cr_api_check_all_ics_events 200 0 info-job103-status ENABLED
fi
-prodstub_check_jobdata_3 200 prod-ib job103 type102 $TARGET103 info-owner-3 testdata/ecs/job-template2.json
+prodstub_check_jobdata_3 200 prod-ib job103 type102 $TARGET103 info-owner-3 testdata/ics/job-template2.json
## Setup prod-id
-ecs_api_edp_put_type_2 201 type104 testdata/ecs/info-type-4.json
-ecs_api_edp_put_producer_2 201 prod-id $CB_JOB/prod-id $CB_SV/prod-id type104
+ics_api_edp_put_type_2 201 type104 testdata/ics/info-type-4.json
+ics_api_edp_put_producer_2 201 prod-id $CB_JOB/prod-id $CB_SV/prod-id type104
-ecs_api_idc_get_job_ids 200 type104 NOWNER EMPTY
+ics_api_idc_get_job_ids 200 type104 NOWNER EMPTY
-ecs_api_idc_put_job 201 job108 type104 $TARGET108 info-owner-4 $INFOSTATUS108 testdata/ecs/job-template.json VALIDATE
+ics_api_idc_put_job 201 job108 type104 $TARGET108 info-owner-4 $INFOSTATUS108 testdata/ics/job-template.json VALIDATE
-prodstub_check_jobdata_3 200 prod-id job108 type104 $TARGET108 info-owner-4 testdata/ecs/job-template.json
+prodstub_check_jobdata_3 200 prod-id job108 type104 $TARGET108 info-owner-4 testdata/ics/job-template.json
prodstub_equal create/prod-id/job108 1
prodstub_equal delete/prod-id/job108 0
-ecs_api_idc_get_job_ids 200 type104 NOWNER job108
+ics_api_idc_get_job_ids 200 type104 NOWNER job108
-ecs_api_idc_get_job_status2 200 job108 ENABLED 1 prod-id
+ics_api_idc_get_job_status2 200 job108 ENABLED 1 prod-id
# Re-PUT the producer with zero types
-ecs_api_edp_put_producer_2 200 prod-id $CB_JOB/prod-id $CB_SV/prod-id NOTYPE
+ics_api_edp_put_producer_2 200 prod-id $CB_JOB/prod-id $CB_SV/prod-id NOTYPE
-ecs_api_idc_get_job_ids 200 type104 NOWNER job108
-ecs_api_idc_get_job_ids 200 NOTYPE NOWNER job101 job102 job103 job108 job1 job2 job3 job8 job10
+ics_api_idc_get_job_ids 200 type104 NOWNER job108
+ics_api_idc_get_job_ids 200 NOTYPE NOWNER job101 job102 job103 job108 job1 job2 job3 job8 job10
-ecs_api_idc_get_job_status2 200 job108 DISABLED EMPTYPROD
+ics_api_idc_get_job_status2 200 job108 DISABLED EMPTYPROD
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
- cr_equal received_callbacks 22 30
- cr_equal received_callbacks?id=type-status1 13
- cr_api_check_all_ecs_subscription_events 200 type-status1 type104 testdata/ecs/info-type-4.json REGISTERED
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+ cr_equal 0 received_callbacks 22 30
+ cr_equal 0 received_callbacks?id=type-status1 13
+ cr_api_check_all_ics_subscription_events 200 0 type-status1 type104 testdata/ics/info-type-4.json REGISTERED
- cr_equal received_callbacks?id=info-job108-status 1
- cr_api_check_all_ecs_events 200 info-job108-status DISABLED
+ cr_equal 0 received_callbacks?id=info-job108-status 1
+ cr_api_check_all_ics_events 200 0 info-job108-status DISABLED
else
- cr_equal received_callbacks 9 30
- cr_equal received_callbacks?id=info-job108-status 1
- cr_api_check_all_ecs_events 200 info-job108-status DISABLED
+ cr_equal 0 received_callbacks 9 30
+ cr_equal 0 received_callbacks?id=info-job108-status 1
+ cr_api_check_all_ics_events 200 0 info-job108-status DISABLED
fi
prodstub_equal create/prod-id/job108 1
prodstub_equal delete/prod-id/job108 0
## Re-setup prod-id
-ecs_api_edp_put_type_2 200 type104 testdata/ecs/info-type-4.json
-ecs_api_edp_put_producer_2 200 prod-id $CB_JOB/prod-id $CB_SV/prod-id type104
+ics_api_edp_put_type_2 200 type104 testdata/ics/info-type-4.json
+ics_api_edp_put_producer_2 200 prod-id $CB_JOB/prod-id $CB_SV/prod-id type104
-ecs_api_idc_get_job_ids 200 type104 NOWNER job108
-ecs_api_idc_get_job_ids 200 NOTYPE NOWNER job101 job102 job103 job108 job1 job2 job3 job8 job10
+ics_api_idc_get_job_ids 200 type104 NOWNER job108
+ics_api_idc_get_job_ids 200 NOTYPE NOWNER job101 job102 job103 job108 job1 job2 job3 job8 job10
-ecs_api_idc_get_job_status2 200 job108 ENABLED 1 prod-id
+ics_api_idc_get_job_status2 200 job108 ENABLED 1 prod-id
-ecs_api_edp_get_producer_status 200 prod-ia ENABLED
-ecs_api_edp_get_producer_status 200 prod-ib ENABLED
-ecs_api_edp_get_producer_status 200 prod-ic ENABLED
-ecs_api_edp_get_producer_status 200 prod-id ENABLED
+ics_api_edp_get_producer_status 200 prod-ia ENABLED
+ics_api_edp_get_producer_status 200 prod-ib ENABLED
+ics_api_edp_get_producer_status 200 prod-ic ENABLED
+ics_api_edp_get_producer_status 200 prod-id ENABLED
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
- cr_equal received_callbacks 24 30
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+ cr_equal 0 received_callbacks 24 30
- cr_equal received_callbacks?id=type-status1 14
- cr_api_check_all_ecs_subscription_events 200 type-status1 type104 testdata/ecs/info-type-4.json REGISTERED
+ cr_equal 0 received_callbacks?id=type-status1 14
+ cr_api_check_all_ics_subscription_events 200 0 type-status1 type104 testdata/ics/info-type-4.json REGISTERED
- cr_equal received_callbacks?id=info-job108-status 2
- cr_api_check_all_ecs_events 200 info-job108-status ENABLED
+ cr_equal 0 received_callbacks?id=info-job108-status 2
+ cr_api_check_all_ics_events 200 0 info-job108-status ENABLED
else
- cr_equal received_callbacks 10 30
- cr_equal received_callbacks?id=info-job108-status 2
- cr_api_check_all_ecs_events 200 info-job108-status ENABLED
+ cr_equal 0 received_callbacks 10 30
+ cr_equal 0 received_callbacks?id=info-job108-status 2
+ cr_api_check_all_ics_events 200 0 info-job108-status ENABLED
fi
prodstub_equal create/prod-id/job108 2
## Setup prod-ie
-ecs_api_edp_put_type_2 201 type106 testdata/ecs/info-type-6.json
-ecs_api_edp_put_producer_2 201 prod-ie $CB_JOB/prod-ie $CB_SV/prod-ie type106
+ics_api_edp_put_type_2 201 type106 testdata/ics/info-type-6.json
+ics_api_edp_put_producer_2 201 prod-ie $CB_JOB/prod-ie $CB_SV/prod-ie type106
-ecs_api_idc_get_job_ids 200 type106 NOWNER EMPTY
+ics_api_idc_get_job_ids 200 type106 NOWNER EMPTY
-ecs_api_idc_put_job 201 job110 type106 $TARGET110 info-owner-4 $INFOSTATUS110 testdata/ecs/job-template.json VALIDATE
+ics_api_idc_put_job 201 job110 type106 $TARGET110 info-owner-4 $INFOSTATUS110 testdata/ics/job-template.json VALIDATE
-prodstub_check_jobdata_3 200 prod-ie job110 type106 $TARGET110 info-owner-4 testdata/ecs/job-template.json
+prodstub_check_jobdata_3 200 prod-ie job110 type106 $TARGET110 info-owner-4 testdata/ics/job-template.json
prodstub_equal create/prod-ie/job110 1
prodstub_equal delete/prod-ie/job110 0
-ecs_api_idc_get_job_ids 200 type106 NOWNER job110
+ics_api_idc_get_job_ids 200 type106 NOWNER job110
-ecs_api_idc_get_job_status2 200 job110 ENABLED 1 prod-ie
+ics_api_idc_get_job_status2 200 job110 ENABLED 1 prod-ie
## Setup prod-if
-ecs_api_edp_put_type_2 200 type106 testdata/ecs/info-type-6.json
-ecs_api_edp_put_producer_2 201 prod-if $CB_JOB/prod-if $CB_SV/prod-if type106
+ics_api_edp_put_type_2 200 type106 testdata/ics/info-type-6.json
+ics_api_edp_put_producer_2 201 prod-if $CB_JOB/prod-if $CB_SV/prod-if type106
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
- cr_equal received_callbacks 26 30
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+ cr_equal 0 received_callbacks 26 30
- cr_equal received_callbacks?id=type-status1 16
- cr_api_check_all_ecs_subscription_events 200 type-status1 type106 testdata/ecs/info-type-6.json REGISTERED type106 testdata/ecs/info-type-6.json REGISTERED
+ cr_equal 0 received_callbacks?id=type-status1 16
+ cr_api_check_all_ics_subscription_events 200 0 type-status1 type106 testdata/ics/info-type-6.json REGISTERED type106 testdata/ics/info-type-6.json REGISTERED
fi
-ecs_api_idc_get_job_ids 200 type106 NOWNER job110
+ics_api_idc_get_job_ids 200 type106 NOWNER job110
-prodstub_check_jobdata_3 200 prod-if job110 type106 $TARGET110 info-owner-4 testdata/ecs/job-template.json
+prodstub_check_jobdata_3 200 prod-if job110 type106 $TARGET110 info-owner-4 testdata/ics/job-template.json
prodstub_equal create/prod-if/job110 1
prodstub_equal delete/prod-if/job110 0
-ecs_api_idc_get_job_ids 200 type106 NOWNER job110
+ics_api_idc_get_job_ids 200 type106 NOWNER job110
-ecs_api_idc_get_job_status2 200 job110 ENABLED 2 prod-ie prod-if
+ics_api_idc_get_job_status2 200 job110 ENABLED 2 prod-ie prod-if
## Status updates prod-ia and jobs
-ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ib prod-ic prod-id prod-ie prod-if prod-b prod-c prod-d prod-e
+ics_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ib prod-ic prod-id prod-ie prod-if prod-b prod-c prod-d prod-e
-ecs_api_edp_get_producer_status 200 prod-ia ENABLED
-ecs_api_edp_get_producer_status 200 prod-ib ENABLED
-ecs_api_edp_get_producer_status 200 prod-ic ENABLED
-ecs_api_edp_get_producer_status 200 prod-id ENABLED
-ecs_api_edp_get_producer_status 200 prod-ie ENABLED
-ecs_api_edp_get_producer_status 200 prod-if ENABLED
+ics_api_edp_get_producer_status 200 prod-ia ENABLED
+ics_api_edp_get_producer_status 200 prod-ib ENABLED
+ics_api_edp_get_producer_status 200 prod-ic ENABLED
+ics_api_edp_get_producer_status 200 prod-id ENABLED
+ics_api_edp_get_producer_status 200 prod-ie ENABLED
+ics_api_edp_get_producer_status 200 prod-if ENABLED
# Arm producer prod-ia for supervision failure
prodstub_arm_producer 200 prod-ia 400
# Wait for producer prod-ia to go disabled
-ecs_api_edp_get_producer_status 200 prod-ia DISABLED 360
+ics_api_edp_get_producer_status 200 prod-ia DISABLED 360
-ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ib prod-ic prod-id prod-ie prod-if prod-b prod-c prod-d prod-e
+ics_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ib prod-ic prod-id prod-ie prod-if prod-b prod-c prod-d prod-e
-ecs_api_edp_get_producer_status 200 prod-ia DISABLED
-ecs_api_edp_get_producer_status 200 prod-ib ENABLED
-ecs_api_edp_get_producer_status 200 prod-ic ENABLED
-ecs_api_edp_get_producer_status 200 prod-id ENABLED
-ecs_api_edp_get_producer_status 200 prod-ie ENABLED
-ecs_api_edp_get_producer_status 200 prod-if ENABLED
+ics_api_edp_get_producer_status 200 prod-ia DISABLED
+ics_api_edp_get_producer_status 200 prod-ib ENABLED
+ics_api_edp_get_producer_status 200 prod-ic ENABLED
+ics_api_edp_get_producer_status 200 prod-id ENABLED
+ics_api_edp_get_producer_status 200 prod-ie ENABLED
+ics_api_edp_get_producer_status 200 prod-if ENABLED
-ecs_api_idc_get_job_status2 200 job101 ENABLED 1 prod-ia
-ecs_api_idc_get_job_status2 200 job102 ENABLED 1 prod-ia
-ecs_api_idc_get_job_status2 200 job103 ENABLED 1 prod-ib
-ecs_api_idc_get_job_status2 200 job108 ENABLED 1 prod-id
-ecs_api_idc_get_job_status2 200 job110 ENABLED 2 prod-ie prod-if
+ics_api_idc_get_job_status2 200 job101 ENABLED 1 prod-ia
+ics_api_idc_get_job_status2 200 job102 ENABLED 1 prod-ia
+ics_api_idc_get_job_status2 200 job103 ENABLED 1 prod-ib
+ics_api_idc_get_job_status2 200 job108 ENABLED 1 prod-id
+ics_api_idc_get_job_status2 200 job110 ENABLED 2 prod-ie prod-if
# Arm producer prod-ia for supervision
prodstub_arm_producer 200 prod-ia 200
# Wait for producer prod-ia to go enabled
-ecs_api_edp_get_producer_status 200 prod-ia ENABLED 360
+ics_api_edp_get_producer_status 200 prod-ia ENABLED 360
-ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ib prod-ic prod-id prod-ie prod-if prod-b prod-c prod-d prod-e
+ics_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ib prod-ic prod-id prod-ie prod-if prod-b prod-c prod-d prod-e
-ecs_api_edp_get_producer_status 200 prod-ia ENABLED
-ecs_api_edp_get_producer_status 200 prod-ib ENABLED
-ecs_api_edp_get_producer_status 200 prod-ic ENABLED
-ecs_api_edp_get_producer_status 200 prod-id ENABLED
-ecs_api_edp_get_producer_status 200 prod-ie ENABLED
-ecs_api_edp_get_producer_status 200 prod-if ENABLED
+ics_api_edp_get_producer_status 200 prod-ia ENABLED
+ics_api_edp_get_producer_status 200 prod-ib ENABLED
+ics_api_edp_get_producer_status 200 prod-ic ENABLED
+ics_api_edp_get_producer_status 200 prod-id ENABLED
+ics_api_edp_get_producer_status 200 prod-ie ENABLED
+ics_api_edp_get_producer_status 200 prod-if ENABLED
-ecs_api_idc_get_job_status2 200 job101 ENABLED 1 prod-ia
-ecs_api_idc_get_job_status2 200 job102 ENABLED 1 prod-ia
-ecs_api_idc_get_job_status2 200 job103 ENABLED 1 prod-ib
-ecs_api_idc_get_job_status2 200 job108 ENABLED 1 prod-id
-ecs_api_idc_get_job_status2 200 job110 ENABLED 2 prod-ie prod-if
+ics_api_idc_get_job_status2 200 job101 ENABLED 1 prod-ia
+ics_api_idc_get_job_status2 200 job102 ENABLED 1 prod-ia
+ics_api_idc_get_job_status2 200 job103 ENABLED 1 prod-ib
+ics_api_idc_get_job_status2 200 job108 ENABLED 1 prod-id
+ics_api_idc_get_job_status2 200 job110 ENABLED 2 prod-ie prod-if
# Arm producer prod-ia for supervision failure
prodstub_arm_producer 200 prod-ia 400
# Wait for producer prod-ia to go disabled
-ecs_api_edp_get_producer_status 200 prod-ia DISABLED 360
+ics_api_edp_get_producer_status 200 prod-ia DISABLED 360
-ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ib prod-ic prod-id prod-ie prod-if prod-b prod-c prod-d prod-e
+ics_api_edp_get_producer_ids_2 200 NOTYPE prod-ia prod-ib prod-ic prod-id prod-ie prod-if prod-b prod-c prod-d prod-e
-ecs_api_edp_get_producer_status 200 prod-ia DISABLED
-ecs_api_edp_get_producer_status 200 prod-ib ENABLED
-ecs_api_edp_get_producer_status 200 prod-ic ENABLED
-ecs_api_edp_get_producer_status 200 prod-id ENABLED
-ecs_api_edp_get_producer_status 200 prod-ie ENABLED
-ecs_api_edp_get_producer_status 200 prod-if ENABLED
+ics_api_edp_get_producer_status 200 prod-ia DISABLED
+ics_api_edp_get_producer_status 200 prod-ib ENABLED
+ics_api_edp_get_producer_status 200 prod-ic ENABLED
+ics_api_edp_get_producer_status 200 prod-id ENABLED
+ics_api_edp_get_producer_status 200 prod-ie ENABLED
+ics_api_edp_get_producer_status 200 prod-if ENABLED
-ecs_api_idc_get_job_status2 200 job101 ENABLED 1 prod-ia
-ecs_api_idc_get_job_status2 200 job102 ENABLED 1 prod-ia
-ecs_api_idc_get_job_status2 200 job103 ENABLED 1 prod-ib
-ecs_api_idc_get_job_status2 200 job108 ENABLED 1 prod-id
-ecs_api_idc_get_job_status2 200 job110 ENABLED 2 prod-ie prod-if
+ics_api_idc_get_job_status2 200 job101 ENABLED 1 prod-ia
+ics_api_idc_get_job_status2 200 job102 ENABLED 1 prod-ia
+ics_api_idc_get_job_status2 200 job103 ENABLED 1 prod-ib
+ics_api_idc_get_job_status2 200 job108 ENABLED 1 prod-id
+ics_api_idc_get_job_status2 200 job110 ENABLED 2 prod-ie prod-if
# Wait for producer prod-ia to be removed
-if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
- ecs_equal json:data-producer/v1/info-producers 9 1000
+if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+ ics_equal json:data-producer/v1/info-producers 9 1000
else
- ecs_equal json:ei-producer/v1/eiproducers 9 1000
+ ics_equal json:ei-producer/v1/eiproducers 9 1000
fi
-ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-ib prod-ic prod-id prod-ie prod-if prod-b prod-c prod-d prod-e
+ics_api_edp_get_producer_ids_2 200 NOTYPE prod-ib prod-ic prod-id prod-ie prod-if prod-b prod-c prod-d prod-e
-ecs_api_edp_get_producer_status 404 prod-ia
-ecs_api_edp_get_producer_status 200 prod-ib ENABLED
-ecs_api_edp_get_producer_status 200 prod-ic ENABLED
-ecs_api_edp_get_producer_status 200 prod-id ENABLED
-ecs_api_edp_get_producer_status 200 prod-ie ENABLED
-ecs_api_edp_get_producer_status 200 prod-if ENABLED
+ics_api_edp_get_producer_status 404 prod-ia
+ics_api_edp_get_producer_status 200 prod-ib ENABLED
+ics_api_edp_get_producer_status 200 prod-ic ENABLED
+ics_api_edp_get_producer_status 200 prod-id ENABLED
+ics_api_edp_get_producer_status 200 prod-ie ENABLED
+ics_api_edp_get_producer_status 200 prod-if ENABLED
-ecs_api_idc_get_job_status2 200 job101 DISABLED EMPTYPROD
-ecs_api_idc_get_job_status2 200 job102 DISABLED EMPTYPROD
-ecs_api_idc_get_job_status2 200 job103 ENABLED 1 prod-ib
-ecs_api_idc_get_job_status2 200 job108 ENABLED 1 prod-id
-ecs_api_idc_get_job_status2 200 job110 ENABLED 2 prod-ie prod-if
+ics_api_idc_get_job_status2 200 job101 DISABLED EMPTYPROD
+ics_api_idc_get_job_status2 200 job102 DISABLED EMPTYPROD
+ics_api_idc_get_job_status2 200 job103 ENABLED 1 prod-ib
+ics_api_idc_get_job_status2 200 job108 ENABLED 1 prod-id
+ics_api_idc_get_job_status2 200 job110 ENABLED 2 prod-ie prod-if
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
- cr_equal received_callbacks 28 30
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+ cr_equal 0 received_callbacks 28 30
- cr_equal received_callbacks?id=info-job101-status 1
- cr_equal received_callbacks?id=info-job102-status 1
- cr_api_check_all_ecs_events 200 info-job101-status DISABLED
- cr_api_check_all_ecs_events 200 info-job102-status DISABLED
+ cr_equal 0 received_callbacks?id=info-job101-status 1
+ cr_equal 0 received_callbacks?id=info-job102-status 1
+ cr_api_check_all_ics_events 200 0 info-job101-status DISABLED
+ cr_api_check_all_ics_events 200 0 info-job102-status DISABLED
else
- cr_equal received_callbacks 12 30
+ cr_equal 0 received_callbacks 12 30
- cr_equal received_callbacks?id=info-job101-status 1
- cr_equal received_callbacks?id=info-job102-status 1
- cr_api_check_all_ecs_events 200 info-job101-status DISABLED
- cr_api_check_all_ecs_events 200 info-job102-status DISABLED
+ cr_equal 0 received_callbacks?id=info-job101-status 1
+ cr_equal 0 received_callbacks?id=info-job102-status 1
+ cr_api_check_all_ics_events 200 0 info-job101-status DISABLED
+ cr_api_check_all_ics_events 200 0 info-job102-status DISABLED
fi
# Arm producer prod-ie for supervision failure
prodstub_arm_producer 200 prod-ie 400
-ecs_api_edp_get_producer_status 200 prod-ie DISABLED 1000
+ics_api_edp_get_producer_status 200 prod-ie DISABLED 1000
-ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-ib prod-ic prod-id prod-ie prod-if prod-b prod-c prod-d prod-e
+ics_api_edp_get_producer_ids_2 200 NOTYPE prod-ib prod-ic prod-id prod-ie prod-if prod-b prod-c prod-d prod-e
-ecs_api_edp_get_producer_status 404 prod-ia
-ecs_api_edp_get_producer_status 200 prod-ib ENABLED
-ecs_api_edp_get_producer_status 200 prod-ic ENABLED
-ecs_api_edp_get_producer_status 200 prod-id ENABLED
-ecs_api_edp_get_producer_status 200 prod-ie DISABLED
-ecs_api_edp_get_producer_status 200 prod-if ENABLED
+ics_api_edp_get_producer_status 404 prod-ia
+ics_api_edp_get_producer_status 200 prod-ib ENABLED
+ics_api_edp_get_producer_status 200 prod-ic ENABLED
+ics_api_edp_get_producer_status 200 prod-id ENABLED
+ics_api_edp_get_producer_status 200 prod-ie DISABLED
+ics_api_edp_get_producer_status 200 prod-if ENABLED
-ecs_api_idc_get_job_status2 200 job101 DISABLED EMPTYPROD
-ecs_api_idc_get_job_status2 200 job102 DISABLED EMPTYPROD
-ecs_api_idc_get_job_status2 200 job103 ENABLED 1 prod-ib
-ecs_api_idc_get_job_status2 200 job108 ENABLED 1 prod-id
-ecs_api_idc_get_job_status2 200 job110 ENABLED 2 prod-ie prod-if
+ics_api_idc_get_job_status2 200 job101 DISABLED EMPTYPROD
+ics_api_idc_get_job_status2 200 job102 DISABLED EMPTYPROD
+ics_api_idc_get_job_status2 200 job103 ENABLED 1 prod-ib
+ics_api_idc_get_job_status2 200 job108 ENABLED 1 prod-id
+ics_api_idc_get_job_status2 200 job110 ENABLED 2 prod-ie prod-if
#Disable create for job110 in prod-ie
prodstub_arm_job_create 200 prod-ie job110 400
#Update tjob 10 - only prod-if will be updated
-ecs_api_idc_put_job 200 job110 type106 $TARGET110 info-owner-4 $INFOSTATUS110 testdata/ecs/job-template2.json VALIDATE
+ics_api_idc_put_job 200 job110 type106 $TARGET110 info-owner-4 $INFOSTATUS110 testdata/ics/job-template2.json VALIDATE
#Reset producer and job responses
prodstub_arm_producer 200 prod-ie 200
prodstub_arm_job_create 200 prod-ie job110 200
-ecs_api_edp_get_producer_status 200 prod-ie ENABLED 360
+ics_api_edp_get_producer_status 200 prod-ie ENABLED 360
-ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-ib prod-ic prod-id prod-ie prod-if prod-b prod-c prod-d prod-e
+ics_api_edp_get_producer_ids_2 200 NOTYPE prod-ib prod-ic prod-id prod-ie prod-if prod-b prod-c prod-d prod-e
#Wait for job to be updated
sleep_wait 120
-prodstub_check_jobdata_3 200 prod-if job110 type106 $TARGET110 info-owner-4 testdata/ecs/job-template2.json
+prodstub_check_jobdata_3 200 prod-if job110 type106 $TARGET110 info-owner-4 testdata/ics/job-template2.json
prodstub_arm_producer 200 prod-if 400
-ecs_api_edp_get_producer_status 200 prod-if DISABLED 360
+ics_api_edp_get_producer_status 200 prod-if DISABLED 360
-if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
- ecs_equal json:data-producer/v1/info-producers 8 1000
+if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+ ics_equal json:data-producer/v1/info-producers 8 1000
else
- ecs_equal json:ei-producer/v1/eiproducers 8 1000
+ ics_equal json:ei-producer/v1/eiproducers 8 1000
fi
-ecs_api_edp_get_producer_ids_2 200 NOTYPE prod-ib prod-ic prod-id prod-ie prod-b prod-c prod-d prod-e
+ics_api_edp_get_producer_ids_2 200 NOTYPE prod-ib prod-ic prod-id prod-ie prod-b prod-c prod-d prod-e
-ecs_api_edp_get_producer_status 404 prod-ia
-ecs_api_edp_get_producer_status 200 prod-ib ENABLED
-ecs_api_edp_get_producer_status 200 prod-ic ENABLED
-ecs_api_edp_get_producer_status 200 prod-id ENABLED
-ecs_api_edp_get_producer_status 200 prod-ie ENABLED
-ecs_api_edp_get_producer_status 404 prod-if
+ics_api_edp_get_producer_status 404 prod-ia
+ics_api_edp_get_producer_status 200 prod-ib ENABLED
+ics_api_edp_get_producer_status 200 prod-ic ENABLED
+ics_api_edp_get_producer_status 200 prod-id ENABLED
+ics_api_edp_get_producer_status 200 prod-ie ENABLED
+ics_api_edp_get_producer_status 404 prod-if
-ecs_api_idc_get_job_status2 200 job101 DISABLED EMPTYPROD
-ecs_api_idc_get_job_status2 200 job102 DISABLED EMPTYPROD
-ecs_api_idc_get_job_status2 200 job103 ENABLED 1 prod-ib
-ecs_api_idc_get_job_status2 200 job108 ENABLED 1 prod-id
-ecs_api_idc_get_job_status2 200 job110 ENABLED 1 prod-ie
+ics_api_idc_get_job_status2 200 job101 DISABLED EMPTYPROD
+ics_api_idc_get_job_status2 200 job102 DISABLED EMPTYPROD
+ics_api_idc_get_job_status2 200 job103 ENABLED 1 prod-ib
+ics_api_idc_get_job_status2 200 job108 ENABLED 1 prod-id
+ics_api_idc_get_job_status2 200 job110 ENABLED 1 prod-ie
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
- cr_equal received_callbacks 28
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+ cr_equal 0 received_callbacks 28
else
- cr_equal received_callbacks 12
+ cr_equal 0 received_callbacks 12
fi
### Test of pre and post validation
-ecs_api_idc_get_type_ids 200 type1 type2 type4 type6 type101 type102 type104 type106
-ecs_api_idc_put_job 404 job150 type150 $TARGET150 info-owner-1 $INFOSTATUS150 testdata/ecs/job-template.json VALIDATE
-ecs_api_idc_put_job 201 job160 type160 $TARGET160 info-owner-1 $INFOSTATUS160 testdata/ecs/job-template.json
+ics_api_idc_get_type_ids 200 type1 type2 type4 type6 type101 type102 type104 type106
+ics_api_idc_put_job 404 job150 type150 $TARGET150 info-owner-1 $INFOSTATUS150 testdata/ics/job-template.json VALIDATE
+ics_api_idc_put_job 201 job160 type160 $TARGET160 info-owner-1 $INFOSTATUS160 testdata/ics/job-template.json
-ecs_api_idc_get_job_status2 404 job150
-ecs_api_idc_get_job_status2 200 job160 DISABLED EMPTYPROD 60
+ics_api_idc_get_job_status2 404 job150
+ics_api_idc_get_job_status2 200 job160 DISABLED EMPTYPROD 60
prodstub_arm_producer 200 prod-ig
prodstub_arm_job_create 200 prod-ig job150
prodstub_arm_job_create 200 prod-ig job160
-ecs_api_edp_put_producer_2 201 prod-ig $CB_JOB/prod-ig $CB_SV/prod-ig NOTYPE
-ecs_api_edp_get_producer_status 200 prod-ig ENABLED 360
+ics_api_edp_put_producer_2 201 prod-ig $CB_JOB/prod-ig $CB_SV/prod-ig NOTYPE
+ics_api_edp_get_producer_status 200 prod-ig ENABLED 360
-ecs_api_edp_get_producer_2 200 prod-ig $CB_JOB/prod-ig $CB_SV/prod-ig EMPTY
+ics_api_edp_get_producer_2 200 prod-ig $CB_JOB/prod-ig $CB_SV/prod-ig EMPTY
-ecs_api_idc_get_job_status2 404 job150
-ecs_api_idc_get_job_status2 200 job160 DISABLED EMPTYPROD 60
+ics_api_idc_get_job_status2 404 job150
+ics_api_idc_get_job_status2 200 job160 DISABLED EMPTYPROD 60
prodstub_arm_type 200 prod-ig type160
-ecs_api_edp_put_type_2 201 type160 testdata/ecs/info-type-60.json
-ecs_api_idc_get_type_ids 200 type1 type2 type4 type6 type101 type102 type104 type106 type160
+ics_api_edp_put_type_2 201 type160 testdata/ics/info-type-60.json
+ics_api_idc_get_type_ids 200 type1 type2 type4 type6 type101 type102 type104 type106 type160
-ecs_api_edp_put_producer_2 200 prod-ig $CB_JOB/prod-ig $CB_SV/prod-ig type160
-ecs_api_edp_get_producer_status 200 prod-ig ENABLED 360
-ecs_api_edp_get_producer_2 200 prod-ig $CB_JOB/prod-ig $CB_SV/prod-ig type160
+ics_api_edp_put_producer_2 200 prod-ig $CB_JOB/prod-ig $CB_SV/prod-ig type160
+ics_api_edp_get_producer_status 200 prod-ig ENABLED 360
+ics_api_edp_get_producer_2 200 prod-ig $CB_JOB/prod-ig $CB_SV/prod-ig type160
-ecs_api_idc_put_job 404 job150 type150 $TARGET150 info-owner-1 $INFOSTATUS150 testdata/ecs/job-template.json VALIDATE
+ics_api_idc_put_job 404 job150 type150 $TARGET150 info-owner-1 $INFOSTATUS150 testdata/ics/job-template.json VALIDATE
-ecs_api_idc_get_job_status2 404 job150
-ecs_api_idc_get_job_status2 200 job160 ENABLED 1 prod-ig 60
+ics_api_idc_get_job_status2 404 job150
+ics_api_idc_get_job_status2 200 job160 ENABLED 1 prod-ig 60
-prodstub_check_jobdata_3 200 prod-ig job160 type160 $TARGET160 info-owner-1 testdata/ecs/job-template.json
+prodstub_check_jobdata_3 200 prod-ig job160 type160 $TARGET160 info-owner-1 testdata/ics/job-template.json
prodstub_equal create/prod-ig/job160 1
prodstub_equal delete/prod-ig/job160 0
prodstub_arm_type 200 prod-ig type150
-ecs_api_edp_put_type_2 201 type150 testdata/ecs/info-type-50.json
-ecs_api_idc_get_type_ids 200 type1 type2 type4 type6 type101 type102 type104 type106 type160 type150
+ics_api_edp_put_type_2 201 type150 testdata/ics/info-type-50.json
+ics_api_idc_get_type_ids 200 type1 type2 type4 type6 type101 type102 type104 type106 type160 type150
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
- cr_equal received_callbacks 30 30
- cr_equal received_callbacks?id=type-status1 18
- cr_api_check_all_ecs_subscription_events 200 type-status1 type160 testdata/ecs/info-type-60.json REGISTERED type150 testdata/ecs/info-type-50.json REGISTERED
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+ cr_equal 0 received_callbacks 30 30
+ cr_equal 0 received_callbacks?id=type-status1 18
+ cr_api_check_all_ics_subscription_events 200 0 type-status1 type160 testdata/ics/info-type-60.json REGISTERED type150 testdata/ics/info-type-50.json REGISTERED
else
- cr_equal received_callbacks 12
+ cr_equal 0 received_callbacks 12
fi
-ecs_api_edp_put_producer_2 200 prod-ig $CB_JOB/prod-ig $CB_SV/prod-ig type160 type150
-ecs_api_edp_get_producer_status 200 prod-ig ENABLED 360
+ics_api_edp_put_producer_2 200 prod-ig $CB_JOB/prod-ig $CB_SV/prod-ig type160 type150
+ics_api_edp_get_producer_status 200 prod-ig ENABLED 360
-ecs_api_edp_get_producer_2 200 prod-ig $CB_JOB/prod-ig $CB_SV/prod-ig type160 type150
+ics_api_edp_get_producer_2 200 prod-ig $CB_JOB/prod-ig $CB_SV/prod-ig type160 type150
-ecs_api_idc_get_job_status2 404 job150
-ecs_api_idc_get_job_status2 200 job160 ENABLED 1 prod-ig
+ics_api_idc_get_job_status2 404 job150
+ics_api_idc_get_job_status2 200 job160 ENABLED 1 prod-ig
-ecs_api_idc_put_job 201 job150 type150 $TARGET150 info-owner-1 $INFOSTATUS150 testdata/ecs/job-template.json VALIDATE
+ics_api_idc_put_job 201 job150 type150 $TARGET150 info-owner-1 $INFOSTATUS150 testdata/ics/job-template.json VALIDATE
-ecs_api_idc_get_job_status2 200 job150 ENABLED 1 prod-ig 60
-ecs_api_idc_get_job_status2 200 job160 ENABLED 1 prod-ig
+ics_api_idc_get_job_status2 200 job150 ENABLED 1 prod-ig 60
+ics_api_idc_get_job_status2 200 job160 ENABLED 1 prod-ig
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
- cr_equal received_callbacks 30 30
- cr_equal received_callbacks?id=type-status1 18
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+ cr_equal 0 received_callbacks 30 30
+ cr_equal 0 received_callbacks?id=type-status1 18
else
- cr_equal received_callbacks 12
+ cr_equal 0 received_callbacks 12
fi
# Test job deletion at type delete
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
- ecs_api_edp_delete_type_2 406 type104
+ if [[ "$ICS_FEATURE_LEVEL" == *"RESP_CODE_CHANGE_1" ]]; then
+ ics_api_edp_delete_type_2 409 type104
+ else
+ ics_api_edp_delete_type_2 406 type104
+ fi
- ecs_api_edp_delete_producer 204 prod-id
+ ics_api_edp_delete_producer 204 prod-id
- ecs_api_edp_delete_type_2 204 type104
+ ics_api_edp_delete_type_2 204 type104
- cr_equal received_callbacks 32 30
- cr_equal received_callbacks?id=info-job108-status 3
- cr_equal received_callbacks?id=type-status1 19
- cr_api_check_all_ecs_subscription_events 200 type-status1 type104 testdata/ecs/info-type-4.json DEREGISTERED
- cr_api_check_all_ecs_events 200 info-job108-status DISABLED
+ cr_equal 0 received_callbacks 32 30
+ cr_equal 0 received_callbacks?id=info-job108-status 3
+ cr_equal 0 received_callbacks?id=type-status1 19
+ cr_api_check_all_ics_subscription_events 200 0 type-status1 type104 testdata/ics/info-type-4.json DEREGISTERED
+ cr_api_check_all_ics_events 200 0 info-job108-status DISABLED
- ecs_api_edp_get_producer 404 prod-id
+ ics_api_edp_get_producer 404 prod-id
- ecs_api_idc_get_job 404 job-108
+ ics_api_idc_get_job 404 job-108
else
- cr_equal received_callbacks 12
+ cr_equal 0 received_callbacks 12
fi
-check_ecs_logs
+check_ics_logs
store_logs END
#Supported run modes
SUPPORTED_RUNMODES="DOCKER KUBE"
-. ../common/testcase_common.sh $@
-. ../common/controller_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
+. ../common/testcase_common.sh $@
setup_testenvironment
#
-TC_ONELINE_DESCR="ECS Create 10000 jobs (ei and info) restart, test job persistency"
+TC_ONELINE_DESCR="ICS Create 10000 jobs (ei and info) restart, test job persistency"
#App names to include in the test when running docker, space separated list
-DOCKER_INCLUDED_IMAGES="ECS PRODSTUB CR CP NGW KUBEPROXY"
+DOCKER_INCLUDED_IMAGES="ICS PRODSTUB CR CP NGW KUBEPROXY"
#App names to include in the test when running kubernetes, space separated list
-KUBE_INCLUDED_IMAGES="ECS PRODSTUB CP CR KUBEPROXY NGW"
+KUBE_INCLUDED_IMAGES="ICS PRODSTUB CP CR KUBEPROXY NGW"
#Prestarted app (not started by script) to include in the test when running kubernetes, space separated list
KUBE_PRESTARTED_IMAGES=""
#Supported run modes
SUPPORTED_RUNMODES="DOCKER KUBE"
-. ../common/testcase_common.sh $@
-. ../common/ecs_api_functions.sh
-. ../common/prodstub_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
+. ../common/testcase_common.sh $@
setup_testenvironment
start_kube_proxy
-use_ecs_rest_http
+use_ics_rest_http
use_prod_stub_http
-start_ecs NOPROXY $SIM_GROUP/$ECS_COMPOSE_DIR/$ECS_CONFIG_FILE
+start_ics NOPROXY $SIM_GROUP/$ICS_COMPOSE_DIR/$ICS_CONFIG_FILE
start_prod_stub
-set_ecs_trace
+set_ics_trace
start_control_panel $SIM_GROUP/$CONTROL_PANEL_COMPOSE_DIR/$CONTROL_PANEL_CONFIG_FILE
start_gateway $SIM_GROUP/$NRT_GATEWAY_COMPOSE_DIR/$NRT_GATEWAY_CONFIG_FILE
fi
-start_cr
+start_cr 1
CB_JOB="$PROD_STUB_SERVICE_PATH$PROD_STUB_JOB_CALLBACK"
CB_SV="$PROD_STUB_SERVICE_PATH$PROD_STUB_SUPERVISION_CALLBACK"
NUM_JOBS=10000
use_info_jobs=false #Set flag if interface supporting info-types is used
-if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
use_info_jobs=true
NUM_JOBS=5000 # 5K ei jobs and 5K info jobs
fi
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
#Type registration status callbacks
- TYPESTATUS1="$CR_SERVICE_APP_PATH/type-status1"
- TYPESTATUS2="$CR_SERVICE_APP_PATH/type-status2"
+ TYPESTATUS1="$CR_SERVICE_APP_PATH_0/type-status1"
+ TYPESTATUS2="$CR_SERVICE_APP_PATH_0/type-status2"
- ecs_api_idc_put_subscription 201 subscription-id-1 owner1 $TYPESTATUS1
+ ics_api_idc_put_subscription 201 subscription-id-1 owner1 $TYPESTATUS1
- ecs_api_idc_get_subscription_ids 200 owner1 subscription-id-1
+ ics_api_idc_get_subscription_ids 200 owner1 subscription-id-1
- ecs_api_idc_put_subscription 201 subscription-id-2 owner2 $TYPESTATUS2
+ ics_api_idc_put_subscription 201 subscription-id-2 owner2 $TYPESTATUS2
- ecs_api_idc_get_subscription_ids 200 owner2 subscription-id-2
+ ics_api_idc_get_subscription_ids 200 owner2 subscription-id-2
fi
fi
-if [ $ECS_VERSION == "V1-1" ]; then
+if [ $ICS_VERSION == "V1-1" ]; then
- ecs_api_edp_put_producer 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ecs/ei-type-1.json
+ ics_api_edp_put_producer 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ics/ei-type-1.json
- ecs_api_edp_put_producer 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type1 testdata/ecs/ei-type-1.json type2 testdata/ecs/ei-type-2.json
+ ics_api_edp_put_producer 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type1 testdata/ics/ei-type-1.json type2 testdata/ics/ei-type-2.json
- ecs_api_edp_put_producer 201 prod-c $CB_JOB/prod-c $CB_SV/prod-c type1 testdata/ecs/ei-type-1.json type2 testdata/ecs/ei-type-2.json type3 testdata/ecs/ei-type-3.json
+ ics_api_edp_put_producer 201 prod-c $CB_JOB/prod-c $CB_SV/prod-c type1 testdata/ics/ei-type-1.json type2 testdata/ics/ei-type-2.json type3 testdata/ics/ei-type-3.json
- ecs_api_edp_put_producer 201 prod-d $CB_JOB/prod-d $CB_SV/prod-d type4 testdata/ecs/ei-type-4.json type5 testdata/ecs/ei-type-5.json
+ ics_api_edp_put_producer 201 prod-d $CB_JOB/prod-d $CB_SV/prod-d type4 testdata/ics/ei-type-4.json type5 testdata/ics/ei-type-5.json
else
- ecs_api_edp_put_type_2 201 type1 testdata/ecs/ei-type-1.json
- ecs_api_edp_put_type_2 201 type2 testdata/ecs/ei-type-2.json
- ecs_api_edp_put_type_2 201 type3 testdata/ecs/ei-type-3.json
- ecs_api_edp_put_type_2 201 type4 testdata/ecs/ei-type-4.json
- ecs_api_edp_put_type_2 201 type5 testdata/ecs/ei-type-5.json
+ ics_api_edp_put_type_2 201 type1 testdata/ics/ei-type-1.json
+ ics_api_edp_put_type_2 201 type2 testdata/ics/ei-type-2.json
+ ics_api_edp_put_type_2 201 type3 testdata/ics/ei-type-3.json
+ ics_api_edp_put_type_2 201 type4 testdata/ics/ei-type-4.json
+ ics_api_edp_put_type_2 201 type5 testdata/ics/ei-type-5.json
- ecs_api_edp_put_producer_2 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
+ ics_api_edp_put_producer_2 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
- ecs_api_edp_put_producer_2 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type1 type2
+ ics_api_edp_put_producer_2 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type1 type2
- ecs_api_edp_put_producer_2 201 prod-c $CB_JOB/prod-c $CB_SV/prod-c type1 type2 type3
+ ics_api_edp_put_producer_2 201 prod-c $CB_JOB/prod-c $CB_SV/prod-c type1 type2 type3
- ecs_api_edp_put_producer_2 201 prod-d $CB_JOB/prod-d $CB_SV/prod-d type4 type5
+ ics_api_edp_put_producer_2 201 prod-d $CB_JOB/prod-d $CB_SV/prod-d type4 type5
if [ $use_info_jobs ]; then
- ecs_api_edp_put_type_2 201 type101 testdata/ecs/info-type-1.json
- ecs_api_edp_put_type_2 201 type102 testdata/ecs/info-type-2.json
- ecs_api_edp_put_type_2 201 type103 testdata/ecs/info-type-3.json
- ecs_api_edp_put_type_2 201 type104 testdata/ecs/info-type-4.json
- ecs_api_edp_put_type_2 201 type105 testdata/ecs/info-type-5.json
+ ics_api_edp_put_type_2 201 type101 testdata/ics/info-type-1.json
+ ics_api_edp_put_type_2 201 type102 testdata/ics/info-type-2.json
+ ics_api_edp_put_type_2 201 type103 testdata/ics/info-type-3.json
+ ics_api_edp_put_type_2 201 type104 testdata/ics/info-type-4.json
+ ics_api_edp_put_type_2 201 type105 testdata/ics/info-type-5.json
- if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
- cr_equal received_callbacks 20 30
- cr_equal received_callbacks?id=type-status1 10
- cr_equal received_callbacks?id=type-status2 10
+ if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+ cr_equal 0 received_callbacks 20 30
+ cr_equal 0 received_callbacks?id=type-status1 10
+ cr_equal 0 received_callbacks?id=type-status2 10
- cr_api_check_all_ecs_subscription_events 200 type-status1 \
- type1 testdata/ecs/ei-type-1.json REGISTERED \
- type2 testdata/ecs/ei-type-2.json REGISTERED \
- type3 testdata/ecs/ei-type-3.json REGISTERED \
- type4 testdata/ecs/ei-type-4.json REGISTERED \
- type5 testdata/ecs/ei-type-5.json REGISTERED \
- type101 testdata/ecs/info-type-1.json REGISTERED \
- type102 testdata/ecs/info-type-2.json REGISTERED \
- type103 testdata/ecs/info-type-3.json REGISTERED \
- type104 testdata/ecs/info-type-4.json REGISTERED \
- type105 testdata/ecs/info-type-5.json REGISTERED
+ cr_api_check_all_ics_subscription_events 200 0 type-status1 \
+ type1 testdata/ics/ei-type-1.json REGISTERED \
+ type2 testdata/ics/ei-type-2.json REGISTERED \
+ type3 testdata/ics/ei-type-3.json REGISTERED \
+ type4 testdata/ics/ei-type-4.json REGISTERED \
+ type5 testdata/ics/ei-type-5.json REGISTERED \
+ type101 testdata/ics/info-type-1.json REGISTERED \
+ type102 testdata/ics/info-type-2.json REGISTERED \
+ type103 testdata/ics/info-type-3.json REGISTERED \
+ type104 testdata/ics/info-type-4.json REGISTERED \
+ type105 testdata/ics/info-type-5.json REGISTERED
- cr_api_check_all_ecs_subscription_events 200 type-status2 \
- type1 testdata/ecs/ei-type-1.json REGISTERED \
- type2 testdata/ecs/ei-type-2.json REGISTERED \
- type3 testdata/ecs/ei-type-3.json REGISTERED \
- type4 testdata/ecs/ei-type-4.json REGISTERED \
- type5 testdata/ecs/ei-type-5.json REGISTERED \
- type101 testdata/ecs/info-type-1.json REGISTERED \
- type102 testdata/ecs/info-type-2.json REGISTERED \
- type103 testdata/ecs/info-type-3.json REGISTERED \
- type104 testdata/ecs/info-type-4.json REGISTERED \
- type105 testdata/ecs/info-type-5.json REGISTERED
+ cr_api_check_all_ics_subscription_events 200 0 type-status2 \
+ type1 testdata/ics/ei-type-1.json REGISTERED \
+ type2 testdata/ics/ei-type-2.json REGISTERED \
+ type3 testdata/ics/ei-type-3.json REGISTERED \
+ type4 testdata/ics/ei-type-4.json REGISTERED \
+ type5 testdata/ics/ei-type-5.json REGISTERED \
+ type101 testdata/ics/info-type-1.json REGISTERED \
+ type102 testdata/ics/info-type-2.json REGISTERED \
+ type103 testdata/ics/info-type-3.json REGISTERED \
+ type104 testdata/ics/info-type-4.json REGISTERED \
+ type105 testdata/ics/info-type-5.json REGISTERED
fi
- ecs_api_edp_put_producer_2 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 type101
+ ics_api_edp_put_producer_2 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 type101
- ecs_api_edp_put_producer_2 200 prod-b $CB_JOB/prod-b $CB_SV/prod-b type1 type2 type101 type102
+ ics_api_edp_put_producer_2 200 prod-b $CB_JOB/prod-b $CB_SV/prod-b type1 type2 type101 type102
- ecs_api_edp_put_producer_2 200 prod-c $CB_JOB/prod-c $CB_SV/prod-c type1 type2 type3 type101 type102 type103
+ ics_api_edp_put_producer_2 200 prod-c $CB_JOB/prod-c $CB_SV/prod-c type1 type2 type3 type101 type102 type103
- ecs_api_edp_put_producer_2 200 prod-d $CB_JOB/prod-d $CB_SV/prod-d type4 type5 type104 type105
+ ics_api_edp_put_producer_2 200 prod-d $CB_JOB/prod-d $CB_SV/prod-d type4 type5 type104 type105
fi
fi
if [ $use_info_jobs ]; then
- ecs_equal json:data-producer/v1/info-producers 4
+ ics_equal json:data-producer/v1/info-producers 4
else
- ecs_equal json:ei-producer/v1/eiproducers 4
+ ics_equal json:ei-producer/v1/eiproducers 4
fi
-ecs_api_edp_get_producer_status 200 prod-a ENABLED
-ecs_api_edp_get_producer_status 200 prod-b ENABLED
-ecs_api_edp_get_producer_status 200 prod-c ENABLED
-ecs_api_edp_get_producer_status 200 prod-d ENABLED
+ics_api_edp_get_producer_status 200 prod-a ENABLED
+ics_api_edp_get_producer_status 200 prod-b ENABLED
+ics_api_edp_get_producer_status 200 prod-c ENABLED
+ics_api_edp_get_producer_status 200 prod-d ENABLED
for ((i=1; i<=$NUM_JOBS; i++))
do
if [ $(($i%5)) -eq 0 ]; then
- ecs_api_a1_put_job 201 job$i type1 $TARGET ric1 $CR_SERVICE_APP_PATH/job_status_ric1 testdata/ecs/job-template.json
+ ics_api_a1_put_job 201 job$i type1 $TARGET ric1 $CR_SERVICE_APP_PATH_0/job_status_ric1 testdata/ics/job-template.json
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_get_job_status 200 type1 job$i ENABLED
+ ics_api_a1_get_job_status 200 type1 job$i ENABLED
else
- ecs_api_a1_get_job_status 200 job$i ENABLED 120
+ ics_api_a1_get_job_status 200 job$i ENABLED 120
fi
if [ $use_info_jobs ]; then
- ecs_api_idc_put_job 201 job$(($i+$NUM_JOBS)) type101 $TARGET info-owner $CR_SERVICE_APP_PATH/job_status_info-owner testdata/ecs/job-template.json VALIDATE
- ecs_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 3 prod-a prod-b prod-c 120
+ ics_api_idc_put_job 201 job$(($i+$NUM_JOBS)) type101 $TARGET info-owner $CR_SERVICE_APP_PATH_0/job_status_info-owner testdata/ics/job-template.json VALIDATE
+ ics_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 3 prod-a prod-b prod-c 120
fi
fi
if [ $(($i%5)) -eq 1 ]; then
- ecs_api_a1_put_job 201 job$i type2 $TARGET ric1 $CR_SERVICE_APP_PATH/job_status_ric1 testdata/ecs/job-template.json
+ ics_api_a1_put_job 201 job$i type2 $TARGET ric1 $CR_SERVICE_APP_PATH_0/job_status_ric1 testdata/ics/job-template.json
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_get_job_status 200 type2 job$i ENABLED
+ ics_api_a1_get_job_status 200 type2 job$i ENABLED
else
- ecs_api_a1_get_job_status 200 job$i ENABLED 120
+ ics_api_a1_get_job_status 200 job$i ENABLED 120
fi
if [ $use_info_jobs ]; then
- ecs_api_idc_put_job 201 job$(($i+$NUM_JOBS)) type102 $TARGET info-owner $CR_SERVICE_APP_PATH/job_status_info-owner testdata/ecs/job-template.json VALIDATE
- ecs_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 2 prod-b prod-c 120
+ ics_api_idc_put_job 201 job$(($i+$NUM_JOBS)) type102 $TARGET info-owner $CR_SERVICE_APP_PATH_0/job_status_info-owner testdata/ics/job-template.json VALIDATE
+ ics_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 2 prod-b prod-c 120
fi
fi
if [ $(($i%5)) -eq 2 ]; then
- ecs_api_a1_put_job 201 job$i type3 $TARGET ric1 $CR_SERVICE_APP_PATH/job_status_ric1 testdata/ecs/job-template.json
+ ics_api_a1_put_job 201 job$i type3 $TARGET ric1 $CR_SERVICE_APP_PATH_0/job_status_ric1 testdata/ics/job-template.json
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_get_job_status 200 type3 job$i ENABLED
+ ics_api_a1_get_job_status 200 type3 job$i ENABLED
else
- ecs_api_a1_get_job_status 200 job$i ENABLED 120
+ ics_api_a1_get_job_status 200 job$i ENABLED 120
fi
if [ $use_info_jobs ]; then
- ecs_api_idc_put_job 201 job$(($i+$NUM_JOBS)) type103 $TARGET info-owner $CR_SERVICE_APP_PATH/job_status_info-owner testdata/ecs/job-template.json VALIDATE
- ecs_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 1 prod-c 120
+ ics_api_idc_put_job 201 job$(($i+$NUM_JOBS)) type103 $TARGET info-owner $CR_SERVICE_APP_PATH_0/job_status_info-owner testdata/ics/job-template.json VALIDATE
+ ics_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 1 prod-c 120
fi
fi
if [ $(($i%5)) -eq 3 ]; then
- ecs_api_a1_put_job 201 job$i type4 $TARGET ric1 $CR_SERVICE_APP_PATH/job_status_ric1 testdata/ecs/job-template.json
+ ics_api_a1_put_job 201 job$i type4 $TARGET ric1 $CR_SERVICE_APP_PATH_0/job_status_ric1 testdata/ics/job-template.json
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_get_job_status 200 type4 job$i ENABLED
+ ics_api_a1_get_job_status 200 type4 job$i ENABLED
else
- ecs_api_a1_get_job_status 200 job$i ENABLED 120
+ ics_api_a1_get_job_status 200 job$i ENABLED 120
fi
if [ $use_info_jobs ]; then
- ecs_api_idc_put_job 201 job$(($i+$NUM_JOBS)) type104 $TARGET info-owner $CR_SERVICE_APP_PATH/job_status_info-owner testdata/ecs/job-template.json VALIDATE
- ecs_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 1 prod-d 120
+ ics_api_idc_put_job 201 job$(($i+$NUM_JOBS)) type104 $TARGET info-owner $CR_SERVICE_APP_PATH_0/job_status_info-owner testdata/ics/job-template.json VALIDATE
+ ics_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 1 prod-d 120
fi
fi
if [ $(($i%5)) -eq 4 ]; then
- ecs_api_a1_put_job 201 job$i type5 $TARGET ric1 $CR_SERVICE_APP_PATH/job_status_ric1 testdata/ecs/job-template.json
+ ics_api_a1_put_job 201 job$i type5 $TARGET ric1 $CR_SERVICE_APP_PATH_0/job_status_ric1 testdata/ics/job-template.json
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_get_job_status 200 type5 job$i ENABLED
+ ics_api_a1_get_job_status 200 type5 job$i ENABLED
else
- ecs_api_a1_get_job_status 200 job$i ENABLED 120
+ ics_api_a1_get_job_status 200 job$i ENABLED 120
fi
if [ $use_info_jobs ]; then
- ecs_api_idc_put_job 201 job$(($i+$NUM_JOBS)) type105 $TARGET info-owner $CR_SERVICE_APP_PATH/job_status_info-owner testdata/ecs/job-template.json VALIDATE
- ecs_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 1 prod-d 120
+ ics_api_idc_put_job 201 job$(($i+$NUM_JOBS)) type105 $TARGET info-owner $CR_SERVICE_APP_PATH_0/job_status_info-owner testdata/ics/job-template.json VALIDATE
+ ics_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 1 prod-d 120
fi
fi
done
if [ -z "$FLAT_A1_EI" ]; then
- ecs_equal json:A1-EI/v1/eitypes/type1/eijobs $(($NUM_JOBS/5))
- ecs_equal json:A1-EI/v1/eitypes/type2/eijobs $(($NUM_JOBS/5))
- ecs_equal json:A1-EI/v1/eitypes/type3/eijobs $(($NUM_JOBS/5))
- ecs_equal json:A1-EI/v1/eitypes/type4/eijobs $(($NUM_JOBS/5))
- ecs_equal json:A1-EI/v1/eitypes/type5/eijobs $(($NUM_JOBS/5))
+ ics_equal json:A1-EI/v1/eitypes/type1/eijobs $(($NUM_JOBS/5))
+ ics_equal json:A1-EI/v1/eitypes/type2/eijobs $(($NUM_JOBS/5))
+ ics_equal json:A1-EI/v1/eitypes/type3/eijobs $(($NUM_JOBS/5))
+ ics_equal json:A1-EI/v1/eitypes/type4/eijobs $(($NUM_JOBS/5))
+ ics_equal json:A1-EI/v1/eitypes/type5/eijobs $(($NUM_JOBS/5))
else
- ecs_equal json:A1-EI/v1/eijobs?eiTypeId=type1 $(($NUM_JOBS/5))
- ecs_equal json:A1-EI/v1/eijobs?eiTypeId=type2 $(($NUM_JOBS/5))
- ecs_equal json:A1-EI/v1/eijobs?eiTypeId=type3 $(($NUM_JOBS/5))
- ecs_equal json:A1-EI/v1/eijobs?eiTypeId=type4 $(($NUM_JOBS/5))
- ecs_equal json:A1-EI/v1/eijobs?eiTypeId=type5 $(($NUM_JOBS/5))
+ ics_equal json:A1-EI/v1/eijobs?eiTypeId=type1 $(($NUM_JOBS/5))
+ ics_equal json:A1-EI/v1/eijobs?eiTypeId=type2 $(($NUM_JOBS/5))
+ ics_equal json:A1-EI/v1/eijobs?eiTypeId=type3 $(($NUM_JOBS/5))
+ ics_equal json:A1-EI/v1/eijobs?eiTypeId=type4 $(($NUM_JOBS/5))
+ ics_equal json:A1-EI/v1/eijobs?eiTypeId=type5 $(($NUM_JOBS/5))
fi
if [ $use_info_jobs ]; then
- ecs_equal json:data-consumer/v1/info-jobs?infoTypeId=type101 $(($NUM_JOBS/5))
- ecs_equal json:data-consumer/v1/info-jobs?infoTypeId=type102 $(($NUM_JOBS/5))
- ecs_equal json:data-consumer/v1/info-jobs?infoTypeId=type103 $(($NUM_JOBS/5))
- ecs_equal json:data-consumer/v1/info-jobs?infoTypeId=type104 $(($NUM_JOBS/5))
- ecs_equal json:data-consumer/v1/info-jobs?infoTypeId=type105 $(($NUM_JOBS/5))
+ ics_equal json:data-consumer/v1/info-jobs?infoTypeId=type101 $(($NUM_JOBS/5))
+ ics_equal json:data-consumer/v1/info-jobs?infoTypeId=type102 $(($NUM_JOBS/5))
+ ics_equal json:data-consumer/v1/info-jobs?infoTypeId=type103 $(($NUM_JOBS/5))
+ ics_equal json:data-consumer/v1/info-jobs?infoTypeId=type104 $(($NUM_JOBS/5))
+ ics_equal json:data-consumer/v1/info-jobs?infoTypeId=type105 $(($NUM_JOBS/5))
fi
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
- cr_equal received_callbacks 20 30
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+ cr_equal 0 received_callbacks 20 30
else
- cr_equal received_callbacks 0 30
+ cr_equal 0 received_callbacks 0 30
fi
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
- ecs_equal json:/data-consumer/v1/info-type-subscription 2 200
+ ics_equal json:/data-consumer/v1/info-type-subscription 2 200
- ecs_api_idc_get_subscription_ids 200 owner1 subscription-id-1
- ecs_api_idc_get_subscription_ids 200 owner2 subscription-id-2
+ ics_api_idc_get_subscription_ids 200 owner1 subscription-id-1
+ ics_api_idc_get_subscription_ids 200 owner2 subscription-id-2
if [ $use_info_jobs ]; then
- ecs_equal json:data-producer/v1/info-types 10 1000
+ ics_equal json:data-producer/v1/info-types 10 1000
else
- ecs_equal json:ei-producer/v1/eitypes 5 1000
+ ics_equal json:ei-producer/v1/eitypes 5 1000
fi
fi
-stop_ecs
+stop_ics
-cr_api_reset
+cr_api_reset 0
-start_stopped_ecs
+start_stopped_ics
-set_ecs_trace
+set_ics_trace
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
- ecs_equal json:/data-consumer/v1/info-type-subscription 2 200
+ ics_equal json:/data-consumer/v1/info-type-subscription 2 200
- ecs_api_idc_get_subscription_ids 200 owner1 subscription-id-1
- ecs_api_idc_get_subscription_ids 200 owner2 subscription-id-2
+ ics_api_idc_get_subscription_ids 200 owner1 subscription-id-1
+ ics_api_idc_get_subscription_ids 200 owner2 subscription-id-2
if [ $use_info_jobs ]; then
- ecs_equal json:data-producer/v1/info-types 10 1000
+ ics_equal json:data-producer/v1/info-types 10 1000
else
- ecs_equal json:ei-producer/v1/eitypes 5 1000
+ ics_equal json:ei-producer/v1/eitypes 5 1000
fi
fi
-cr_equal received_callbacks 0
+cr_equal 0 received_callbacks 0
for ((i=1; i<=$NUM_JOBS; i++))
do
fi
done
-ecs_api_edp_get_producer_status 404 prod-a
-ecs_api_edp_get_producer_status 404 prod-b
-ecs_api_edp_get_producer_status 404 prod-c
-ecs_api_edp_get_producer_status 404 prod-d
+ics_api_edp_get_producer_status 404 prod-a
+ics_api_edp_get_producer_status 404 prod-b
+ics_api_edp_get_producer_status 404 prod-c
+ics_api_edp_get_producer_status 404 prod-d
for ((i=1; i<=$NUM_JOBS; i++))
do
if [ $(($i%5)) -eq 0 ]; then
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_get_job_status 200 type1 job$i DISABLED
+ ics_api_a1_get_job_status 200 type1 job$i DISABLED
else
- ecs_api_a1_get_job_status 200 job$i DISABLED 120
+ ics_api_a1_get_job_status 200 job$i DISABLED 120
fi
if [ $use_info_jobs ]; then
- ecs_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) DISABLED EMPTYPROD 120
+ ics_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) DISABLED EMPTYPROD 120
fi
fi
if [ $(($i%5)) -eq 1 ]; then
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_get_job_status 200 type2 job$i DISABLED
+ ics_api_a1_get_job_status 200 type2 job$i DISABLED
else
- ecs_api_a1_get_job_status 200 job$i DISABLED 120
+ ics_api_a1_get_job_status 200 job$i DISABLED 120
fi
if [ $use_info_jobs ]; then
- ecs_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) DISABLED EMPTYPROD 120
+ ics_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) DISABLED EMPTYPROD 120
fi
fi
if [ $(($i%5)) -eq 2 ]; then
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_get_job_status 200 type3 job$i DISABLED
+ ics_api_a1_get_job_status 200 type3 job$i DISABLED
else
- ecs_api_a1_get_job_status 200 job$i DISABLED 120
+ ics_api_a1_get_job_status 200 job$i DISABLED 120
fi
if [ $use_info_jobs ]; then
- ecs_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) DISABLED EMPTYPROD 120
+ ics_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) DISABLED EMPTYPROD 120
fi
fi
if [ $(($i%5)) -eq 3 ]; then
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_get_job_status 200 type4 job$i DISABLED
+ ics_api_a1_get_job_status 200 type4 job$i DISABLED
else
- ecs_api_a1_get_job_status 200 job$i DISABLED 120
+ ics_api_a1_get_job_status 200 job$i DISABLED 120
fi
if [ $use_info_jobs ]; then
- ecs_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) DISABLED EMPTYPROD 120
+ ics_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) DISABLED EMPTYPROD 120
fi
fi
if [ $(($i%5)) -eq 4 ]; then
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_get_job_status 200 type5 job$i DISABLED
+ ics_api_a1_get_job_status 200 type5 job$i DISABLED
else
- ecs_api_a1_get_job_status 200 job$i DISABLED 120
+ ics_api_a1_get_job_status 200 job$i DISABLED 120
fi
if [ $use_info_jobs ]; then
- ecs_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) DISABLED EMPTYPROD 120
+ ics_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) DISABLED EMPTYPROD 120
fi
fi
done
-if [ $ECS_VERSION == "V1-1" ]; then
+if [ $ICS_VERSION == "V1-1" ]; then
- ecs_api_edp_put_producer 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ecs/ei-type-1.json
+ ics_api_edp_put_producer 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ics/ei-type-1.json
- ecs_api_edp_put_producer 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type1 testdata/ecs/ei-type-1.json type2 testdata/ecs/ei-type-2.json
+ ics_api_edp_put_producer 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type1 testdata/ics/ei-type-1.json type2 testdata/ics/ei-type-2.json
- ecs_api_edp_put_producer 201 prod-c $CB_JOB/prod-c $CB_SV/prod-c type1 testdata/ecs/ei-type-1.json type2 testdata/ecs/ei-type-2.json type3 testdata/ecs/ei-type-3.json
+ ics_api_edp_put_producer 201 prod-c $CB_JOB/prod-c $CB_SV/prod-c type1 testdata/ics/ei-type-1.json type2 testdata/ics/ei-type-2.json type3 testdata/ics/ei-type-3.json
- ecs_api_edp_put_producer 201 prod-d $CB_JOB/prod-d $CB_SV/prod-d type4 testdata/ecs/ei-type-4.json type5 testdata/ecs/ei-type-5.json
+ ics_api_edp_put_producer 201 prod-d $CB_JOB/prod-d $CB_SV/prod-d type4 testdata/ics/ei-type-4.json type5 testdata/ics/ei-type-5.json
else
if [ $use_info_jobs ]; then
- ecs_api_edp_put_producer_2 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 type101
+ ics_api_edp_put_producer_2 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 type101
- ecs_api_edp_put_producer_2 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type1 type2 type101 type102
+ ics_api_edp_put_producer_2 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type1 type2 type101 type102
- ecs_api_edp_put_producer_2 201 prod-c $CB_JOB/prod-c $CB_SV/prod-c type1 type2 type3 type101 type102 type103
+ ics_api_edp_put_producer_2 201 prod-c $CB_JOB/prod-c $CB_SV/prod-c type1 type2 type3 type101 type102 type103
- ecs_api_edp_put_producer_2 201 prod-d $CB_JOB/prod-d $CB_SV/prod-d type4 type5 type104 type105
+ ics_api_edp_put_producer_2 201 prod-d $CB_JOB/prod-d $CB_SV/prod-d type4 type5 type104 type105
else
- ecs_api_edp_put_producer_2 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
+ ics_api_edp_put_producer_2 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
- ecs_api_edp_put_producer_2 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type1 type2
+ ics_api_edp_put_producer_2 201 prod-b $CB_JOB/prod-b $CB_SV/prod-b type1 type2
- ecs_api_edp_put_producer_2 201 prod-c $CB_JOB/prod-c $CB_SV/prod-c type1 type2 type3
+ ics_api_edp_put_producer_2 201 prod-c $CB_JOB/prod-c $CB_SV/prod-c type1 type2 type3
- ecs_api_edp_put_producer_2 201 prod-d $CB_JOB/prod-d $CB_SV/prod-d type4 type5
+ ics_api_edp_put_producer_2 201 prod-d $CB_JOB/prod-d $CB_SV/prod-d type4 type5
fi
fi
if [ $use_info_jobs ]; then
- ecs_equal json:data-producer/v1/info-producers 4
+ ics_equal json:data-producer/v1/info-producers 4
else
- ecs_equal json:ei-producer/v1/eiproducers 4
+ ics_equal json:ei-producer/v1/eiproducers 4
fi
-ecs_api_edp_get_producer_status 200 prod-a ENABLED
-ecs_api_edp_get_producer_status 200 prod-b ENABLED
-ecs_api_edp_get_producer_status 200 prod-c ENABLED
-ecs_api_edp_get_producer_status 200 prod-d ENABLED
+ics_api_edp_get_producer_status 200 prod-a ENABLED
+ics_api_edp_get_producer_status 200 prod-b ENABLED
+ics_api_edp_get_producer_status 200 prod-c ENABLED
+ics_api_edp_get_producer_status 200 prod-d ENABLED
for ((i=1; i<=$NUM_JOBS; i++))
do
if [ $(($i%5)) -eq 0 ]; then
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_get_job_status 200 type1 job$i ENABLED
+ ics_api_a1_get_job_status 200 type1 job$i ENABLED
else
- ecs_api_a1_get_job_status 200 job$i ENABLED 120
+ ics_api_a1_get_job_status 200 job$i ENABLED 120
fi
if [ $use_info_jobs ]; then
- ecs_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 3 prod-a prod-b prod-c 120
+ ics_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 3 prod-a prod-b prod-c 120
fi
fi
if [ $(($i%5)) -eq 1 ]; then
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_get_job_status 200 type2 job$i ENABLED
+ ics_api_a1_get_job_status 200 type2 job$i ENABLED
else
- ecs_api_a1_get_job_status 200 job$i ENABLED 120
+ ics_api_a1_get_job_status 200 job$i ENABLED 120
fi
if [ $use_info_jobs ]; then
- ecs_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 2 prod-b prod-c 120
+ ics_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 2 prod-b prod-c 120
fi
fi
if [ $(($i%5)) -eq 2 ]; then
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_get_job_status 200 type3 job$i ENABLED
+ ics_api_a1_get_job_status 200 type3 job$i ENABLED
else
- ecs_api_a1_get_job_status 200 job$i ENABLED 120
+ ics_api_a1_get_job_status 200 job$i ENABLED 120
fi
if [ $use_info_jobs ]; then
- ecs_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 1 prod-c 120
+ ics_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 1 prod-c 120
fi
fi
if [ $(($i%5)) -eq 3 ]; then
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_get_job_status 200 type4 job$i ENABLED
+ ics_api_a1_get_job_status 200 type4 job$i ENABLED
else
- ecs_api_a1_get_job_status 200 job$i ENABLED 120
+ ics_api_a1_get_job_status 200 job$i ENABLED 120
fi
if [ $use_info_jobs ]; then
- ecs_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 1 prod-d 120
+ ics_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 1 prod-d 120
fi
fi
if [ $(($i%5)) -eq 4 ]; then
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_get_job_status 200 type5 job$i ENABLED
+ ics_api_a1_get_job_status 200 type5 job$i ENABLED
else
- ecs_api_a1_get_job_status 200 job$i ENABLED 120
+ ics_api_a1_get_job_status 200 job$i ENABLED 120
fi
if [ $use_info_jobs ]; then
- ecs_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 1 prod-d 120
+ ics_api_idc_get_job_status2 200 job$(($i+$NUM_JOBS)) ENABLED 1 prod-d 120
fi
fi
done
if [ -z "$FLAT_A1_EI" ]; then
- ecs_equal json:A1-EI/v1/eitypes/type1/eijobs $(($NUM_JOBS/5))
- ecs_equal json:A1-EI/v1/eitypes/type2/eijobs $(($NUM_JOBS/5))
- ecs_equal json:A1-EI/v1/eitypes/type3/eijobs $(($NUM_JOBS/5))
- ecs_equal json:A1-EI/v1/eitypes/type4/eijobs $(($NUM_JOBS/5))
- ecs_equal json:A1-EI/v1/eitypes/type5/eijobs $(($NUM_JOBS/5))
+ ics_equal json:A1-EI/v1/eitypes/type1/eijobs $(($NUM_JOBS/5))
+ ics_equal json:A1-EI/v1/eitypes/type2/eijobs $(($NUM_JOBS/5))
+ ics_equal json:A1-EI/v1/eitypes/type3/eijobs $(($NUM_JOBS/5))
+ ics_equal json:A1-EI/v1/eitypes/type4/eijobs $(($NUM_JOBS/5))
+ ics_equal json:A1-EI/v1/eitypes/type5/eijobs $(($NUM_JOBS/5))
else
- ecs_equal json:A1-EI/v1/eijobs?eiTypeId=type1 $(($NUM_JOBS/5))
- ecs_equal json:A1-EI/v1/eijobs?eiTypeId=type2 $(($NUM_JOBS/5))
- ecs_equal json:A1-EI/v1/eijobs?eiTypeId=type3 $(($NUM_JOBS/5))
- ecs_equal json:A1-EI/v1/eijobs?eiTypeId=type4 $(($NUM_JOBS/5))
- ecs_equal json:A1-EI/v1/eijobs?eiTypeId=type5 $(($NUM_JOBS/5))
+ ics_equal json:A1-EI/v1/eijobs?eiTypeId=type1 $(($NUM_JOBS/5))
+ ics_equal json:A1-EI/v1/eijobs?eiTypeId=type2 $(($NUM_JOBS/5))
+ ics_equal json:A1-EI/v1/eijobs?eiTypeId=type3 $(($NUM_JOBS/5))
+ ics_equal json:A1-EI/v1/eijobs?eiTypeId=type4 $(($NUM_JOBS/5))
+ ics_equal json:A1-EI/v1/eijobs?eiTypeId=type5 $(($NUM_JOBS/5))
fi
if [ $use_info_jobs ]; then
- ecs_equal json:data-consumer/v1/info-jobs?infoTypeId=type101 $(($NUM_JOBS/5))
- ecs_equal json:data-consumer/v1/info-jobs?infoTypeId=type102 $(($NUM_JOBS/5))
- ecs_equal json:data-consumer/v1/info-jobs?infoTypeId=type103 $(($NUM_JOBS/5))
- ecs_equal json:data-consumer/v1/info-jobs?infoTypeId=type104 $(($NUM_JOBS/5))
- ecs_equal json:data-consumer/v1/info-jobs?infoTypeId=type105 $(($NUM_JOBS/5))
+ ics_equal json:data-consumer/v1/info-jobs?infoTypeId=type101 $(($NUM_JOBS/5))
+ ics_equal json:data-consumer/v1/info-jobs?infoTypeId=type102 $(($NUM_JOBS/5))
+ ics_equal json:data-consumer/v1/info-jobs?infoTypeId=type103 $(($NUM_JOBS/5))
+ ics_equal json:data-consumer/v1/info-jobs?infoTypeId=type104 $(($NUM_JOBS/5))
+ ics_equal json:data-consumer/v1/info-jobs?infoTypeId=type105 $(($NUM_JOBS/5))
fi
for ((i=1; i<=$NUM_JOBS; i++))
do
if [ $(($i%5)) -eq 0 ]; then
- if [ $ECS_VERSION == "V1-1" ]; then
- prodstub_check_jobdata 200 prod-a job$i type1 $TARGET ric1 testdata/ecs/job-template.json
- prodstub_check_jobdata 200 prod-b job$i type1 $TARGET ric1 testdata/ecs/job-template.json
- prodstub_check_jobdata 200 prod-c job$i type1 $TARGET ric1 testdata/ecs/job-template.json
+ if [ $ICS_VERSION == "V1-1" ]; then
+ prodstub_check_jobdata 200 prod-a job$i type1 $TARGET ric1 testdata/ics/job-template.json
+ prodstub_check_jobdata 200 prod-b job$i type1 $TARGET ric1 testdata/ics/job-template.json
+ prodstub_check_jobdata 200 prod-c job$i type1 $TARGET ric1 testdata/ics/job-template.json
else
if [ $use_info_jobs ]; then
- prodstub_check_jobdata_3 200 prod-a job$i type1 $TARGET ric1 testdata/ecs/job-template.json
- prodstub_check_jobdata_3 200 prod-b job$i type1 $TARGET ric1 testdata/ecs/job-template.json
- prodstub_check_jobdata_3 200 prod-c job$i type1 $TARGET ric1 testdata/ecs/job-template.json
+ prodstub_check_jobdata_3 200 prod-a job$i type1 $TARGET ric1 testdata/ics/job-template.json
+ prodstub_check_jobdata_3 200 prod-b job$i type1 $TARGET ric1 testdata/ics/job-template.json
+ prodstub_check_jobdata_3 200 prod-c job$i type1 $TARGET ric1 testdata/ics/job-template.json
else
- prodstub_check_jobdata_2 200 prod-a job$i type1 $TARGET ric1 testdata/ecs/job-template.json
- prodstub_check_jobdata_2 200 prod-b job$i type1 $TARGET ric1 testdata/ecs/job-template.json
- prodstub_check_jobdata_2 200 prod-c job$i type1 $TARGET ric1 testdata/ecs/job-template.json
+ prodstub_check_jobdata_2 200 prod-a job$i type1 $TARGET ric1 testdata/ics/job-template.json
+ prodstub_check_jobdata_2 200 prod-b job$i type1 $TARGET ric1 testdata/ics/job-template.json
+ prodstub_check_jobdata_2 200 prod-c job$i type1 $TARGET ric1 testdata/ics/job-template.json
fi
fi
if [ $use_info_jobs ]; then
- prodstub_check_jobdata_3 200 prod-a job$(($i+$NUM_JOBS)) type101 $TARGET info-owner testdata/ecs/job-template.json
- prodstub_check_jobdata_3 200 prod-b job$(($i+$NUM_JOBS)) type101 $TARGET info-owner testdata/ecs/job-template.json
- prodstub_check_jobdata_3 200 prod-c job$(($i+$NUM_JOBS)) type101 $TARGET info-owner testdata/ecs/job-template.json
+ prodstub_check_jobdata_3 200 prod-a job$(($i+$NUM_JOBS)) type101 $TARGET info-owner testdata/ics/job-template.json
+ prodstub_check_jobdata_3 200 prod-b job$(($i+$NUM_JOBS)) type101 $TARGET info-owner testdata/ics/job-template.json
+ prodstub_check_jobdata_3 200 prod-c job$(($i+$NUM_JOBS)) type101 $TARGET info-owner testdata/ics/job-template.json
fi
fi
if [ $(($i%5)) -eq 1 ]; then
- if [ $ECS_VERSION == "V1-1" ]; then
- prodstub_check_jobdata 200 prod-b job$i type2 $TARGET ric1 testdata/ecs/job-template.json
- prodstub_check_jobdata 200 prod-c job$i type2 $TARGET ric1 testdata/ecs/job-template.json
+ if [ $ICS_VERSION == "V1-1" ]; then
+ prodstub_check_jobdata 200 prod-b job$i type2 $TARGET ric1 testdata/ics/job-template.json
+ prodstub_check_jobdata 200 prod-c job$i type2 $TARGET ric1 testdata/ics/job-template.json
else
if [ $use_info_jobs ]; then
- prodstub_check_jobdata_3 200 prod-b job$i type2 $TARGET ric1 testdata/ecs/job-template.json
- prodstub_check_jobdata_3 200 prod-c job$i type2 $TARGET ric1 testdata/ecs/job-template.json
+ prodstub_check_jobdata_3 200 prod-b job$i type2 $TARGET ric1 testdata/ics/job-template.json
+ prodstub_check_jobdata_3 200 prod-c job$i type2 $TARGET ric1 testdata/ics/job-template.json
else
- prodstub_check_jobdata_2 200 prod-b job$i type2 $TARGET ric1 testdata/ecs/job-template.json
- prodstub_check_jobdata_2 200 prod-c job$i type2 $TARGET ric1 testdata/ecs/job-template.json
+ prodstub_check_jobdata_2 200 prod-b job$i type2 $TARGET ric1 testdata/ics/job-template.json
+ prodstub_check_jobdata_2 200 prod-c job$i type2 $TARGET ric1 testdata/ics/job-template.json
fi
fi
if [ $use_info_jobs ]; then
- prodstub_check_jobdata_3 200 prod-b job$(($i+$NUM_JOBS)) type102 $TARGET info-owner testdata/ecs/job-template.json
- prodstub_check_jobdata_3 200 prod-c job$(($i+$NUM_JOBS)) type102 $TARGET info-owner testdata/ecs/job-template.json
+ prodstub_check_jobdata_3 200 prod-b job$(($i+$NUM_JOBS)) type102 $TARGET info-owner testdata/ics/job-template.json
+ prodstub_check_jobdata_3 200 prod-c job$(($i+$NUM_JOBS)) type102 $TARGET info-owner testdata/ics/job-template.json
fi
fi
if [ $(($i%5)) -eq 2 ]; then
- if [ $ECS_VERSION == "V1-1" ]; then
- prodstub_check_jobdata 200 prod-c job$i type3 $TARGET ric1 testdata/ecs/job-template.json
+ if [ $ICS_VERSION == "V1-1" ]; then
+ prodstub_check_jobdata 200 prod-c job$i type3 $TARGET ric1 testdata/ics/job-template.json
else
if [ $use_info_jobs ]; then
- prodstub_check_jobdata_3 200 prod-c job$i type3 $TARGET ric1 testdata/ecs/job-template.json
+ prodstub_check_jobdata_3 200 prod-c job$i type3 $TARGET ric1 testdata/ics/job-template.json
else
- prodstub_check_jobdata_2 200 prod-c job$i type3 $TARGET ric1 testdata/ecs/job-template.json
+ prodstub_check_jobdata_2 200 prod-c job$i type3 $TARGET ric1 testdata/ics/job-template.json
fi
fi
if [ $use_info_jobs ]; then
- prodstub_check_jobdata_3 200 prod-c job$(($i+$NUM_JOBS)) type103 $TARGET info-owner testdata/ecs/job-template.json
+ prodstub_check_jobdata_3 200 prod-c job$(($i+$NUM_JOBS)) type103 $TARGET info-owner testdata/ics/job-template.json
fi
fi
if [ $(($i%5)) -eq 3 ]; then
- if [ $ECS_VERSION == "V1-1" ]; then
- prodstub_check_jobdata 200 prod-d job$i type4 $TARGET ric1 testdata/ecs/job-template.json
+ if [ $ICS_VERSION == "V1-1" ]; then
+ prodstub_check_jobdata 200 prod-d job$i type4 $TARGET ric1 testdata/ics/job-template.json
else
if [ $use_info_jobs ]; then
- prodstub_check_jobdata_3 200 prod-d job$i type4 $TARGET ric1 testdata/ecs/job-template.json
+ prodstub_check_jobdata_3 200 prod-d job$i type4 $TARGET ric1 testdata/ics/job-template.json
else
- prodstub_check_jobdata_2 200 prod-d job$i type4 $TARGET ric1 testdata/ecs/job-template.json
+ prodstub_check_jobdata_2 200 prod-d job$i type4 $TARGET ric1 testdata/ics/job-template.json
fi
fi
if [ $use_info_jobs ]; then
- prodstub_check_jobdata_3 200 prod-d job$(($i+$NUM_JOBS)) type104 $TARGET info-owner testdata/ecs/job-template.json
+ prodstub_check_jobdata_3 200 prod-d job$(($i+$NUM_JOBS)) type104 $TARGET info-owner testdata/ics/job-template.json
fi
fi
if [ $(($i%5)) -eq 4 ]; then
- if [ $ECS_VERSION == "V1-1" ]; then
- prodstub_check_jobdata 200 prod-d job$i type5 $TARGET ric1 testdata/ecs/job-template.json
+ if [ $ICS_VERSION == "V1-1" ]; then
+ prodstub_check_jobdata 200 prod-d job$i type5 $TARGET ric1 testdata/ics/job-template.json
else
if [ $use_info_jobs ]; then
- prodstub_check_jobdata_3 200 prod-d job$i type5 $TARGET ric1 testdata/ecs/job-template.json
+ prodstub_check_jobdata_3 200 prod-d job$i type5 $TARGET ric1 testdata/ics/job-template.json
else
- prodstub_check_jobdata_2 200 prod-d job$i type5 $TARGET ric1 testdata/ecs/job-template.json
+ prodstub_check_jobdata_2 200 prod-d job$i type5 $TARGET ric1 testdata/ics/job-template.json
fi
fi
if [ $use_info_jobs ]; then
- prodstub_check_jobdata_3 200 prod-d job$(($i+$NUM_JOBS)) type105 $TARGET info-owner testdata/ecs/job-template.json
+ prodstub_check_jobdata_3 200 prod-d job$(($i+$NUM_JOBS)) type105 $TARGET info-owner testdata/ics/job-template.json
fi
fi
done
for ((i=1; i<=$NUM_JOBS; i++))
do
if [ $(($i%5)) -eq 0 ]; then
- ecs_api_a1_delete_job 204 job$i
+ ics_api_a1_delete_job 204 job$i
if [ $use_info_jobs ]; then
- ecs_api_idc_delete_job 204 job$(($i+$NUM_JOBS))
+ ics_api_idc_delete_job 204 job$(($i+$NUM_JOBS))
fi
fi
if [ $(($i%5)) -eq 1 ]; then
- ecs_api_a1_delete_job 204 job$i
+ ics_api_a1_delete_job 204 job$i
if [ $use_info_jobs ]; then
- ecs_api_idc_delete_job 204 job$(($i+$NUM_JOBS))
+ ics_api_idc_delete_job 204 job$(($i+$NUM_JOBS))
fi
fi
if [ $(($i%5)) -eq 2 ]; then
- ecs_api_a1_delete_job 204 job$i
+ ics_api_a1_delete_job 204 job$i
if [ $use_info_jobs ]; then
- ecs_api_idc_delete_job 204 job$(($i+$NUM_JOBS))
+ ics_api_idc_delete_job 204 job$(($i+$NUM_JOBS))
fi
fi
if [ $(($i%5)) -eq 3 ]; then
- ecs_api_a1_delete_job 204 job$i
+ ics_api_a1_delete_job 204 job$i
if [ $use_info_jobs ]; then
- ecs_api_idc_delete_job 204 job$(($i+$NUM_JOBS))
+ ics_api_idc_delete_job 204 job$(($i+$NUM_JOBS))
fi
fi
if [ $(($i%5)) -eq 4 ]; then
- ecs_api_a1_delete_job 204 job$i
+ ics_api_a1_delete_job 204 job$i
if [ $use_info_jobs ]; then
- ecs_api_idc_delete_job 204 job$(($i+$NUM_JOBS))
+ ics_api_idc_delete_job 204 job$(($i+$NUM_JOBS))
fi
fi
done
if [ $use_info_jobs ]; then
- ecs_equal json:data-producer/v1/info-producers 4
+ ics_equal json:data-producer/v1/info-producers 4
else
- ecs_equal json:ei-producer/v1/eiproducers 4
+ ics_equal json:ei-producer/v1/eiproducers 4
fi
-ecs_api_edp_get_producer_status 200 prod-a ENABLED
-ecs_api_edp_get_producer_status 200 prod-b ENABLED
-ecs_api_edp_get_producer_status 200 prod-c ENABLED
-ecs_api_edp_get_producer_status 200 prod-d ENABLED
+ics_api_edp_get_producer_status 200 prod-a ENABLED
+ics_api_edp_get_producer_status 200 prod-b ENABLED
+ics_api_edp_get_producer_status 200 prod-c ENABLED
+ics_api_edp_get_producer_status 200 prod-d ENABLED
if [ -z "$FLAT_A1_EI" ]; then
- ecs_equal json:A1-EI/v1/eitypes/type1/eijobs 0
- ecs_equal json:A1-EI/v1/eitypes/type2/eijobs 0
- ecs_equal json:A1-EI/v1/eitypes/type3/eijobs 0
- ecs_equal json:A1-EI/v1/eitypes/type4/eijobs 0
- ecs_equal json:A1-EI/v1/eitypes/type5/eijobs 0
+ ics_equal json:A1-EI/v1/eitypes/type1/eijobs 0
+ ics_equal json:A1-EI/v1/eitypes/type2/eijobs 0
+ ics_equal json:A1-EI/v1/eitypes/type3/eijobs 0
+ ics_equal json:A1-EI/v1/eitypes/type4/eijobs 0
+ ics_equal json:A1-EI/v1/eitypes/type5/eijobs 0
else
- ecs_equal json:A1-EI/v1/eijobs?eiTypeId=type1 0
- ecs_equal json:A1-EI/v1/eijobs?eiTypeId=type2 0
- ecs_equal json:A1-EI/v1/eijobs?eiTypeId=type3 0
- ecs_equal json:A1-EI/v1/eijobs?eiTypeId=type4 0
- ecs_equal json:A1-EI/v1/eijobs?eiTypeId=type5 0
+ ics_equal json:A1-EI/v1/eijobs?eiTypeId=type1 0
+ ics_equal json:A1-EI/v1/eijobs?eiTypeId=type2 0
+ ics_equal json:A1-EI/v1/eijobs?eiTypeId=type3 0
+ ics_equal json:A1-EI/v1/eijobs?eiTypeId=type4 0
+ ics_equal json:A1-EI/v1/eijobs?eiTypeId=type5 0
fi
if [ $use_info_jobs ]; then
- ecs_equal json:data-consumer/v1/info-jobs?infoTypeId=type101 0
- ecs_equal json:data-consumer/v1/info-jobs?infoTypeId=type102 0
- ecs_equal json:data-consumer/v1/info-jobs?infoTypeId=type103 0
- ecs_equal json:data-consumer/v1/info-jobs?infoTypeId=type104 0
- ecs_equal json:data-consumer/v1/info-jobs?infoTypeId=type105 0
+ ics_equal json:data-consumer/v1/info-jobs?infoTypeId=type101 0
+ ics_equal json:data-consumer/v1/info-jobs?infoTypeId=type102 0
+ ics_equal json:data-consumer/v1/info-jobs?infoTypeId=type103 0
+ ics_equal json:data-consumer/v1/info-jobs?infoTypeId=type104 0
+ ics_equal json:data-consumer/v1/info-jobs?infoTypeId=type105 0
fi
if [ $use_info_jobs ]; then
- if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
- ecs_api_edp_put_type_2 200 type101 testdata/ecs/info-type-1.json
- ecs_api_edp_put_type_2 200 type102 testdata/ecs/info-type-2.json
- ecs_api_edp_put_type_2 200 type103 testdata/ecs/info-type-3.json
- ecs_api_edp_put_type_2 200 type104 testdata/ecs/info-type-4.json
- ecs_api_edp_put_type_2 200 type105 testdata/ecs/info-type-5.json
+ if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+ ics_api_edp_put_type_2 200 type101 testdata/ics/info-type-1.json
+ ics_api_edp_put_type_2 200 type102 testdata/ics/info-type-2.json
+ ics_api_edp_put_type_2 200 type103 testdata/ics/info-type-3.json
+ ics_api_edp_put_type_2 200 type104 testdata/ics/info-type-4.json
+ ics_api_edp_put_type_2 200 type105 testdata/ics/info-type-5.json
fi
fi
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
- cr_equal received_callbacks 10 30
- cr_equal received_callbacks?id=type-status1 5
- cr_equal received_callbacks?id=type-status2 5
-
- cr_api_check_all_ecs_subscription_events 200 type-status1 \
- type101 testdata/ecs/info-type-1.json REGISTERED \
- type102 testdata/ecs/info-type-2.json REGISTERED \
- type103 testdata/ecs/info-type-3.json REGISTERED \
- type104 testdata/ecs/info-type-4.json REGISTERED \
- type105 testdata/ecs/info-type-5.json REGISTERED
-
- cr_api_check_all_ecs_subscription_events 200 type-status2 \
- type101 testdata/ecs/info-type-1.json REGISTERED \
- type102 testdata/ecs/info-type-2.json REGISTERED \
- type103 testdata/ecs/info-type-3.json REGISTERED \
- type104 testdata/ecs/info-type-4.json REGISTERED \
- type105 testdata/ecs/info-type-5.json REGISTERED
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+ cr_equal 0 received_callbacks 10 30
+ cr_equal 0 received_callbacks?id=type-status1 5
+ cr_equal 0 received_callbacks?id=type-status2 5
+
+ cr_api_check_all_ics_subscription_events 200 0 type-status1 \
+ type101 testdata/ics/info-type-1.json REGISTERED \
+ type102 testdata/ics/info-type-2.json REGISTERED \
+ type103 testdata/ics/info-type-3.json REGISTERED \
+ type104 testdata/ics/info-type-4.json REGISTERED \
+ type105 testdata/ics/info-type-5.json REGISTERED
+
+ cr_api_check_all_ics_subscription_events 200 0 type-status2 \
+ type101 testdata/ics/info-type-1.json REGISTERED \
+ type102 testdata/ics/info-type-2.json REGISTERED \
+ type103 testdata/ics/info-type-3.json REGISTERED \
+ type104 testdata/ics/info-type-4.json REGISTERED \
+ type105 testdata/ics/info-type-5.json REGISTERED
else
- cr_equal received_callbacks 0 30
+ cr_equal 0 received_callbacks 0 30
fi
-check_ecs_logs
+check_ics_logs
store_logs END
# ============LICENSE_END=================================================
#
-TC_ONELINE_DESCR="Testing southbound proxy for PMS and ECS"
+TC_ONELINE_DESCR="Testing southbound proxy for PMS and ICS"
#App names to include in the test when running docker, space separated list
-DOCKER_INCLUDED_IMAGES="CBS CONSUL CP CR MR PA RICSIM ECS PRODSTUB HTTPPROXY NGW KUBEPROXY"
+DOCKER_INCLUDED_IMAGES="CBS CONSUL CP CR MR PA RICSIM ICS PRODSTUB HTTPPROXY NGW KUBEPROXY"
#App names to include in the test when running kubernetes, space separated list
-KUBE_INCLUDED_IMAGES=" MR CR PA PRODSTUB RICSIM CP ECS HTTPPROXY KUBEPROXY NGW"
+KUBE_INCLUDED_IMAGES=" MR CR PA PRODSTUB RICSIM CP ICS HTTPPROXY KUBEPROXY NGW"
#Prestarted app (not started by script) to include in the test when running kubernetes, space separated list
KUBE_PRESTARTED_IMAGES=""
SUPPORTED_RUNMODES="DOCKER KUBE"
. ../common/testcase_common.sh $@
-. ../common/agent_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/ecs_api_functions.sh
-. ../common/prodstub_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/http_proxy_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
setup_testenvironment
use_cr_https
use_agent_rest_https
use_simulator_https
-use_ecs_rest_https
+use_ics_rest_https
use_prod_stub_https
if [ "$PMS_VERSION" == "V2" ]; then
- notificationurl=$CR_SERVICE_APP_PATH"/test"
+ notificationurl=$CR_SERVICE_APP_PATH_0"/test"
else
echo "PMS VERSION 2 (V2) is required"
exit 1
consul_config_app ".consul_config.json"
fi
-start_cr
+start_cr 1
start_prod_stub
-start_ecs PROXY $SIM_GROUP/$ECS_COMPOSE_DIR/$ECS_CONFIG_FILE
+start_ics PROXY $SIM_GROUP/$ICS_COMPOSE_DIR/$ICS_CONFIG_FILE
set_agent_trace
-set_ecs_debug
+set_ics_debug
api_get_status 200
#Check the number of types
api_equal json:policy-types 2 300
-api_put_service 201 "Emergency-response-app" 0 "$CR_SERVICE_APP_PATH/1"
+api_put_service 201 "Emergency-response-app" 0 "$CR_SERVICE_APP_PATH_0/1"
# Create policies in STD
for ((i=1; i<=$STD_NUM_RICS; i++))
TARGET1="$RIC_SIM_HTTPX://$RIC_G1_1:$RIC_SIM_PORT/datadelivery"
TARGET2="$RIC_SIM_HTTPX://$RIC_G1_1:$RIC_SIM_PORT/datadelivery"
-STATUS1="$CR_SERVICE_APP_PATH/job1-status"
-STATUS2="$CR_SERVICE_APP_PATH/job2-status"
+STATUS1="$CR_SERVICE_APP_PATH_0/job1-status"
+STATUS2="$CR_SERVICE_APP_PATH_0/job2-status"
prodstub_arm_producer 200 prod-a
prodstub_arm_type 200 prod-a type1
prodstub_arm_job_create 200 prod-a job1
prodstub_arm_job_create 200 prod-a job2
-### ecs status
-ecs_api_service_status 200
+### ics status
+ics_api_service_status 200
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
#Type registration status callbacks
- TYPESTATUS1="$CR_SERVICE_APP_PATH/type-status1"
+ TYPESTATUS1="$CR_SERVICE_APP_PATH_0/type-status1"
- ecs_api_idc_put_subscription 201 subscription-id-1 owner1 $TYPESTATUS1
+ ics_api_idc_put_subscription 201 subscription-id-1 owner1 $TYPESTATUS1
- ecs_api_idc_get_subscription_ids 200 owner1 subscription-id-1
+ ics_api_idc_get_subscription_ids 200 owner1 subscription-id-1
fi
## Setup prod-a
-if [ $ECS_VERSION == "V1-1" ]; then
- ecs_api_edp_put_producer 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ecs/ei-type-1.json
+if [ $ICS_VERSION == "V1-1" ]; then
+ ics_api_edp_put_producer 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ics/ei-type-1.json
- ecs_api_edp_get_producer 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ecs/ei-type-1.json
+ ics_api_edp_get_producer 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ics/ei-type-1.json
else
- ecs_api_edp_put_type_2 201 type1 testdata/ecs/ei-type-1.json
+ ics_api_edp_put_type_2 201 type1 testdata/ics/ei-type-1.json
- ecs_api_edp_put_producer_2 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
+ ics_api_edp_put_producer_2 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
- ecs_api_edp_get_producer_2 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
+ ics_api_edp_get_producer_2 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
fi
-ecs_api_edp_get_producer_status 200 prod-a ENABLED
+ics_api_edp_get_producer_status 200 prod-a ENABLED
## Create a job for prod-a
## job1 - prod-a
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_put_job 201 type1 job1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json
+ ics_api_a1_put_job 201 type1 job1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json
else
- ecs_api_a1_put_job 201 job1 type1 $TARGET1 ricsim_g3_1 $STATUS1 testdata/ecs/job-template.json
+ ics_api_a1_put_job 201 job1 type1 $TARGET1 ricsim_g3_1 $STATUS1 testdata/ics/job-template.json
fi
# Check the job data in the producer
-if [ $ECS_VERSION == "V1-1" ]; then
- prodstub_check_jobdata 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json
+if [ $ICS_VERSION == "V1-1" ]; then
+ prodstub_check_jobdata 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json
else
- if [[ "$ECS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
- prodstub_check_jobdata_2 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json
+ if [[ "$ICS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
+ prodstub_check_jobdata_2 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json
else
- prodstub_check_jobdata_3 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json
+ prodstub_check_jobdata_3 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json
fi
fi
## Create a second job for prod-a
## job2 - prod-a
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_put_job 201 type1 job2 $TARGET2 ricsim_g3_2 testdata/ecs/job-template.json
+ ics_api_a1_put_job 201 type1 job2 $TARGET2 ricsim_g3_2 testdata/ics/job-template.json
else
- ecs_api_a1_put_job 201 job2 type1 $TARGET2 ricsim_g3_2 $STATUS2 testdata/ecs/job-template.json
+ ics_api_a1_put_job 201 job2 type1 $TARGET2 ricsim_g3_2 $STATUS2 testdata/ics/job-template.json
fi
# Check the job data in the producer
-if [ $ECS_VERSION == "V1-1" ]; then
- prodstub_check_jobdata 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ecs/job-template.json
+if [ $ICS_VERSION == "V1-1" ]; then
+ prodstub_check_jobdata 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ics/job-template.json
else
- if [[ "$ECS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
- prodstub_check_jobdata_2 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ecs/job-template.json
+ if [[ "$ICS_FEATURE_LEVEL" != *"INFO-TYPES"* ]]; then
+ prodstub_check_jobdata_2 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ics/job-template.json
else
- prodstub_check_jobdata_3 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ecs/job-template.json
+ prodstub_check_jobdata_3 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ics/job-template.json
fi
fi
prodstub_arm_producer 200 prod-a 400
# Wait for producer prod-a to go disabled
-ecs_api_edp_get_producer_status 200 prod-a DISABLED 360
+ics_api_edp_get_producer_status 200 prod-a DISABLED 360
-if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
- ecs_equal json:data-producer/v1/info-producers 0 1000
+if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+ ics_equal json:data-producer/v1/info-producers 0 1000
else
- ecs_equal json:ei-producer/v1/eiproducers 0 1000
+ ics_equal json:ei-producer/v1/eiproducers 0 1000
fi
-if [[ "$ECS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
- cr_equal received_callbacks 3 30
- cr_api_check_all_ecs_subscription_events 200 type-status1 type1 testdata/ecs/ei-type-1.json REGISTERED
- cr_api_check_all_ecs_events 200 job1-status DISABLED
- cr_api_check_all_ecs_events 200 job2-status DISABLED
+if [[ "$ICS_FEATURE_LEVEL" == *"TYPE-SUBSCRIPTIONS"* ]]; then
+ cr_equal 0 received_callbacks 3 30
+ cr_api_check_all_ics_subscription_events 200 0 type-status1 type1 testdata/ics/ei-type-1.json REGISTERED
+ cr_api_check_all_ics_events 200 0 job1-status DISABLED
+ cr_api_check_all_ics_events 200 0 job2-status DISABLED
else
- cr_equal received_callbacks 2 30
- cr_api_check_all_ecs_events 200 job1-status DISABLED
- cr_api_check_all_ecs_events 200 job2-status DISABLED
+ cr_equal 0 received_callbacks 2 30
+ cr_api_check_all_ics_events 200 0 job1-status DISABLED
+ cr_api_check_all_ics_events 200 0 job2-status DISABLED
fi
-cr_contains_str remote_hosts $HTTP_PROXY_APP_NAME
+cr_contains_str 0 remote_hosts $HTTP_PROXY_APP_NAME
check_policy_agent_logs
-check_ecs_logs
+check_ics_logs
#### TEST COMPLETE ####
#Supported run modes
SUPPORTED_RUNMODES="DOCKER"
-. ../common/testcase_common.sh $@
-. ../common/controller_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/http_proxy_api_functions.sh
+. ../common/testcase_common.sh $@
setup_testenvironment
TC_ONELINE_DESCR="Testing southbound proxy for Dmaap Adaptor"
#App names to include in the test when running docker, space separated list
-DOCKER_INCLUDED_IMAGES="CR MR ECS HTTPPROXY KUBEPROXY DMAAPADP"
+DOCKER_INCLUDED_IMAGES="CR MR ICS HTTPPROXY KUBEPROXY DMAAPADP"
#App names to include in the test when running kubernetes, space separated list
-KUBE_INCLUDED_IMAGES=" CR MR ECS HTTPPROXY KUBEPROXY DMAAPADP"
+KUBE_INCLUDED_IMAGES=" CR MR ICS HTTPPROXY KUBEPROXY DMAAPADP"
#Prestarted app (not started by script) to include in the test when running kubernetes, space separated list
KUBE_PRESTARTED_IMAGES=""
SUPPORTED_RUNMODES="DOCKER KUBE"
. ../common/testcase_common.sh $@
-. ../common/ecs_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/http_proxy_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/dmaapadp_api_functions.sh
setup_testenvironment
clean_environment
use_cr_https
-use_ecs_rest_https
+use_ics_rest_https
use_mr_https
use_dmaapadp_https
start_http_proxy
-start_cr
+start_cr 1
-start_ecs NOPROXY $SIM_GROUP/$ECS_COMPOSE_DIR/$ECS_CONFIG_FILE
+start_ics NOPROXY $SIM_GROUP/$ICS_COMPOSE_DIR/$ICS_CONFIG_FILE
-set_ecs_trace
+set_ics_trace
start_mr
set_dmaapadp_trace
-if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
- ecs_equal json:data-producer/v1/info-producers 1 60
+if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+ ics_equal json:data-producer/v1/info-producers 1 60
else
- ecs_equal json:ei-producer/v1/eiproducers 1 60
+ ics_equal json:ei-producer/v1/eiproducers 1 60
fi
-ecs_api_idc_get_job_ids 200 NOTYPE NOWNER EMPTY
-ecs_api_idc_get_type_ids 200 ExampleInformationType
+ics_api_idc_get_job_ids 200 NOTYPE NOWNER EMPTY
+ics_api_idc_get_type_ids 200 ExampleInformationType
-ecs_api_edp_get_producer_ids_2 200 NOTYPE DmaapGenericInfoProducer
+ics_api_edp_get_producer_ids_2 200 NOTYPE DmaapGenericInfoProducer
for ((i=1; i<=$NUM_JOBS; i++))
do
- ecs_api_idc_put_job 201 joby$i ExampleInformationType $CR_SERVICE_MR_PATH/joby-data$i info-ownery$i $CR_SERVICE_MR_PATH/job_status_info-ownery$i testdata/dmaap-adapter/job-template.json
+ ics_api_idc_put_job 201 joby$i ExampleInformationType $CR_SERVICE_MR_PATH_0/joby-data$i info-ownery$i $CR_SERVICE_MR_PATH_0/job_status_info-ownery$i testdata/dmaap-adapter/job-template.json
done
for ((i=1; i<=$NUM_JOBS; i++))
do
- ecs_api_a1_get_job_status 200 joby$i ENABLED 30
+ ics_api_a1_get_job_status 200 joby$i ENABLED 30
done
mr_api_send_json "/events/unauthenticated.dmaapadp.json" '{"msg":"msg-1"}'
mr_api_send_json "/events/unauthenticated.dmaapadp.json" '{"msg":"msg-3"}'
-cr_equal received_callbacks $(($NUM_JOBS*2)) 60
+cr_equal 0 received_callbacks $(($NUM_JOBS*2)) 60
for ((i=1; i<=$NUM_JOBS; i++))
do
- cr_equal received_callbacks?id=joby-data$i 2
+ cr_equal 0 received_callbacks?id=joby-data$i 2
done
for ((i=1; i<=$NUM_JOBS; i++))
do
- cr_api_check_single_genric_json_event 200 joby-data$i '{"msg":"msg-1"}'
- cr_api_check_single_genric_json_event 200 joby-data$i '{"msg":"msg-3"}'
+ cr_api_check_single_genric_json_event 200 0 joby-data$i '{"msg":"msg-1"}'
+ cr_api_check_single_genric_json_event 200 0 joby-data$i '{"msg":"msg-3"}'
done
-cr_contains_str remote_hosts $HTTP_PROXY_APP_NAME
+cr_contains_str 0 remote_hosts $HTTP_PROXY_APP_NAME
#### TEST COMPLETE ####
#Supported run modes
SUPPORTED_RUNMODES="DOCKER KUBE"
-. ../common/testcase_common.sh $@
-. ../common/agent_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
+. ../common/testcase_common.sh $@
setup_testenvironment
start_mr
- start_cr
+ start_cr 1
if [ $RUNMODE == "DOCKER" ]; then
start_consul_cbs
api_equal json:policy_types 2 120 #Wait for the agent to refresh types from the simulator
fi
- api_put_service 201 "serv1" 3600 "$CR_SERVICE_APP_PATH/1"
+ api_put_service 201 "serv1" 3600 "$CR_SERVICE_APP_PATH_0/1"
START_ID=2000
NUM_POLICIES=10000 # Must be at least 100
if [ "$PMS_VERSION" == "V2" ]; then
- notificationurl=$CR_SERVICE_APP_PATH"/test"
+ notificationurl=$CR_SERVICE_APP_PATH_0"/test"
else
notificationurl=""
fi
TC_ONELINE_DESCR="App test DMAAP Meditor and DMAAP Adapter"
#App names to include in the test when running docker, space separated list
-DOCKER_INCLUDED_IMAGES="ECS DMAAPMED DMAAPADP KUBEPROXY MR DMAAPMR CR"
+DOCKER_INCLUDED_IMAGES="ICS DMAAPMED DMAAPADP KUBEPROXY MR DMAAPMR CR KAFKAPC"
#App names to include in the test when running kubernetes, space separated list
-KUBE_INCLUDED_IMAGES=" ECS DMAAPMED DMAAPADP KUBEPROXY MR DMAAPMR CR"
+KUBE_INCLUDED_IMAGES=" ICS DMAAPMED DMAAPADP KUBEPROXY MR DMAAPMR CR KAFKAPC"
#Prestarted app (not started by script) to include in the test when running kubernetes, space separated list
KUBE_PRESTARTED_IMAGES=""
SUPPORTED_RUNMODES="DOCKER KUBE"
. ../common/testcase_common.sh $@
-. ../common/agent_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/ecs_api_functions.sh
-. ../common/prodstub_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/rapp_catalogue_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/http_proxy_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
-. ../common/dmaapmed_api_functions.sh
-. ../common/dmaapadp_api_functions.sh
setup_testenvironment
#Local vars in test script
##########################
FLAT_A1_EI="1"
-NUM_JOBS=100 # Mediator and adapter gets same number of jobs
+NUM_CR=10 # Number of callback receivers, divide all callbacks to this number of servers - for load sharing
+## Note: The number jobs must be a multiple of the number of CRs in order to calculate the number of expected event in each CR
+NUM_JOBS=200 # Mediator and adapter gets same number of jobs for every type
+
+if [ $NUM_JOBS -lt $NUM_CR ]; then
+ __log_conf_fail_general "Number of jobs: $NUM_JOBS must be greater then the number of CRs: $NUM_CR"
+fi
clean_environment
#use_cr_https
use_cr_http
-use_ecs_rest_https
+use_ics_rest_https
use_mr_https
use_dmaapadp_https
use_dmaapmed_https
start_kube_proxy
-start_cr
+start_cr $NUM_CR
-start_ecs NOPROXY $SIM_GROUP/$ECS_COMPOSE_DIR/$ECS_CONFIG_FILE
+start_ics NOPROXY $SIM_GROUP/$ICS_COMPOSE_DIR/$ICS_CONFIG_FILE
-set_ecs_trace
+set_ics_trace
start_mr "unauthenticated.dmaapmed.json" "/events" "dmaapmediatorproducer/STD_Fault_Messages" \
- "unauthenticated.dmaapadp.json" "/events" "dmaapadapterproducer/msgs" \
- "unauthenticated.dmaapadp_kafka.text" "/events" "dmaapadapterproducer/msgs"
+ "unauthenticated.dmaapadp.json" "/events" "dmaapadapterproducer/msgs"
+
+start_kafkapc
+
+kafkapc_api_create_topic 201 "unauthenticated.dmaapadp_kafka.text" "text/plain"
+
+kafkapc_api_start_sending 200 "unauthenticated.dmaapadp_kafka.text"
start_dmaapadp NOPROXY $SIM_GROUP/$DMAAP_ADP_COMPOSE_DIR/$DMAAP_ADP_CONFIG_FILE $SIM_GROUP/$DMAAP_ADP_COMPOSE_DIR/$DMAAP_ADP_DATA_FILE
start_dmaapmed NOPROXY $SIM_GROUP/$DMAAP_MED_COMPOSE_DIR/$DMAAP_MED_DATA_FILE
-ecs_equal json:data-producer/v1/info-producers 2 60
+ics_equal json:data-producer/v1/info-producers 2 60
# Check producers
-ecs_api_idc_get_job_ids 200 NOTYPE NOWNER EMPTY
-ecs_api_idc_get_type_ids 200 ExampleInformationType STD_Fault_Messages ExampleInformationTypeKafka
-ecs_api_edp_get_producer_ids_2 200 NOTYPE DmaapGenericInfoProducer DMaaP_Mediator_Producer
+ics_api_idc_get_job_ids 200 NOTYPE NOWNER EMPTY
+ics_api_idc_get_type_ids 200 ExampleInformationType STD_Fault_Messages ExampleInformationTypeKafka
+ics_api_edp_get_producer_ids_2 200 NOTYPE DmaapGenericInfoProducer DMaaP_Mediator_Producer
# Create jobs for adapter - CR stores data as MD5 hash
start_timer "Create adapter jobs: $NUM_JOBS"
for ((i=1; i<=$NUM_JOBS; i++))
do
- ecs_api_idc_put_job 201 job-adp-$i ExampleInformationType $CR_SERVICE_MR_PATH/job-adp-data$i"?storeas=md5" info-owner-adp-$i $CR_SERVICE_APP_PATH/job_status_info-owner-adp-$i testdata/dmaap-adapter/job-template.json
+ cr_index=$(($i%$NUM_CR))
+ service_mr="CR_SERVICE_MR_PATH_"$cr_index
+ service_app="CR_SERVICE_APP_PATH_"$cr_index
+ ics_api_idc_put_job 201 job-adp-$i ExampleInformationType ${!service_mr}/job-adp-data$i"?storeas=md5" info-owner-adp-$i ${!service_app}/job_status_info-owner-adp-$i testdata/dmaap-adapter/job-template.json
done
-print_timer "Create adapter jobs: $NUM_JOBS"
+print_timer
# Create jobs for adapter kafka - CR stores data as MD5 hash
start_timer "Create adapter (kafka) jobs: $NUM_JOBS"
for ((i=1; i<=$NUM_JOBS; i++))
do
- ecs_api_idc_put_job 201 job-adp-kafka-$i ExampleInformationTypeKafka $CR_SERVICE_TEXT_PATH/job-adp-kafka-data$i"?storeas=md5" info-owner-adp-kafka-$i $CR_SERVICE_APP_PATH/job_status_info-owner-adp-kafka-$i testdata/dmaap-adapter/job-template-1-kafka.json
+ cr_index=$(($i%$NUM_CR))
+ service_text="CR_SERVICE_TEXT_PATH_"$cr_index
+ service_app="CR_SERVICE_APP_PATH_"$cr_index
+ ics_api_idc_put_job 201 job-adp-kafka-$i ExampleInformationTypeKafka ${!service_text}/job-adp-kafka-data$i"?storeas=md5" info-owner-adp-kafka-$i ${!service_app}/job_status_info-owner-adp-kafka-$i testdata/dmaap-adapter/job-template-1-kafka.json
done
-print_timer "Create adapter (kafka) jobs: $NUM_JOBS"
+print_timer
# Create jobs for mediator - CR stores data as MD5 hash
start_timer "Create mediator jobs: $NUM_JOBS"
for ((i=1; i<=$NUM_JOBS; i++))
do
- ecs_api_idc_put_job 201 job-med-$i STD_Fault_Messages $CR_SERVICE_MR_PATH/job-med-data$i"?storeas=md5" info-owner-med-$i $CR_SERVICE_APP_PATH/job_status_info-owner-med-$i testdata/dmaap-adapter/job-template.json
+ cr_index=$(($i%$NUM_CR))
+ service_mr="CR_SERVICE_MR_PATH_"$cr_index
+ service_app="CR_SERVICE_APP_PATH_"$cr_index
+ ics_api_idc_put_job 201 job-med-$i STD_Fault_Messages ${!service_mr}/job-med-data$i"?storeas=md5" info-owner-med-$i ${!service_app}/job_status_info-owner-med-$i testdata/dmaap-adapter/job-template.json
done
-print_timer "Create mediator jobs: $NUM_JOBS"
+print_timer
# Check job status
for ((i=1; i<=$NUM_JOBS; i++))
do
- ecs_api_a1_get_job_status 200 job-med-$i ENABLED 30
- ecs_api_a1_get_job_status 200 job-adp-$i ENABLED 30
- ecs_api_a1_get_job_status 200 job-adp-kafka-$i ENABLED 30
+ ics_api_a1_get_job_status 200 job-med-$i ENABLED 30
+ ics_api_a1_get_job_status 200 job-adp-$i ENABLED 30
+ ics_api_a1_get_job_status 200 job-adp-kafka-$i ENABLED 30
done
-EXPECTED_DATA_DELIV=0
+EXPECTED_DATA_DELIV=0 #Total delivered msg per CR
+DATA_DELIV_JOBS=0 #Total delivered msg per job per CR
mr_api_generate_json_payload_file 1 ./tmp/data_for_dmaap_test.json
-mr_api_generate_text_payload_file 1 ./tmp/data_for_dmaap_test.txt
+kafkapc_api_generate_text_payload_file 1 ./tmp/data_for_dmaap_test.txt
## Send json file via message-router to adapter
-
-EXPECTED_DATA_DELIV=$(($NUM_JOBS+$EXPECTED_DATA_DELIV))
-
+DATA_DELIV_JOBS=5 #Each job will eventuall get 2 msgs
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
mr_api_send_json_file "/events/unauthenticated.dmaapadp.json" ./tmp/data_for_dmaap_test.json
-cr_equal received_callbacks $EXPECTED_DATA_DELIV 200
+for ((i=0; i<$NUM_CR; i++))
+do
+ cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
-EXPECTED_DATA_DELIV=$(($NUM_JOBS+$EXPECTED_DATA_DELIV))
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
mr_api_send_json_file "/events/unauthenticated.dmaapadp.json" ./tmp/data_for_dmaap_test.json
-cr_equal received_callbacks $EXPECTED_DATA_DELIV 200
+for ((i=0; i<$NUM_CR; i++))
+do
+ cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
-EXPECTED_DATA_DELIV=$(($NUM_JOBS+$EXPECTED_DATA_DELIV))
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
mr_api_send_json_file "/events/unauthenticated.dmaapadp.json" ./tmp/data_for_dmaap_test.json
-cr_equal received_callbacks $EXPECTED_DATA_DELIV 200
+for ((i=0; i<$NUM_CR; i++))
+do
+ cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
-EXPECTED_DATA_DELIV=$(($NUM_JOBS+$EXPECTED_DATA_DELIV))
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
mr_api_send_json_file "/events/unauthenticated.dmaapadp.json" ./tmp/data_for_dmaap_test.json
-cr_equal received_callbacks $EXPECTED_DATA_DELIV 200
+for ((i=0; i<$NUM_CR; i++))
+do
+ cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
-EXPECTED_DATA_DELIV=$(($NUM_JOBS+$EXPECTED_DATA_DELIV))
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
mr_api_send_json_file "/events/unauthenticated.dmaapadp.json" ./tmp/data_for_dmaap_test.json
-cr_equal received_callbacks $EXPECTED_DATA_DELIV 200
+for ((i=0; i<$NUM_CR; i++))
+do
+ cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
# Check received data callbacks from adapter
for ((i=1; i<=$NUM_JOBS; i++))
do
- cr_api_check_single_genric_event_md5_file 200 job-adp-data$i ./tmp/data_for_dmaap_test.json
- cr_api_check_single_genric_event_md5_file 200 job-adp-data$i ./tmp/data_for_dmaap_test.json
- cr_api_check_single_genric_event_md5_file 200 job-adp-data$i ./tmp/data_for_dmaap_test.json
- cr_api_check_single_genric_event_md5_file 200 job-adp-data$i ./tmp/data_for_dmaap_test.json
- cr_api_check_single_genric_event_md5_file 200 job-adp-data$i ./tmp/data_for_dmaap_test.json
+ cr_index=$(($i%$NUM_CR))
+ cr_api_check_single_genric_event_md5_file 200 $cr_index job-adp-data$i ./tmp/data_for_dmaap_test.json
+ cr_api_check_single_genric_event_md5_file 200 $cr_index job-adp-data$i ./tmp/data_for_dmaap_test.json
+ cr_api_check_single_genric_event_md5_file 200 $cr_index job-adp-data$i ./tmp/data_for_dmaap_test.json
+ cr_api_check_single_genric_event_md5_file 200 $cr_index job-adp-data$i ./tmp/data_for_dmaap_test.json
+ cr_api_check_single_genric_event_md5_file 200 $cr_index job-adp-data$i ./tmp/data_for_dmaap_test.json
done
## Send text file via message-router to adapter kafka
-EXPECTED_DATA_DELIV=$(($NUM_JOBS+$EXPECTED_DATA_DELIV))
-
-mr_api_send_text_file "/events/unauthenticated.dmaapadp_kafka.text" ./tmp/data_for_dmaap_test.txt
-cr_equal received_callbacks $EXPECTED_DATA_DELIV 200
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
+kafkapc_api_post_msg_from_file 200 "unauthenticated.dmaapadp_kafka.text" "text/plain" ./tmp/data_for_dmaap_test.txt
+kafkapc_equal topics/unauthenticated.dmaapadp_kafka.text/counters/sent 1 30
+for ((i=0; i<$NUM_CR; i++))
+do
+ cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
-EXPECTED_DATA_DELIV=$(($NUM_JOBS+$EXPECTED_DATA_DELIV))
-mr_api_send_text_file "/events/unauthenticated.dmaapadp_kafka.text" ./tmp/data_for_dmaap_test.txt
-cr_equal received_callbacks $EXPECTED_DATA_DELIV 200
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
+kafkapc_api_post_msg_from_file 200 "unauthenticated.dmaapadp_kafka.text" "text/plain" ./tmp/data_for_dmaap_test.txt
+kafkapc_equal topics/unauthenticated.dmaapadp_kafka.text/counters/sent 2 30
+for ((i=0; i<$NUM_CR; i++))
+do
+ cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
-EXPECTED_DATA_DELIV=$(($NUM_JOBS+$EXPECTED_DATA_DELIV))
-mr_api_send_text_file "/events/unauthenticated.dmaapadp_kafka.text" ./tmp/data_for_dmaap_test.txt
-cr_equal received_callbacks $EXPECTED_DATA_DELIV 200
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
+kafkapc_api_post_msg_from_file 200 "unauthenticated.dmaapadp_kafka.text" "text/plain" ./tmp/data_for_dmaap_test.txt
+kafkapc_equal topics/unauthenticated.dmaapadp_kafka.text/counters/sent 3 30
+for ((i=0; i<$NUM_CR; i++))
+do
+ cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
-EXPECTED_DATA_DELIV=$(($NUM_JOBS+$EXPECTED_DATA_DELIV))
-mr_api_send_text_file "/events/unauthenticated.dmaapadp_kafka.text" ./tmp/data_for_dmaap_test.txt
-cr_equal received_callbacks $EXPECTED_DATA_DELIV 200
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
+kafkapc_api_post_msg_from_file 200 "unauthenticated.dmaapadp_kafka.text" "text/plain" ./tmp/data_for_dmaap_test.txt
+kafkapc_equal topics/unauthenticated.dmaapadp_kafka.text/counters/sent 4 30
+for ((i=0; i<$NUM_CR; i++))
+do
+ cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
-EXPECTED_DATA_DELIV=$(($NUM_JOBS+$EXPECTED_DATA_DELIV))
-mr_api_send_text_file "/events/unauthenticated.dmaapadp_kafka.text" ./tmp/data_for_dmaap_test.txt
-cr_equal received_callbacks $EXPECTED_DATA_DELIV 200
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
+kafkapc_api_post_msg_from_file 200 "unauthenticated.dmaapadp_kafka.text" "text/plain" ./tmp/data_for_dmaap_test.txt
+kafkapc_equal topics/unauthenticated.dmaapadp_kafka.text/counters/sent 5 30
+for ((i=0; i<$NUM_CR; i++))
+do
+ cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
# Check received data callbacks from adapter kafka
for ((i=1; i<=$NUM_JOBS; i++))
do
- cr_api_check_single_genric_event_md5_file 200 job-adp-kafka-data$i ./tmp/data_for_dmaap_test.txt
- cr_api_check_single_genric_event_md5_file 200 job-adp-kafka-data$i ./tmp/data_for_dmaap_test.txt
- cr_api_check_single_genric_event_md5_file 200 job-adp-kafka-data$i ./tmp/data_for_dmaap_test.txt
- cr_api_check_single_genric_event_md5_file 200 job-adp-kafka-data$i ./tmp/data_for_dmaap_test.txt
- cr_api_check_single_genric_event_md5_file 200 job-adp-kafka-data$i ./tmp/data_for_dmaap_test.txt
+ cr_index=$(($i%$NUM_CR))
+ cr_api_check_single_genric_event_md5_file 200 $cr_index job-adp-kafka-data$i ./tmp/data_for_dmaap_test.txt
+ cr_api_check_single_genric_event_md5_file 200 $cr_index job-adp-kafka-data$i ./tmp/data_for_dmaap_test.txt
+ cr_api_check_single_genric_event_md5_file 200 $cr_index job-adp-kafka-data$i ./tmp/data_for_dmaap_test.txt
+ cr_api_check_single_genric_event_md5_file 200 $cr_index job-adp-kafka-data$i ./tmp/data_for_dmaap_test.txt
+ cr_api_check_single_genric_event_md5_file 200 $cr_index job-adp-kafka-data$i ./tmp/data_for_dmaap_test.txt
done
## Send json file via message-router to mediator
-EXPECTED_DATA_DELIV=$(($NUM_JOBS+$EXPECTED_DATA_DELIV))
-
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
mr_api_send_json_file "/events/unauthenticated.dmaapmed.json" ./tmp/data_for_dmaap_test.json
-cr_equal received_callbacks $EXPECTED_DATA_DELIV 200
+for ((i=0; i<$NUM_CR; i++))
+do
+ cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
-EXPECTED_DATA_DELIV=$(($NUM_JOBS+$EXPECTED_DATA_DELIV))
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
mr_api_send_json_file "/events/unauthenticated.dmaapmed.json" ./tmp/data_for_dmaap_test.json
-cr_equal received_callbacks $EXPECTED_DATA_DELIV 200
+for ((i=0; i<$NUM_CR; i++))
+do
+ cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
-EXPECTED_DATA_DELIV=$(($NUM_JOBS+$EXPECTED_DATA_DELIV))
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
mr_api_send_json_file "/events/unauthenticated.dmaapmed.json" ./tmp/data_for_dmaap_test.json
-cr_equal received_callbacks $EXPECTED_DATA_DELIV 200
+for ((i=0; i<$NUM_CR; i++))
+do
+ cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
-EXPECTED_DATA_DELIV=$(($NUM_JOBS+$EXPECTED_DATA_DELIV))
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
mr_api_send_json_file "/events/unauthenticated.dmaapmed.json" ./tmp/data_for_dmaap_test.json
-cr_equal received_callbacks $EXPECTED_DATA_DELIV 200
+for ((i=0; i<$NUM_CR; i++))
+do
+ cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
-EXPECTED_DATA_DELIV=$(($NUM_JOBS+$EXPECTED_DATA_DELIV))
+EXPECTED_DATA_DELIV=$(($NUM_JOBS/$NUM_CR+$EXPECTED_DATA_DELIV))
mr_api_send_json_file "/events/unauthenticated.dmaapmed.json" ./tmp/data_for_dmaap_test.json
-cr_equal received_callbacks $EXPECTED_DATA_DELIV 200
+for ((i=0; i<$NUM_CR; i++))
+do
+ cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
# Check received data callbacks from mediator
for ((i=1; i<=$NUM_JOBS; i++))
do
- cr_api_check_single_genric_event_md5_file 200 job-med-data$i ./tmp/data_for_dmaap_test.json
- cr_api_check_single_genric_event_md5_file 200 job-med-data$i ./tmp/data_for_dmaap_test.json
- cr_api_check_single_genric_event_md5_file 200 job-med-data$i ./tmp/data_for_dmaap_test.json
- cr_api_check_single_genric_event_md5_file 200 job-med-data$i ./tmp/data_for_dmaap_test.json
- cr_api_check_single_genric_event_md5_file 200 job-med-data$i ./tmp/data_for_dmaap_test.json
+ cr_index=$(($i%$NUM_CR))
+ cr_api_check_single_genric_event_md5_file 200 $cr_index job-med-data$i ./tmp/data_for_dmaap_test.json
+ cr_api_check_single_genric_event_md5_file 200 $cr_index job-med-data$i ./tmp/data_for_dmaap_test.json
+ cr_api_check_single_genric_event_md5_file 200 $cr_index job-med-data$i ./tmp/data_for_dmaap_test.json
+ cr_api_check_single_genric_event_md5_file 200 $cr_index job-med-data$i ./tmp/data_for_dmaap_test.json
+ cr_api_check_single_genric_event_md5_file 200 $cr_index job-med-data$i ./tmp/data_for_dmaap_test.json
done
mr_api_send_json "/events/unauthenticated.dmaapadp.json" '{"msg":"msg-1"}'
mr_api_send_json "/events/unauthenticated.dmaapadp.json" '{"msg":"msg-3"}'
+DATA_DELIV_JOBS=7 #Each job will eventuall get 5+2 msgs
+
# Wait for data recetption, adapter
-EXPECTED_DATA_DELIV=$(($NUM_JOBS*2+$EXPECTED_DATA_DELIV))
+EXPECTED_DATA_DELIV=$(($NUM_JOBS*2/$NUM_CR+$EXPECTED_DATA_DELIV))
start_timer "Data delivery adapter, 2 json per job"
-cr_equal received_callbacks $EXPECTED_DATA_DELIV 100
-print_timer "Data delivery adapter, 2 json per job"
+for ((i=0; i<$NUM_CR; i++))
+do
+ cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
+print_timer
# Send small text via message-routere to adapter
-mr_api_send_text "/events/unauthenticated.dmaapadp_kafka.text" 'Message-------1'
-mr_api_send_text "/events/unauthenticated.dmaapadp_kafka.text" 'Message-------3'
+kafkapc_api_post_msg 200 "unauthenticated.dmaapadp_kafka.text" "text/plain" 'Message-------1'
+kafkapc_api_post_msg 200 "unauthenticated.dmaapadp_kafka.text" "text/plain" 'Message-------3'
+kafkapc_equal topics/unauthenticated.dmaapadp_kafka.text/counters/sent 7 30
# Wait for data recetption, adapter kafka
-EXPECTED_DATA_DELIV=$(($NUM_JOBS*2+$EXPECTED_DATA_DELIV))
-start_timer "Data delivery adapte kafkar, 2 strings per job"
-cr_equal received_callbacks $EXPECTED_DATA_DELIV 100
-print_timer "Data delivery adapte kafkar, 2 strings per job"
+EXPECTED_DATA_DELIV=$(($NUM_JOBS*2/$NUM_CR+$EXPECTED_DATA_DELIV))
+start_timer "Data delivery adapter kafka, 2 strings per job"
+for ((i=0; i<$NUM_CR; i++))
+do
+ cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 60
+done
+print_timer
# Send small json via message-router to mediator
mr_api_send_json "/events/unauthenticated.dmaapmed.json" '{"msg":"msg-0"}'
mr_api_send_json "/events/unauthenticated.dmaapmed.json" '{"msg":"msg-2"}'
# Wait for data reception, mediator
-EXPECTED_DATA_DELIV=$(($NUM_JOBS*2+$EXPECTED_DATA_DELIV))
+EXPECTED_DATA_DELIV=$(($NUM_JOBS*2/$NUM_CR+$EXPECTED_DATA_DELIV))
start_timer "Data delivery mediator, 2 json per job"
-cr_equal received_callbacks $EXPECTED_DATA_DELIV 100
-print_timer "Data delivery mediator, 2 json per job"
+for ((i=0; i<$NUM_CR; i++))
+do
+ cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 100
+done
+print_timer
# Check received number of messages for mediator and adapter callbacks
for ((i=1; i<=$NUM_JOBS; i++))
do
- cr_equal received_callbacks?id=job-med-data$i 7
- cr_equal received_callbacks?id=job-adp-data$i 7
- cr_equal received_callbacks?id=job-adp-kafka-data$i 7
+ cr_index=$(($i%$NUM_CR))
+ cr_equal $cr_index received_callbacks?id=job-med-data$i $DATA_DELIV_JOBS
+ cr_equal $cr_index received_callbacks?id=job-adp-data$i $DATA_DELIV_JOBS
+ cr_equal $cr_index received_callbacks?id=job-adp-kafka-data$i $DATA_DELIV_JOBS
done
# Check received data and order for mediator and adapter callbacks
for ((i=1; i<=$NUM_JOBS; i++))
do
- cr_api_check_single_genric_event_md5 200 job-med-data$i '{"msg":"msg-0"}'
- cr_api_check_single_genric_event_md5 200 job-med-data$i '{"msg":"msg-2"}'
- cr_api_check_single_genric_event_md5 200 job-adp-data$i '{"msg":"msg-1"}'
- cr_api_check_single_genric_event_md5 200 job-adp-data$i '{"msg":"msg-3"}'
- cr_api_check_single_genric_event_md5 200 job-adp-kafka-data$i 'Message-------1'
- cr_api_check_single_genric_event_md5 200 job-adp-kafka-data$i 'Message-------3'
+ cr_index=$(($i%$NUM_CR))
+ cr_api_check_single_genric_event_md5 200 $cr_index job-med-data$i '{"msg":"msg-0"}'
+ cr_api_check_single_genric_event_md5 200 $cr_index job-med-data$i '{"msg":"msg-2"}'
+ cr_api_check_single_genric_event_md5 200 $cr_index job-adp-data$i '{"msg":"msg-1"}'
+ cr_api_check_single_genric_event_md5 200 $cr_index job-adp-data$i '{"msg":"msg-3"}'
+ cr_api_check_single_genric_event_md5 200 $cr_index job-adp-kafka-data$i 'Message-------1'
+ cr_api_check_single_genric_event_md5 200 $cr_index job-adp-kafka-data$i 'Message-------3'
done
# Set delay in the callback receiver to slow down callbacks
SEC_DELAY=2
-cr_delay_callback 200 $SEC_DELAY
+for ((i=0; i<$NUM_CR; i++))
+do
+ cr_delay_callback 200 $i $SEC_DELAY
+done
# Send small json via message-router to adapter
mr_api_send_json "/events/unauthenticated.dmaapadp.json" '{"msg":"msg-5"}'
mr_api_send_json "/events/unauthenticated.dmaapadp.json" '{"msg":"msg-7"}'
# Wait for data recetption, adapter
-EXPECTED_DATA_DELIV=$(($NUM_JOBS*2+$EXPECTED_DATA_DELIV))
+EXPECTED_DATA_DELIV=$(($NUM_JOBS*2/$NUM_CR+$EXPECTED_DATA_DELIV))
start_timer "Data delivery adapter with $SEC_DELAY seconds delay in consumer, 2 json per job"
-cr_equal received_callbacks $EXPECTED_DATA_DELIV $(($NUM_JOBS+300))
-print_timer "Data delivery adapter with $SEC_DELAY seconds delay in consumer, 2 json per job"
+for ((i=0; i<$NUM_CR; i++))
+do
+ cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 100
+done
+print_timer
# Send small text via message-router to adapter kafka
-mr_api_send_text "/events/unauthenticated.dmaapadp_kafka.text" 'Message-------5'
-mr_api_send_text "/events/unauthenticated.dmaapadp_kafka.text" 'Message-------7'
+kafkapc_api_post_msg 200 "unauthenticated.dmaapadp_kafka.text" "text/plain" 'Message-------5'
+kafkapc_api_post_msg 200 "unauthenticated.dmaapadp_kafka.text" "text/plain" 'Message-------7'
+kafkapc_equal topics/unauthenticated.dmaapadp_kafka.text/counters/sent 9 30
# Wait for data recetption, adapter kafka
-EXPECTED_DATA_DELIV=$(($NUM_JOBS*2+$EXPECTED_DATA_DELIV))
+EXPECTED_DATA_DELIV=$(($NUM_JOBS*2/$NUM_CR+$EXPECTED_DATA_DELIV))
start_timer "Data delivery adapter kafka with $SEC_DELAY seconds delay in consumer, 2 strings per job"
-cr_equal received_callbacks $EXPECTED_DATA_DELIV $(($NUM_JOBS+300))
-print_timer "Data delivery adapter with kafka $SEC_DELAY seconds delay in consumer, 2 strings per job"
+for ((i=0; i<$NUM_CR; i++))
+do
+ cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 100
+done
+print_timer
# Send small json via message-router to mediator
mr_api_send_json "/events/unauthenticated.dmaapmed.json" '{"msg":"msg-6"}'
# Wait for data reception, mediator
-EXPECTED_DATA_DELIV=$(($NUM_JOBS*2+$EXPECTED_DATA_DELIV))
+EXPECTED_DATA_DELIV=$(($NUM_JOBS*2/$NUM_CR+$EXPECTED_DATA_DELIV))
start_timer "Data delivery mediator with $SEC_DELAY seconds delay in consumer, 2 json per job"
-cr_equal received_callbacks $EXPECTED_DATA_DELIV 1000
-print_timer "Data delivery mediator with $SEC_DELAY seconds delay in consumer, 2 json per job"
+for ((i=0; i<$NUM_CR; i++))
+do
+ cr_equal $i received_callbacks $EXPECTED_DATA_DELIV 100
+done
+print_timer
# Check received number of messages for mediator and adapter callbacks
for ((i=1; i<=$NUM_JOBS; i++))
do
- cr_equal received_callbacks?id=job-med-data$i 9
- cr_equal received_callbacks?id=job-adp-data$i 9
- cr_equal received_callbacks?id=job-adp-kafka-data$i 9
+ cr_index=$(($i%$NUM_CR))
+ cr_equal $cr_index received_callbacks?id=job-med-data$i 9
+ cr_equal $cr_index received_callbacks?id=job-adp-data$i 9
+ cr_equal $cr_index received_callbacks?id=job-adp-kafka-data$i 9
done
# Check received data and order for mediator and adapter callbacks
for ((i=1; i<=$NUM_JOBS; i++))
do
- cr_api_check_single_genric_event_md5 200 job-med-data$i '{"msg":"msg-4"}'
- cr_api_check_single_genric_event_md5 200 job-med-data$i '{"msg":"msg-6"}'
- cr_api_check_single_genric_event_md5 200 job-adp-data$i '{"msg":"msg-5"}'
- cr_api_check_single_genric_event_md5 200 job-adp-data$i '{"msg":"msg-7"}'
- cr_api_check_single_genric_event_md5 200 job-adp-kafka-data$i 'Message-------5'
- cr_api_check_single_genric_event_md5 200 job-adp-kafka-data$i 'Message-------7'
+ cr_index=$(($i%$NUM_CR))
+ cr_api_check_single_genric_event_md5 200 $cr_index job-med-data$i '{"msg":"msg-4"}'
+ cr_api_check_single_genric_event_md5 200 $cr_index job-med-data$i '{"msg":"msg-6"}'
+ cr_api_check_single_genric_event_md5 200 $cr_index job-adp-data$i '{"msg":"msg-5"}'
+ cr_api_check_single_genric_event_md5 200 $cr_index job-adp-data$i '{"msg":"msg-7"}'
+ cr_api_check_single_genric_event_md5 200 $cr_index job-adp-kafka-data$i 'Message-------5'
+ cr_api_check_single_genric_event_md5 200 $cr_index job-adp-kafka-data$i 'Message-------7'
done
#### TEST COMPLETE ####
#Supported run modes
SUPPORTED_RUNMODES="DOCKER"
-. ../common/testcase_common.sh $@
-. ../common/agent_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/gateway_api_functions.sh
+. ../common/testcase_common.sh $@
setup_testenvironment
# Create service to be able to receive events when rics becomes available
# Must use rest towards the agent since dmaap is not configured yet
- api_put_service 201 "ric-registration" 0 "$CR_SERVICE_APP_PATH/ric-registration"
+ api_put_service 201 "ric-registration" 0 "$CR_SERVICE_APP_PATH_0/ric-registration"
# Start one RIC of each type
start_ric_simulators ricsim_g1 1 OSC_2.1.0
start_mr
- start_cr
+ start_cr 1
start_control_panel $SIM_GROUP/$CONTROL_PANEL_COMPOSE_DIR/$CONTROL_PANEL_CONFIG_FILE
if [ "$PMS_VERSION" == "V2" ]; then
api_equal json:rics 3 300
- cr_equal received_callbacks 3 120
+ cr_equal 0 received_callbacks 3 120
- cr_api_check_all_sync_events 200 ric-registration ricsim_g1_1 ricsim_g2_1 ricsim_g3_1
+ cr_api_check_all_sync_events 200 0 ric-registration ricsim_g1_1 ricsim_g2_1 ricsim_g3_1
else
api_equal json:rics 2 300
fi
if [ "$PMS_VERSION" == "V2" ]; then
api_equal json:rics 4 120
- cr_equal received_callbacks 4 120
+ cr_equal 0 received_callbacks 4 120
- cr_api_check_all_sync_events 200 ric-registration ricsim_g2_2
+ cr_api_check_all_sync_events 200 0 ric-registration ricsim_g2_2
else
api_equal json:rics 3 120
fi
if [ "$PMS_VERSION" == "V2" ]; then
api_equal json:rics 3 120
- cr_equal received_callbacks 4 120
+ cr_equal 0 received_callbacks 4 120
else
api_equal json:rics 2 120
fi
#Supported run modes
SUPPORTED_RUNMODES="DOCKER KUBE"
-. ../common/testcase_common.sh $@
-. ../common/agent_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
+. ../common/testcase_common.sh $@
setup_testenvironment
start_ric_simulators ricsim_g1 $NUM_RICS_2 OSC_2.1.0
- start_cr
+ start_cr 1
start_mr
# Create service to be able to receive events when rics becomes available
# Must use rest towards the agent since dmaap is not configured yet
- api_put_service 201 "ric-registration" 0 "$CR_SERVICE_APP_PATH/ric-registration"
+ api_put_service 201 "ric-registration" 0 "$CR_SERVICE_APP_PATH_0/ric-registration"
#Load first config
if [ $RUNMODE == "KUBE" ]; then
api_equal json:rics 8 300
if [ "$PMS_VERSION" == "V2" ]; then
- cr_equal received_callbacks?id=ric-registration 8 120
- cr_api_check_all_sync_events 200 ric-registration ricsim_g1_1 ricsim_g1_2 ricsim_g1_3 ricsim_g1_4 ricsim_g1_5 ricsim_g1_6 ricsim_g1_7 ricsim_g1_8
+ cr_equal 0 received_callbacks?id=ric-registration 8 120
+ cr_api_check_all_sync_events 200 0 ric-registration ricsim_g1_1 ricsim_g1_2 ricsim_g1_3 ricsim_g1_4 ricsim_g1_5 ricsim_g1_6 ricsim_g1_7 ricsim_g1_8
fi
api_get_rics 200 NOTYPE "ricsim_g1_1:me1_ricsim_g1_1,me2_ricsim_g1_1:NOTYPE:???? \
ricsim_g1_8:me1_ricsim_g1_8,me2_ricsim_g1_8:4,5:???? "
if [ "$PMS_VERSION" == "V2" ]; then
- cr_equal received_callbacks?id=ric-registration 16 120
- cr_api_check_all_sync_events 200 ric-registration ricsim_g1_1 ricsim_g1_2 ricsim_g1_3 ricsim_g1_4 ricsim_g1_5 ricsim_g1_6 ricsim_g1_7 ricsim_g1_8
+ cr_equal 0 received_callbacks?id=ric-registration 16 120
+ cr_api_check_all_sync_events 200 0 ric-registration ricsim_g1_1 ricsim_g1_2 ricsim_g1_3 ricsim_g1_4 ricsim_g1_5 ricsim_g1_6 ricsim_g1_7 ricsim_g1_8
fi
#Load config with all rics
api_equal json:rics 10 120
if [ "$PMS_VERSION" == "V2" ]; then
- cr_equal received_callbacks?id=ric-registration 18 120
- cr_api_check_all_sync_events 200 ric-registration ricsim_g1_9 ricsim_g1_10
+ cr_equal 0 received_callbacks?id=ric-registration 18 120
+ cr_api_check_all_sync_events 200 0 ric-registration ricsim_g1_9 ricsim_g1_10
fi
sim_put_policy_type 201 ricsim_g1_9 5 testdata/OSC/sim_5.json
ricsim_g1_10:me1_ricsim_g1_10,me2_ricsim_g1_10:NOTYPE:???? "
if [ "$PMS_VERSION" == "V2" ]; then
- cr_equal received_callbacks?id=ric-registration 19 120
- cr_api_check_all_sync_events 200 ric-registration ricsim_g1_9
+ cr_equal 0 received_callbacks?id=ric-registration 19 120
+ cr_api_check_all_sync_events 200 0 ric-registration ricsim_g1_9
fi
#No policy type in sim #10
api_equal json:policy_types 5
fi
- api_put_service 201 "serv1" 3600 "$CR_SERVICE_APP_PATH/serv1"
+ api_put_service 201 "serv1" 3600 "$CR_SERVICE_APP_PATH_0/serv1"
if [ "$PMS_VERSION" == "V2" ]; then
- notificationurl=$CR_SERVICE_APP_PATH"/test"
+ notificationurl=$CR_SERVICE_APP_PATH_0"/test"
else
notificationurl=""
fi
api_equal json:rics 8 120
if [ "$PMS_VERSION" == "V2" ]; then
- cr_equal received_callbacks?id=ric-registration 19 120
- cr_api_check_all_sync_events 200 ric-registration EMPTY
+ cr_equal 0 received_callbacks?id=ric-registration 19 120
+ cr_api_check_all_sync_events 200 0 ric-registration EMPTY
fi
if [ "$PMS_VERSION" == "V2" ]; then
#Supported run modes
SUPPORTED_RUNMODES="DOCKER KUBE"
-. ../common/testcase_common.sh $@
-. ../common/agent_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
+. ../common/testcase_common.sh $@
setup_testenvironment
start_mr
- start_cr
+ start_cr 1
start_control_panel $SIM_GROUP/$CONTROL_PANEL_COMPOSE_DIR/$CONTROL_PANEL_CONFIG_FILE
api_equal json:policy_types 2 300 #Wait for the agent to refresh types from the simulators
fi
- api_put_service 201 "serv1" 3600 "$CR_SERVICE_APP_PATH/1"
+ api_put_service 201 "serv1" 3600 "$CR_SERVICE_APP_PATH_0/1"
if [ "$PMS_VERSION" == "V2" ]; then
- notificationurl=$CR_SERVICE_APP_PATH"/test"
+ notificationurl=$CR_SERVICE_APP_PATH_0"/test"
else
notificationurl=""
fi
#Supported run modes
SUPPORTED_RUNMODES="DOCKER KUBE"
-. ../common/testcase_common.sh $@
-. ../common/agent_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
+. ../common/testcase_common.sh $@
setup_testenvironment
generate_policy_uuid
if [ "$PMS_VERSION" == "V2" ]; then
- notificationurl=$CR_SERVICE_APP_PATH"/test"
+ notificationurl=$CR_SERVICE_APP_PATH_0"/test"
else
notificationurl=""
fi
consul_config_app ".consul_config.json"
fi
- start_cr
+ start_cr 1
api_get_status 200
api_equal json:policy_types 1 300 #Wait for the agent to refresh types from the simulator
fi
- api_put_service 201 "serv1" 0 "$CR_SERVICE_APP_PATH/1"
+ api_put_service 201 "serv1" 0 "$CR_SERVICE_APP_PATH_0/1"
echo "Check the number of types in the agent for each ric is 1"
for ((i=1; i<=$NUM_RICS; i++))
sim_equal ricsim_g1_$i num_instances $NUM_POLICIES_PER_RIC
done
- api_get_services 200 "serv1" "serv1" 0 "$CR_SERVICE_APP_PATH/1"
+ api_get_services 200 "serv1" "serv1" 0 "$CR_SERVICE_APP_PATH_0/1"
stop_policy_agent
print_timer "Restore $((NUM_POLICIES_PER_RIC*$NUM_RICS)) polices after restart over $interface using "$__httpx
- api_get_services 200 "serv1" "serv1" 0 "$CR_SERVICE_APP_PATH/1"
+ api_get_services 200 "serv1" "serv1" 0 "$CR_SERVICE_APP_PATH_0/1"
start_timer "Delete $((NUM_POLICIES_PER_RIC*$NUM_RICS)) polices over $interface using "$__httpx
sleep_wait 200
- api_get_services 200 "serv1" "serv1" 0 "$CR_SERVICE_APP_PATH/1"
+ api_get_services 200 "serv1" "serv1" 0 "$CR_SERVICE_APP_PATH_0/1"
api_equal json:policies 0
#Supported run modes
SUPPORTED_RUNMODES="DOCKER KUBE"
-. ../common/testcase_common.sh $@
-. ../common/agent_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
+. ../common/testcase_common.sh $@
setup_testenvironment
start_mr
-start_cr
+start_cr 1
start_control_panel $SIM_GROUP/$CONTROL_PANEL_COMPOSE_DIR/$CONTROL_PANEL_CONFIG_FILE
done
echo "Register a service"
-api_put_service 201 "serv1" 0 "$CR_SERVICE_APP_PATH/1"
+api_put_service 201 "serv1" 0 "$CR_SERVICE_APP_PATH_0/1"
TEST_DURATION=$((24*3600*$DAYS))
TEST_START=$SECONDS
MR_MESSAGES=0
if [ "$PMS_VERSION" == "V2" ]; then
- notificationurl=$CR_SERVICE_APP_PATH"/test"
+ notificationurl=$CR_SERVICE_APP_PATH_0"/test"
else
notificationurl=""
fi
#Supported run modes
SUPPORTED_RUNMODES="DOCKER KUBE"
-. ../common/testcase_common.sh $@
-. ../common/agent_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
+. ../common/testcase_common.sh $@
setup_testenvironment
generate_policy_uuid
if [ "$PMS_VERSION" == "V2" ]; then
- notificationurl=$CR_SERVICE_APP_PATH"/test"
+ notificationurl=$CR_SERVICE_APP_PATH_0"/test"
else
notificationurl=""
fi
start_mr # Not used, but removes error messages from the agent log
- start_cr
+ start_cr 1
api_get_status 200
api_equal json:policy_types 1 300 #Wait for the agent to refresh types from the simulator
fi
- api_put_service 201 "serv1" 600 "$CR_SERVICE_APP_PATH/1"
+ api_put_service 201 "serv1" 600 "$CR_SERVICE_APP_PATH_0/1"
echo "Check the number of types in the agent for each ric is 1"
for ((i=1; i<=$NUM_RICS; i++))
SUPPORTED_RUNMODES="DOCKER KUBE"
. ../common/testcase_common.sh $@
-. ../common/agent_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
-. ../common/cr_api_functions.sh
setup_testenvironment
# Create policies
use_agent_rest_http
-api_put_service 201 "Emergency-response-app" 0 "$CR_SERVICE_APP_PATH/1"
+api_put_service 201 "Emergency-response-app" 0 "$CR_SERVICE_APP_PATH_0/1"
if [ "$PMS_VERSION" == "V2" ]; then
- notificationurl=$CR_SERVICE_APP_PATH"/test"
+ notificationurl=$CR_SERVICE_APP_PATH_0"/test"
else
notificationurl=""
fi
#App names to include in the test when running kubernetes, space separated list
KUBE_INCLUDED_IMAGES=" MR CR PRODSTUB KUBEPROXY"
#Prestarted app (not started by script) to include in the test when running kubernetes, space separated list
-KUBE_PRESTARTED_IMAGES=" PA RICSIM CP ECS RC SDNC DMAAPMED DMAAPADP"
+KUBE_PRESTARTED_IMAGES=" PA RICSIM CP ICS RC SDNC DMAAPMED DMAAPADP"
#Supported test environment profiles
SUPPORTED_PROFILES="ORAN-E-RELEASE"
SUPPORTED_RUNMODES="KUBE"
. ../common/testcase_common.sh $@
-. ../common/agent_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/ecs_api_functions.sh
-. ../common/prodstub_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/rapp_catalogue_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/dmaapmed_api_functions.sh
-. ../common/dmaapadp_api_functions.sh
setup_testenvironment
use_agent_rest_https
use_sdnc_https
use_simulator_https
-use_ecs_rest_https
+use_ics_rest_https
use_prod_stub_https
-if [ $ECS_VERSION == "V1-1" ]; then
+if [ $ICS_VERSION == "V1-1" ]; then
use_rapp_catalogue_http # https not yet supported
else
########################################use_rapp_catalogue_https
pms_kube_pvc_reset
-ecs_kube_pvc_reset
+ics_kube_pvc_reset
start_kube_proxy
start_policy_agent
-start_cr
+start_cr 1
start_prod_stub
-start_ecs NOPROXY
+start_ics NOPROXY
-set_ecs_trace
+set_ics_trace
start_rapp_catalogue
api_equal json:policy_ids 0
fi
-api_put_service 201 "Emergency-response-app" 0 "$CR_SERVICE_APP_PATH/ER-app"
+api_put_service 201 "Emergency-response-app" 0 "$CR_SERVICE_APP_PATH_0/ER-app"
# Create policies in STD
for ((i=0; i<$STD_NUM_RICS; i++))
do
ricid=$((3+$i))
generate_policy_uuid
- api_put_policy 201 "Emergency-response-app" ric$ricid NOTYPE $((1100+$i)) NOTRANSIENT $CR_SERVICE_APP_PATH/"std2" testdata/STD/pi1_template.json 1
+ api_put_policy 201 "Emergency-response-app" ric$ricid NOTYPE $((1100+$i)) NOTRANSIENT $CR_SERVICE_APP_PATH_0/"std2" testdata/STD/pi1_template.json 1
generate_policy_uuid
- api_put_policy 201 "Emergency-response-app" ric$ricid NOTYPE $((1200+$i)) NOTRANSIENT $CR_SERVICE_APP_PATH/"std2" testdata/STD/pi1_template.json 1
+ api_put_policy 201 "Emergency-response-app" ric$ricid NOTYPE $((1200+$i)) NOTRANSIENT $CR_SERVICE_APP_PATH_0/"std2" testdata/STD/pi1_template.json 1
done
#Create policies in STD 2
do
ricid=$((5+$i))
generate_policy_uuid
- api_put_policy 201 "Emergency-response-app" ric$ricid STD_QOS_0_2_0 $((2100+$i)) NOTRANSIENT $CR_SERVICE_APP_PATH/"std2" testdata/STD2/pi_qos_template.json 1
+ api_put_policy 201 "Emergency-response-app" ric$ricid STD_QOS_0_2_0 $((2100+$i)) NOTRANSIENT $CR_SERVICE_APP_PATH_0/"std2" testdata/STD2/pi_qos_template.json 1
generate_policy_uuid
- api_put_policy 201 "Emergency-response-app" ric$ricid STD_QOS2_0.1.0 $((2200+$i)) NOTRANSIENT $CR_SERVICE_APP_PATH/"std2" testdata/STD2/pi_qos2_template.json 1
+ api_put_policy 201 "Emergency-response-app" ric$ricid STD_QOS2_0.1.0 $((2200+$i)) NOTRANSIENT $CR_SERVICE_APP_PATH_0/"std2" testdata/STD2/pi_qos2_template.json 1
done
# Create policies in OSC
do
ricid=$((1+$i))
generate_policy_uuid
- api_put_policy 201 "Emergency-response-app" ric$ricid 1 $((3100+$i)) NOTRANSIENT $CR_SERVICE_APP_PATH/"osc" testdata/OSC/pi1_template.json 1
+ api_put_policy 201 "Emergency-response-app" ric$ricid 1 $((3100+$i)) NOTRANSIENT $CR_SERVICE_APP_PATH_0/"osc" testdata/OSC/pi1_template.json 1
generate_policy_uuid
- api_put_policy 201 "Emergency-response-app" ric$ricid 2 $((3200+$i)) NOTRANSIENT $CR_SERVICE_APP_PATH/"osc" testdata/OSC/pi2_template.json 1
+ api_put_policy 201 "Emergency-response-app" ric$ricid 2 $((3200+$i)) NOTRANSIENT $CR_SERVICE_APP_PATH_0/"osc" testdata/OSC/pi2_template.json 1
done
FLAT_A1_EI="1"
-ecs_api_admin_reset
+ics_api_admin_reset
CB_JOB="$PROD_STUB_SERVICE_PATH$PROD_STUB_JOB_CALLBACK"
CB_SV="$PROD_STUB_SERVICE_PATH$PROD_STUB_SUPERVISION_CALLBACK"
TARGET1="$RIC_SIM_HTTPX://a1-sim-std2-0.a1-sim:$RIC_SIM_PORT/datadelivery"
TARGET2="$RIC_SIM_HTTPX://a1-sim-std2-1.a1-sim:$RIC_SIM_PORT/datadelivery"
-STATUS1="$CR_SERVICE_APP_PATH/job1-status"
-STATUS2="$CR_SERVICE_APP_PATH/job2-status"
+STATUS1="$CR_SERVICE_APP_PATH_0/job1-status"
+STATUS2="$CR_SERVICE_APP_PATH_0/job2-status"
prodstub_arm_producer 200 prod-a
prodstub_arm_type 200 prod-a type1
prodstub_arm_job_create 200 prod-a job2
-### ecs status
-ecs_api_service_status 200
+### ics status
+ics_api_service_status 200
## Setup prod-a
-if [ $ECS_VERSION == "V1-1" ]; then
- ecs_api_edp_put_producer 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ecs/ei-type-1.json
+if [ $ICS_VERSION == "V1-1" ]; then
+ ics_api_edp_put_producer 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ics/ei-type-1.json
- ecs_api_edp_get_producer 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ecs/ei-type-1.json
+ ics_api_edp_get_producer 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ics/ei-type-1.json
else
- ecs_api_edp_put_type_2 201 type1 testdata/ecs/ei-type-1.json
- ecs_api_edp_get_type_2 200 type1
- ecs_api_edp_get_type_ids 200 type1
+ ics_api_edp_put_type_2 201 type1 testdata/ics/ei-type-1.json
+ ics_api_edp_get_type_2 200 type1
+ ics_api_edp_get_type_ids 200 type1
- ecs_api_edp_put_producer_2 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
- ecs_api_edp_put_producer_2 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
+ ics_api_edp_put_producer_2 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
+ ics_api_edp_put_producer_2 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
fi
-ecs_api_edp_get_producer_status 200 prod-a ENABLED
+ics_api_edp_get_producer_status 200 prod-a ENABLED
## Create a job for prod-a
## job1 - prod-a
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_put_job 201 type1 job1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json
+ ics_api_a1_put_job 201 type1 job1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json
else
- ecs_api_a1_put_job 201 job1 type1 $TARGET1 ricsim_g3_1 $STATUS1 testdata/ecs/job-template.json
+ ics_api_a1_put_job 201 job1 type1 $TARGET1 ricsim_g3_1 $STATUS1 testdata/ics/job-template.json
fi
# Check the job data in the producer
-if [ $ECS_VERSION == "V1-1" ]; then
- prodstub_check_jobdata 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json
+if [ $ICS_VERSION == "V1-1" ]; then
+ prodstub_check_jobdata 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json
else
- if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
- prodstub_check_jobdata_3 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json
+ if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+ prodstub_check_jobdata_3 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json
else
- prodstub_check_jobdata_2 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json
+ prodstub_check_jobdata_2 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json
fi
fi
## Create a second job for prod-a
## job2 - prod-a
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_put_job 201 type1 job2 $TARGET2 ricsim_g3_2 testdata/ecs/job-template.json
+ ics_api_a1_put_job 201 type1 job2 $TARGET2 ricsim_g3_2 testdata/ics/job-template.json
else
- ecs_api_a1_put_job 201 job2 type1 $TARGET2 ricsim_g3_2 $STATUS2 testdata/ecs/job-template.json
+ ics_api_a1_put_job 201 job2 type1 $TARGET2 ricsim_g3_2 $STATUS2 testdata/ics/job-template.json
fi
# Check the job data in the producer
-if [ $ECS_VERSION == "V1-1" ]; then
- prodstub_check_jobdata 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ecs/job-template.json
+if [ $ICS_VERSION == "V1-1" ]; then
+ prodstub_check_jobdata 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ics/job-template.json
else
- if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
- prodstub_check_jobdata_3 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ecs/job-template.json
+ if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+ prodstub_check_jobdata_3 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ics/job-template.json
else
- prodstub_check_jobdata_2 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ecs/job-template.json
+ prodstub_check_jobdata_2 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ics/job-template.json
fi
fi
start_dmaapmed NOPROXY $SIM_GROUP/$DMAAP_MED_COMPOSE_DIR/$DMAAP_MED_DATA_FILE
-ecs_equal json:ei-producer/v1/eiproducers 2 60
+ics_equal json:ei-producer/v1/eiproducers 2 60
-ecs_api_idc_get_type_ids 200 ExampleInformationType STD_Fault_Messages
+ics_api_idc_get_type_ids 200 ExampleInformationType STD_Fault_Messages
-ecs_api_edp_get_producer_ids_2 200 NOTYPE DmaapGenericInfoProducer DMaaP_Mediator_Producer
+ics_api_edp_get_producer_ids_2 200 NOTYPE DmaapGenericInfoProducer DMaaP_Mediator_Producer
NUM_JOBS=5
for ((i=1; i<=$NUM_JOBS; i++))
do
- ecs_api_idc_put_job 201 jobx$i STD_Fault_Messages $CR_SERVICE_MR_PATH/jobx-data$i info-ownerx$i $CR_SERVICE_MR_PATH/job_status_info-ownerx$i testdata/dmaap-adapter/job-template.json
+ ics_api_idc_put_job 201 jobx$i STD_Fault_Messages $CR_SERVICE_MR_PATH_0/jobx-data$i info-ownerx$i $CR_SERVICE_MR_PATH_0/job_status_info-ownerx$i testdata/dmaap-adapter/job-template.json
done
for ((i=1; i<=$NUM_JOBS; i++))
do
- ecs_api_idc_put_job 201 joby$i ExampleInformationType $CR_SERVICE_MR_PATH/joby-data$i info-ownery$i $CR_SERVICE_MR_PATH/job_status_info-ownery$i testdata/dmaap-adapter/job-template.json
+ ics_api_idc_put_job 201 joby$i ExampleInformationType $CR_SERVICE_MR_PATH_0/joby-data$i info-ownery$i $CR_SERVICE_MR_PATH_0/job_status_info-ownery$i testdata/dmaap-adapter/job-template.json
done
for ((i=1; i<=$NUM_JOBS; i++))
do
- ecs_api_a1_get_job_status 200 jobx$i ENABLED 30
+ ics_api_a1_get_job_status 200 jobx$i ENABLED 30
done
mr_api_send_json "/events/unauthenticated.dmaapmed.json" '{"msg":"msg-0"}'
mr_api_send_json "/events/unauthenticated.dmaapmed.json" '{"msg":"msg-2"}'
mr_api_send_json "/events/unauthenticated.dmaapadp.json" '{"msg":"msg-3"}'
-cr_equal received_callbacks $(($NUM_JOBS*2*2)) 60
+cr_equal 0 received_callbacks $(($NUM_JOBS*2*2)) 60
for ((i=1; i<=$NUM_JOBS; i++))
do
- cr_equal received_callbacks?id=jobx-data$i 2
- cr_equal received_callbacks?id=joby-data$i 2
+ cr_equal 0 received_callbacks?id=jobx-data$i 2
+ cr_equal 0 received_callbacks?id=joby-data$i 2
done
for ((i=1; i<=$NUM_JOBS; i++))
do
- cr_api_check_single_genric_json_event 200 jobx-data$i '{"msg":"msg-0"}'
- cr_api_check_single_genric_json_event 200 jobx-data$i '{"msg":"msg-2"}'
- cr_api_check_single_genric_json_event 200 joby-data$i '{"msg":"msg-1"}'
- cr_api_check_single_genric_json_event 200 joby-data$i '{"msg":"msg-3"}'
+ cr_api_check_single_genric_json_event 200 0 jobx-data$i '{"msg":"msg-0"}'
+ cr_api_check_single_genric_json_event 200 0 jobx-data$i '{"msg":"msg-2"}'
+ cr_api_check_single_genric_json_event 200 0 joby-data$i '{"msg":"msg-1"}'
+ cr_api_check_single_genric_json_event 200 0 joby-data$i '{"msg":"msg-3"}'
done
-stop_ecs
+stop_ics
-start_stopped_ecs
+start_stopped_ics
-# Check ECS status after restart
+# Check ICS status after restart
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_get_job_status 200 type1 job1 DISABLED
- ecs_api_a1_get_job_status 200 type1 job2 DISABLED
+ ics_api_a1_get_job_status 200 type1 job1 DISABLED
+ ics_api_a1_get_job_status 200 type1 job2 DISABLED
else
- ecs_api_a1_get_job_status 200 job1 DISABLED
- ecs_api_a1_get_job_status 200 job2 DISABLED
+ ics_api_a1_get_job_status 200 job1 DISABLED
+ ics_api_a1_get_job_status 200 job2 DISABLED
fi
check_policy_agent_logs
-check_ecs_logs
+check_ics_logs
check_sdnc_logs
#### TEST COMPLETE ####
SUPPORTED_RUNMODES="DOCKER KUBE"
. ../common/testcase_common.sh $@
-. ../common/agent_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
setup_testenvironment
use_mr_https
if [ "$PMS_VERSION" == "V2" ]; then
- notificationurl=$CR_SERVICE_APP_PATH"/test"
+ notificationurl=$CR_SERVICE_APP_PATH_0"/test"
else
echo "Version V2 of PMS is needed, exiting..."
exit 1
# Create policies
use_agent_rest_http
- api_put_service 201 "Emergency-response-app" 0 "$CR_SERVICE_APP_PATH/1"
+ api_put_service 201 "Emergency-response-app" 0 "$CR_SERVICE_APP_PATH_0/1"
# Create policies in OSC
for ((i=1; i<=$OSC_NUM_RICS; i++))
SUPPORTED_RUNMODES="DOCKER KUBE"
. ../common/testcase_common.sh $@
-. ../common/agent_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
setup_testenvironment
use_simulator_https
if [ "$PMS_VERSION" == "V2" ]; then
- notificationurl=$CR_SERVICE_APP_PATH"/test"
+ notificationurl=$CR_SERVICE_APP_PATH_0"/test"
else
notificationurl=""
fi
# Create policies
use_agent_rest_http
-api_put_service 201 "Emergency-response-app" 0 "$CR_SERVICE_APP_PATH/1"
+api_put_service 201 "Emergency-response-app" 0 "$CR_SERVICE_APP_PATH_0/1"
# Create policies in OSC
for ((i=1; i<=$OSC_NUM_RICS; i++))
# ============LICENSE_END=================================================
#
-TC_ONELINE_DESCR="Preparation demo setup - policy management and enrichment information"
+TC_ONELINE_DESCR="Preparation demo setup - policy management and information information"
#App names to include in the test when running docker, space separated list
-DOCKER_INCLUDED_IMAGES="CBS CONSUL CP CR MR PA RICSIM SDNC ECS PRODSTUB RC HTTPPROXY KUBEPROXY NGW"
+DOCKER_INCLUDED_IMAGES="CBS CONSUL CP CR MR PA RICSIM SDNC ICS PRODSTUB RC HTTPPROXY KUBEPROXY NGW"
#App names to include in the test when running kubernetes, space separated list
-KUBE_INCLUDED_IMAGES=" MR CR PA RC PRODSTUB RICSIM CP ECS SDNC HTTPPROXY KUBEPROXY NGW"
+KUBE_INCLUDED_IMAGES=" MR CR PA RC PRODSTUB RICSIM CP ICS SDNC HTTPPROXY KUBEPROXY NGW"
#Prestarted app (not started by script) to include in the test when running kubernetes, space separated list
KUBE_PRESTARTED_IMAGES=""
SUPPORTED_RUNMODES="DOCKER KUBE"
. ../common/testcase_common.sh $@
-. ../common/agent_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/ecs_api_functions.sh
-. ../common/prodstub_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/rapp_catalogue_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/http_proxy_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
setup_testenvironment
use_agent_rest_https
use_sdnc_https
use_simulator_https
-use_ecs_rest_https
+use_ics_rest_https
use_prod_stub_https
-if [ $ECS_VERSION == "V1-1" ]; then
+if [ $ICS_VERSION == "V1-1" ]; then
use_rapp_catalogue_http # https not yet supported
else
use_rapp_catalogue_https
if [ "$PMS_VERSION" == "V2" ]; then
- notificationurl=$CR_SERVICE_APP_PATH"/test"
+ notificationurl=$CR_SERVICE_APP_PATH_0"/test"
else
echo "PMS VERSION 2 (V2) is required"
exit 1
consul_config_app ".consul_config.json"
fi
-start_cr
+start_cr 1
start_prod_stub
-start_ecs PROXY $SIM_GROUP/$ECS_COMPOSE_DIR/$ECS_CONFIG_FILE
+start_ics PROXY $SIM_GROUP/$ICS_COMPOSE_DIR/$ICS_CONFIG_FILE
start_rapp_catalogue
set_agent_trace
-set_ecs_trace
+set_ics_trace
use_info_jobs=false #Set flag if interface supporting info-types is used
-if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
use_info_jobs=true
fi
#Check the number of types
api_equal json:policy-types 2 300
-api_put_service 201 "Emergency-response-app" 0 "$CR_SERVICE_APP_PATH/1"
+api_put_service 201 "Emergency-response-app" 0 "$CR_SERVICE_APP_PATH_0/1"
# Create policies in STD
for ((i=1; i<=$STD_NUM_RICS; i++))
TARGET1="$RIC_SIM_HTTPX://$RIC_G1_1:$RIC_SIM_PORT/datadelivery"
TARGET2="$RIC_SIM_HTTPX://$RIC_G1_1:$RIC_SIM_PORT/datadelivery"
-STATUS1="$CR_SERVICE_APP_PATH/callbacks/job1-status"
-STATUS2="$CR_SERVICE_APP_PATH/callbacks/job2-status"
+STATUS1="$CR_SERVICE_APP_PATH_0/callbacks/job1-status"
+STATUS2="$CR_SERVICE_APP_PATH_0/callbacks/job2-status"
prodstub_arm_producer 200 prod-a
prodstub_arm_type 200 prod-a type1
prodstub_arm_job_create 200 prod-a job1
prodstub_arm_job_create 200 prod-a job2
-### ecs status
-ecs_api_service_status 200
+### ics status
+ics_api_service_status 200
## Setup prod-a
-if [ $ECS_VERSION == "V1-1" ]; then
- ecs_api_edp_put_producer 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ecs/ei-type-1.json
+if [ $ICS_VERSION == "V1-1" ]; then
+ ics_api_edp_put_producer 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ics/ei-type-1.json
- ecs_api_edp_get_producer 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ecs/ei-type-1.json
+ ics_api_edp_get_producer 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1 testdata/ics/ei-type-1.json
else
- ecs_api_edp_put_type_2 201 type1 testdata/ecs/ei-type-1.json
+ ics_api_edp_put_type_2 201 type1 testdata/ics/ei-type-1.json
- ecs_api_edp_put_producer_2 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
+ ics_api_edp_put_producer_2 201 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
- ecs_api_edp_get_producer_2 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
+ ics_api_edp_get_producer_2 200 prod-a $CB_JOB/prod-a $CB_SV/prod-a type1
fi
-ecs_api_edp_get_producer_status 200 prod-a ENABLED
+ics_api_edp_get_producer_status 200 prod-a ENABLED
## Create a job for prod-a
## job1 - prod-a
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_put_job 201 type1 job1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json
+ ics_api_a1_put_job 201 type1 job1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json
else
- ecs_api_a1_put_job 201 job1 type1 $TARGET1 ricsim_g3_1 $STATUS1 testdata/ecs/job-template.json
+ ics_api_a1_put_job 201 job1 type1 $TARGET1 ricsim_g3_1 $STATUS1 testdata/ics/job-template.json
fi
# Check the job data in the producer
-if [ $ECS_VERSION == "V1-1" ]; then
- prodstub_check_jobdata 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json
+if [ $ICS_VERSION == "V1-1" ]; then
+ prodstub_check_jobdata 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json
else
if [ $use_info_jobs ]; then
- prodstub_check_jobdata_3 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json
+ prodstub_check_jobdata_3 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json
else
- prodstub_check_jobdata_2 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ecs/job-template.json
+ prodstub_check_jobdata_2 200 prod-a job1 type1 $TARGET1 ricsim_g3_1 testdata/ics/job-template.json
fi
fi
## Create a second job for prod-a
## job2 - prod-a
if [ -z "$FLAT_A1_EI" ]; then
- ecs_api_a1_put_job 201 type1 job2 $TARGET2 ricsim_g3_2 testdata/ecs/job-template.json
+ ics_api_a1_put_job 201 type1 job2 $TARGET2 ricsim_g3_2 testdata/ics/job-template.json
else
- ecs_api_a1_put_job 201 job2 type1 $TARGET2 ricsim_g3_2 $STATUS2 testdata/ecs/job-template.json
+ ics_api_a1_put_job 201 job2 type1 $TARGET2 ricsim_g3_2 $STATUS2 testdata/ics/job-template.json
fi
# Check the job data in the producer
-if [ $ECS_VERSION == "V1-1" ]; then
- prodstub_check_jobdata 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ecs/job-template.json
+if [ $ICS_VERSION == "V1-1" ]; then
+ prodstub_check_jobdata 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ics/job-template.json
else
if [ $use_info_jobs ]; then
- prodstub_check_jobdata_3 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ecs/job-template.json
+ prodstub_check_jobdata_3 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ics/job-template.json
else
- prodstub_check_jobdata_2 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ecs/job-template.json
+ prodstub_check_jobdata_2 200 prod-a job2 type1 $TARGET2 ricsim_g3_2 testdata/ics/job-template.json
fi
fi
check_policy_agent_logs
-check_ecs_logs
+check_ics_logs
check_sdnc_logs
#### TEST COMPLETE ####
>```./PM_DEMO.sh remote-remove kube release --env-file ../common/test_env-onap-guilin.sh```
-Note that ECS was not available before oran cherry so a test script without ECS is used.
+Note that ICS was not available before oran cherry so a test script without ICS is used.
ONAP HONOLULU
=============
## Test case categories
-The test script are number using these basic categories where 0-999 are releated to the policy managment and 1000-1999 are related to enrichment management. 2000-2999 are for southbound http proxy. There are also demo test cases that test more or less all components. These test scripts does not use the numbering scheme below.
+The test script are number using these basic categories where 0-999 are releated to the policy managment and 1000-1999 are related to information management. 2000-2999 are for southbound http proxy. There are also demo test cases that test more or less all components. These test scripts does not use the numbering scheme below.
The numbering in each series corresponds to the following groupings
1-99 - Basic sanity tests
900-999 - Misc test
-11XX - ECS API Tests
+11XX - ICS API Tests
-18XX - ECS Stability and capacity test
+18XX - ICS Stability and capacity test
2000 - Southbound http proxy tests
CONDITIONALLY_IGNORED_IMAGES=<list of images to exclude if it does not exist in the profile file>
-. ../common/testcase_common.sh $@
-< other scripts need to be sourced for specific interfaces>
+. ../common/testcase_common.sh $@
setup_testenvironment
# ============LICENSE_END=================================================
#
-
TC_ONELINE_DESCR="Starts DMAAP MR"
#App names to include in the test when running docker, space separated list
-DOCKER_INCLUDED_IMAGES="MR DMAAPMR KUBEPROXY"
+DOCKER_INCLUDED_IMAGES="MR DMAAPMR KUBEPROXY KAFKAPC"
#App names to include in the test when running kubernetes, space separated list
-KUBE_INCLUDED_IMAGES="MR DMAAPMR KUBEPROXY"
+KUBE_INCLUDED_IMAGES="MR DMAAPMR KUBEPROXY KAFKAPC"
#Prestarted app (not started by script) to include in the test when running kubernetes, space separated list
KUBE_PRESTARTED_IMAGES=""
CONDITIONALLY_IGNORED_IMAGES=""
#Supported test environment profiles
-SUPPORTED_PROFILES="ONAP-HONOLULU ONAP-ISTANBUL ORAN-D-RELEASE ORAN-E-RELEASE"
+SUPPORTED_PROFILES="ORAN-E-RELEASE"
#Supported run modes
SUPPORTED_RUNMODES="DOCKER KUBE"
-. ../common/testcase_common.sh $@
-. ../common/agent_api_functions.sh
-. ../common/consul_cbs_functions.sh
-. ../common/control_panel_api_functions.sh
-. ../common/controller_api_functions.sh
-. ../common/cr_api_functions.sh
-. ../common/mr_api_functions.sh
-. ../common/ricsimulator_api_functions.sh
-. ../common/http_proxy_api_functions.sh
-. ../common/kube_proxy_api_functions.sh
-. ../common/gateway_api_functions.sh
+. ../common/testcase_common.sh $@
setup_testenvironment
clean_environment
start_kube_proxy
start_mr "$MR_READ_TOPIC" "/events" "users/policy-agent" \
- "$MR_WRITE_TOPIC" "/events" "users/mr-stub" \
- "unauthenticated.dmaapadp.json" "/events" "dmaapadapterproducer/msgs" \
- "unauthenticated.dmaapmed.json" "/events" "maapmediatorproducer/STD_Fault_Messages"
+ "$MR_WRITE_TOPIC" "/events" "users/mr-stub"
+ #\
+ #"unauthenticated.dmaapadp.json" "/events" "dmaapadapterproducer/msgs" \
+ #"unauthenticated.dmaapmed.json" "/events" "maapmediatorproducer/STD_Fault_Messages"
+
+start_kafkapc
+
+kafkapc_api_reset 200
+
+kafkapc_api_create_topic 201 "unauthenticated.dmaapadp.json" "application/json"
+
+kafkapc_api_create_topic 201 "unauthenticated.dmaapmed.json" "application/json"
+
+dmaap_api_print_topics
if [ $RUNMODE == "KUBE" ]; then
:
Contains functions for adapting towards the Policy Management Service (PMS) API, also via dmaap (using a message-router stub interface)
`api_curl.sh` \
-A common curl based function for the agent and ecs apis. Also partly used for the Callback receiver and RAPP Catalogue apis.
+A common curl based function for the agent and ics apis. Also partly used for the Callback receiver and RAPP Catalogue apis.
`clean-kube.sh` \
Cleans all services, deployments, pods, replica set etc started by the test environment in kubernetes.
`do_curl_function.sh`
A script for executing a curl call with a specific url and optional payload. It also compare the response with an expected result in terms of response code and optional returned payload. Intended to be used by test script (for example basic test scripts of other components)
-`ecs_api_functions.sh` \
-Contains functions for adapting towards the ECS API
+`ics_api_functions.sh` \
+Contains functions for adapting towards the ICS API
`extract_sdnc_reply.py` \
A python script to extract the information from an sdnc (A1 Controller) reply json. Helper for the test environment.
| --------- | ----------- |
| `<timer-message-to-print>` | Any text message to be printed along with the timer result.(It is good practice to use same args for this function as for the `start_timer`) |
-## Function: print_and_reset_timer ##
-
-Print the value of the timer (in seconds) previously started by 'start_timer'. Also reset the timer to 0. The result of the timer as well as the args to the function will also be printed in the test report.
-| arg list |
-|--|
-| `<timer-message-to-print>` |
-
-| parameter | description |
-| --------- | ----------- |
-| `<timer-message-to-print>` | Any text message to be printed along with the timer result.(It is good practice to use same args for this function as for the `start_timer`) |
-
## Function: deviation ##
Mark a test as a deviation from the requirements. The list of deviations will be printed in the test report.
| `EMPTY` | Indicator for an empty list |
| `<ric-id>` | Id of the ric |
-## Function: cr_api_check_all_ecs_events() ##
+## Function: cr_api_check_all_ics_events() ##
-Check the contents of all current status events for one id from ECS
+Check the contents of all current status events for one id from ICS
| arg list |
|--|
| `EMPTY` | Indicator for an empty list |
| `<status>` | Status string |
-## Function: cr_api_check_all_ecs_subscription_events() ##
+## Function: cr_api_check_all_ics_subscription_events() ##
-Check the contents of all current subscription events for one id from ECS
+Check the contents of all current subscription events for one id from ICS
| arg list |
|--|
| - |
-# Description of functions in ecs_api_functions.sh #
+# Description of functions in ics_api_functions.sh #
-## Function: use_ecs_rest_http ##
+## Function: use_ics_rest_http ##
-Use http for all API calls to the ECS. This is the default protocol.
+Use http for all API calls to the ICS. This is the default protocol.
| arg list |
|--|
| None |
-## Function: use_ecs_rest_https ##
+## Function: use_ics_rest_https ##
-Use https for all API calls to the ECS.
+Use https for all API calls to the ICS.
| arg list |
|--|
| None |
-## Function: use_ecs_dmaap_http ##
+## Function: use_ics_dmaap_http ##
-Send and recieve all API calls to the ECS over Dmaap via the MR using http.
+Send and recieve all API calls to the ICS over Dmaap via the MR using http.
| arg list |
|--|
| None |
-## Function: use_ecs_dmaap_https ##
+## Function: use_ics_dmaap_https ##
-Send and recieve all API calls to the ECS over Dmaap via the MR using https.
+Send and recieve all API calls to the ICS over Dmaap via the MR using https.
| arg list |
|--|
| None |
-## Function: start_ecs ##
+## Function: start_ics ##
-Start the ECS container in docker or kube depending on running mode.
+Start the ICS container in docker or kube depending on running mode.
| arg list |
|--|
| None |
-## Function: stop_ecs ##
+## Function: stop_ics ##
-Stop the ECS container.
+Stop the ICS container.
| arg list |
|--|
| None |
-## Function: start_stopped_ecs ##
+## Function: start_stopped_ics ##
-Start a previously stopped ecs.
+Start a previously stopped ics.
| arg list |
|--|
| None |
-## Function: set_ecs_debug ##
+## Function: set_ics_debug ##
-Configure the ECS log on debug level. The ECS must be running.
+Configure the ICS log on debug level. The ICS must be running.
| arg list |
|--|
| None |
-## Function: set_ecs_trace ##
+## Function: set_ics_trace ##
-Configure the ECS log on trace level. The ECS must be running.
+Configure the ICS log on trace level. The ICS must be running.
| arg list |
|--|
| None |
-## Function: check_ecs_logs ##
+## Function: check_ics_logs ##
-Check the ECS log for any warnings and errors and print the count of each.
+Check the ICS log for any warnings and errors and print the count of each.
| arg list |
|--|
| None |
-## Function: ecs_equal ##
+## Function: ics_equal ##
-Tests if a variable value in the ECS is equal to a target value.
+Tests if a variable value in the ICS is equal to a target value.
Without the timeout, the test sets pass or fail immediately depending on if the variable is equal to the target or not.
With the timeout, the test waits up to the timeout seconds before setting pass or fail depending on if the variable value becomes equal to the target value or not.
See the 'a1-interface' repo for more details.
| parameter | description |
| --------- | ----------- |
-| `<variable-name>` | Variable name in ecs |
+| `<variable-name>` | Variable name in ics |
| `<target-value>` | Target value for the variable |
| `<timeout-in-sec>` | Max time to wait for the variable to reach the target value |
-## Function: ecs_api_a1_get_job_ids() ##
+## Function: ics_api_a1_get_job_ids() ##
Test of GET '/A1-EI​/v1​/eitypes​/{eiTypeId}​/eijobs' and optional check of the array of returned job ids.
To test the response code only, provide the response code parameter as well as a type id and an owner id.
| `<job-id>` | Id of the expected job |
| `EMPTY` | The expected list of job id shall be empty |
-## Function: ecs_api_a1_get_type() ##
+## Function: ics_api_a1_get_type() ##
Test of GET '/A1-EI​/v1​/eitypes​/{eiTypeId}' and optional check of the returned schema.
To test the response code only, provide the response code parameter as well as the type-id.
| `<type-id>` | Id of the EI type |
| `<schema-file>` | Path to a schema file to compare with the returned schema |
-## Function: ecs_api_a1_get_type_ids() ##
+## Function: ics_api_a1_get_type_ids() ##
Test of GET '/A1-EI​/v1​/eitypes' and optional check of returned list of type ids.
To test the response code only, provide the response only.
| `EMPTY` | The expected list of type ids shall be empty |
| `<type-id>` | Id of the EI type |
-## Function: ecs_api_a1_get_job_status() ##
+## Function: ics_api_a1_get_job_status() ##
Test of GET '/A1-EI​/v1​/eitypes​/{eiTypeId}​/eijobs​/{eiJobId}​/status' and optional check of the returned status.
To test the response code only, provide the response code, type id and job id.
| `<job-id>` | Id of the job |
| `<status>` | Expected status |
-## Function: ecs_api_a1_get_job() ##
+## Function: ics_api_a1_get_job() ##
Test of GET '/A1-EI​/v1​/eitypes​/{eiTypeId}​/eijobs​/{eiJobId}' and optional check of the returned job.
To test the response code only, provide the response code, type id and job id.
| `<owner-id>` | Expected owner for the job |
| `<template-job-file>` | Path to a job template for job parameters of the job |
-## Function: ecs_api_a1_delete_job() ##
+## Function: ics_api_a1_delete_job() ##
Test of DELETE '/A1-EI​/v1​/eitypes​/{eiTypeId}​/eijobs​/{eiJobId}'.
To test, provide all the specified parameters.
| `<type-id>` | Id of the EI type |
| `<job-id>` | Id of the job |
-## Function: ecs_api_a1_put_job() ##
+## Function: ics_api_a1_put_job() ##
Test of PUT '/A1-EI​/v1​/eitypes​/{eiTypeId}​/eijobs​/{eiJobId}'.
To test, provide all the specified parameters.
| `<owner-id>` | Owner of the job |
| `<template-job-file>` | Path to a job template for job parameters of the job |
-## Function: ecs_api_edp_get_type_ids() ##
+## Function: ics_api_edp_get_type_ids() ##
-Test of GET '/ei-producer/v1/eitypes' or '/data-producer/v1/info-types' depending on ecs version and an optional check of the returned list of type ids.
+Test of GET '/ei-producer/v1/eitypes' or '/data-producer/v1/info-types' depending on ics version and an optional check of the returned list of type ids.
To test the response code only, provide the response code.
To also test the response payload add list of expected type ids (or EMPTY if the list is expected to be empty).
| `<type-id>` | Id of the type |
| `EMPTY` | The expected list of type ids shall be empty |
-## Function: ecs_api_edp_get_producer_status() ##
+## Function: ics_api_edp_get_producer_status() ##
-Test of GET '/ei-producer/v1/eiproducers/{eiProducerId}/status' or '/data-producer/v1/info-producers/{infoProducerId}/status' depending on ecs version and optional check of the returned status.
+Test of GET '/ei-producer/v1/eiproducers/{eiProducerId}/status' or '/data-producer/v1/info-producers/{infoProducerId}/status' depending on ics version and optional check of the returned status.
To test the response code only, provide the response code and producer id.
To also test the response payload add the expected status.
| `<producer-id>` | Id of the producer |
| `<status>` | The expected status string |
-## Function: ecs_api_edp_get_producer_ids() ##
+## Function: ics_api_edp_get_producer_ids() ##
Test of GET '/ei-producer/v1/eiproducers' and optional check of the returned producer ids.
To test the response code only, provide the response.
| `<producer-id>` | Id of the producer |
| `EMPTY` | The expected list of type ids shall be empty |
-## Function: ecs_api_edp_get_producer_ids_2() ##
+## Function: ics_api_edp_get_producer_ids_2() ##
-Test of GET '/ei-producer/v1/eiproducers' or '/data-producer/v1/info-producers' depending on ecs version and optional check of the returned producer ids.
+Test of GET '/ei-producer/v1/eiproducers' or '/data-producer/v1/info-producers' depending on ics version and optional check of the returned producer ids.
To test the response code only, provide the response.
To also test the response payload add the type (if any) and a list of expected producer-ids (or EMPTY if the list of ids is expected to be empty).
| `<producer-id>` | Id of the producer |
| `EMPTY` | The expected list of type ids shall be empty |
-## Function: ecs_api_edp_get_type() ##
+## Function: ics_api_edp_get_type() ##
Test of GET '/ei-producer/v1/eitypes/{eiTypeId}' and optional check of the returned type.
To test the response code only, provide the response and the type-id.
| `<producer-id>` | Id of the producer |
| `EMPTY` | The expected list of type ids shall be empty |
-## Function: ecs_api_edp_get_type_2() ##
+## Function: ics_api_edp_get_type_2() ##
-Test of GET '/ei-producer/v1/eitypes/{eiTypeId}' or '/data-producer/v1/info-types/{infoTypeId}' depending on ecs version and optional check of the returned type.
+Test of GET '/ei-producer/v1/eitypes/{eiTypeId}' or '/data-producer/v1/info-types/{infoTypeId}' depending on ics version and optional check of the returned type.
To test the response code only, provide the response and the type-id.
To also test the response payload add a path to a job schema file.
| `<job-schema-file>` | Path to a job schema file |
| `EMPTY` | The expected list of type ids shall be empty |
-## Function: ecs_api_edp_put_type_2() ##
+## Function: ics_api_edp_put_type_2() ##
-Test of PUT '/ei-producer/v1/eitypes/{eiTypeId}' or '/data-producer/v1/info-types/{infoTypeId}' depending on ecs version and optional check of the returned type.
+Test of PUT '/ei-producer/v1/eitypes/{eiTypeId}' or '/data-producer/v1/info-types/{infoTypeId}' depending on ics version and optional check of the returned type.
| arg list |
|--|
| `<job-schema-file>` | Path to a job schema file |
| `EMPTY` | The expected list of type ids shall be empty |
-## Function: ecs_api_edp_delete_type_2() ##
+## Function: ics_api_edp_delete_type_2() ##
-Test of DELETE '/ei-producer/v1/eitypes/{eiTypeId}' or '/data-producer/v1/info-types/{infoTypeId}' depending on ecs version and optional check of the returned type.
+Test of DELETE '/ei-producer/v1/eitypes/{eiTypeId}' or '/data-producer/v1/info-types/{infoTypeId}' depending on ics version and optional check of the returned type.
| arg list |
|--|
| `<response-code>` | Expected http response code |
| `<type-id>` | Id of the type |
-## Function: ecs_api_edp_get_producer() ##
+## Function: ics_api_edp_get_producer() ##
Test of GET '/ei-producer/v1/eiproducers/{eiProducerId}' and optional check of the returned producer.
To test the response code only, provide the response and the producer-id.
| `<schema-file>` | Path to a schema file |
| `EMPTY` | The expected list of type schema pairs shall be empty |
-## Function: ecs_api_edp_get_producer_2() ##
+## Function: ics_api_edp_get_producer_2() ##
-Test of GET '/ei-producer/v1/eiproducers/{eiProducerId}' or '/data-producer/v1/info-producers/{infoProducerId}' depending on ecs version and optional check of the returned producer.
+Test of GET '/ei-producer/v1/eiproducers/{eiProducerId}' or '/data-producer/v1/info-producers/{infoProducerId}' depending on ics version and optional check of the returned producer.
To test the response code only, provide the response and the producer-id.
To also test the response payload add the remaining parameters defining thee producer.
| `<type-id>` | Id of the type |
| `EMPTY` | The expected list of types shall be empty |
-## Function: ecs_api_edp_delete_producer() ##
+## Function: ics_api_edp_delete_producer() ##
-Test of DELETE '/ei-producer/v1/eiproducers/{eiProducerId}' or '/data-producer/v1/info-producers/{infoProducerId}' depending on ecs version.
+Test of DELETE '/ei-producer/v1/eiproducers/{eiProducerId}' or '/data-producer/v1/info-producers/{infoProducerId}' depending on ics version.
To test, provide all parameters.
| arg list |
| `<response-code>` | Expected http response code |
| `<producer-id>` | Id of the producer |
-## Function: ecs_api_edp_put_producer() ##
+## Function: ics_api_edp_put_producer() ##
Test of PUT '/ei-producer/v1/eiproducers/{eiProducerId}'.
To test, provide all parameters. The list of type/schema pair may be empty.
| `<schema-file>` | Path to a schema file |
| `EMPTY` | The list of type/schema pairs is empty |
-## Function: ecs_api_edp_put_producer_2() ##
+## Function: ics_api_edp_put_producer_2() ##
-Test of PUT '/ei-producer/v1/eiproducers/{eiProducerId}' or '/data-producer/v1/info-producers/{infoProducerId}' depending on ecs version.
+Test of PUT '/ei-producer/v1/eiproducers/{eiProducerId}' or '/data-producer/v1/info-producers/{infoProducerId}' depending on ics version.
To test, provide all parameters. The list of type/schema pair may be empty.
| arg list |
| `<type-id>` | Id of the type |
| `NOTYPE` | The list of types is empty |
-## Function: ecs_api_edp_get_producer_jobs() ##
+## Function: ics_api_edp_get_producer_jobs() ##
Test of GET '/ei-producer/v1/eiproducers/{eiProducerId}/eijobs' and optional check of the returned producer job.
To test the response code only, provide the response and the producer-id.
| `<template-job-file>` | Path to a job template file |
| `EMPTY` | The list of job/type/target/job-file tuples is empty |
-## Function: ecs_api_edp_get_producer_jobs_2() ##
+## Function: ics_api_edp_get_producer_jobs_2() ##
-Test of GET '/ei-producer/v1/eiproducers/{eiProducerId}/eijobs' or '/data-producer/v1/info-producers/{infoProducerId}/info-jobs' depending on ecs version and optional check of the returned producer job.
+Test of GET '/ei-producer/v1/eiproducers/{eiProducerId}/eijobs' or '/data-producer/v1/info-producers/{infoProducerId}/info-jobs' depending on ics version and optional check of the returned producer job.
To test the response code only, provide the response and the producer-id.
To also test the response payload add the remaining parameters.
| `<template-job-file>` | Path to a job template file |
| `EMPTY` | The list of job/type/target/job-file tuples is empty |
-## Function: ecs_api_service_status() ##
+## Function: ics_api_service_status() ##
Test of GET '/status'.
| --------- | ----------- |
| `<response-code>` | Expected http response code |
-## Function: ecs_api_idc_get_type_ids() ##
+## Function: ics_api_idc_get_type_ids() ##
Test of GET '/data-consumer/v1/info-types' and an optional check of the returned list of type ids.
To test the response code only, provide the response code.
| `<type-id>` | Id of the Info type |
| `EMPTY` | The expected list of type ids shall be empty |
-## Function: ecs_api_idc_get_job_ids() ##
+## Function: ics_api_idc_get_job_ids() ##
Test of GET '/data-consumer/v1/info-jobs' and optional check of the array of returned job ids.
To test the response code only, provide the response code parameter as well as a type id and an owner id.
| `<job-id>` | Id of the expected job |
| `EMPTY` | The expected list of job id shall be empty |
-## Function: ecs_api_idc_get_job() ##
+## Function: ics_api_idc_get_job() ##
Test of GET '/data-consumer/v1/info-jobs/{infoJobId}' and optional check of the returned job.
To test the response code only, provide the response code, type id and job id.
| `<owner-id>` | Expected owner for the job |
| `<template-job-file>` | Path to a job template for job parameters of the job |
-## Function: ecs_api_idc_put_job() ##
+## Function: ics_api_idc_put_job() ##
Test of PUT '​/data-consumer/v1/info-jobs/{infoJobId}'.
To test, provide all the specified parameters.
| `<template-job-file>` | Path to a job template for job parameters of the job |
| `VALIIDATE` | Indicator to preform type validation at creation |
-## Function: ecs_api_idc_delete_job() ##
+## Function: ics_api_idc_delete_job() ##
Test of DELETE '/A1-EI​/v1​/eitypes​/{eiTypeId}​/eijobs​/{eiJobId}'.
To test, provide all the specified parameters.
| `<type-id>` | Id of the type |
| `<job-id>` | Id of the job |
-## Function: ecs_api_idc_get_type() ##
+## Function: ics_api_idc_get_type() ##
Test of GET '/data-consumer/v1/info-types/{infoTypeId} and optional check of the returned schema.
To test the response code only, provide the response code parameter as well as the type-id.
| `<type-id>` | Id of the Info type |
| `<schema-file>` | Path to a schema file to compare with the returned schema |
-## Function: ecs_api_idc_get_job_status() ##
+## Function: ics_api_idc_get_job_status() ##
Test of GET '/data-consumer/v1/info-jobs/{infoJobId}/status' and optional check of the returned status and timeout.
To test the response code only, provide the response code and job id.
| `<status>` | Expected status |
| `<timeout>` | Timeout |
-## Function: ecs_api_idc_get_job_status2() ##
+## Function: ics_api_idc_get_job_status2() ##
Test of GET '/data-consumer/v1/info-jobs/{infoJobId}/status' with returned producers and optional check of the returned status and timeout.
To test the response code only, provide the response code and job id.
| `<timeout>` | Timeout |
-## Function: ecs_api_idc_get_subscription_ids() ##
+## Function: ics_api_idc_get_subscription_ids() ##
Test of GET '/data-consumer/v1/info-type-subscription' with the returned list of subscription ids
| arg list |
| `<EMPTY>` | Indicated for empty list of subscription ids |
| `<subscription-id>` |Id of the subscription |
-## Function: ecs_api_idc_get_subscription() ##
+## Function: ics_api_idc_get_subscription() ##
Test of GET '/data-consumer/v1/info-type-subscription/{subscriptionId}' with the subscription information
| arg list |
| `<status-uri>` | Url for status notifications |
-## Function: ecs_api_idc_put_subscription() ##
+## Function: ics_api_idc_put_subscription() ##
Test of PUT '/data-consumer/v1/info-type-subscription/{subscriptionId}' with the subscription information
| arg list |
| `<owner-id>` | Id of the owner |
| `<status-uri>` | Url for status notifications |
-## Function: ecs_api_idc_delete_subscription() ##
+## Function: ics_api_idc_delete_subscription() ##
Test of DELETE /data-consumer/v1/info-type-subscription/{subscriptionId}
| arg list |
| `<subscription-id>` |Id of the subscription |
-## Function: ecs_api_admin_reset() ##
+## Function: ics_api_admin_reset() ##
Test of GET '/status'.
| --------- | ----------- |
| `<response-code>` | Expected http response code |
-## Function: gateway_ecs_get_types ##
+## Function: gateway_ics_get_types ##
-Sample test of ecs api (get types)
+Sample test of ics api (get types)
Only response code tested - not payload
| arg list |
|--|
# ============LICENSE_END=================================================
#
-# Generic function to query the agent/ECS via the REST or DMAAP interface.
-# Used by all other agent/ECS api test functions
+# Generic function to query the agent/ICS via the REST or DMAAP interface.
+# Used by all other agent/ICS api test functions
# If operation sufffix is '_BATCH' the the send and get response is split in two sequences,
# one for sending the requests and one for receiving the response
# but only when using the DMAAP interface
# REST or DMAAP is controlled of the base url of $XX_ADAPTER
-# arg: (PA|ECS|CR|RC GET|PUT|POST|DELETE|GET_BATCH|PUT_BATCH|POST_BATCH|DELETE_BATCH <url>|<correlation-id> [<file> [mime-type]]) | (PA|ECS RESPONSE <correlation-id>)
+# arg: (PA|ICS|CR|RC GET|PUT|POST|DELETE|GET_BATCH|PUT_BATCH|POST_BATCH|DELETE_BATCH <url>|<correlation-id> [<file> [mime-type]]) | (PA|ICS RESPONSE <correlation-id>)
# Default mime type for file is application/json unless specified in parameter mime-type
# (Not for test scripts)
__do_curl_to_api() {
if [ $PMS_VERSION != "V1" ]; then
input_url=$PMS_API_PREFIX$3
fi
- elif [ $1 == "ECS" ]; then
- __ADAPTER=$ECS_ADAPTER
- __ADAPTER_TYPE=$ECS_ADAPTER_TYPE
- __RETRY_CODES=$ECS_RETRY_CODES
+ elif [ $1 == "ICS" ]; then
+ __ADAPTER=$ICS_ADAPTER
+ __ADAPTER_TYPE=$ICS_ADAPTER_TYPE
+ __RETRY_CODES=$ICS_RETRY_CODES
elif [ $1 == "CR" ]; then
__ADAPTER=$CR_ADAPTER
__ADAPTER_TYPE=$CR_ADAPTER_TYPE
__ADAPTER=$MR_DMAAP_ADAPTER_HTTP
__ADAPTER_TYPE=$MR_DMAAP_ADAPTER_TYPE
__RETRY_CODES=""
+ elif [ $1 == "KAFKAPC" ]; then
+ __ADAPTER=$KAFKAPC_ADAPTER
+ __ADAPTER_TYPE=$KAFKAPC_ADAPTER_TYPE
+ __RETRY_CODES=""
else
paramError=1
fi
if [ $# -ne 3 ]; then
paramError=1
fi
- #if [ $__ADAPTER == $__RESTBASE ] || [ $__ADAPTER == $__RESTBASE_SECURE ]; then
if [ $__ADAPTER_TYPE == "REST" ]; then
paramError=1
fi
if [ $paramError -eq 1 ]; then
((RES_CONF_FAIL++))
echo "-Incorrect number of parameters to __do_curl_to_api " $@ >> $HTTPLOG
- echo "-Expected: (PA|ECS GET|PUT|POST|DELETE|GET_BATCH|PUT_BATCH|POST_BATCH|DELETE_BATCH <url> [<file>]) | (PA|ECS RESPONSE <correlation-id>)" >> $HTTPLOG
+ echo "-Expected: (PA|ICS GET|PUT|POST|DELETE|GET_BATCH|PUT_BATCH|POST_BATCH|DELETE_BATCH <url> [<file> [mime-type]]) | (PA|ICS RESPONSE <correlation-id>)" >> $HTTPLOG
echo "-Returning response 000" >> $HTTPLOG
echo "-000"
return 1
fi
- #if [ $__ADAPTER == $__RESTBASE ] || [ $__ADAPTER == $__RESTBASE_SECURE ]; then
if [ $__ADAPTER_TYPE == "REST" ]; then
url=" "${__ADAPTER}${input_url}
oper=" -X "$oper
echo " RESP: "$res >> $HTTPLOG
status=${res:${#res}-3}
TS=$SECONDS
- # wait of the reply from the agent/ECS...
+ # wait of the reply from the agent/ICS...
while [ $status -eq 204 ]; do
if [ $(($SECONDS - $TS)) -gt 90 ]; then
echo " RETCODE: (timeout after 90s)" >> $HTTPLOG
--- /dev/null
+#!/bin/bash
+
+# ============LICENSE_START===============================================
+# Copyright (C) 2021 Nordix Foundation. All rights reserved.
+# ========================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=================================================
+#
+
+# Functions for CBS is included in consul_api_functions.sh
+
+
+
if [ $? -eq 0 ] && [ ! -z "$result" ]; then
for resid in $result; do
echo " Deleting $restype $resid in namespace $namespace with label autotest "
- kubectl delete $restype $resid -n $namespace 1> /dev/null 2> /dev/null
+ kubectl delete --grace-period=1 $restype $resid -n $namespace 1> /dev/null 2> /dev/null
done
fi
done
if [ $? -eq 0 ] && [ ! -z "$result" ]; then
for resid in $result; do
echo " Deleting $restype $resid with label autotest "
- kubectl delete $restype $resid 1> /dev/null 2> /dev/null
+ kubectl delete --grace-period=1 $restype $resid 1> /dev/null 2> /dev/null
done
fi
done
if [ $? -eq 0 ] && [ ! -z "$result" ]; then
for resid in $result; do
echo " Deleting $restype $resid in namespace $namespace with label autotest "
- kubectl delete $restype $resid -n $namespace #1> /dev/null 2> /dev/null
+ kubectl delete --grace-period=1 $restype $resid -n $namespace #1> /dev/null 2> /dev/null
echo -ne " Waiting for $restype $resid in namespace $namespace with label autotest to be deleted..."$SAMELINE
T_START=$SECONDS
result="dummy"
if [ $? -eq 0 ] && [ ! -z "$result" ]; then
for resid in $result; do
echo " Deleting $restype $resid with label autotest "
- kubectl delete $restype $resid -n $namespace #1> /dev/null 2> /dev/null
+ kubectl delete --grace-period=1 $restype $resid -n $namespace #1> /dev/null 2> /dev/null
echo -ne " Waiting for $restype $resid with label autotest to be deleted..."$SAMELINE
T_START=$SECONDS
result="dummy"
}
-# Function to perpare the consul configuration according to the current simulator configuration
-# args: SDNC|NOSDNC <output-file>
-# (Function for test scripts)
-prepare_consul_config() {
- echo -e $BOLD"Prepare Consul config"$EBOLD
-
- echo " Writing consul config for "$POLICY_AGENT_APP_NAME" to file: "$2
-
- if [ $# != 2 ]; then
- ((RES_CONF_FAIL++))
- __print_err "need two args, SDNC|NOSDNC <output-file>" $@
- exit 1
- fi
-
- if [ $1 == "SDNC" ]; then
- echo -e " Config$BOLD including SDNC$EBOLD configuration"
- elif [ $1 == "NOSDNC" ]; then
- echo -e " Config$BOLD excluding SDNC$EBOLD configuration"
- else
- ((RES_CONF_FAIL++))
- __print_err "need two args, SDNC|NOSDNC <output-file>" $@
- exit 1
- fi
-
- config_json="\n {"
- if [ $1 == "SDNC" ]; then
- config_json=$config_json"\n \"controller\": ["
- config_json=$config_json"\n {"
- config_json=$config_json"\n \"name\": \"$SDNC_APP_NAME\","
- config_json=$config_json"\n \"baseUrl\": \"$SDNC_SERVICE_PATH\","
- config_json=$config_json"\n \"userName\": \"$SDNC_USER\","
- config_json=$config_json"\n \"password\": \"$SDNC_PWD\""
- config_json=$config_json"\n }"
- config_json=$config_json"\n ],"
- fi
-
- config_json=$config_json"\n \"streams_publishes\": {"
- config_json=$config_json"\n \"dmaap_publisher\": {"
- config_json=$config_json"\n \"type\": \"message-router\","
- config_json=$config_json"\n \"dmaap_info\": {"
- config_json=$config_json"\n \"topic_url\": \"$MR_SERVICE_PATH$MR_WRITE_URL\""
- config_json=$config_json"\n }"
- config_json=$config_json"\n }"
- config_json=$config_json"\n },"
- config_json=$config_json"\n \"streams_subscribes\": {"
- config_json=$config_json"\n \"dmaap_subscriber\": {"
- config_json=$config_json"\n \"type\": \"message-router\","
- config_json=$config_json"\n \"dmaap_info\": {"
- config_json=$config_json"\n \"topic_url\": \"$MR_SERVICE_PATH$MR_READ_URL\""
- config_json=$config_json"\n }"
- config_json=$config_json"\n }"
- config_json=$config_json"\n },"
-
- config_json=$config_json"\n \"ric\": ["
-
- if [ $RUNMODE == "KUBE" ]; then
- result=$(kubectl get pods -n $KUBE_A1SIM_NAMESPACE -o jsonpath='{.items[?(@.metadata.labels.autotest=="RICSIM")].metadata.name}')
- rics=""
- ric_cntr=0
- if [ $? -eq 0 ] && [ ! -z "$result" ]; then
- for im in $result; do
- if [[ $im != *"-0" ]]; then
- ric_subdomain=$(kubectl get pod $im -n $KUBE_A1SIM_NAMESPACE -o jsonpath='{.spec.subdomain}')
- rics=$rics" "$im"."$ric_subdomain"."$KUBE_A1SIM_NAMESPACE
- let ric_cntr=ric_cntr+1
- fi
- done
- fi
- if [ $ric_cntr -eq 0 ]; then
- echo $YELLOW"Warning: No rics found for the configuration"$EYELLOW
- fi
- else
- rics=$(docker ps --filter "name=$RIC_SIM_PREFIX" --filter "network=$DOCKER_SIM_NWNAME" --filter "status=running" --format {{.Names}})
- if [ $? -ne 0 ] || [ -z "$rics" ]; then
- echo -e $RED" FAIL - the names of the running RIC Simulator cannot be retrieved." $ERED
- ((RES_CONF_FAIL++))
- return 1
- fi
- fi
- cntr=0
- for ric in $rics; do
- if [ $cntr -gt 0 ]; then
- config_json=$config_json"\n ,"
- fi
- config_json=$config_json"\n {"
- if [ $RUNMODE == "KUBE" ]; then
- ric_id=${ric%.*.*} #extract pod id from full hosthame
- ric_id=$(echo "$ric_id" | tr '-' '_')
- else
- ric_id=$ric
- fi
- echo " Found a1 sim: "$ric_id
- config_json=$config_json"\n \"name\": \"$ric_id\","
- config_json=$config_json"\n \"baseUrl\": \"$RIC_SIM_HTTPX://$ric:$RIC_SIM_PORT\","
- if [ $1 == "SDNC" ]; then
- config_json=$config_json"\n \"controller\": \"$SDNC_APP_NAME\","
- fi
- config_json=$config_json"\n \"managedElementIds\": ["
- config_json=$config_json"\n \"me1_$ric_id\","
- config_json=$config_json"\n \"me2_$ric_id\""
- config_json=$config_json"\n ]"
- config_json=$config_json"\n }"
- let cntr=cntr+1
- done
-
- config_json=$config_json"\n ]"
- config_json=$config_json"\n}"
-
- if [ $RUNMODE == "KUBE" ]; then
- config_json="{\"config\":"$config_json"}"
- fi
-
- printf "$config_json">$2
-
- echo ""
-}
-
# Start Consul and CBS
# args: -
# (Function for test scripts)
# args: <protocol> <internal-port> <external-port>
__control_panel_set_protocoll() {
echo -e $BOLD"$CONTROL_PANEL_DISPLAY_NAME protocol setting"$EBOLD
- echo -e " Using $BOLD http $EBOLD towards $CONTROL_PANEL_DISPLAY_NAME"
+ echo -e " Using $BOLD $1 $EBOLD towards $CONTROL_PANEL_DISPLAY_NAME"
CP_SERVICE_PATH=$1"://"$CONTROL_PANEL_APP_NAME":"$2
if [ $RUNMODE == "KUBE" ]; then
export CP_PROXY_CONFIGMAP_NAME=$CONTROL_PANEL_APP_NAME"-proxy"
export CONTROL_PANEL_PATH_POLICY_PREFIX
- export CONTROL_PANEL_PATH_ECS_PREFIX
- export CONTROL_PANEL_PATH_ECS_PREFIX2
+ export CONTROL_PANEL_PATH_ICS_PREFIX
+ export CONTROL_PANEL_PATH_ICS_PREFIX2
export NRT_GATEWAY_APP_NAME
export NRT_GATEWAY_EXTERNAL_PORT
export POLICY_AGENT_EXTERNAL_SECURE_PORT
- export ECS_EXTERNAL_SECURE_PORT
+ export ICS_EXTERNAL_SECURE_PORT
if [ $RUNMODE == "KUBE" ]; then
export NGW_DOMAIN_NAME=$NRT_GATEWAY_APP_NAME.$KUBE_NONRTRIC_NAMESPACE.svc.cluster.local # suffix needed for nginx name resolution
export CP_NGINX_RESOLVER=$CONTROL_PANEL_NGINX_KUBE_RESOLVER
else
export POLICY_AGENT_DOMAIN_NAME=$POLICY_AGENT_APP_NAME
- export ECS_DOMAIN_NAME=$ECS_APP_NAME
+ export ICS_DOMAIN_NAME=$ICS_APP_NAME
export NGW_DOMAIN_NAME=$NRT_GATEWAY_APP_NAME
export CP_NGINX_RESOLVER=$CONTROL_PANEL_NGINX_DOCKER_RESOLVER
dest_file=$SIM_GROUP/$CONTROL_PANEL_COMPOSE_DIR/$CONTROL_PANEL_HOST_MNT_DIR/$CONTROL_PANEL_CONFIG_FILE
- envsubst '${NGW_DOMAIN_NAME},${CP_NGINX_RESOLVER},${NRT_GATEWAY_EXTERNAL_PORT},${POLICY_AGENT_EXTERNAL_SECURE_PORT},${ECS_EXTERNAL_SECURE_PORT},${POLICY_AGENT_DOMAIN_NAME},${ECS_DOMAIN_NAME},${CONTROL_PANEL_PATH_POLICY_PREFIX},${CONTROL_PANEL_PATH_ECS_PREFIX} ,${CONTROL_PANEL_PATH_ECS_PREFIX2}' < $1 > $dest_file
+ envsubst '${NGW_DOMAIN_NAME},${CP_NGINX_RESOLVER},${NRT_GATEWAY_EXTERNAL_PORT},${POLICY_AGENT_EXTERNAL_SECURE_PORT},${ICS_EXTERNAL_SECURE_PORT},${POLICY_AGENT_DOMAIN_NAME},${ICS_DOMAIN_NAME},${CONTROL_PANEL_PATH_POLICY_PREFIX},${CONTROL_PANEL_PATH_ICS_PREFIX} ,${CONTROL_PANEL_PATH_ICS_PREFIX2}' < $1 > $dest_file
__start_container $CONTROL_PANEL_COMPOSE_DIR "" NODOCKERARGS 1 $CONTROL_PANEL_APP_NAME
# args: <log-dir> <file-prexix>
__CR_store_docker_logs() {
if [ $RUNMODE == "KUBE" ]; then
- kubectl logs -l "autotest=CR" -n $KUBE_SIM_NAMESPACE --tail=-1 > $1$2_cr.log 2>&1
+ for podname in $(kubectl get pods -n $KUBE_SIM_NAMESPACE -l "autotest=CR" -o custom-columns=":metadata.name"); do
+ kubectl logs -n $KUBE_SIM_NAMESPACE $podname --tail=-1 > $1$2_$podname.log 2>&1
+ done
else
- docker logs $CR_APP_NAME > $1$2_cr.log 2>&1
+ crs=$(docker ps --filter "name=$CR_APP_NAME" --filter "network=$DOCKER_SIM_NWNAME" --filter "status=running" --format {{.Names}})
+ for crid in $crs; do
+ docker logs $crid > $1$2_$crid.log 2>&1
+ done
fi
}
# This function is called for apps managed by the test script as well as for prestarted apps.
# args: -
__CR_statisics_setup() {
- if [ $RUNMODE == "KUBE" ]; then
- echo "CR $CR_APP_NAME $KUBE_SIM_NAMESPACE"
- else
- echo "CR $CR_APP_NAME"
- fi
+ for ((CR_INSTANCE=MAX_CR_APP_COUNT; CR_INSTANCE>0; CR_INSTANCE-- )); do
+ if [ $RUNMODE == "KUBE" ]; then
+ CR_INSTANCE_KUBE=$(($CR_INSTANCE-1))
+ echo -n " CR-$CR_INSTANCE_KUBE $CR_APP_NAME-$CR_INSTANCE_KUBE $KUBE_SIM_NAMESPACE "
+ else
+ if [ $DOCKER_COMPOSE_VERION == "V1" ]; then
+ echo -n " CR_$CR_INSTANCE ${CR_APP_NAME}_cr_$CR_INSTANCE "
+ else
+ echo -n " CR_$CR_INSTANCE ${CR_APP_NAME}-cr-$CR_INSTANCE "
+ fi
+ fi
+ done
}
#######################################################
### CR functions
################
+#Var to hold the current number of CR instances
+CR_APP_COUNT=1
+MAX_CR_APP_COUNT=10
+
# Set http as the protocol to use for all communication to the Dmaap adapter
# args: -
# (Function for test scripts)
# Setup paths to svc/container for internal and external access
# args: <protocol> <internal-port> <external-port>
__cr_set_protocoll() {
- echo -e $BOLD"$CR_DISPLAY_NAME protocol setting"$EBOLD
- echo -e " Using $BOLD http $EBOLD towards $CR_DISPLAY_NAME"
+ echo -e $BOLD"$CR_DISPLAY_NAME protocol setting"$EBOLD
+ echo -e " Using $BOLD $1 $EBOLD towards $CR_DISPLAY_NAME"
## Access to Dmaap adapter
-
- # CR_SERVICE_PATH is the base path to cr
- CR_SERVICE_PATH=$1"://"$CR_APP_NAME":"$2 # docker access, container->container and script->container via proxy
- if [ $RUNMODE == "KUBE" ]; then
- CR_SERVICE_PATH=$1"://"$CR_APP_NAME.$KUBE_SIM_NAMESPACE":"$3 # kube access, pod->svc and script->svc via proxy
- fi
- # Service paths are used in test script to provide callbacck urls to app
- CR_SERVICE_MR_PATH=$CR_SERVICE_PATH$CR_APP_CALLBACK_MR #Only for messages from dmaap adapter/mediator
- CR_SERVICE_TEXT_PATH=$CR_SERVICE_PATH$CR_APP_CALLBACK_TEXT #Callbacks for text payload
- CR_SERVICE_APP_PATH=$CR_SERVICE_PATH$CR_APP_CALLBACK #For general callbacks from apps
-
- # CR_ADAPTER used for switching between REST and DMAAP (only REST supported currently)
- CR_ADAPTER_TYPE="REST"
- CR_ADAPTER=$CR_SERVICE_PATH
-
+ for ((CR_INSTANCE=0; CR_INSTANCE<$MAX_CR_APP_COUNT; CR_INSTANCE++ )); do
+ CR_DOCKER_INSTANCE=$(($CR_INSTANCE+1))
+ # CR_SERVICE_PATH is the base path to cr
+ if [ $DOCKER_COMPOSE_VERION == "V1" ]; then
+ __CR_SERVICE_PATH=$1"://"$CR_APP_NAME"_cr_"${CR_DOCKER_INSTANCE}":"$2 # docker access, container->container and script->container via proxy
+ else
+ __CR_SERVICE_PATH=$1"://"$CR_APP_NAME"-cr-"${CR_DOCKER_INSTANCE}":"$2 # docker access, container->container and script->container via proxy
+ fi
+ if [ $RUNMODE == "KUBE" ]; then
+ __CR_SERVICE_PATH=$1"://"$CR_APP_NAME"-"$CR_INSTANCE.$CR_APP_NAME"."$KUBE_SIM_NAMESPACE":"$3 # kube access, pod->svc and script->svc via proxy
+ fi
+ export CR_SERVICE_PATH"_"${CR_INSTANCE}=$__CR_SERVICE_PATH
+ # Service paths are used in test script to provide callbacck urls to app
+ export CR_SERVICE_MR_PATH"_"${CR_INSTANCE}=$__CR_SERVICE_PATH$CR_APP_CALLBACK_MR #Only for messages from dmaap adapter/mediator
+ export CR_SERVICE_TEXT_PATH"_"${CR_INSTANCE}=$__CR_SERVICE_PATH$CR_APP_CALLBACK_TEXT #Callbacks for text payload
+ export CR_SERVICE_APP_PATH"_"${CR_INSTANCE}=$__CR_SERVICE_PATH$CR_APP_CALLBACK #For general callbacks from apps
+
+ if [ $CR_INSTANCE -eq 0 ]; then
+ # CR_ADAPTER used for switching between REST and DMAAP (only REST supported currently)
+ # CR_ADDAPTER need to be set before each call to CR....only set for instance 0 here
+ CR_ADAPTER_TYPE="REST"
+ CR_ADAPTER=$__CR_SERVICE_PATH
+ fi
+ done
echo ""
}
export CR_INTERNAL_SECURE_PORT
export CR_EXTERNAL_PORT
export CR_EXTERNAL_SECURE_PORT
+
+ export CR_APP_COUNT
}
# Start the Callback reciver in the simulator group
-# args: -
+# args: <app-count>
# (Function for test scripts)
start_cr() {
echo -e $BOLD"Starting $CR_DISPLAY_NAME"$EBOLD
+ if [ $# -ne 1 ]; then
+ echo -e $RED" Number of CR instances missing, usage: start_cr <app-count>"$ERED
+ exit 1
+ fi
+ if [ $1 -lt 1 ] || [ $1 -gt 10 ]; then
+ echo -e $RED" Number of CR shall be 1...10, usage: start_cr <app-count>"$ERED
+ exit 1
+ fi
+ export CR_APP_COUNT=$1
+
if [ $RUNMODE == "KUBE" ]; then
# Check if app shall be fully managed by the test script
fi
- __check_service_start $CR_APP_NAME $CR_SERVICE_PATH$CR_ALIVE_URL
+ for ((CR_INSTANCE=0; CR_INSTANCE<$CR_APP_COUNT; CR_INSTANCE++ )); do
+ __dynvar="CR_SERVICE_PATH_"$CR_INSTANCE
+ __cr_app_name=$CR_APP_NAME"-"$CR_INSTANCE
+ __check_service_start $__cr_app_name ${!__dynvar}$CR_ALIVE_URL
+ result=$(__do_curl ${!__dynvar}/reset)
+ done
- echo -ne " Service $CR_APP_NAME - reset "$SAMELINE
- result=$(__do_curl CR $CR_SERVICE_PATH/reset)
- if [ $? -ne 0 ]; then
- echo -e " Service $CR_APP_NAME - reset $RED Failed $ERED - will continue"
- else
- echo -e " Service $CR_APP_NAME - reset $GREEN OK $EGREEN"
- fi
else
# Check if docker app shall be fully managed by the test script
__check_included_image 'CR'
__cr_export_vars
- __start_container $CR_COMPOSE_DIR "" NODOCKERARGS 1 $CR_APP_NAME
+ app_data=""
+ cntr=1
+ while [ $cntr -le $CR_APP_COUNT ]; do
+ if [ $DOCKER_COMPOSE_VERION == "V1" ]; then
+ app=$CR_APP_NAME"_cr_"$cntr
+ else
+ app=$CR_APP_NAME"-cr-"$cntr
+ fi
+ app_data="$app_data $app"
+ let cntr=cntr+1
+ done
+
+ echo "COMPOSE_PROJECT_NAME="$CR_APP_NAME > $SIM_GROUP/$CR_COMPOSE_DIR/.env
+
+ __start_container $CR_COMPOSE_DIR "" NODOCKERARGS $CR_APP_COUNT $app_data
- __check_service_start $CR_APP_NAME $CR_SERVICE_PATH$CR_ALIVE_URL
+ cntr=1 #Counter for docker instance, starts on 1
+ cntr2=0 #Couter for env var name, starts with 0 to be compablible with kube
+ while [ $cntr -le $CR_APP_COUNT ]; do
+ if [ $DOCKER_COMPOSE_VERION == "V1" ]; then
+ app=$CR_APP_NAME"_cr_"$cntr
+ else
+ app=$CR_APP_NAME"-cr-"$cntr
+ fi
+ __dynvar="CR_SERVICE_PATH_"$cntr2
+ __check_service_start $app ${!__dynvar}$CR_ALIVE_URL
+ let cntr=cntr+1
+ let cntr2=cntr2+1
+ done
fi
echo ""
}
+#Convert a cr path id to the value of the environment var holding the url
+# arg: <cr-path-id>
+# returns: <base-url-to-the-app>
+__cr_get_service_path(){
+ if [ $# -ne 1 ]; then
+ echo "DUMMY"
+ return 1
+ fi
+ if [ $1 -lt 0 ] || [ $1 -ge $MAX_CR_APP_COUNT ]; then
+ echo "DUMMY"
+ return 1
+ fi
+ __dynvar="CR_SERVICE_PATH_"$1
+ echo ${!__dynvar}
+ return 0
+}
# Tests if a variable value in the CR is equal to a target value and and optional timeout.
# Arg: <variable-name> <target-value> - This test set pass or fail depending on if the variable is
# equal to the target or not.
-# Arg: <variable-name> <target-value> <timeout-in-sec> - This test waits up to the timeout seconds
+# Arg: <cr-path-id> <variable-name> <target-value> <timeout-in-sec> - This test waits up to the timeout seconds
# before setting pass or fail depending on if the variable value becomes equal to the target
# value or not.
# (Function for test scripts)
cr_equal() {
- if [ $# -eq 2 ] || [ $# -eq 3 ]; then
- __var_test "CR" "$CR_SERVICE_PATH/counter/" $1 "=" $2 $3
+ if [ $# -eq 3 ] || [ $# -eq 4 ]; then
+ CR_SERVICE_PATH=$(__cr_get_service_path $1)
+ CR_ADAPTER=$CR_SERVICE_PATH
+ if [ $? -ne 0 ]; then
+ __print_err "<cr-path-id> missing or incorrect" $@
+ return 1
+ fi
+ __var_test "CR" "$CR_SERVICE_PATH/counter/" $2 "=" $3 $4
else
- __print_err "Wrong args to cr_equal, needs two or three args: <sim-param> <target-value> [ timeout ]" $@
+ __print_err "Wrong args to cr_equal, needs three or four args: <cr-path-id> <variable-name> <target-value> [ timeout ]" $@
fi
}
# Tests if a variable value in the CR contains the target string and and optional timeout
# Arg: <variable-name> <target-value> - This test set pass or fail depending on if the variable contains
# the target or not.
-# Arg: <variable-name> <target-value> <timeout-in-sec> - This test waits up to the timeout seconds
+# Arg: <cr-path-id> <variable-name> <target-value> <timeout-in-sec> - This test waits up to the timeout seconds
# before setting pass or fail depending on if the variable value contains the target
# value or not.
# (Function for test scripts)
cr_contains_str() {
- if [ $# -eq 2 ] || [ $# -eq 3 ]; then
- __var_test "CR" "$CR_SERVICE_PATH/counter/" $1 "contain_str" $2 $3
+ if [ $# -eq 3 ] || [ $# -eq 4 ]; then
+ CR_SERVICE_PATH=$(__cr_get_service_path $1)
+ CR_ADAPTER=$CR_SERVICE_PATH
+ if [ $? -ne 0 ]; then
+ __print_err "<cr-path-id> missing or incorrect" $@
+ return 1
+ fi
+ __var_test "CR" "$CR_SERVICE_PATH/counter/" $2 "contain_str" $3 $4
return 0
else
- __print_err "needs two or three args: <sim-param> <target-value> [ timeout ]"
+ __print_err "needs two or three args: <cr-path-id> <variable-name> <target-value> [ timeout ]"
return 1
fi
}
# Read a variable value from CR sim and send to stdout. Arg: <variable-name>
cr_read() {
+ CR_SERVICE_PATH=$(__cr_get_service_path $1)
+ CR_ADAPTER=$CR_SERVICE_PATH
+ if [ $? -ne 0 ]; then
+ __print_err "<cr-path-id> missing or incorrect" $@
+ return 1
+ fi
echo "$(__do_curl $CR_SERVICE_PATH/counter/$1)"
}
# Function to configure write delay on callbacks
# Delay given in seconds.
-# arg <response-code> <delay-in-sec>
+# arg <response-code> <cr-path-id> <delay-in-sec>
# (Function for test scripts)
cr_delay_callback() {
__log_conf_start $@
- if [ $# -ne 2 ]; then
- __print_err "<response-code> <delay-in-sec>]" $@
+ if [ $# -ne 3 ]; then
+ __print_err "<response-code> <cr-path-id> <delay-in-sec>]" $@
return 1
fi
- res="$(__do_curl_to_api CR POST /forcedelay?delay=$2)"
+ CR_SERVICE_PATH=$(__cr_get_service_path $2)
+ CR_ADAPTER=$CR_SERVICE_PATH
+ if [ $? -ne 0 ]; then
+ __print_err "<cr-path-id> missing or incorrect" $@
+ return 1
+ fi
+
+ res="$(__do_curl_to_api CR POST /forcedelay?delay=$3)"
status=${res:${#res}-3}
if [ $status -ne 200 ]; then
}
# CR API: Check the contents of all current ric sync events for one id from PMS
-# <response-code> <id> [ EMPTY | ( <ric-id> )+ ]
+# <response-code> <cr-path-id> <id> [ EMPTY | ( <ric-id> )+ ]
# (Function for test scripts)
cr_api_check_all_sync_events() {
__log_test_start $@
return 1
fi
- if [ $# -lt 2 ]; then
- __print_err "<response-code> <id> [ EMPTY | ( <ric-id> )+ ]" $@
+ if [ $# -lt 3 ]; then
+ __print_err "<response-code> <cr-path-id> <id> [ EMPTY | ( <ric-id> )+ ]" $@
return 1
fi
- query="/get-all-events/"$2
+ CR_SERVICE_PATH=$(__cr_get_service_path $2)
+ CR_ADAPTER=$CR_SERVICE_PATH
+ if [ $? -ne 0 ]; then
+ __print_err "<cr-path-id> missing or incorrect" $@
+ return 1
+ fi
+
+ query="/get-all-events/"$3
res="$(__do_curl_to_api CR GET $query)"
status=${res:${#res}-3}
return 1
fi
- if [ $# -gt 2 ]; then
+ if [ $# -gt 3 ]; then
body=${res:0:${#res}-3}
- if [ $# -eq 3 ] && [ $3 == "EMPTY" ]; then
+ if [ $# -eq 4 ] && [ $4 == "EMPTY" ]; then
targetJson="["
else
targetJson="["
- arr=(${@:3})
+ arr=(${@:4})
- for ((i=0; i<$(($#-2)); i=i+1)); do
+ for ((i=0; i<$(($#-3)); i=i+1)); do
if [ "$targetJson" != "[" ]; then
targetJson=$targetJson","
return 0
}
-# CR API: Check the contents of all current status events for one id from ECS
-# <response-code> <id> [ EMPTY | ( <status> )+ ]
+# CR API: Check the contents of all current status events for one id from ICS
+# <response-code> <cr-path-id> <id> [ EMPTY | ( <status> )+ ]
# (Function for test scripts)
-cr_api_check_all_ecs_events() {
+cr_api_check_all_ics_events() {
__log_test_start $@
- if [ $# -lt 2 ]; then
- __print_err "<response-code> <id> [ EMPTY | ( <status> )+ ]" $@
+ if [ $# -lt 3 ]; then
+ __print_err "<response-code> <cr-path-id> <id> [ EMPTY | ( <status> )+ ]" $@
return 1
fi
- query="/get-all-events/"$2
+ CR_SERVICE_PATH=$(__cr_get_service_path $2)
+ CR_ADAPTER=$CR_SERVICE_PATH
+ if [ $? -ne 0 ]; then
+ __print_err "<cr-path-id> missing or incorrect" $@
+ return 1
+ fi
+
+ query="/get-all-events/"$3
res="$(__do_curl_to_api CR GET $query)"
status=${res:${#res}-3}
return 1
fi
- if [ $# -gt 2 ]; then
+ if [ $# -gt 3 ]; then
body=${res:0:${#res}-3}
- if [ $# -eq 3 ] && [ $3 == "EMPTY" ]; then
+ if [ $# -eq 4 ] && [ $4 == "EMPTY" ]; then
targetJson="["
else
targetJson="["
- arr=(${@:3})
+ arr=(${@:4})
- for ((i=0; i<$(($#-2)); i=i+1)); do
+ for ((i=0; i<$(($#-3)); i=i+1)); do
if [ "$targetJson" != "[" ]; then
targetJson=$targetJson","
return 0
}
-# CR API: Check the contents of all current type subscription events for one id from ECS
-# <response-code> <id> [ EMPTY | ( <type-id> <schema> <registration-status> )+ ]
+# CR API: Check the contents of all current type subscription events for one id from ICS
+# <response-code> <cr-path-id> <id> [ EMPTY | ( <type-id> <schema> <registration-status> )+ ]
# (Function for test scripts)
-cr_api_check_all_ecs_subscription_events() {
+cr_api_check_all_ics_subscription_events() {
__log_test_start $@
- #Valid number of parameter 2,3,7,11
+ #Valid number of parameter 3,4,8,12
paramError=1
- if [ $# -eq 2 ]; then
+ if [ $# -eq 3 ]; then
paramError=0
fi
- if [ $# -eq 3 ] && [ "$3" == "EMPTY" ]; then
+ if [ $# -eq 4 ] && [ "$4" == "EMPTY" ]; then
paramError=0
fi
- variablecount=$(($#-2))
- if [ $# -gt 3 ] && [ $(($variablecount%3)) -eq 0 ]; then
+ variablecount=$(($#-3))
+ if [ $# -gt 4 ] && [ $(($variablecount%3)) -eq 0 ]; then
paramError=0
fi
if [ $paramError -eq 1 ]; then
- __print_err "<response-code> <id> [ EMPTY | ( <type-id> <schema> <registration-status> )+ ]" $@
+ __print_err "<response-code> <cr-path-id> <id> [ EMPTY | ( <type-id> <schema> <registration-status> )+ ]" $@
+ return 1
+ fi
+
+ CR_SERVICE_PATH=$(__cr_get_service_path $2)
+ CR_ADAPTER=$CR_SERVICE_PATH
+ if [ $? -ne 0 ]; then
+ __print_err "<cr-path-id> missing or incorrect" $@
return 1
fi
- query="/get-all-events/"$2
+ query="/get-all-events/"$3
res="$(__do_curl_to_api CR GET $query)"
status=${res:${#res}-3}
return 1
fi
- if [ $# -gt 2 ]; then
+ if [ $# -gt 3 ]; then
body=${res:0:${#res}-3}
targetJson="["
- if [ $# -gt 3 ]; then
- arr=(${@:3})
- for ((i=0; i<$(($#-3)); i=i+3)); do
+ if [ $# -gt 4 ]; then
+ arr=(${@:4})
+ for ((i=0; i<$(($#-4)); i=i+3)); do
if [ "$targetJson" != "[" ]; then
targetJson=$targetJson","
fi
# CR API: Reset all events and counters
-# Arg: -
+# Arg: <cr-path-id>
# (Function for test scripts)
cr_api_reset() {
__log_conf_start $@
+ if [ $# -ne 1 ]; then
+ __print_err "<cr-path-id>" $@
+ return 1
+ fi
+
+ CR_SERVICE_PATH=$(__cr_get_service_path $1)
+ CR_ADAPTER=$CR_SERVICE_PATH
+ if [ $? -ne 0 ]; then
+ __print_err "<cr-path-id> missing or incorrect" $@
+ return 1
+ fi
+
res="$(__do_curl_to_api CR GET /reset)"
status=${res:${#res}-3}
# CR API: Check the contents of all json events for path
-# <response-code> <topic-url> (EMPTY | <json-msg>+ )
+# <response-code> <cr-path-id> <topic-url> (EMPTY | <json-msg>+ )
# (Function for test scripts)
cr_api_check_all_genric_json_events() {
__log_test_start $@
- if [ $# -lt 3 ]; then
- __print_err "<response-code> <topic-url> (EMPTY | <json-msg>+ )" $@
+ if [ $# -lt 4 ]; then
+ __print_err "<response-code> <cr-path-id> <topic-url> (EMPTY | <json-msg>+ )" $@
+ return 1
+ fi
+
+ CR_SERVICE_PATH=$(__cr_get_service_path $2)
+ CR_ADAPTER=$CR_SERVICE_PATH
+ if [ $? -ne 0 ]; then
+ __print_err "<cr-path-id> missing or incorrect" $@
return 1
fi
- query="/get-all-events/"$2
+ query="/get-all-events/"$3
res="$(__do_curl_to_api CR GET $query)"
status=${res:${#res}-3}
body=${res:0:${#res}-3}
targetJson="["
- if [ $3 != "EMPTY" ]; then
+ if [ $4 != "EMPTY" ]; then
+ shift
shift
shift
while [ $# -gt 0 ]; do
}
-
# CR API: Check a single (oldest) json event (or none if empty) for path
-# <response-code> <topic-url> (EMPTY | <json-msg> )
+# <response-code> <cr-path-id> <topic-url> (EMPTY | <json-msg> )
# (Function for test scripts)
cr_api_check_single_genric_json_event() {
__log_test_start $@
- if [ $# -ne 3 ]; then
- __print_err "<response-code> <topic-url> (EMPTY | <json-msg> )" $@
+ if [ $# -ne 4 ]; then
+ __print_err "<response-code> <cr-path-id> <topic-url> (EMPTY | <json-msg> )" $@
+ return 1
+ fi
+
+ CR_SERVICE_PATH=$(__cr_get_service_path $2)
+ CR_ADAPTER=$CR_SERVICE_PATH
+ if [ $? -ne 0 ]; then
+ __print_err "<cr-path-id> missing or incorrect" $@
return 1
fi
- query="/get-event/"$2
+ query="/get-event/"$3
res="$(__do_curl_to_api CR GET $query)"
status=${res:${#res}-3}
return 1
fi
body=${res:0:${#res}-3}
- targetJson=$3
+ targetJson=$4
if [ $targetJson == "EMPTY" ] && [ ${#body} -ne 0 ]; then
__log_test_fail_body
# CR API: Check a single (oldest) json in md5 format (or none if empty) for path.
# Note that if a json message is given, it shall be compact, no ws except inside string.
# The MD5 will generate different hash if ws is present or not in otherwise equivalent json
-# arg: <response-code> <topic-url> (EMPTY | <data-msg> )
+# arg: <response-code> <cr-path-id> <topic-url> (EMPTY | <data-msg> )
# (Function for test scripts)
cr_api_check_single_genric_event_md5() {
__log_test_start $@
- if [ $# -ne 3 ]; then
- __print_err "<response-code> <topic-url> (EMPTY | <data-msg> )" $@
+ if [ $# -ne 4 ]; then
+ __print_err "<response-code> <cr-path-id> <topic-url> (EMPTY | <data-msg> )" $@
+ return 1
+ fi
+
+ CR_SERVICE_PATH=$(__cr_get_service_path $2)
+ CR_ADAPTER=$CR_SERVICE_PATH
+ if [ $? -ne 0 ]; then
+ __print_err "<cr-path-id> missing or incorrect" $@
return 1
fi
- query="/get-event/"$2
+ query="/get-event/"$3
res="$(__do_curl_to_api CR GET $query)"
status=${res:${#res}-3}
return 1
fi
body=${res:0:${#res}-3}
- if [ $3 == "EMPTY" ]; then
+ if [ $4 == "EMPTY" ]; then
if [ ${#body} -ne 0 ]; then
__log_test_fail_body
return 1
fi
command -v md5 > /dev/null # Mac
if [ $? -eq 0 ]; then
- targetMd5=$(echo -n "$3" | md5)
+ targetMd5=$(echo -n "$4" | md5)
else
command -v md5sum > /dev/null # Linux
if [ $? -eq 0 ]; then
- targetMd5=$(echo -n "$3" | md5sum | cut -d' ' -f 1) # Need to cut additional info printed by cmd
+ targetMd5=$(echo -n "$4" | md5sum | cut -d' ' -f 1) # Need to cut additional info printed by cmd
else
__log_test_fail_general "Command md5 nor md5sum is available"
return 1
# CR API: Check a single (oldest) event in md5 format (or none if empty) for path.
# Note that if a file with json message is given, the json shall be compact, no ws except inside string and not newlines.
# The MD5 will generate different hash if ws/newlines is present or not in otherwise equivalent json
-# arg: <response-code> <topic-url> (EMPTY | <data-file> )
+# arg: <response-code> <cr-path-id> <topic-url> (EMPTY | <data-file> )
# (Function for test scripts)
cr_api_check_single_genric_event_md5_file() {
__log_test_start $@
- if [ $# -ne 3 ]; then
- __print_err "<response-code> <topic-url> (EMPTY | <data-file> )" $@
+ if [ $# -ne 4 ]; then
+ __print_err "<response-code> <cr-path-id> <topic-url> (EMPTY | <data-file> )" $@
+ return 1
+ fi
+
+ CR_SERVICE_PATH=$(__cr_get_service_path $2)
+ CR_ADAPTER=$CR_SERVICE_PATH
+ if [ $? -ne 0 ]; then
+ __print_err "<cr-path-id> missing or incorrect" $@
return 1
fi
- query="/get-event/"$2
+ query="/get-event/"$3
res="$(__do_curl_to_api CR GET $query)"
status=${res:${#res}-3}
return 1
fi
body=${res:0:${#res}-3}
- if [ $3 == "EMPTY" ]; then
+ if [ $4 == "EMPTY" ]; then
if [ ${#body} -ne 0 ]; then
__log_test_fail_body
return 1
fi
fi
- if [ ! -f $3 ]; then
+ if [ ! -f $4 ]; then
__log_test_fail_general "File $3 does not exist"
return 1
fi
- filedata=$(cat $3)
+ filedata=$(cat $4)
command -v md5 > /dev/null # Mac
if [ $? -eq 0 ]; then
# args: <protocol> <internal-port> <external-port>
__dmaapadp_set_protocoll() {
echo -e $BOLD"$DMAAP_ADP_DISPLAY_NAME protocol setting"$EBOLD
- echo -e " Using $BOLD http $EBOLD towards $DMAAP_ADP_DISPLAY_NAME"
+ echo -e " Using $BOLD $1 $EBOLD towards $DMAAP_ADP_DISPLAY_NAME"
## Access to Dmaap adapter
# paths to other components
- export ECS_SERVICE_PATH
+ export ICS_SERVICE_PATH
export DMAAP_ADP_SERVICE_PATH
export MR_SERVICE_PATH
# args: <protocol> <internal-port> <external-port>
__dmaapmed_set_protocoll() {
echo -e $BOLD"$DMAAP_MED_DISPLAY_NAME protocol setting"$EBOLD
- echo -e " Using $BOLD http $EBOLD towards $DMAAP_MED_DISPLAY_NAME"
+ echo -e " Using $BOLD $1 $EBOLD towards $DMAAP_MED_DISPLAY_NAME"
## Access to Dmaap mediator
fi
# paths to other components
- export ECS_SERVICE_PATH
+ export ICS_SERVICE_PATH
export DMAAP_MED_CONF_SELF_HOST=$(echo $DMAAP_MED_SERVICE_PATH | cut -d: -f1-2)
export DMAAP_MED_CONF_SELF_PORT=$(echo $DMAAP_MED_SERVICE_PATH | cut -d: -f3)
--- /dev/null
+#!/bin/bash
+
+# ============LICENSE_START===============================================
+# Copyright (C) 2021 Nordix Foundation. All rights reserved.
+# ========================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=================================================
+#
+
+# Functions for DMAAPMR is included in mr_api_functions.sh
+
+
+
# Function to execute curl towards a container (or process) and compare + print result
# Intended use is for basic test scripts where testing is done with curl and the returned response and payload need to be checked.
-# args: GET|PUT|POST|DELETE <url> <target-response-code> [<json-file>]
+# args: GET|PUT|POST|DELETE <url> <target-response-code> [<payload-file>]
# All calls made to 'localhost:'<port>.
# Expects env PORT set to intended port number
# Expects env RESULT to contain the target response body.
# Optional env HTTPX shall contain protocol 'http' or 'https'. If not set, 'http' is used. For 'https' all cert errors are ignored
-# RESULT="*" means that returned payload is not checked, may container any text
+# RESULT="*" means that returned payload is not checked, may contain any text
# RESULT="<text>" means that the returned payload has to match the <text> exactly
# RESULT="json:<returned-payload>" means that the returned json payload is compared with the expected result (order of json keys and index is irrelevant)
# RESULT="json-array-size:<integer-size>" means that the returned json payload shall contain the number of element given by the <integer-size>
PROT=$HTTPX
fi
- curlstr="curl -X "$1" -skw %{http_code} ${PROT}://localhost:$PORT$2 -H accept:*/*"
+ req_content=""
+ if [ -z "$REQ_CONTENT" ]; then
+ if [ $# -gt 3 ]; then
+ req_content="-H Content-Type:application/json" #Assuming json
+ fi
+ else
+ req_content="-H Content-Type:$REQ_CONTENT"
+ fi
+ resp_content=""
+ if [ -z "$RESP_CONTENT" ]; then
+ if [[ "$RESULT" == "json"* ]]; then
+ resp_content="application/json"
+ elif [[ "$RESULT" == "*" ]]; then
+ resp_content=""
+ else
+ resp_content="text/plain"
+ fi
+ else
+ resp_content=$RESP_CONTENT
+ fi
+ curlstr="curl -X "$1" -skw :%{content_type}:%{http_code} ${PROT}://localhost:$PORT$2 -H accept:*/*"
if [ $# -gt 3 ]; then
- curlstr=$curlstr" -H Content-Type:application/json --data-binary @"$4
+ curlstr=$curlstr" $req_content --data-binary @"$4
fi
echo " CMD:"$curlstr
res=$($curlstr)
status=${res:${#res}-3}
- body=${res:0:${#res}-3}
+ reminder=${res:0:${#res}-4}
+ content_type="${reminder##*:}"
+ body="${reminder%:*}"
+
export body
if [ $status -ne $3 ]; then
echo " Error status:"$status" Expected status: "$3
exit 1
else
echo " OK, code: "$status" (Expected)"
+ if [[ "$content_type" == *"$resp_content"* ]]; then
+ echo " Content type: "$content_type" (Expected)"
+ else
+ echo " Expected content type: "$resp_content
+ echo " Got: "$content_type
+ echo "Exiting....."
+ exit 1
+ fi
echo " Body: "$body
if [ "$RESULT" == "*" ]; then
echo " Body contents not checked"
echo "or"
echo "Usage: genstat.sh KUBE <start-time-seconds> <log-file> <app-short-name> <app-name> <namespace> [ <app-short-name> <app-name> <namespace> ]*"
}
-
STARTTIME=-1
if [ $# -lt 4 ]; then
fi
-echo "Time;Name;PIDS;CPU perc;Mem perc" > $LOGFILE
+echo "Name;Time;PIDS;CPU perc;Mem perc" > $LOGFILE
if [ "$STARTTIME" -ne -1 ]; then
STARTTIME=$(($SECONDS-$STARTTIME))
# Scale kubernetes resources to zero and wait until this has been accomplished, if relevant. If not relevant to scale, then do no action.
# This function is called for prestarted apps not managed by the test script.
__HTTPPROXY_kube_scale_zero_and_wait() {
- echo -e $RED" HTTPPROXY replicas kept as is"$ERED
+ echo -e $RED" HTTPPROXY app is not scaled in this state"$ERED
}
# Delete all kube resouces for the app
# This function is called for apps managed by the test script.
# args: -
__HTTPPROXY_initial_setup() {
- :
+ use_http_proxy_http
}
# Set app short-name, app name and namespace for logging runtime statistics of kubernets pods or docker containers
#######################################################
-
-## Access to Http Proxy Receiver
-# Host name may be changed if app started by kube
-# Direct access from script
-HTTP_PROXY_HTTPX="http"
-HTTP_PROXY_HOST_NAME=$LOCALHOST_NAME
-HTTP_PROXY_PATH=$HTTP_PROXY_HTTPX"://"$HTTP_PROXY_HOST_NAME":"$HTTP_PROXY_WEB_EXTERNAL_PORT
-
-#########################
-### Http Proxy functions
-#########################
-
-# All calls to httpproxy will be directed to the http interface
+# Set http as the protocol to use for all communication to the http proxy
# args: -
# (Function for test scripts)
use_http_proxy_http() {
- echo -e $BOLD"$HTTP_PROXY_DISPLAY_NAME protocol setting"$EBOLD
- echo -e " Using $BOLD http $EBOLD"
- HTTP_PROXY_HTTPX="http"
- HTTP_PROXY_PATH=$HTTP_PROXY_HTTPX"://"$HTTP_PROXY_HOST_NAME":"$HTTP_PROXY_EXTERNAL_PORT
-
- echo ""
+ __http_proxy_set_protocoll "http" $HTTP_PROXY_INTERNAL_PORT $HTTP_PROXY_EXTERNAL_PORT
}
-# All calls to httpproxy will be directed to the https interface
+# Set https as the protocol to use for all communication to the http proxy
# args: -
# (Function for test scripts)
use_http_proxy_https() {
+ __http_proxy_set_protocoll "https" $HTTP_PROXY_INTERNAL_SECURE_PORT $HTTP_PROXY_EXTERNAL_SECURE_PORT
+}
+
+# Setup paths to svc/container for internal and external access
+# args: <protocol> <internal-port> <external-port>
+__http_proxy_set_protocoll() {
echo -e $BOLD"$HTTP_PROXY_DISPLAY_NAME protocol setting"$EBOLD
- echo -e " Using $BOLD https $EBOLD"
- HTTP_PROXY_HTTPX="https"
- HTTP_PROXY_PATH=$HTTP_PROXY_HTTPX"://"$HTTP_PROXY_HOST_NAME":"$HTTP_PROXY_EXTERNAL_SECURE_PORT
+ echo -e " Using $BOLD $1 $EBOLD towards $HTTP_PROXY_DISPLAY_NAME"
+
+ ## Access to http proxy
+ ## HTTP_PROXY_CONFIG_HOST_NAME and HTTP_PROXY_CONFIG_PORT used by apps as config for proxy host and port
+
+ HTTP_PROXY_SERVICE_PATH=$1"://"$HTTP_PROXY_APP_NAME":"$2 # docker access, container->container and script->container via proxy
+ HTTP_PROXY_CONFIG_HOST_NAME=$HTTP_PROXY_APP_NAME
+ HTTP_PROXY_CONFIG_PORT=$2
+ if [ $RUNMODE == "KUBE" ]; then
+ HTTP_PROXY_CONFIG_HOST_NAME=$HTTP_PROXY_APP_NAME"."$KUBE_SIM_NAMESPACE
+ HTTP_PROXY_CONFIG_PORT=$3
+ HTTP_PROXY_SERVICE_PATH=$1"://"$HTTP_PROXY_APP_NAME.$KUBE_SIM_NAMESPACE":"$3 # kube access, pod->svc and script->svc via proxy
+ fi
echo ""
}
+# Export env vars for config files, docker compose and kube resources
+# args:
+__http_proxy_export_vars() {
+
+ export HTTP_PROXY_APP_NAME
+ export HTTP_PROXY_DISPLAY_NAME
+
+ export HTTP_PROXY_WEB_EXTERNAL_PORT
+ export HTTP_PROXY_WEB_INTERNAL_PORT
+ export HTTP_PROXY_EXTERNAL_PORT
+ export HTTP_PROXY_INTERNAL_PORT
+
+ export HTTP_PROXY_WEB_EXTERNAL_SECURE_PORT
+ export HTTP_PROXY_WEB_INTERNAL_SECURE_PORT
+ export HTTP_PROXY_EXTERNAL_SECURE_PORT
+ export HTTP_PROXY_INTERNAL_SECURE_PORT
+
+ export KUBE_SIM_NAMESPACE
+ export DOCKER_SIM_NWNAME
+ export HTTP_PROXY_IMAGE
+}
+
# Start the Http Proxy in the simulator group
# args: -
# (Function for test scripts)
if [ $retcode_i -eq 0 ]; then
echo -e " Creating $HTTP_PROXY_APP_NAME deployment and service"
- export HTTP_PROXY_APP_NAME
-
- export HTTP_PROXY_WEB_EXTERNAL_PORT
- export HTTP_PROXY_WEB_INTERNAL_PORT
- export HTTP_PROXY_EXTERNAL_PORT
- export HTTP_PROXY_INTERNAL_PORT
-
- export HTTP_PROXY_WEB_EXTERNAL_SECURE_PORT
- export HTTP_PROXY_WEB_INTERNAL_SECURE_PORT
- export HTTP_PROXY_EXTERNAL_SECURE_PORT
- export HTTP_PROXY_INTERNAL_SECURE_PORT
-
- export KUBE_SIM_NAMESPACE
- export HTTP_PROXY_IMAGE
__kube_create_namespace $KUBE_SIM_NAMESPACE
+ __http_proxy_export_vars
+
# Create service
input_yaml=$SIM_GROUP"/"$HTTP_PROXY_COMPOSE_DIR"/"svc.yaml
output_yaml=$PWD/tmp/proxy_svc.yaml
fi
- echo " Retrieving host and ports for service..."
- HTTP_PROXY_HOST_NAME=$(__kube_get_service_host $HTTP_PROXY_APP_NAME $KUBE_SIM_NAMESPACE)
- HTTP_PROXY_WEB_EXTERNAL_PORT=$(__kube_get_service_port $HTTP_PROXY_APP_NAME $KUBE_SIM_NAMESPACE "web")
- HTTP_PROXY_WEB_EXTERNAL_SECURE_PORT=$(__kube_get_service_port $HTTP_PROXY_APP_NAME $KUBE_SIM_NAMESPACE "webs")
-
- HTTP_PROXY_EXTERNAL_PORT=$(__kube_get_service_port $HTTP_PROXY_APP_NAME $KUBE_SIM_NAMESPACE "http")
- HTTP_PROXY_EXTERNAL_SECURE_PORT=$(__kube_get_service_port $HTTP_PROXY_APP_NAME $KUBE_SIM_NAMESPACE "https")
-
- if [ $HTTP_PROXY_HTTPX == "http" ]; then
- HTTP_PROXY_PATH=$HTTP_PROXY_HTTPX"://"$HTTP_PROXY_HOST_NAME":"$HTTP_PROXY_WEB_EXTERNAL_PORT
- HTTP_PROXY_CONFIG_PORT=$HTTP_PROXY_EXTERNAL_PORT
- HTTP_PROXY_CONFIG_HOST_NAME=$HTTP_PROXY_APP_NAME"."$KUBE_SIM_NAMESPACE
-
- echo " Host IP, http port: $HTTP_PROXY_HOST_NAME $HTTP_PROXY_WEB_EXTERNAL_PORT"
- else
- HTTP_PROXY_PATH=$HTTP_PROXY_HTTPX"://"$HTTP_PROXY_HOST_NAME":"$HTTP_PROXY_WEB_EXTERNAL_SECURE_PORT
- HTTP_PROXY_CONFIG_PORT=$HTTP_PROXY_EXTERNAL_SECURE_PORT
- HTTP_PROXY_CONFIG_HOST_NAME=$HTTP_PROXY_APP_NAME"."$KUBE_SIM_NAMESPACE
-
- echo " Host IP, https port: $HTTP_PROXY_HOST_NAME $HTTP_PROXY_WEB_EXTERNAL_SECURE_PORT"
- fi
-
- __check_service_start $HTTP_PROXY_APP_NAME $HTTP_PROXY_PATH$HTTP_PROXY_ALIVE_URL
+ __check_service_start $HTTP_PROXY_APP_NAME $HTTP_PROXY_SERVICE_PATH$HTTP_PROXY_ALIVE_URL
else
# Check if docker app shall be fully managed by the test script
exit
fi
- export HTTP_PROXY_APP_NAME
- export HTTP_PROXY_EXTERNAL_PORT
- export HTTP_PROXY_INTERNAL_PORT
- export HTTP_PROXY_EXTERNAL_SECURE_PORT
- export HTTP_PROXY_INTERNAL_SECURE_PORT
- export HTTP_PROXY_WEB_EXTERNAL_PORT
- export HTTP_PROXY_WEB_INTERNAL_PORT
- export HTTP_PROXY_WEB_EXTERNAL_SECURE_PORT
- export HTTP_PROXY_WEB_INTERNAL_SECURE_PORT
- export DOCKER_SIM_NWNAME
-
- export HTTP_PROXY_DISPLAY_NAME
+ __http_proxy_export_vars
__start_container $HTTP_PROXY_COMPOSE_DIR "" NODOCKERARGS 1 $HTTP_PROXY_APP_NAME
- if [ $HTTP_PROXY_HTTPX == "http" ]; then
- HTTP_PROXY_PATH=$HTTP_PROXY_HTTPX"://"$HTTP_PROXY_HOST_NAME":"$HTTP_PROXY_WEB_INTERNAL_PORT
- else
- HTTP_PROXY_PATH=$HTTP_PROXY_HTTPX"://"$HTTP_PROXY_HOST_NAME":"$HTTP_PROXY_WEB_INTERNAL_SECURE_PORT
- fi
- __check_service_start $HTTP_PROXY_APP_NAME $HTTP_PROXY_PATH$HTTP_PROXY_ALIVE_URL
-
- if [ $HTTP_PROXY_HTTPX == "http" ]; then
- HTTP_PROXY_CONFIG_PORT=$HTTP_PROXY_INTERNAL_PORT
- else
- HTTP_PROXY_CONFIG_PORT=$HTTP_PROXY_INTERNAL_SECURE_PORT
- fi
- HTTP_PROXY_CONFIG_HOST_NAME=$HTTP_PROXY_APP_NAME
-
+ __check_service_start $HTTP_PROXY_APP_NAME $HTTP_PROXY_SERVICE_PATH$HTTP_PROXY_ALIVE_URL
fi
echo ""
}
-
# ============LICENSE_END=================================================
#
-# This is a script that contains container/service management functions and test functions for ECS
+# This is a script that contains container/service management functions and test functions for ICS
################ Test engine functions ################
# Create the image var used during the test
# arg: <image-tag-suffix> (selects staging, snapshot, release etc)
# <image-tag-suffix> is present only for images with staging, snapshot,release tags
-__ECS_imagesetup() {
- __check_and_create_image_var ECS "ECS_IMAGE" "ECS_IMAGE_BASE" "ECS_IMAGE_TAG" $1 "$ECS_DISPLAY_NAME"
+__ICS_imagesetup() {
+ __check_and_create_image_var ICS "ICS_IMAGE" "ICS_IMAGE_BASE" "ICS_IMAGE_TAG" $1 "$ICS_DISPLAY_NAME"
}
# Pull image from remote repo or use locally built image
# <pull-policy-override> Shall be used for images allowing overriding. For example use a local image when test is started to use released images
# <pull-policy-original> Shall be used for images that does not allow overriding
# Both var may contain: 'remote', 'remote-remove' or 'local'
-__ECS_imagepull() {
- __check_and_pull_image $1 "$ECS_DISPLAY_NAME" $ECS_APP_NAME ECS_IMAGE
+__ICS_imagepull() {
+ __check_and_pull_image $1 "$ICS_DISPLAY_NAME" $ICS_APP_NAME ICS_IMAGE
}
# Build image (only for simulator or interfaces stubs owned by the test environment)
# arg: <image-tag-suffix> (selects staging, snapshot, release etc)
# <image-tag-suffix> is present only for images with staging, snapshot,release tags
-__ECS_imagebuild() {
- echo -e $RED" Image for app ECS shall never be built"$ERED
+__ICS_imagebuild() {
+ echo -e $RED" Image for app ICS shall never be built"$ERED
}
# Generate a string for each included image using the app display name and a docker images format string
# If a custom image repo is used then also the source image from the local repo is listed
# arg: <docker-images-format-string> <file-to-append>
-__ECS_image_data() {
- echo -e "$ECS_DISPLAY_NAME\t$(docker images --format $1 $ECS_IMAGE)" >> $2
- if [ ! -z "$ECS_IMAGE_SOURCE" ]; then
- echo -e "-- source image --\t$(docker images --format $1 $ECS_IMAGE_SOURCE)" >> $2
+__ICS_image_data() {
+ echo -e "$ICS_DISPLAY_NAME\t$(docker images --format $1 $ICS_IMAGE)" >> $2
+ if [ ! -z "$ICS_IMAGE_SOURCE" ]; then
+ echo -e "-- source image --\t$(docker images --format $1 $ICS_IMAGE_SOURCE)" >> $2
fi
}
# Scale kubernetes resources to zero
# All resources shall be ordered to be scaled to 0, if relevant. If not relevant to scale, then do no action.
# This function is called for apps fully managed by the test script
-__ECS_kube_scale_zero() {
- __kube_scale_all_resources $KUBE_NONRTRIC_NAMESPACE autotest ECS
+__ICS_kube_scale_zero() {
+ __kube_scale_all_resources $KUBE_NONRTRIC_NAMESPACE autotest ICS
}
# Scale kubernetes resources to zero and wait until this has been accomplished, if relevant. If not relevant to scale, then do no action.
# This function is called for prestarted apps not managed by the test script.
-__ECS_kube_scale_zero_and_wait() {
- __kube_scale_and_wait_all_resources $KUBE_NONRTRIC_NAMESPACE app "$KUBE_NONRTRIC_NAMESPACE"-enrichmentservice
+__ICS_kube_scale_zero_and_wait() {
+ __kube_scale_and_wait_all_resources $KUBE_NONRTRIC_NAMESPACE app "$KUBE_NONRTRIC_NAMESPACE"-informationservice
}
# Delete all kube resouces for the app
# This function is called for apps managed by the test script.
-__ECS_kube_delete_all() {
- __kube_delete_all_resources $KUBE_NONRTRIC_NAMESPACE autotest ECS
+__ICS_kube_delete_all() {
+ __kube_delete_all_resources $KUBE_NONRTRIC_NAMESPACE autotest ICS
}
# Store docker logs
# This function is called for apps managed by the test script.
# args: <log-dir> <file-prexix>
-__ECS_store_docker_logs() {
+__ICS_store_docker_logs() {
if [ $RUNMODE == "KUBE" ]; then
- kubectl logs -l "autotest=ECS" -n $KUBE_NONRTRIC_NAMESPACE --tail=-1 > $1$2_ecs.log 2>&1
+ kubectl logs -l "autotest=ICS" -n $KUBE_NONRTRIC_NAMESPACE --tail=-1 > $1$2_ics.log 2>&1
else
- docker logs $ECS_APP_NAME > $1$2_ecs.log 2>&1
+ docker logs $ICS_APP_NAME > $1$2_ics.log 2>&1
fi
}
# Initial setup of protocol, host and ports
# This function is called for apps managed by the test script.
# args: -
-__ECS_initial_setup() {
- use_ecs_rest_http
+__ICS_initial_setup() {
+ use_ics_rest_http
}
# Set app short-name, app name and namespace for logging runtime statistics of kubernets pods or docker containers
# For docker, the namespace shall be excluded
# This function is called for apps managed by the test script as well as for prestarted apps.
# args: -
-__ECS_statisics_setup() {
+__ICS_statisics_setup() {
if [ $RUNMODE == "KUBE" ]; then
- echo "ECS $ECS_APP_NAME $KUBE_NONRTRIC_NAMESPACE"
+ echo "ICS $ICS_APP_NAME $KUBE_NONRTRIC_NAMESPACE"
else
- echo "ECS $ECS_APP_NAME"
+ echo "ICS $ICS_APP_NAME"
fi
}
#######################################################
-# Make curl retries towards ECS for http response codes set in this env var, space separated list of codes
-ECS_RETRY_CODES=""
+# Make curl retries towards ICS for http response codes set in this env var, space separated list of codes
+ICS_RETRY_CODES=""
#Save first worker node the pod is started on
-__ECS_WORKER_NODE=""
+__ICS_WORKER_NODE=""
###########################
-### ECS functions
+### ICS functions
###########################
-# All calls to ECS will be directed to the ECS REST interface from now on
+# All calls to ICS will be directed to the ICS REST interface from now on
# args: -
# (Function for test scripts)
-use_ecs_rest_http() {
- __ecs_set_protocoll "http" $ECS_INTERNAL_PORT $ECS_EXTERNAL_PORT
+use_ics_rest_http() {
+ __ics_set_protocoll "http" $ICS_INTERNAL_PORT $ICS_EXTERNAL_PORT
}
-# All calls to ECS will be directed to the ECS REST interface from now on
+# All calls to ICS will be directed to the ICS REST interface from now on
# args: -
# (Function for test scripts)
-use_ecs_rest_https() {
- __ecs_set_protocoll "https" $ECS_INTERNAL_SECURE_PORT $ECS_EXTERNAL_SECURE_PORT
+use_ics_rest_https() {
+ __ics_set_protocoll "https" $ICS_INTERNAL_SECURE_PORT $ICS_EXTERNAL_SECURE_PORT
}
-# All calls to ECS will be directed to the ECS dmaap interface over http from now on
+# All calls to ICS will be directed to the ICS dmaap interface over http from now on
# args: -
# (Function for test scripts)
-use_ecs_dmaap_http() {
- echo -e $BOLD"ECS dmaap protocol setting"$EBOLD
+use_ics_dmaap_http() {
+ echo -e $BOLD"ICS dmaap protocol setting"$EBOLD
echo -e $RED" - NOT SUPPORTED - "$ERED
- echo -e " Using $BOLD http $EBOLD and $BOLD DMAAP $EBOLD towards ECS"
- ECS_ADAPTER_TYPE="MR-HTTP"
+ echo -e " Using $BOLD http $EBOLD and $BOLD DMAAP $EBOLD towards ICS"
+ ICS_ADAPTER_TYPE="MR-HTTP"
echo ""
}
# Setup paths to svc/container for internal and external access
# args: <protocol> <internal-port> <external-port>
-__ecs_set_protocoll() {
- echo -e $BOLD"$ECS_DISPLAY_NAME protocol setting"$EBOLD
- echo -e " Using $BOLD http $EBOLD towards $ECS_DISPLAY_NAME"
+__ics_set_protocoll() {
+ echo -e $BOLD"$ICS_DISPLAY_NAME protocol setting"$EBOLD
+ echo -e " Using $BOLD $1 $EBOLD towards $ICS_DISPLAY_NAME"
- ## Access to ECS
+ ## Access to ICS
- ECS_SERVICE_PATH=$1"://"$ECS_APP_NAME":"$2 # docker access, container->container and script->container via proxy
+ ICS_SERVICE_PATH=$1"://"$ICS_APP_NAME":"$2 # docker access, container->container and script->container via proxy
if [ $RUNMODE == "KUBE" ]; then
- ECS_SERVICE_PATH=$1"://"$ECS_APP_NAME.$KUBE_NONRTRIC_NAMESPACE":"$3 # kube access, pod->svc and script->svc via proxy
+ ICS_SERVICE_PATH=$1"://"$ICS_APP_NAME.$KUBE_NONRTRIC_NAMESPACE":"$3 # kube access, pod->svc and script->svc via proxy
fi
- # ECS_ADAPTER used for switching between REST and DMAAP (only REST supported currently)
- ECS_ADAPTER_TYPE="REST"
- ECS_ADAPTER=$ECS_SERVICE_PATH
+ # ICS_ADAPTER used for switching between REST and DMAAP (only REST supported currently)
+ ICS_ADAPTER_TYPE="REST"
+ ICS_ADAPTER=$ICS_SERVICE_PATH
echo ""
}
# Export env vars for config files, docker compose and kube resources
# args: PROXY|NOPROXY
-__ecs_export_vars() {
- export ECS_APP_NAME
- export ECS_APP_NAME_ALIAS
+__ics_export_vars() {
+ export ICS_APP_NAME
+ export ICS_APP_NAME_ALIAS
export KUBE_NONRTRIC_NAMESPACE
- export ECS_IMAGE
- export ECS_INTERNAL_PORT
- export ECS_INTERNAL_SECURE_PORT
- export ECS_EXTERNAL_PORT
- export ECS_EXTERNAL_SECURE_PORT
- export ECS_CONFIG_MOUNT_PATH
- export ECS_CONFIG_CONFIGMAP_NAME=$ECS_APP_NAME"-config"
- export ECS_DATA_CONFIGMAP_NAME=$ECS_APP_NAME"-data"
- export ECS_CONTAINER_MNT_DIR
- export ECS_HOST_MNT_DIR
- export ECS_CONFIG_FILE
+ export ICS_IMAGE
+ export ICS_INTERNAL_PORT
+ export ICS_INTERNAL_SECURE_PORT
+ export ICS_EXTERNAL_PORT
+ export ICS_EXTERNAL_SECURE_PORT
+ export ICS_CONFIG_MOUNT_PATH
+ export ICS_CONFIG_CONFIGMAP_NAME=$ICS_APP_NAME"-config"
+ export ICS_DATA_CONFIGMAP_NAME=$ICS_APP_NAME"-data"
+ export ICS_CONTAINER_MNT_DIR
+ export ICS_HOST_MNT_DIR
+ export ICS_CONFIG_FILE
export DOCKER_SIM_NWNAME
- export ECS_DISPLAY_NAME
+ export ICS_DISPLAY_NAME
+ export ICS_LOGPATH
-
- export ECS_DATA_PV_NAME=$ECS_APP_NAME"-pv"
- export ECS_DATA_PVC_NAME=$ECS_APP_NAME"-pvc"
+ export ICS_DATA_PV_NAME=$ICS_APP_NAME"-pv"
+ export ICS_DATA_PVC_NAME=$ICS_APP_NAME"-pvc"
#Create a unique path for the pv each time to prevent a previous volume to be reused
- export ECS_PV_PATH="ecsdata-"$(date +%s)
+ export ICS_PV_PATH="icsdata-"$(date +%s)
if [ $1 == "PROXY" ]; then
- export ECS_HTTP_PROXY_CONFIG_PORT=$HTTP_PROXY_CONFIG_PORT #Set if proxy is started
- export ECS_HTTP_PROXY_CONFIG_HOST_NAME=$HTTP_PROXY_CONFIG_HOST_NAME #Set if proxy is started
- if [ $ECS_HTTP_PROXY_CONFIG_PORT -eq 0 ] || [ -z "$ECS_HTTP_PROXY_CONFIG_HOST_NAME" ]; then
+ export ICS_HTTP_PROXY_CONFIG_PORT=$HTTP_PROXY_CONFIG_PORT #Set if proxy is started
+ export ICS_HTTP_PROXY_CONFIG_HOST_NAME=$HTTP_PROXY_CONFIG_HOST_NAME #Set if proxy is started
+ if [ $ICS_HTTP_PROXY_CONFIG_PORT -eq 0 ] || [ -z "$ICS_HTTP_PROXY_CONFIG_HOST_NAME" ]; then
echo -e $YELLOW" Warning: HTTP PROXY will not be configured, proxy app not started"$EYELLOW
else
echo " Configured with http proxy"
fi
else
- export ECS_HTTP_PROXY_CONFIG_PORT=0
- export ECS_HTTP_PROXY_CONFIG_HOST_NAME=""
+ export ICS_HTTP_PROXY_CONFIG_PORT=0
+ export ICS_HTTP_PROXY_CONFIG_HOST_NAME=""
echo " Configured without http proxy"
fi
}
-# Start the ECS
+# Start the ICS
# args: PROXY|NOPROXY <config-file>
# (Function for test scripts)
-start_ecs() {
+start_ics() {
- echo -e $BOLD"Starting $ECS_DISPLAY_NAME"$EBOLD
+ echo -e $BOLD"Starting $ICS_DISPLAY_NAME"$EBOLD
if [ $RUNMODE == "KUBE" ]; then
# Check if app shall be fully managed by the test script
- __check_included_image "ECS"
+ __check_included_image "ICS"
retcode_i=$?
# Check if app shall only be used by the testscipt
- __check_prestarted_image "ECS"
+ __check_prestarted_image "ICS"
retcode_p=$?
if [ $retcode_i -ne 0 ] && [ $retcode_p -ne 0 ]; then
- echo -e $RED"The $ECS_APP_NAME app is not included as managed nor prestarted in this test script"$ERED
- echo -e $RED"The $ECS_APP_NAME will not be started"$ERED
+ echo -e $RED"The $ICS_APP_NAME app is not included as managed nor prestarted in this test script"$ERED
+ echo -e $RED"The $ICS_APP_NAME will not be started"$ERED
exit
fi
if [ $retcode_i -eq 0 ] && [ $retcode_p -eq 0 ]; then
- echo -e $RED"The $ECS_APP_NAME app is included both as managed and prestarted in this test script"$ERED
- echo -e $RED"The $ECS_APP_NAME will not be started"$ERED
+ echo -e $RED"The $ICS_APP_NAME app is included both as managed and prestarted in this test script"$ERED
+ echo -e $RED"The $ICS_APP_NAME will not be started"$ERED
exit
fi
-
if [ $retcode_p -eq 0 ]; then
- echo -e " Using existing $ECS_APP_NAME deployment and service"
- echo " Setting ECS replicas=1"
- res_type=$(__kube_get_resource_type $ECS_APP_NAME $KUBE_NONRTRIC_NAMESPACE)
- __kube_scale $res_type $ECS_APP_NAME $KUBE_NONRTRIC_NAMESPACE 1
+ echo -e " Using existing $ICS_APP_NAME deployment and service"
+ echo " Setting ICS replicas=1"
+ res_type=$(__kube_get_resource_type $ICS_APP_NAME $KUBE_NONRTRIC_NAMESPACE)
+ __kube_scale $res_type $ICS_APP_NAME $KUBE_NONRTRIC_NAMESPACE 1
fi
# Check if app shall be fully managed by the test script
if [ $retcode_i -eq 0 ]; then
- echo -e " Creating $ECS_APP_NAME app and expose service"
+ echo -e " Creating $ICS_APP_NAME app and expose service"
#Check if nonrtric namespace exists, if not create it
__kube_create_namespace $KUBE_NONRTRIC_NAMESPACE
- __ecs_export_vars $1
+ __ics_export_vars $1
# Create config map for config
- datafile=$PWD/tmp/$ECS_CONFIG_FILE
+ datafile=$PWD/tmp/$ICS_CONFIG_FILE
cp $2 $datafile
- output_yaml=$PWD/tmp/ecs_cfc.yaml
- __kube_create_configmap $ECS_CONFIG_CONFIGMAP_NAME $KUBE_NONRTRIC_NAMESPACE autotest ECS $datafile $output_yaml
+ output_yaml=$PWD/tmp/ics_cfc.yaml
+ __kube_create_configmap $ICS_CONFIG_CONFIGMAP_NAME $KUBE_NONRTRIC_NAMESPACE autotest ICS $datafile $output_yaml
# Create pv
- input_yaml=$SIM_GROUP"/"$ECS_COMPOSE_DIR"/"pv.yaml
- output_yaml=$PWD/tmp/ecs_pv.yaml
- __kube_create_instance pv $ECS_APP_NAME $input_yaml $output_yaml
+ input_yaml=$SIM_GROUP"/"$ICS_COMPOSE_DIR"/"pv.yaml
+ output_yaml=$PWD/tmp/ics_pv.yaml
+ __kube_create_instance pv $ICS_APP_NAME $input_yaml $output_yaml
# Create pvc
- input_yaml=$SIM_GROUP"/"$ECS_COMPOSE_DIR"/"pvc.yaml
- output_yaml=$PWD/tmp/ecs_pvc.yaml
- __kube_create_instance pvc $ECS_APP_NAME $input_yaml $output_yaml
+ input_yaml=$SIM_GROUP"/"$ICS_COMPOSE_DIR"/"pvc.yaml
+ output_yaml=$PWD/tmp/ics_pvc.yaml
+ __kube_create_instance pvc $ICS_APP_NAME $input_yaml $output_yaml
# Create service
- input_yaml=$SIM_GROUP"/"$ECS_COMPOSE_DIR"/"svc.yaml
- output_yaml=$PWD/tmp/ecs_svc.yaml
- __kube_create_instance service $ECS_APP_NAME $input_yaml $output_yaml
+ input_yaml=$SIM_GROUP"/"$ICS_COMPOSE_DIR"/"svc.yaml
+ output_yaml=$PWD/tmp/ics_svc.yaml
+ __kube_create_instance service $ICS_APP_NAME $input_yaml $output_yaml
# Create app
- input_yaml=$SIM_GROUP"/"$ECS_COMPOSE_DIR"/"app.yaml
- output_yaml=$PWD/tmp/ecs_app.yaml
- __kube_create_instance app $ECS_APP_NAME $input_yaml $output_yaml
+ input_yaml=$SIM_GROUP"/"$ICS_COMPOSE_DIR"/"app.yaml
+ output_yaml=$PWD/tmp/ics_app.yaml
+ __kube_create_instance app $ICS_APP_NAME $input_yaml $output_yaml
fi
- # Tie the ECS to a worker node so that ECS will always be scheduled to the same worker node if the ECS pod is restarted
- # A PVC of type hostPath is mounted to ECS, for persistent storage, so the ECS must always be on the node which mounted the volume
+ # Tie the ICS to a worker node so that ICS will always be scheduled to the same worker node if the ICS pod is restarted
+ # A PVC of type hostPath is mounted to ICS, for persistent storage, so the ICS must always be on the node which mounted the volume
# Keep the initial worker node in case the pod need to be "restarted" - must be made to the same node due to a volume mounted on the host
if [ $retcode_i -eq 0 ]; then
- __ECS_WORKER_NODE=$(kubectl get pod -l "autotest=ECS" -n $KUBE_NONRTRIC_NAMESPACE -o jsonpath='{.items[*].spec.nodeName}')
- if [ -z "$__ECS_WORKER_NODE" ]; then
- echo -e $YELLOW" Cannot find worker node for pod for $ECS_APP_NAME, persistency may not work"$EYELLOW
+ __ICS_WORKER_NODE=$(kubectl get pod -l "autotest=ICS" -n $KUBE_NONRTRIC_NAMESPACE -o jsonpath='{.items[*].spec.nodeName}')
+ if [ -z "$__ICS_WORKER_NODE" ]; then
+ echo -e $YELLOW" Cannot find worker node for pod for $ICS_APP_NAME, persistency may not work"$EYELLOW
fi
else
- echo -e $YELLOW" Persistency may not work for app $ECS_APP_NAME in multi-worker node config when running it as a prestarted app"$EYELLOW
+ echo -e $YELLOW" Persistency may not work for app $ICS_APP_NAME in multi-worker node config when running it as a prestarted app"$EYELLOW
fi
- __check_service_start $ECS_APP_NAME $ECS_SERVICE_PATH$ECS_ALIVE_URL
+ __check_service_start $ICS_APP_NAME $ICS_SERVICE_PATH$ICS_ALIVE_URL
else
- __check_included_image 'ECS'
+ __check_included_image 'ICS'
if [ $? -eq 1 ]; then
- echo -e $RED"The ECS app is not included in this test script"$ERED
- echo -e $RED"ECS will not be started"$ERED
+ echo -e $RED"The ICS app is not included in this test script"$ERED
+ echo -e $RED"ICS will not be started"$ERED
exit 1
fi
curdir=$PWD
cd $SIM_GROUP
- cd ecs
- cd $ECS_HOST_MNT_DIR
+ cd ics
+ cd $ICS_HOST_MNT_DIR
#cd ..
if [ -d db ]; then
if [ "$(ls -A $DIR)" ]; then
cd $curdir
- __ecs_export_vars $1
+ __ics_export_vars $1
- dest_file=$SIM_GROUP/$ECS_COMPOSE_DIR/$ECS_HOST_MNT_DIR/$ECS_CONFIG_FILE
+ dest_file=$SIM_GROUP/$ICS_COMPOSE_DIR/$ICS_HOST_MNT_DIR/$ICS_CONFIG_FILE
envsubst < $2 > $dest_file
- __start_container $ECS_COMPOSE_DIR "" NODOCKERARGS 1 $ECS_APP_NAME
+ __start_container $ICS_COMPOSE_DIR "" NODOCKERARGS 1 $ICS_APP_NAME
- __check_service_start $ECS_APP_NAME $ECS_SERVICE_PATH$ECS_ALIVE_URL
+ __check_service_start $ICS_APP_NAME $ICS_SERVICE_PATH$ICS_ALIVE_URL
fi
echo ""
return 0
}
-# Stop the ecs
+# Stop the ics
# args: -
# args: -
# (Function for test scripts)
-stop_ecs() {
- echo -e $BOLD"Stopping $ECS_DISPLAY_NAME"$EBOLD
+stop_ics() {
+ echo -e $BOLD"Stopping $ICS_DISPLAY_NAME"$EBOLD
if [ $RUNMODE == "KUBE" ]; then
- __check_prestarted_image "ECS"
+ __check_prestarted_image "ICS"
if [ $? -eq 0 ]; then
- echo -e $YELLOW" Persistency may not work for app $ECS_APP_NAME in multi-worker node config when running it as a prestarted app"$EYELLOW
- res_type=$(__kube_get_resource_type $ECS_APP_NAME $KUBE_NONRTRIC_NAMESPACE)
- __kube_scale $res_type $ECS_APP_NAME $KUBE_NONRTRIC_NAMESPACE 0
+ echo -e $YELLOW" Persistency may not work for app $ICS_APP_NAME in multi-worker node config when running it as a prestarted app"$EYELLOW
+ res_type=$(__kube_get_resource_type $ICS_APP_NAME $KUBE_NONRTRIC_NAMESPACE)
+ __kube_scale $res_type $ICS_APP_NAME $KUBE_NONRTRIC_NAMESPACE 0
return 0
fi
- __kube_scale_all_resources $KUBE_NONRTRIC_NAMESPACE autotest ECS
+ __kube_scale_all_resources $KUBE_NONRTRIC_NAMESPACE autotest ICS
echo " Deleting the replica set - a new will be started when the app is started"
- tmp=$(kubectl delete rs -n $KUBE_NONRTRIC_NAMESPACE -l "autotest=ECS")
+ tmp=$(kubectl delete rs -n $KUBE_NONRTRIC_NAMESPACE -l "autotest=ICS")
if [ $? -ne 0 ]; then
echo -e $RED" Could not delete replica set "$RED
((RES_CONF_FAIL++))
return 1
fi
else
- docker stop $ECS_APP_NAME &> ./tmp/.dockererr
+ docker stop $ICS_APP_NAME &> ./tmp/.dockererr
if [ $? -ne 0 ]; then
- __print_err "Could not stop $ECS_APP_NAME" $@
+ __print_err "Could not stop $ICS_APP_NAME" $@
cat ./tmp/.dockererr
((RES_CONF_FAIL++))
return 1
return 0
}
-# Start a previously stopped ecs
+# Start a previously stopped ics
# args: -
# (Function for test scripts)
-start_stopped_ecs() {
- echo -e $BOLD"Starting (the previously stopped) $ECS_DISPLAY_NAME"$EBOLD
+start_stopped_ics() {
+ echo -e $BOLD"Starting (the previously stopped) $ICS_DISPLAY_NAME"$EBOLD
if [ $RUNMODE == "KUBE" ]; then
- __check_prestarted_image "ECS"
+ __check_prestarted_image "ICS"
if [ $? -eq 0 ]; then
- echo -e $YELLOW" Persistency may not work for app $ECS_APP_NAME in multi-worker node config when running it as a prestarted app"$EYELLOW
- res_type=$(__kube_get_resource_type $ECS_APP_NAME $KUBE_NONRTRIC_NAMESPACE)
- __kube_scale $res_type $ECS_APP_NAME $KUBE_NONRTRIC_NAMESPACE 1
- __check_service_start $ECS_APP_NAME $ECS_SERVICE_PATH$ECS_ALIVE_URL
+ echo -e $YELLOW" Persistency may not work for app $ICS_APP_NAME in multi-worker node config when running it as a prestarted app"$EYELLOW
+ res_type=$(__kube_get_resource_type $ICS_APP_NAME $KUBE_NONRTRIC_NAMESPACE)
+ __kube_scale $res_type $ICS_APP_NAME $KUBE_NONRTRIC_NAMESPACE 1
+ __check_service_start $ICS_APP_NAME $ICS_SERVICE_PATH$ICS_ALIVE_URL
return 0
fi
# Tie the PMS to the same worker node it was initially started on
# A PVC of type hostPath is mounted to PMS, for persistent storage, so the PMS must always be on the node which mounted the volume
- if [ -z "$__ECS_WORKER_NODE" ]; then
+ if [ -z "$__ICS_WORKER_NODE" ]; then
echo -e $RED" No initial worker node found for pod "$RED
((RES_CONF_FAIL++))
return 1
else
- echo -e $BOLD" Setting nodeSelector kubernetes.io/hostname=$__ECS_WORKER_NODE to deployment for $ECS_APP_NAME. Pod will always run on this worker node: $__PA_WORKER_NODE"$BOLD
+ echo -e $BOLD" Setting nodeSelector kubernetes.io/hostname=$__ICS_WORKER_NODE to deployment for $ICS_APP_NAME. Pod will always run on this worker node: $__PA_WORKER_NODE"$BOLD
echo -e $BOLD" The mounted volume is mounted as hostPath and only available on that worker node."$BOLD
- tmp=$(kubectl patch deployment $ECS_APP_NAME -n $KUBE_NONRTRIC_NAMESPACE --patch '{"spec": {"template": {"spec": {"nodeSelector": {"kubernetes.io/hostname": "'$__ECS_WORKER_NODE'"}}}}}')
+ tmp=$(kubectl patch deployment $ICS_APP_NAME -n $KUBE_NONRTRIC_NAMESPACE --patch '{"spec": {"template": {"spec": {"nodeSelector": {"kubernetes.io/hostname": "'$__ICS_WORKER_NODE'"}}}}}')
if [ $? -ne 0 ]; then
- echo -e $YELLOW" Cannot set nodeSelector to deployment for $ECS_APP_NAME, persistency may not work"$EYELLOW
+ echo -e $YELLOW" Cannot set nodeSelector to deployment for $ICS_APP_NAME, persistency may not work"$EYELLOW
fi
- __kube_scale deployment $ECS_APP_NAME $KUBE_NONRTRIC_NAMESPACE 1
+ __kube_scale deployment $ICS_APP_NAME $KUBE_NONRTRIC_NAMESPACE 1
fi
else
- docker start $ECS_APP_NAME &> ./tmp/.dockererr
+ docker start $ICS_APP_NAME &> ./tmp/.dockererr
if [ $? -ne 0 ]; then
- __print_err "Could not start (the stopped) $ECS_APP_NAME" $@
+ __print_err "Could not start (the stopped) $ICS_APP_NAME" $@
cat ./tmp/.dockererr
((RES_CONF_FAIL++))
return 1
fi
fi
- __check_service_start $ECS_APP_NAME $ECS_SERVICE_PATH$ECS_ALIVE_URL
+ __check_service_start $ICS_APP_NAME $ICS_SERVICE_PATH$ICS_ALIVE_URL
if [ $? -ne 0 ]; then
return 1
fi
return 0
}
-# Turn on debug level tracing in ECS
+# Turn on debug level tracing in ICS
# args: -
# (Function for test scripts)
-set_ecs_debug() {
- echo -e $BOLD"Setting ecs debug logging"$EBOLD
- curlString="$ECS_SERVICE_PATH$ECS_ACTUATOR -X POST -H Content-Type:application/json -d {\"configuredLevel\":\"debug\"}"
+set_ics_debug() {
+ echo -e $BOLD"Setting ics debug logging"$EBOLD
+ curlString="$ICS_SERVICE_PATH$ICS_ACTUATOR -X POST -H Content-Type:application/json -d {\"configuredLevel\":\"debug\"}"
result=$(__do_curl "$curlString")
if [ $? -ne 0 ]; then
__print_err "Could not set debug mode" $@
return 0
}
-# Turn on trace level tracing in ECS
+# Turn on trace level tracing in ICS
# args: -
# (Function for test scripts)
-set_ecs_trace() {
- echo -e $BOLD"Setting ecs trace logging"$EBOLD
- curlString="$ECS_SERVICE_PATH/actuator/loggers/org.oransc.enrichment -X POST -H Content-Type:application/json -d {\"configuredLevel\":\"trace\"}"
+set_ics_trace() {
+ echo -e $BOLD"Setting ics trace logging"$EBOLD
+ curlString="$ICS_SERVICE_PATH/actuator/loggers/org.oransc.information -X POST -H Content-Type:application/json -d {\"configuredLevel\":\"trace\"}"
result=$(__do_curl "$curlString")
if [ $? -ne 0 ]; then
__print_err "Could not set trace mode" $@
return 0
}
-# Perform curl retries when making direct call to ECS for the specified http response codes
+# Perform curl retries when making direct call to ICS for the specified http response codes
# Speace separated list of http response codes
# args: [<response-code>]*
-use_ecs_retries() {
- echo -e $BOLD"Do curl retries to the ECS REST inteface for these response codes:$@"$EBOLD
- ECS_RETRY_CODES=$@
+use_ics_retries() {
+ echo -e $BOLD"Do curl retries to the ICS REST inteface for these response codes:$@"$EBOLD
+ ICS_RETRY_CODES=$@
echo ""
return 0
}
-# Check the ecs logs for WARNINGs and ERRORs
+# Check the ics logs for WARNINGs and ERRORs
# args: -
# (Function for test scripts)
-check_ecs_logs() {
- __check_container_logs "ECS" $ECS_APP_NAME $ECS_LOGPATH WARN ERR
+check_ics_logs() {
+ __check_container_logs "ICS" $ICS_APP_NAME $ICS_LOGPATH WARN ERR
}
-# Tests if a variable value in the ECS is equal to a target value and and optional timeout.
+# Tests if a variable value in the ICS is equal to a target value and and optional timeout.
# Arg: <variable-name> <target-value> - This test set pass or fail depending on if the variable is
# equal to the target or not.
# Arg: <variable-name> <target-value> <timeout-in-sec> - This test waits up to the timeout seconds
# before setting pass or fail depending on if the variable value becomes equal to the target
# value or not.
# (Function for test scripts)
-ecs_equal() {
+ics_equal() {
if [ $# -eq 2 ] || [ $# -eq 3 ]; then
- __var_test ECS "$ECS_SERVICE_PATH/" $1 "=" $2 $3
+ __var_test ICS "$ICS_SERVICE_PATH/" $1 "=" $2 $3
else
- __print_err "Wrong args to ecs_equal, needs two or three args: <sim-param> <target-value> [ timeout ]" $@
+ __print_err "Wrong args to ics_equal, needs two or three args: <sim-param> <target-value> [ timeout ]" $@
fi
}
##########################################
-######### A1-E Enrichment API ##########
+######### A1-E information API ##########
##########################################
-#Function prefix: ecs_api_a1
+#Function prefix: ics_api_a1
# API Test function: GET /A1-EI​/v1​/eitypes​/{eiTypeId}​/eijobs
# args: <response-code> <type-id> <owner-id>|NOOWNER [ EMPTY | <job-id>+ ]
# args (flat uri structure): <response-code> <type-id>|NOTYPE <owner-id>|NOOWNER [ EMPTY | <job-id>+ ]
# (Function for test scripts)
-ecs_api_a1_get_job_ids() {
+ics_api_a1_get_job_ids() {
__log_test_start $@
if [ -z "$FLAT_A1_EI" ]; then
fi
query="/A1-EI/v1/eijobs$search"
fi
- res="$(__do_curl_to_api ECS GET $query)"
+ res="$(__do_curl_to_api ICS GET $query)"
status=${res:${#res}-3}
if [ $status -ne $1 ]; then
# API Test function: GET ​/A1-EI​/v1​/eitypes​/{eiTypeId}
# args: <response-code> <type-id> [<schema-file>]
# (Function for test scripts)
-ecs_api_a1_get_type() {
+ics_api_a1_get_type() {
__log_test_start $@
if [ $# -lt 2 ] || [ $# -gt 3 ]; then
fi
query="/A1-EI/v1/eitypes/$2"
- res="$(__do_curl_to_api ECS GET $query)"
+ res="$(__do_curl_to_api ICS GET $query)"
status=${res:${#res}-3}
if [ $status -ne $1 ]; then
# API Test function: GET /A1-EI/v1/eitypes
# args: <response-code> [ (EMPTY | [<type-id>]+) ]
# (Function for test scripts)
-ecs_api_a1_get_type_ids() {
+ics_api_a1_get_type_ids() {
__log_test_start $@
if [ $# -lt 1 ]; then
fi
query="/A1-EI/v1/eitypes"
- res="$(__do_curl_to_api ECS GET $query)"
+ res="$(__do_curl_to_api ICS GET $query)"
status=${res:${#res}-3}
if [ $status -ne $1 ]; then
# args: <response-code> <type-id> <job-id> [<status>]
# args (flat uri structure): <response-code> <job-id> [<status> [<timeout>]]
# (Function for test scripts)
-ecs_api_a1_get_job_status() {
+ics_api_a1_get_job_status() {
__log_test_start $@
if [ -z "$FLAT_A1_EI" ]; then
query="/A1-EI/v1/eitypes/$2/eijobs/$3/status"
- res="$(__do_curl_to_api ECS GET $query)"
+ res="$(__do_curl_to_api ICS GET $query)"
status=${res:${#res}-3}
if [ $status -ne $1 ]; then
start=$SECONDS
for (( ; ; )); do
- res="$(__do_curl_to_api ECS GET $query)"
+ res="$(__do_curl_to_api ICS GET $query)"
status=${res:${#res}-3}
if [ $# -eq 4 ]; then
# args: <response-code> <type-id> <job-id> [<target-url> <owner-id> <template-job-file>]
# args (flat uri structure): <response-code> <job-id> [<type-id> <target-url> <owner-id> <template-job-file>]
# (Function for test scripts)
-ecs_api_a1_get_job() {
+ics_api_a1_get_job() {
__log_test_start $@
if [ -z "$FLAT_A1_EI" ]; then
fi
query="/A1-EI/v1/eijobs/$2"
fi
- res="$(__do_curl_to_api ECS GET $query)"
+ res="$(__do_curl_to_api ICS GET $query)"
status=${res:${#res}-3}
if [ $status -ne $1 ]; then
# args: <response-code> <type-id> <job-id>
# args (flat uri structure): <response-code> <job-id>
# (Function for test scripts)
-ecs_api_a1_delete_job() {
+ics_api_a1_delete_job() {
__log_test_start $@
if [ -z "$FLAT_A1_EI" ]; then
fi
query="/A1-EI/v1/eijobs/$2"
fi
- res="$(__do_curl_to_api ECS DELETE $query)"
+ res="$(__do_curl_to_api ICS DELETE $query)"
status=${res:${#res}-3}
if [ $status -ne $1 ]; then
# args: <response-code> <type-id> <job-id> <target-url> <owner-id> <template-job-file>
# args (flat uri structure): <response-code> <job-id> <type-id> <target-url> <owner-id> <notification-url> <template-job-file>
# (Function for test scripts)
-ecs_api_a1_put_job() {
+ics_api_a1_put_job() {
__log_test_start $@
if [ -z "$FLAT_A1_EI" ]; then
query="/A1-EI/v1/eijobs/$2"
fi
- res="$(__do_curl_to_api ECS PUT $query $file)"
+ res="$(__do_curl_to_api ICS PUT $query $file)"
status=${res:${#res}-3}
if [ $status -ne $1 ]; then
##########################################
-#### Enrichment Data Producer API ####
+#### information Data Producer API ####
##########################################
-# Function prefix: ecs_api_edp
+# Function prefix: ics_api_edp
# API Test function: GET /ei-producer/v1/eitypes
# API Test function: GET /data-producer/v1/info-types
# args: <response-code> [ EMPTY | <type-id>+]
# (Function for test scripts)
-ecs_api_edp_get_type_ids() {
+ics_api_edp_get_type_ids() {
__log_test_start $@
if [ $# -lt 1 ]; then
__print_err "<response-code> [ EMPTY | <type-id>+]" $@
return 1
fi
- if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+ if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
query="/data-producer/v1/info-types"
else
query="/ei-producer/v1/eitypes"
fi
- res="$(__do_curl_to_api ECS GET $query)"
+ res="$(__do_curl_to_api ICS GET $query)"
status=${res:${#res}-3}
if [ $status -ne $1 ]; then
# API Test function: GET /data-producer/v1/info-producers/{infoProducerId}/status
# args: <response-code> <producer-id> [<status> [<timeout>]]
# (Function for test scripts)
-ecs_api_edp_get_producer_status() {
+ics_api_edp_get_producer_status() {
__log_test_start $@
if [ $# -lt 2 ] || [ $# -gt 4 ]; then
__print_err "<response-code> <producer-id> [<status> [<timeout>]]" $@
return 1
fi
- if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+ if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
query="/data-producer/v1/info-producers/$2/status"
else
query="/ei-producer/v1/eiproducers/$2/status"
fi
start=$SECONDS
for (( ; ; )); do
- res="$(__do_curl_to_api ECS GET $query)"
+ res="$(__do_curl_to_api ICS GET $query)"
status=${res:${#res}-3}
if [ $# -eq 4 ]; then
# API Test function: GET /ei-producer/v1/eiproducers
# args (v1_1): <response-code> [ EMPTY | <producer-id>+]
# (Function for test scripts)
-ecs_api_edp_get_producer_ids() {
+ics_api_edp_get_producer_ids() {
__log_test_start $@
if [ $# -lt 1 ]; then
fi
query="/ei-producer/v1/eiproducers"
- res="$(__do_curl_to_api ECS GET $query)"
+ res="$(__do_curl_to_api ICS GET $query)"
status=${res:${#res}-3}
if [ $status -ne $1 ]; then
# API Test function: GET /data-producer/v1/info-producers
# args (v1_2): <response-code> [ ( NOTYPE | <type-id> ) [ EMPTY | <producer-id>+] ]
# (Function for test scripts)
-ecs_api_edp_get_producer_ids_2() {
+ics_api_edp_get_producer_ids_2() {
__log_test_start $@
if [ $# -lt 1 ]; then
__print_err "<response-code> [ ( NOTYPE | <type-id> ) [ EMPTY | <producer-id>+] ]" $@
return 1
fi
- if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+ if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
query="/data-producer/v1/info-producers"
if [ $# -gt 1 ] && [ $2 != "NOTYPE" ]; then
query=$query"?info_type_id=$2"
query=$query"?ei_type_id=$2"
fi
fi
- res="$(__do_curl_to_api ECS GET $query)"
+ res="$(__do_curl_to_api ICS GET $query)"
status=${res:${#res}-3}
if [ $status -ne $1 ]; then
# API Test function: GET /ei-producer/v1/eitypes/{eiTypeId}
# args: (v1_1) <response-code> <type-id> [<job-schema-file> (EMPTY | [<producer-id>]+)]
# (Function for test scripts)
-ecs_api_edp_get_type() {
+ics_api_edp_get_type() {
__log_test_start $@
paramError=1
fi
query="/ei-producer/v1/eitypes/$2"
- res="$(__do_curl_to_api ECS GET $query)"
+ res="$(__do_curl_to_api ICS GET $query)"
status=${res:${#res}-3}
if [ $status -ne $1 ]; then
# API Test function: GET /data-producer/v1/info-types/{infoTypeId}
# args: (v1_2) <response-code> <type-id> [<job-schema-file> [ <info-type-info> ]]
# (Function for test scripts)
-ecs_api_edp_get_type_2() {
+ics_api_edp_get_type_2() {
__log_test_start $@
paramError=1
if [ $# -eq 3 ]; then
paramError=0
fi
- if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPE-INFO"* ]]; then
+ if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPE-INFO"* ]]; then
if [ $# -eq 4 ]; then
paramError=0
fi
__print_err "<response-code> <type-id> [<job-schema-file> [ <info-type-info> ]]" $@
return 1
fi
- if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+ if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
query="/data-producer/v1/info-types/$2"
else
query="/ei-producer/v1/eitypes/$2"
fi
- res="$(__do_curl_to_api ECS GET $query)"
+ res="$(__do_curl_to_api ICS GET $query)"
status=${res:${#res}-3}
if [ $status -ne $1 ]; then
fi
info_data=",\"info_type_information\":$info_data"
fi
- if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+ if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
targetJson="{\"info_job_data_schema\":$schema $info_data}"
else
targetJson="{\"ei_job_data_schema\":$schema}"
# API Test function: PUT /data-producer/v1/info-types/{infoTypeId}
# args: (v1_2) <response-code> <type-id> <job-schema-file> [ <info-type-info> ]
# (Function for test scripts)
-ecs_api_edp_put_type_2() {
+ics_api_edp_put_type_2() {
__log_test_start $@
- if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPE-INFO"* ]]; then
+ if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPE-INFO"* ]]; then
if [ $# -lt 3 ] || [ $# -gt 4 ]; then
__print_err "<response-code> <type-id> <job-schema-file> [ <info-type-info> ]" $@
return 1
info_data=",\"info_type_information\":$info_data"
fi
- if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+ if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
schema=$(cat $3)
input_json="{\"info_job_data_schema\":$schema $info_data}"
file="./tmp/put_type.json"
query="/ei-producer/v1/eitypes/$2"
fi
- res="$(__do_curl_to_api ECS PUT $query $file)"
+ res="$(__do_curl_to_api ICS PUT $query $file)"
status=${res:${#res}-3}
if [ $status -ne $1 ]; then
# API Test function: DELETE /data-producer/v1/info-types/{infoTypeId}
# args: (v1_2) <response-code> <type-id>
# (Function for test scripts)
-ecs_api_edp_delete_type_2() {
+ics_api_edp_delete_type_2() {
__log_test_start $@
if [ $# -ne 2 ]; then
return 1
fi
- if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+ if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
query="/data-producer/v1/info-types/$2"
else
query="/ei-producer/v1/eitypes/$2"
fi
- res="$(__do_curl_to_api ECS DELETE $query)"
+ res="$(__do_curl_to_api ICS DELETE $query)"
status=${res:${#res}-3}
if [ $status -ne $1 ]; then
# API Test function: GET /ei-producer/v1/eiproducers/{eiProducerId}
# args: (v1_1) <response-code> <producer-id> [<job-callback> <supervision-callback> (EMPTY | [<type-id> <schema-file>]+) ]
# (Function for test scripts)
-ecs_api_edp_get_producer() {
+ics_api_edp_get_producer() {
__log_test_start $@
#Possible arg count: 2, 5 6, 8, 10 etc
fi
query="/ei-producer/v1/eiproducers/$2"
- res="$(__do_curl_to_api ECS GET $query)"
+ res="$(__do_curl_to_api ICS GET $query)"
status=${res:${#res}-3}
if [ $status -ne $1 ]; then
# API Test function: GET /data-producer/v1/info-producers/{infoProducerId}
# args (v1_2): <response-code> <producer-id> [<job-callback> <supervision-callback> (EMPTY | <type-id>+) ]
# (Function for test scripts)
-ecs_api_edp_get_producer_2() {
+ics_api_edp_get_producer_2() {
__log_test_start $@
#Possible arg count: 2, 5, 6, 7, 8 etc
__print_err "<response-code> <producer-id> [<job-callback> <supervision-callback> (EMPTY | <type-id>+) ]" $@
return 1
fi
- if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+ if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
query="/data-producer/v1/info-producers/$2"
else
query="/ei-producer/v1/eiproducers/$2"
fi
- res="$(__do_curl_to_api ECS GET $query)"
+ res="$(__do_curl_to_api ICS GET $query)"
status=${res:${#res}-3}
if [ $status -ne $1 ]; then
fi
targetJson=$targetJson"]"
if [ $# -gt 4 ]; then
- if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+ if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
targetJson="{\"supported_info_types\":$targetJson,\"info_job_callback_url\": \"$3\",\"info_producer_supervision_callback_url\": \"$4\"}"
else
targetJson="{\"supported_ei_types\":$targetJson,\"ei_job_callback_url\": \"$3\",\"ei_producer_supervision_callback_url\": \"$4\"}"
# API Test function: DELETE /data-producer/v1/info-producers/{infoProducerId}
# args: <response-code> <producer-id>
# (Function for test scripts)
-ecs_api_edp_delete_producer() {
+ics_api_edp_delete_producer() {
__log_test_start $@
if [ $# -lt 2 ]; then
__print_err "<response-code> <producer-id>" $@
return 1
fi
- if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+ if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
query="/data-producer/v1/info-producers/$2"
else
query="/ei-producer/v1/eiproducers/$2"
fi
- res="$(__do_curl_to_api ECS DELETE $query)"
+ res="$(__do_curl_to_api ICS DELETE $query)"
status=${res:${#res}-3}
if [ $status -ne $1 ]; then
# API Test function: PUT /ei-producer/v1/eiproducers/{eiProducerId}
# args: (v1_1) <response-code> <producer-id> <job-callback> <supervision-callback> NOTYPE|[<type-id> <schema-file>]+
# (Function for test scripts)
-ecs_api_edp_put_producer() {
+ics_api_edp_put_producer() {
__log_test_start $@
#Valid number of parametrer 5,6,8,10,
file="./tmp/.p.json"
echo "$inputJson" > $file
query="/ei-producer/v1/eiproducers/$2"
- res="$(__do_curl_to_api ECS PUT $query $file)"
+ res="$(__do_curl_to_api ICS PUT $query $file)"
status=${res:${#res}-3}
if [ $status -ne $1 ]; then
# API Test function: PUT /data-producer/v1/info-producers/{infoProducerId}
# args: (v1_2) <response-code> <producer-id> <job-callback> <supervision-callback> NOTYPE|[<type-id>+]
# (Function for test scripts)
-ecs_api_edp_put_producer_2() {
+ics_api_edp_put_producer_2() {
__log_test_start $@
#Valid number of parametrer 5,6,8,10,
inputJson=$inputJson"\""${arr[$i]}"\""
done
fi
- if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+ if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
inputJson="\"supported_info_types\":"$inputJson"]"
inputJson=$inputJson",\"info_job_callback_url\": \"$3\",\"info_producer_supervision_callback_url\": \"$4\""
echo "$inputJson" > $file
query="/ei-producer/v1/eiproducers/$2"
fi
- res="$(__do_curl_to_api ECS PUT $query $file)"
+ res="$(__do_curl_to_api ICS PUT $query $file)"
status=${res:${#res}-3}
if [ $status -ne $1 ]; then
# API Test function: GET /ei-producer/v1/eiproducers/{eiProducerId}/eijobs
# args: (V1-1) <response-code> <producer-id> (EMPTY | [<job-id> <type-id> <target-url> <job-owner> <template-job-file>]+)
# (Function for test scripts)
-ecs_api_edp_get_producer_jobs() {
+ics_api_edp_get_producer_jobs() {
__log_test_start $@
#Valid number of parameter 2,3,7,11
fi
query="/ei-producer/v1/eiproducers/$2/eijobs"
- res="$(__do_curl_to_api ECS GET $query)"
+ res="$(__do_curl_to_api ICS GET $query)"
status=${res:${#res}-3}
if [ $status -ne $1 ]; then
__log_test_fail_status_code $1 $status
# API Test function: GET /data-producer/v1/info-producers/{infoProducerId}/info-jobs
# args: (V1-2) <response-code> <producer-id> (EMPTY | [<job-id> <type-id> <target-url> <job-owner> <template-job-file>]+)
# (Function for test scripts)
-ecs_api_edp_get_producer_jobs_2() {
+ics_api_edp_get_producer_jobs_2() {
__log_test_start $@
#Valid number of parameter 2,3,7,11
__print_err "<response-code> <producer-id> (EMPTY | [<job-id> <type-id> <target-url> <job-owner> <template-job-file>]+)" $@
return 1
fi
- if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+ if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
query="/data-producer/v1/info-producers/$2/info-jobs"
else
query="/ei-producer/v1/eiproducers/$2/eijobs"
fi
- res="$(__do_curl_to_api ECS GET $query)"
+ res="$(__do_curl_to_api ICS GET $query)"
status=${res:${#res}-3}
if [ $status -ne $1 ]; then
__log_test_fail_status_code $1 $status
__log_test_fail_general "Job template file "${arr[$i+4]}", does not exist"
return 1
fi
- if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+ if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
targetJson=$targetJson"{\"info_job_identity\":\"${arr[$i]}\",\"info_type_identity\":\"${arr[$i+1]}\",\"target_uri\":\"${arr[$i+2]}\",\"owner\":\"${arr[$i+3]}\",\"info_job_data\":$jobfile, \"last_updated\":\"????\"}"
else
targetJson=$targetJson"{\"ei_job_identity\":\"${arr[$i]}\",\"ei_type_identity\":\"${arr[$i+1]}\",\"target_uri\":\"${arr[$i+2]}\",\"owner\":\"${arr[$i+3]}\",\"ei_job_data\":$jobfile, \"last_updated\":\"????\"}"
##########################################
#### Service status ####
##########################################
-# Function prefix: ecs_api_service
+# Function prefix: ics_api_service
# API Test function: GET ​/status
# args: <response-code>
# (Function for test scripts)
-ecs_api_service_status() {
+ics_api_service_status() {
__log_test_start $@
if [ $# -lt 1 ]; then
__print_err "<response-code>" $@
return 1
fi
- res="$(__do_curl_to_api ECS GET /status)"
+ res="$(__do_curl_to_api ICS GET /status)"
status=${res:${#res}-3}
if [ $status -ne $1 ]; then
__log_test_fail_status_code $1 $status
###########################################
######### Info data consumer API ##########
###########################################
-#Function prefix: ecs_api_idc
+#Function prefix: ics_api_idc
# API Test function: GET /data-consumer/v1/info-types
# args: <response-code> [ (EMPTY | [<type-id>]+) ]
# (Function for test scripts)
-ecs_api_idc_get_type_ids() {
+ics_api_idc_get_type_ids() {
__log_test_start $@
if [ $# -lt 1 ]; then
fi
query="/data-consumer/v1/info-types"
- res="$(__do_curl_to_api ECS GET $query)"
+ res="$(__do_curl_to_api ICS GET $query)"
status=${res:${#res}-3}
if [ $status -ne $1 ]; then
# API Test function: GET /data-consumer/v1/info-jobs
# args: <response-code> <type-id>|NOTYPE <owner-id>|NOOWNER [ EMPTY | <job-id>+ ]
# (Function for test scripts)
-ecs_api_idc_get_job_ids() {
+ics_api_idc_get_job_ids() {
__log_test_start $@
# Valid number of parameters 4,5,6 etc
fi
query="/data-consumer/v1/info-jobs$search"
- res="$(__do_curl_to_api ECS GET $query)"
+ res="$(__do_curl_to_api ICS GET $query)"
status=${res:${#res}-3}
if [ $status -ne $1 ]; then
# API Test function: GET /data-consumer/v1/info-jobs/{infoJobId}
# args: <response-code> <job-id> [<type-id> <target-url> <owner-id> <template-job-file>]
# (Function for test scripts)
-ecs_api_idc_get_job() {
+ics_api_idc_get_job() {
__log_test_start $@
if [ $# -ne 2 ] && [ $# -ne 7 ]; then
return 1
fi
query="/data-consumer/v1/info-jobs/$2"
- res="$(__do_curl_to_api ECS GET $query)"
+ res="$(__do_curl_to_api ICS GET $query)"
status=${res:${#res}-3}
if [ $status -ne $1 ]; then
# API Test function: PUT ​/data-consumer/v1/info-jobs/{infoJobId}
# args: <response-code> <job-id> <type-id> <target-url> <owner-id> <notification-url> <template-job-file> [ VALIDATE ]
# (Function for test scripts)
-ecs_api_idc_put_job() {
+ics_api_idc_put_job() {
__log_test_start $@
if [ $# -lt 7 ] || [ $# -gt 8 ]; then
fi
fi
- res="$(__do_curl_to_api ECS PUT $query $file)"
+ res="$(__do_curl_to_api ICS PUT $query $file)"
status=${res:${#res}-3}
if [ $status -ne $1 ]; then
# API Test function: DELETE ​/data-consumer/v1/info-jobs/{infoJobId}
# args: <response-code> <job-id>
# (Function for test scripts)
-ecs_api_idc_delete_job() {
+ics_api_idc_delete_job() {
__log_test_start $@
if [ $# -ne 2 ]; then
return 1
fi
query="/data-consumer/v1/info-jobs/$2"
- res="$(__do_curl_to_api ECS DELETE $query)"
+ res="$(__do_curl_to_api ICS DELETE $query)"
status=${res:${#res}-3}
if [ $status -ne $1 ]; then
# API Test function: GET ​/data-consumer/v1/info-types/{infoTypeId}
# args: <response-code> <type-id> [<schema-file> [<type-status> <producers-count]]
# (Function for test scripts)
-ecs_api_idc_get_type() {
+ics_api_idc_get_type() {
__log_test_start $@
if [ $# -lt 2 ] || [ $# -gt 5 ]; then
fi
query="/data-consumer/v1/info-types/$2"
- res="$(__do_curl_to_api ECS GET $query)"
+ res="$(__do_curl_to_api ICS GET $query)"
status=${res:${#res}-3}
if [ $status -ne $1 ]; then
# This test only status during an optional timeout. No test of the list of producers
# args: <response-code> <job-id> [<status> [<timeout>]]
# (Function for test scripts)
-ecs_api_idc_get_job_status() {
+ics_api_idc_get_job_status() {
__log_test_start $@
if [ $# -lt 2 ] && [ $# -gt 4 ]; then
start=$SECONDS
for (( ; ; )); do
- res="$(__do_curl_to_api ECS GET $query)"
+ res="$(__do_curl_to_api ICS GET $query)"
status=${res:${#res}-3}
if [ $# -eq 4 ]; then
# This function test status and the list of producers with and optional timeout
# args: <response-code> <job-id> [<status> EMPTYPROD|( <prod-count> <producer-id>+ ) [<timeout>]]
# (Function for test scripts)
-ecs_api_idc_get_job_status2() {
+ics_api_idc_get_job_status2() {
__log_test_start $@
param_error=0
idx=$(($4+4))
timeout=${args[$idx]}
fi
- for ((ecs_i = 0 ; ecs_i < $4 ; ecs_i++)); do
- idx=$(($ecs_i+4))
- if [ $ecs_i -gt 0 ]; then
+ for ((ics_i = 0 ; ics_i < $4 ; ics_i++)); do
+ idx=$(($ics_i+4))
+ if [ $ics_i -gt 0 ]; then
targetJson=$targetJson","
fi
targetJson=$targetJson"\""${args[$idx]}"\""
start=$SECONDS
for (( ; ; )); do
- res="$(__do_curl_to_api ECS GET $query)"
+ res="$(__do_curl_to_api ICS GET $query)"
status=${res:${#res}-3}
if [ $# -gt 2 ]; then
# API Test function: GET /data-consumer/v1/info-type-subscription
# args: <response-code> <owner-id>|NOOWNER [ EMPTY | <subscription-id>+]
# (Function for test scripts)
-ecs_api_idc_get_subscription_ids() {
+ics_api_idc_get_subscription_ids() {
__log_test_start $@
if [ $# -lt 3 ]; then
search="?owner="$2
fi
- res="$(__do_curl_to_api ECS GET $query$search)"
+ res="$(__do_curl_to_api ICS GET $query$search)"
status=${res:${#res}-3}
if [ $status -ne $1 ]; then
# API Test function: GET /data-consumer/v1/info-type-subscription/{subscriptionId}
# args: <response-code> <subscription-id> [ <owner-id> <status-uri> ]
# (Function for test scripts)
-ecs_api_idc_get_subscription() {
+ics_api_idc_get_subscription() {
__log_test_start $@
if [ $# -ne 2 ] && [ $# -ne 4 ]; then
fi
query="/data-consumer/v1/info-type-subscription/$2"
- res="$(__do_curl_to_api ECS GET $query)"
+ res="$(__do_curl_to_api ICS GET $query)"
status=${res:${#res}-3}
if [ $status -ne $1 ]; then
# API Test function: PUT /data-consumer/v1/info-type-subscription/{subscriptionId}
# args: <response-code> <subscription-id> <owner-id> <status-uri>
# (Function for test scripts)
-ecs_api_idc_put_subscription() {
+ics_api_idc_put_subscription() {
__log_test_start $@
if [ $# -ne 4 ]; then
echo "$inputJson" > $file
query="/data-consumer/v1/info-type-subscription/$2"
- res="$(__do_curl_to_api ECS PUT $query $file)"
+ res="$(__do_curl_to_api ICS PUT $query $file)"
status=${res:${#res}-3}
if [ $status -ne $1 ]; then
# API Test function: DELETE /data-consumer/v1/info-type-subscription/{subscriptionId}
# args: <response-code> <subscription-id>
# (Function for test scripts)
-ecs_api_idc_delete_subscription() {
+ics_api_idc_delete_subscription() {
__log_test_start $@
if [ $# -ne 2 ]; then
fi
query="/data-consumer/v1/info-type-subscription/$2"
- res="$(__do_curl_to_api ECS DELETE $query)"
+ res="$(__do_curl_to_api ICS DELETE $query)"
status=${res:${#res}-3}
if [ $status -ne $1 ]; then
##########################################
#### Reset jobs ####
##########################################
-# Function prefix: ecs_api_admin
+# Function prefix: ics_api_admin
# Admin to remove all jobs
# args: <response-code> [ <type> ]
# (Function for test scripts)
-ecs_api_admin_reset() {
+ics_api_admin_reset() {
__log_test_start $@
if [ -z "$FLAT_A1_EI" ]; then
else
query="/A1-EI/v1/eijobs"
fi
- res="$(__do_curl_to_api ECS GET $query)"
+ res="$(__do_curl_to_api ICS GET $query)"
status=${res:${#res}-3}
if [ $status -ne 200 ]; then
echo "Not supported for non-flat EI api"
else
query="/A1-EI/v1/eijobs/$job"
- res="$(__do_curl_to_api ECS DELETE $query)"
+ res="$(__do_curl_to_api ICS DELETE $query)"
status=${res:${#res}-3}
if [ $status -ne 204 ]; then
__log_test_fail_status_code $1 $status
##########################################
-# Admin reset to remove all data in ecs; jobs, producers etc
+# Admin reset to remove all data in ics; jobs, producers etc
# NOTE - only works in kubernetes and the pod should not be running
# args: -
# (Function for test scripts)
-ecs_kube_pvc_reset() {
+ics_kube_pvc_reset() {
__log_test_start $@
- pvc_name=$(kubectl get pvc -n $KUBE_NONRTRIC_NAMESPACE --no-headers -o custom-columns=":metadata.name" | grep enrichment)
+ pvc_name=$(kubectl get pvc -n $KUBE_NONRTRIC_NAMESPACE --no-headers -o custom-columns=":metadata.name" | grep information)
if [ -z "$pvc_name" ]; then
- pvc_name=enrichmentservice-pvc
+ pvc_name=informationservice-pvc
fi
echo " Trying to reset pvc: "$pvc_name
- __kube_clean_pvc $ECS_APP_NAME $KUBE_NONRTRIC_NAMESPACE $pvc_name $ECS_CONTAINER_MNT_DIR
+ __kube_clean_pvc $ICS_APP_NAME $KUBE_NONRTRIC_NAMESPACE $pvc_name $ICS_CONTAINER_MNT_DIR
__log_test_pass
return 0
--- /dev/null
+#!/bin/bash
+
+# ============LICENSE_START===============================================
+# Copyright (C) 2020 Nordix Foundation. All rights reserved.
+# ========================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=================================================
+#
+
+# This is a script that contains container/service managemnt functions for Kafka producer/consumer
+
+################ Test engine functions ################
+
+# Create the image var used during the test
+# arg: <image-tag-suffix> (selects staging, snapshot, release etc)
+# <image-tag-suffix> is present only for images with staging, snapshot,release tags
+__KAFKAPC_imagesetup() {
+ __check_and_create_image_var KAFKAPC "KAFKAPC_IMAGE" "KAFKAPC_IMAGE_BASE" "KAFKAPC_IMAGE_TAG" LOCAL "$KAFKAPC_DISPLAY_NAME"
+}
+
+# Pull image from remote repo or use locally built image
+# arg: <pull-policy-override> <pull-policy-original>
+# <pull-policy-override> Shall be used for images allowing overriding. For example use a local image when test is started to use released images
+# <pull-policy-original> Shall be used for images that does not allow overriding
+# Both var may contain: 'remote', 'remote-remove' or 'local'
+__KAFKAPC_imagepull() {
+ __check_and_pull_image $2 "$KAFKAPC_DISPLAY_NAME" $KAFKAPC_APP_NAME KAFKAPC_IMAGE
+}
+
+# Build image (only for simulator or interfaces stubs owned by the test environment)
+# arg: <image-tag-suffix> (selects staging, snapshot, release etc)
+# <image-tag-suffix> is present only for images with staging, snapshot,release tags
+__KAFKAPC_imagebuild() {
+
+ cd ../$KAFKAPC_BUILD_DIR
+ echo " Building KAFKAPC - $KAFKAPC_DISPLAY_NAME - image: $KAFKAPC_IMAGE"
+ docker build --build-arg NEXUS_PROXY_REPO=$NEXUS_PROXY_REPO -t $KAFKAPC_IMAGE . &> .dockererr
+ if [ $? -eq 0 ]; then
+ echo -e $GREEN" Build Ok"$EGREEN
+ __retag_and_push_image KAFKAPC_IMAGE
+ if [ $? -ne 0 ]; then
+ exit 1
+ fi
+ else
+ echo -e $RED" Build Failed"$ERED
+ ((RES_CONF_FAIL++))
+ cat .dockererr
+ echo -e $RED"Exiting...."$ERED
+ exit 1
+ fi
+}
+
+# Generate a string for each included image using the app display name and a docker images format string
+# If a custom image repo is used then also the source image from the local repo is listed
+# arg: <docker-images-format-string> <file-to-append>
+__KAFKAPC_image_data() {
+ echo -e "$KAFKAPC_DISPLAY_NAME\t$(docker images --format $1 $KAFKAPC_IMAGE)" >> $2
+ if [ ! -z "$KAFKAPC_IMAGE_SOURCE" ]; then
+ echo -e "-- source image --\t$(docker images --format $1 $KAFKAPC_IMAGE_SOURCE)" >> $2
+ fi
+}
+
+# Scale kubernetes resources to zero
+# All resources shall be ordered to be scaled to 0, if relevant. If not relevant to scale, then do no action.
+# This function is called for apps fully managed by the test script
+__KAFKAPC_kube_scale_zero() {
+ __kube_scale_all_resources $KUBE_SIM_NAMESPACE autotest KAFKAPC
+}
+
+# Scale kubernetes resources to zero and wait until this has been accomplished, if relevant. If not relevant to scale, then do no action.
+# This function is called for prestarted apps not managed by the test script.
+__KAFKAPC_kube_scale_zero_and_wait() {
+ echo -e $RED" KAFKAPC app is not scaled in this state"$ERED
+}
+
+# Delete all kube resouces for the app
+# This function is called for apps managed by the test script.
+__KAFKAPC_kube_delete_all() {
+ __kube_delete_all_resources $KUBE_SIM_NAMESPACE autotest KAFKAPC
+}
+
+# Store docker logs
+# This function is called for apps managed by the test script.
+# args: <log-dir> <file-prexix>
+__KAFKAPC_store_docker_logs() {
+ if [ $RUNMODE == "KUBE" ]; then
+ kubectl logs -l "autotest=KAFKAPC" -n $KUBE_SIM_NAMESPACE --tail=-1 > $1$2_kafkapc.log 2>&1
+ else
+ docker logs $KAFKAPC_APP_NAME > $1$2_kafkapc.log 2>&1
+ fi
+}
+
+# Initial setup of protocol, host and ports
+# This function is called for apps managed by the test script.
+# args: -
+__KAFKAPC_initial_setup() {
+ use_kafkapc_http
+}
+
+# Set app short-name, app name and namespace for logging runtime statistics of kubernets pods or docker containers
+# For docker, the namespace shall be excluded
+# This function is called for apps managed by the test script as well as for prestarted apps.
+# args: -
+__KAFKAPC_statisics_setup() {
+ if [ $RUNMODE == "KUBE" ]; then
+ echo "KAFKAPC $KAFKAPC_APP_NAME $KUBE_SIM_NAMESPACE"
+ else
+ echo "KAFKAPC $KAFKAPC_APP_NAME"
+ fi
+}
+
+#######################################################
+
+#######################################################
+
+# Set http as the protocol to use for all communication to the Kafka procon
+# args: -
+# (Function for test scripts)
+use_kafkapc_http() {
+ __kafkapc_set_protocoll "http" $KAFKAPC_INTERNAL_PORT $KAFKAPC_EXTERNAL_PORT
+}
+
+# Set httpS as the protocol to use for all communication to the Kafka procon
+# args: -
+# (Function for test scripts)
+use_kafkapc_https() {
+ __kafkapc_set_protocoll "https" $KAFKAPC_INTERNAL_SECURE_PORT $KAFKAPC_EXTERNAL_SECURE_PORT
+}
+
+# Setup paths to svc/container for internal and external access
+# args: <protocol> <internal-port> <external-port>
+__kafkapc_set_protocoll() {
+ echo -e $BOLD"$KAFKAPC_DISPLAY_NAME protocol setting"$EBOLD
+ echo -e " Using $BOLD $1 $EBOLD towards $KAFKAPC_DISPLAY_NAME"
+
+ ## Access to Kafka procon
+
+ KAFKAPC_SERVICE_PATH=$1"://"$KAFKAPC_APP_NAME":"$2 # docker access, container->container and script->container via proxy
+ if [ $RUNMODE == "KUBE" ]; then
+ KAFKAPC_SERVICE_PATH=$1"://"$KAFKAPC_APP_NAME.$KUBE_SIM_NAMESPACE":"$3 # kube access, pod->svc and script->svc via proxy
+ fi
+
+ KAFKAPC_ADAPTER_TYPE="REST"
+ KAFKAPC_ADAPTER=$KAFKAPC_SERVICE_PATH
+
+ echo ""
+}
+
+### Admin API functions Kafka procon
+
+###########################
+### Kafka Procon functions
+###########################
+
+# Export env vars for config files, docker compose and kube resources
+# args:
+__kafkapc_export_vars() {
+ export KAFKAPC_APP_NAME
+ export KAFKAPC_DISPLAY_NAME
+
+ export DOCKER_SIM_NWNAME
+ export KUBE_SIM_NAMESPACE
+
+ export KAFKAPC_IMAGE
+ export KAFKAPC_INTERNAL_PORT
+ export KAFKAPC_INTERNAL_SECURE_PORT
+ export KAFKAPC_EXTERNAL_PORT
+ export KAFKAPC_EXTERNAL_SECURE_PORT
+
+ export MR_KAFKA_SERVICE_PATH
+}
+
+
+# Start the Kafka procon in the simulator group
+# args: -
+# (Function for test scripts)
+start_kafkapc() {
+
+ echo -e $BOLD"Starting $KAFKAPC_DISPLAY_NAME"$EBOLD
+
+ if [ $RUNMODE == "KUBE" ]; then
+
+ # Check if app shall be fully managed by the test script
+ __check_included_image "KAFKAPC"
+ retcode_i=$?
+
+ # Check if app shall only be used by the testscipt
+ __check_prestarted_image "KAFKAPC"
+ retcode_p=$?
+
+ if [ $retcode_i -ne 0 ] && [ $retcode_p -ne 0 ]; then
+ echo -e $RED"The $ICS_APP_NAME app is not included as managed nor prestarted in this test script"$ERED
+ echo -e $RED"The $ICS_APP_NAME will not be started"$ERED
+ exit
+ fi
+ if [ $retcode_i -eq 0 ] && [ $retcode_p -eq 0 ]; then
+ echo -e $RED"The $ICS_APP_NAME app is included both as managed and prestarted in this test script"$ERED
+ echo -e $RED"The $ICS_APP_NAME will not be started"$ERED
+ exit
+ fi
+
+ if [ $retcode_p -eq 0 ]; then
+ echo -e " Using existing $KAFKAPC_APP_NAME deployment and service"
+ echo " Setting RC replicas=1"
+ __kube_scale deployment $KAFKAPC_APP_NAME $KUBE_SIM_NAMESPACE 1
+ fi
+
+ if [ $retcode_i -eq 0 ]; then
+ echo -e " Creating $KAFKAPC_APP_NAME deployment and service"
+
+ __kube_create_namespace $KUBE_SIM_NAMESPACE
+
+ __kafkapc_export_vars
+
+ # Create service
+ input_yaml=$SIM_GROUP"/"$KAFKAPC_COMPOSE_DIR"/"svc.yaml
+ output_yaml=$PWD/tmp/kafkapc_svc.yaml
+ __kube_create_instance service $KAFKAPC_APP_NAME $input_yaml $output_yaml
+
+ # Create app
+ input_yaml=$SIM_GROUP"/"$KAFKAPC_COMPOSE_DIR"/"app.yaml
+ output_yaml=$PWD/tmp/kafkapc_app.yaml
+ __kube_create_instance app $KAFKAPC_APP_NAME $input_yaml $output_yaml
+ fi
+
+ __check_service_start $KAFKAPC_APP_NAME $KAFKAPC_SERVICE_PATH$KAFKAPC_ALIVE_URL
+
+ else
+
+ # Check if docker app shall be fully managed by the test script
+ __check_included_image 'KAFKAPC'
+ if [ $? -eq 1 ]; then
+ echo -e $RED"The Kafka procon app is not included as managed in this test script"$ERED
+ echo -e $RED"The Kafka procon will not be started"$ERED
+ exit
+ fi
+
+ __kafkapc_export_vars
+
+ __start_container $KAFKAPC_COMPOSE_DIR "" NODOCKERARGS 1 $KAFKAPC_APP_NAME
+
+ __check_service_start $KAFKAPC_APP_NAME $KAFKAPC_SERVICE_PATH$KAFKAPC_ALIVE_URL
+ fi
+ echo ""
+ return 0
+}
+
+# Tests if a variable value in the KAFPAPC is equal to a target value and and optional timeout.
+# Arg: <variable-name> <target-value> - This test set pass or fail depending on if the variable is
+# equal to the target or not.
+# Arg: <variable-name> <target-value> <timeout-in-sec> - This test waits up to the timeout seconds
+# before setting pass or fail depending on if the variable value becomes equal to the target
+# value or not.
+# (Function for test scripts)
+kafkapc_equal() {
+ if [ $# -eq 2 ] || [ $# -eq 3 ]; then
+ __var_test KAFPAPC "$KAFKAPC_SERVICE_PATH/" $1 "=" $2 $3
+ else
+ __print_err "Wrong args to kafkapc_equal, needs two or three args: <sim-param> <target-value> [ timeout ]" $@
+ fi
+}
+
+# KAFKA PC API: Reset all, POST /reset
+# Arg: <response-code>
+# (Function for test scripts)
+kafkapc_api_reset() {
+ __log_conf_start $@
+
+ if [ $# -ne 1 ]; then
+ __print_err "<response-code>" $@
+ return 1
+ fi
+
+ res="$(__do_curl_to_api KAFKAPC POST /reset)"
+ status=${res:${#res}-3}
+
+ if [ $status -ne $1 ]; then
+ __log_conf_fail_status_code $1 $status
+ return 1
+ fi
+
+ __log_conf_ok
+ return 0
+}
+
+# KAFKA PC API: Create a topic of a data-type, PUT /topics/<topic>
+# Arg: <response-code> <topic-name> <mime-type>
+# (Function for test scripts)
+kafkapc_api_create_topic() {
+ __log_conf_start $@
+
+ if [ $# -ne 3 ]; then
+ __print_err "<response-code> <topic-name> <mime-type>" $@
+ return 1
+ fi
+
+ res="$(__do_curl_to_api KAFKAPC PUT /topics/$2?type=$3)"
+ status=${res:${#res}-3}
+
+ if [ $status -ne $1 ]; then
+ __log_conf_fail_status_code $1 $status
+ return 1
+ fi
+
+ __log_conf_ok
+ return 0
+}
+
+# KAFKA PC API: Get topics, GET /topics
+# args: <response-code> [ EMPTY | [<topic>]+ ]
+# (Function for test scripts)
+kafkapc_api_get_topics() {
+ __log_test_start $@
+
+ if [ $# -lt 1 ]; then
+ __print_err "<response-code> EMPTY | [<policy-type-id>]*" $@
+ return 1
+ fi
+
+ res="$(__do_curl_to_api KAFKAPC GET /topics)"
+ status=${res:${#res}-3}
+
+ if [ $status -ne $1 ]; then
+ __log_test_fail_status_code $1 $status
+ return 1
+ fi
+ if [ $# -gt 1 ]; then
+ body=${res:0:${#res}-3}
+ targetJson="["
+
+ for pid in ${@:2} ; do
+ if [ "$targetJson" != "[" ]; then
+ targetJson=$targetJson","
+ fi
+ if [ $pid != "EMPTY" ]; then
+ targetJson=$targetJson"\"$pid\""
+ fi
+ done
+ targetJson=$targetJson"]"
+ echo " TARGET JSON: $targetJson" >> $HTTPLOG
+ res=$(python3 ../common/compare_json.py "$targetJson" "$body")
+
+ if [ $res -ne 0 ]; then
+ __log_test_fail_body
+ return 1
+ fi
+ fi
+ __log_test_pass
+ return 0
+}
+
+# KAFKA PC API: Get a topic, GET /topic/<topic>
+# args: <response-code> <topic> <mime-type>
+# (Function for test scripts)
+kafkapc_api_get_topic() {
+ __log_test_start $@
+
+ if [ $# -ne 3 ]; then
+ __print_err "<response-code> <topic> <mime-type>" $@
+ return 1
+ fi
+
+ res="$(__do_curl_to_api KAFKAPC GET /topics/$2)"
+ status=${res:${#res}-3}
+
+ if [ $status -ne $1 ]; then
+ __log_test_fail_status_code $1 $status
+ return 1
+ fi
+
+ body=${res:0:${#res}-3}
+ if [ "$body" != $3 ]; then
+ __log_test_fail_body
+ return 1
+ fi
+
+ __log_test_pass
+ return 0
+}
+
+# KAFKA PC API: Start sending on a topic, POST /topic/<topic>/startsend
+# args: <response-code> <topic>
+# (Function for test scripts)
+kafkapc_api_start_sending() {
+ __log_test_start $@
+
+ if [ $# -ne 2 ]; then
+ __print_err "<response-code> <topic>" $@
+ return 1
+ fi
+
+ res="$(__do_curl_to_api KAFKAPC POST /topics/$2/startsend)"
+ status=${res:${#res}-3}
+
+ if [ $status -ne $1 ]; then
+ __log_test_fail_status_code $1 $status
+ return 1
+ fi
+
+ __log_test_pass
+ return 0
+}
+
+# KAFKA PC API: Start receiving on a topic, POST /topic/<topic>/startreceive
+# args: <response-code> <topic>
+# (Function for test scripts)
+kafkapc_api_start_receiving() {
+ __log_test_start $@
+
+ if [ $# -ne 2 ]; then
+ __print_err "<response-code> <topic>" $@
+ return 1
+ fi
+
+ res="$(__do_curl_to_api KAFKAPC POST /topics/$2/startreceive)"
+ status=${res:${#res}-3}
+
+ if [ $status -ne $1 ]; then
+ __log_test_fail_status_code $1 $status
+ return 1
+ fi
+
+ __log_test_pass
+ return 0
+}
+
+# KAFKA PC API: Stop sending on a topic, POST /topic/<topic>/stopsend
+# args: <response-code> <topic>
+# (Function for test scripts)
+kafkapc_api_stop_sending() {
+ __log_test_start $@
+
+ if [ $# -ne 2 ]; then
+ __print_err "<response-code> <topic>" $@
+ return 1
+ fi
+
+ res="$(__do_curl_to_api KAFKAPC POST /topics/$2/stopsend)"
+ status=${res:${#res}-3}
+
+ if [ $status -ne $1 ]; then
+ __log_test_fail_status_code $1 $status
+ return 1
+ fi
+
+ __log_test_pass
+ return 0
+}
+
+# KAFKA PC API: Stop receiving on a topic, POST /topic/<topic>/stopreceive
+# args: <response-code> <topic>
+# (Function for test scripts)
+kafkapc_api_stop_receiving() {
+ __log_test_start $@
+
+ if [ $# -ne 2 ]; then
+ __print_err "<response-code> <topic>" $@
+ return 1
+ fi
+
+ res="$(__do_curl_to_api KAFKAPC POST /topics/$2/stopreceive)"
+ status=${res:${#res}-3}
+
+ if [ $status -ne $1 ]; then
+ __log_test_fail_status_code $1 $status
+ return 1
+ fi
+
+ __log_test_pass
+ return 0
+}
+
+# KAFKA PC API: Send a message on a topic, POST /topic/<topic>/msg
+# args: <response-code> <topic> <mime-type> <msg>
+# (Function for test scripts)
+kafkapc_api_post_msg() {
+ __log_test_start $@
+
+ if [ $# -ne 4 ]; then
+ __print_err "<response-code> <topic> <mime-type> <msg>" $@
+ return 1
+ fi
+ payload="tmp/.kafkapayload"
+ echo -n $4 > $payload #-n prevent a newline to be added...
+ res="$(__do_curl_to_api KAFKAPC POST /topics/$2/msg $payload $3)"
+ status=${res:${#res}-3}
+
+ if [ $status -ne $1 ]; then
+ __log_test_fail_status_code $1 $status
+ return 1
+ fi
+
+ __log_test_pass
+ return 0
+}
+
+
+# KAFKA PC API: Get a msg on a topic, GET /topic/<topic>/msg
+# args: <response-code> <topic> ([ <mime-type> <msg> ] | NOMSG )
+# (Function for test scripts)
+kafkapc_api_get_msg() {
+ __log_test_start $@
+
+ if [ $# -lt 3 ]; then
+ __print_err "<response-code> <topic> ([ <mime-type> <msg> ] | NOMSG )" $@
+ return 1
+ fi
+ mime_type="text/plain"
+ if [ ! -z "$3" ]; then
+ mime_type=$3
+ fi
+ res="$(__do_curl_to_api KAFKAPC GET /topics/$2/msg)"
+ status=${res:${#res}-3}
+
+ if [ $status -ne $1 ]; then
+ __log_test_fail_status_code $1 $status
+ return 1
+ fi
+ if [ $# -eq 4 ]; then
+ body=${res:0:${#res}-3}
+ if [ "$body" != "$4" ]; then
+ __log_test_fail_body
+ return 1
+ fi
+ fi
+
+ __log_test_pass
+ return 0
+}
+
+# KAFKA PC API: Send a message from a file on a topic, POST /topic/<topic>/msg
+# args: <response-code> <topic> <mime-type> <file>
+# (Function for test scripts)
+kafkapc_api_post_msg_from_file() {
+ __log_test_start $@
+
+ if [ $# -ne 4 ]; then
+ __print_err "<response-code> <topic> <mime-type> <file>" $@
+ return 1
+ fi
+ res="$(__do_curl_to_api KAFKAPC POST /topics/$2/msg $4 $3)"
+ status=${res:${#res}-3}
+
+ if [ $status -ne $1 ]; then
+ __log_test_fail_status_code $1 $status
+ return 1
+ fi
+
+ __log_test_pass
+ return 0
+}
+
+# KAFKA PC API: Get a msg on a topic and compare with file, GET /topic/<topic>/msg
+# args: <response-code> <topic> <mime-type> <file>
+# (Function for test scripts)
+kafkapc_api_get_msg_from_file() {
+ __log_test_start $@
+
+ if [ $# -ne 4 ]; then
+ __print_err "<response-code> <topic> <mime-type> <file> " $@
+ return 1
+ fi
+
+ if [ -f $4 ]; then
+ msgfile=$(cat $4)
+ else
+ __log_test_fail_general "Message file "$4", does not exist"
+ return 1
+ fi
+
+ mime_type="text/plain"
+
+ res="$(__do_curl_to_api KAFKAPC GET /topics/$2/msg)"
+ status=${res:${#res}-3}
+
+ if [ $status -ne $1 ]; then
+ __log_test_fail_status_code $1 $status
+ return 1
+ fi
+
+ body=${res:0:${#res}-3}
+ if [ "$body" != "$msgfile" ]; then
+ __log_test_fail_body
+ return 1
+ fi
+
+ __log_test_pass
+ return 0
+}
+
+
+# Create json file for payload
+# arg: <size-in-kb> <filename>
+kafkapc_api_generate_json_payload_file() {
+ __log_conf_start $@
+ if [ $# -ne 2 ]; then
+ __print_err "<topic-url> <json-file>" $@
+ return 1
+ fi
+ if [ $1 -lt 1 ] || [ $1 -gt 10000 ]; then
+ __log_conf_fail_general "Only size between 1k and 10000k supported"
+ return 1
+ fi
+ echo -n "{\"abcdefghijklmno\":[" > $2
+ LEN=$(($1*100-2))
+ echo -n "\""ABCDEFG"\"" >> $2
+ for ((idx=1; idx<$LEN; idx++))
+ do
+ echo -n ",\"ABCDEFG\"" >> $2
+ done
+ echo -n "]}" >> $2
+
+ __log_conf_ok
+ return 0
+}
+
+# Create text file for payload
+# arg: <size-in-kb> <filename>
+kafkapc_api_generate_text_payload_file() {
+ __log_conf_start $@
+ if [ $# -ne 2 ]; then
+ __print_err "<topic-url> <text-file>" $@
+ return 1
+ fi
+ if [ $1 -lt 1 ] || [ $1 -gt 10000 ]; then
+ __log_conf_fail_general "Only size between 1k and 10000k supported"
+ return 1
+ fi
+ echo -n "" > $2
+ LEN=$(($1*100))
+ for ((idx=0; idx<$LEN; idx++))
+ do
+ echo -n "ABCDEFGHIJ" >> $2
+ done
+
+ __log_conf_ok
+ return 0
+}
\ No newline at end of file
kubectl logs -n $KUBE_ONAP_NAMESPACE $podname --tail=-1 > $1$2_$podname.log 2>&1
done
else
- docker logs $MR_DMAAP_APP_NAME > $1$2mr.log 2>&1
+ docker logs $MR_DMAAP_APP_NAME > $1$2_mr.log 2>&1
docker logs $MR_KAFKA_APP_NAME > $1$2_mr_kafka.log 2>&1
docker logs $MR_ZOOKEEPER_APP_NAME > $1$2_mr_zookeeper.log 2>&1
fi
# args: -
__MR_statisics_setup() {
if [ $RUNMODE == "KUBE" ]; then
- echo "MR $MR_STUB_APP_NAME $KUBE_ONAP_NAMESPACE"
+ echo "MR-STUB $MR_STUB_APP_NAME $KUBE_ONAP_NAMESPACE"
else
- echo "MR $MR_STUB_APP_NAME"
+ echo "MR-STUB $MR_STUB_APP_NAME"
fi
}
# args: -
__DMAAPMR_statisics_setup() {
if [ $RUNMODE == "KUBE" ]; then
- echo ""
+ echo "KAFKA $MR_KAFKA_APP_NAME $KUBE_ONAP_NAMESPACE MESSAGE-ROUTER $MR_DMAAP_APP_NAME $KUBE_ONAP_NAMESPACE ZOOKEEPER $MR_ZOOKEEPER_APP_NAME $KUBE_ONAP_NAMESPACE"
else
- echo ""
+ echo "KAFKA $MR_KAFKA_APP_NAME MESSAGE-ROUTER $MR_DMAAP_APP_NAME ZOOKEEPER $MR_ZOOKEEPER_APP_NAME"
fi
}
# args: <protocol> <internal-port> <external-port> <internal-secure-port> <external-secure-port>
__mr_set_protocoll() {
echo -e $BOLD"$MR_STUB_DISPLAY_NAME and $MR_DMAAP_DISPLAY_NAME protocol setting"$EBOLD
- echo -e " Using $BOLD http $EBOLD towards $MR_STUB_DISPLAY_NAME and $MR_DMAAP_DISPLAY_NAME"
+ echo -e " Using $BOLD $1 $EBOLD towards $MR_STUB_DISPLAY_NAME and $MR_DMAAP_DISPLAY_NAME"
## Access to Dmaap mediator
MR_SERVICE_PATH=$MR_STUB_PATH # access container->container, docker - access pod->svc, kube
MR_KAFKA_SERVICE_PATH=""
+ MR_ZOOKEEPER_SERVICE_PATH=""
__check_included_image "DMAAPMR"
if [ $? -eq 0 ]; then
MR_SERVICE_PATH=$MR_DMAAP_PATH # access container->container, docker - access pod->svc, kube
MR_DMAAP_ADAPTER_HTTP=$MR_DMAAP_PATH
MR_KAFKA_SERVICE_PATH=$MR_KAFKA_APP_NAME":"$MR_KAFKA_PORT
+ MR_ZOOKEEPER_SERVICE_PATH=$MR_ZOOKEEPER_APP_NAME":"$MR_ZOOKEEPER_PORT
fi
# For directing calls from script to e.g.PMS via message rounter
MR_SERVICE_PATH=$MR_DMAAP_PATH
MR_DMAAP_ADAPTER_HTTP=$MR_DMAAP_PATH
MR_KAFKA_SERVICE_PATH=$MR_KAFKA_APP_NAME"."$KUBE_ONAP_NAMESPACE":"$MR_KAFKA_PORT
+ MR_ZOOKEEPER_SERVICE_PATH=$MR_ZOOKEEPER_APP_NAME"."$KUBE_ONAP_NAMESPACE":"$MR_ZOOKEEPER_PORT
fi
__check_prestarted_image "DMAAPMR"
if [ $? -eq 0 ]; then
MR_SERVICE_PATH=$MR_DMAAP_PATH
MR_DMAAP_ADAPTER_HTTP=$MR_DMAAP_PATH
MR_KAFKA_SERVICE_PATH=$MR_KAFKA_APP_NAME"."$KUBE_ONAP_NAMESPACE":"$MR_KAFKA_PORT
+ MR_ZOOKEEPER_SERVICE_PATH=$MR_ZOOKEEPER_APP_NAME"."$KUBE_ONAP_NAMESPACE":"$MR_ZOOKEEPER_PORT
fi
# For directing calls from script to e.g.PMS, via message rounter
}
-
-# use_mr_http() { 2 3 4 5 6 7
-# __mr_set_protocoll "http" $MR_INTERNAL_PORT $MR_EXTERNAL_PORT $MR_INTERNAL_PORT $MR_EXTERNAL_PORT $MR_INTERNAL_SECURE_PORT $MR_EXT_SECURE_PORT
-# }
-
-# use_mr_https() {
-# __mr_set_protocoll "https" $MR_INTERNAL_SECURE_PORT $MR_EXTERNAL_SECURE_PORT
-# }
-
-# # Setup paths to svc/container for internal and external access
-# # args: <protocol> <internal-port> <external-port> <mr-stub-internal-port> <mr-stub-external-port> <mr-stub-internal-secure-port> <mr-stub-external-secure-port>
-# __mr_set_protocoll() {
-# echo -e $BOLD"$MR_STUB_DISPLAY_NAME and $MR_DMAAP_DISPLAY_NAME protocol setting"$EBOLD
-# echo -e " Using $BOLD http $EBOLD towards $MR_STUB_DISPLAY_NAME and $MR_DMAAP_DISPLAY_NAME"
-
-# ## Access to Dmaap mediator
-
-# MR_HTTPX=$1
-
-# # Access via test script
-# MR_STUB_PATH=$MR_HTTPX"://"$MR_STUB_APP_NAME":"$2 # access from script via proxy, docker
-# MR_DMAAP_PATH=$MR_HTTPX"://"$MR_DMAAP_APP_NAME":"$2 # access from script via proxy, docker
-# MR_DMAAP_ADAPTER_HTTP="" # Access to dmaap mr via proyx - set only if app is included
-
-# MR_SERVICE_PATH=$MR_STUB_PATH # access container->container, docker - access pod->svc, kube
-# __check_included_image "DMAAPMR"
-# if [ $? -eq 0 ]; then
-# MR_SERVICE_PATH=$MR_DMAAP_PATH # access container->container, docker - access pod->svc, kube
-# MR_DMAAP_ADAPTER_HTTP=$MR_DMAAP_PATH
-# fi
-
-# # For directing calls from script to e.g.PMS via message rounter
-# # These cases shall always go though the mr-stub
-# MR_ADAPTER_HTTP="http://"$MR_STUB_APP_NAME":"$4
-# MR_ADAPTER_HTTPS="https://"$MR_STUB_APP_NAME":"$6
-
-# MR_DMAAP_ADAPTER_TYPE="REST"
-
-# if [ $RUNMODE == "KUBE" ]; then
-# MR_STUB_PATH=$MR_HTTPX"://"$MR_STUB_APP_NAME.$KUBE_ONAP_NAMESPACE":"$3 # access from script via proxy, kube
-# MR_DMAAP_PATH=$MR_HTTPX"://"$MR_DMAAP_APP_NAME.$KUBE_ONAP_NAMESPACE":"$3 # access from script via proxy, kube
-
-# MR_SERVICE_PATH=$MR_STUB_PATH
-# __check_included_image "DMAAPMR"
-# if [ $? -eq 0 ]; then
-# MR_SERVICE_PATH=$MR_DMAAP_PATH
-# MR_DMAAP_ADAPTER_HTTP=$MR_DMAAP_PATH
-# fi
-# __check_prestarted_image "DMAAPMR"
-# if [ $? -eq 0 ]; then
-# MR_SERVICE_PATH=$MR_DMAAP_PATH
-# MR_DMAAP_ADAPTER_HTTP=$MR_DMAAP_PATH
-# fi
-
-# # For directing calls from script to e.g.PMS, via message rounter
-# # These calls shall always go though the mr-stub
-# MR_ADAPTER_HTTP="http://"$MR_STUB_APP_NAME":"$5
-# MR_ADAPTER_HTTPS="https://"$MR_STUB_APP_NAME":"$7
-# fi
-
-# # For calls from script to the mr-stub
-# MR_STUB_ADAPTER=$MR_STUB_PATH
-# MR_STUB_ADAPTER_TYPE="REST"
-
-# echo ""
-
-# }
-
# Export env vars for config files, docker compose and kube resources
# args: -
__dmaapmr_export_vars() {
export MR_ZOOKEEPER_PORT
export MR_KAFKA_SERVICE_PATH
+ export MR_ZOOKEEPER_SERVICE_PATH
+
+ export MR_KAFKA_KUBE_NODE_PORT
+ export MR_KAFKA_DOCKER_LOCALHOST_PORT
}
# Export env vars for config files, docker compose and kube resources
export MR_EXTERNAL_PORT
export MR_KAFKA_SERVICE_PATH
+ export MR_ZOOKEEPER_SERVICE_PATH
}
__kube_create_instance app $MR_DMAAP_APP_NAME $input_yaml $output_yaml
- # echo " Retrieving host and ports for service..."
- # MR_DMAAP_HOST_NAME=$(__kube_get_service_host $MR_DMAAP_APP_NAME $KUBE_ONAP_NAMESPACE)
-
- # MR_EXT_PORT=$(__kube_get_service_port $MR_DMAAP_APP_NAME $KUBE_ONAP_NAMESPACE "http")
- # MR_EXT_SECURE_PORT=$(__kube_get_service_port $MR_DMAAP_APP_NAME $KUBE_ONAP_NAMESPACE "https")
-
- # echo " Host IP, http port, https port: $MR_DMAAP_APP_NAME $MR_EXT_PORT $MR_EXT_SECURE_PORT"
- # MR_SERVICE_PATH=""
- # if [ $MR_HTTPX == "http" ]; then
- # MR_DMAAP_PATH=$MR_HTTPX"://"$MR_DMAAP_HOST_NAME":"$MR_EXT_PORT
- # MR_SERVICE_PATH=$MR_HTTPX"://"$MR_DMAAP_APP_NAME"."$KUBE_ONAP_NAMESPACE":"$MR_EXT_PORT
- # else
- # MR_DMAAP_PATH=$MR_HTTPX"://"$MR_DMAAP_HOST_NAME":"$MR_EXT_SECURE_PORT
- # MR_SERVICE_PATH=$MR_HTTPX"://"$MR_DMAAP_APP_NAME"."$KUBE_ONAP_NAMESPACE":"$MR_EXT_SECURE_PORT
- # fi
-
__check_service_start $MR_DMAAP_APP_NAME $MR_DMAAP_PATH$MR_DMAAP_ALIVE_URL
- # Cannot create topics, returns 400 forever.....topics will be created during pipeclean below
- #__create_topic $MR_READ_TOPIC "Topic for reading policy messages"
-
- #__create_topic $MR_WRITE_TOPIC "Topic for writing policy messages"
+ echo " Kafka TCP node port $MR_KAFKA_KUBE_NODE_PORT"
-# __dmaap_pipeclean $MR_READ_TOPIC "/events/$MR_READ_TOPIC" "/events/$MR_READ_TOPIC/users/policy-agent?timeout=1000&limit=100"
-#
-# __dmaap_pipeclean $MR_WRITE_TOPIC "/events/$MR_WRITE_TOPIC" "/events/$MR_WRITE_TOPIC/users/mr-stub?timeout=1000&limit=100"
-
-
- #__dmaap_pipeclean "unauthenticated.dmaapmed.json" "/events/unauthenticated.dmaapmed.json" "/events/unauthenticated.dmaapmed.json/dmaapmediatorproducer/STD_Fault_Messages?timeout=1000&limit=100"
- #__dmaap_pipeclean "unauthenticated.dmaapadp.json" "/events/unauthenticated.dmaapadp.json" "/events/unauthenticated.dmaapadp.json/dmaapadapterproducer/msgs?timeout=1000&limit=100"
if [ $# -gt 0 ]; then
if [ $(($#%3)) -eq 0 ]; then
fi
- # echo " Retrieving host and ports for service..."
- # MR_STUB_HOST_NAME=$(__kube_get_service_host $MR_STUB_APP_NAME $KUBE_ONAP_NAMESPACE)
-
- # MR_EXT_PORT=$(__kube_get_service_port $MR_STUB_APP_NAME $KUBE_ONAP_NAMESPACE "http")
- # MR_EXT_SECURE_PORT=$(__kube_get_service_port $MR_STUB_APP_NAME $KUBE_ONAP_NAMESPACE "https")
-
- # echo " Host IP, http port, https port: $MR_STUB_APP_NAME $MR_EXT_PORT $MR_EXT_SECURE_PORT"
- # if [ $MR_HTTPX == "http" ]; then
- # MR_STUB_PATH=$MR_HTTPX"://"$MR_STUB_HOST_NAME":"$MR_EXT_PORT
- # if [ -z "$MR_SERVICE_PATH" ]; then
- # MR_SERVICE_PATH=$MR_HTTPX"://"$MR_STUB_APP_NAME"."$KUBE_ONAP_NAMESPACE":"$MR_EXT_PORT
- # fi
- # else
- # MR_STUB_PATH=$MR_HTTPX"://"$MR_STUB_HOST_NAME":"$MR_EXT_SECURE_PORT
- # if [ -z "$MR_SERVICE_PATH" ]; then
- # MR_SERVICE_PATH=$MR_HTTPX"://"$MR_STUB_APP_NAME"."$KUBE_ONAP_NAMESPACE":"$MR_EXT_SECURE_PORT
- # fi
- # fi
- # MR_ADAPTER_HTTP="http://"$MR_STUB_HOST_NAME":"$MR_EXT_PORT
- # MR_ADAPTER_HTTPS="https://"$MR_STUB_HOST_NAME":"$MR_EXT_SECURE_PORT
-
- # MR_STUB_ADAPTER=$MR_STUB_PATH
- # MR_STUB_ADAPTER_TYPE="REST"
-
__check_service_start $MR_STUB_APP_NAME $MR_STUB_PATH$MR_STUB_ALIVE_URL
- echo -ne " Service $MR_STUB_APP_NAME - reset "$SAMELINE
- result=$(__do_curl $MR_STUB_PATH/reset)
- if [ $? -ne 0 ]; then
- echo -e " Service $MR_STUB_APP_NAME - reset $RED Failed $ERED - will continue"
- else
- echo -e " Service $MR_STUB_APP_NAME - reset $GREEN OK $EGREEN"
- fi
-
-
else
__check_included_image 'DMAAPMR'
__check_service_start $MR_DMAAP_APP_NAME $MR_DMAAP_PATH$MR_DMAAP_ALIVE_URL
-
- # Cannot create topics, returns 400 forever.....topics will be created during pipeclean below
- #__create_topic $MR_READ_TOPIC "Topic for reading policy messages"
-
- #__create_topic $MR_WRITE_TOPIC "Topic for writing policy messages"
-
- #__dmaap_pipeclean $MR_READ_TOPIC "/events/$MR_READ_TOPIC" "/events/$MR_READ_TOPIC/users/policy-agent?timeout=1000&limit=100"
-
- #__dmaap_pipeclean $MR_WRITE_TOPIC "/events/$MR_WRITE_TOPIC" "/events/$MR_WRITE_TOPIC/users/mr-stub?timeout=1000&limit=100"
+ echo " Kafka TCP node port $MR_KAFKA_DOCKER_LOCALHOST_PORT"
if [ $# -gt 0 ]; then
if [ $(($#%3)) -eq 0 ]; then
fi
fi
- #__dmaap_pipeclean "unauthenticated.dmaapmed.json" "/events/unauthenticated.dmaapmed.json" "/events/unauthenticated.dmaapmed.json/dmaapmediatorproducer/STD_Fault_Messages?timeout=1000&limit=100"
- #__dmaap_pipeclean "unauthenticated.dmaapadp.json" "/events/unauthenticated.dmaapadp.json" "/events/unauthenticated.dmaapadp.json/dmaapadapterproducer/msgs?timeout=1000&limit=100"
-
- echo " Current topics:"
- curlString="$MR_DMAAP_PATH/topics"
- result=$(__do_curl "$curlString")
- echo $result | indent2
+ dmaap_api_print_topics
fi
__mr_export_vars
return 1
}
+# Helper function to list the current topics in DMAAP MR
+# args: -
+dmaap_api_print_topics() {
+ echo " Current topics:"
+ curlString="$MR_DMAAP_PATH/topics"
+ result=$(__do_curl "$curlString")
+ echo $result | indent2
+}
+
### Generic test cases for varaible checking
__log_conf_fail_general "Only size between 1k and 10000k supported"
return 1
fi
- echo -n "{\"a\":[" > $2
- LEN=$(($1*150))
- echo -n "\"a0\"" >> $2
+ echo -n "{\"abcdefghijklmno\":[" > $2
+ LEN=$(($1*100-2))
+ echo -n "\""ABCDEFG"\"" >> $2
for ((idx=1; idx<$LEN; idx++))
do
- echo -n ",\"a$idx\"" >> $2
+ echo -n ",\"ABCDEFG\"" >> $2
done
echo -n "]}" >> $2
return 0
}
-# Create tet file for payload
+# Create text file for payload
# arg: <size-in-kb> <filename>
mr_api_generate_text_payload_file() {
__log_conf_start $@
# args: <protocol> <internal-port> <external-port>
__gateway_set_protocoll() {
echo -e $BOLD"$NRT_GATEWAY_DISPLAY_NAME protocol setting"$EBOLD
- echo -e " Using $BOLD http $EBOLD towards $NRT_GATEWAY_DISPLAY_NAME"
+ echo -e " Using $BOLD $1 $EBOLD towards $NRT_GATEWAY_DISPLAY_NAME"
## Access to nonrtric gateway
if [ $RUNMODE == "KUBE" ]; then
export POLICY_AGENT_EXTERNAL_SECURE_PORT
- export ECS_EXTERNAL_SECURE_PORT
+ export ICS_EXTERNAL_SECURE_PORT
export POLICY_AGENT_DOMAIN_NAME=$POLICY_AGENT_APP_NAME.$KUBE_NONRTRIC_NAMESPACE
- export ECS_DOMAIN_NAME=$ECS_APP_NAME.$KUBE_NONRTRIC_NAMESPACE
+ export ICS_DOMAIN_NAME=$ICS_APP_NAME.$KUBE_NONRTRIC_NAMESPACE
else
export POLICY_AGENT_DOMAIN_NAME=$POLICY_AGENT_APP_NAME
- export ECS_DOMAIN_NAME=$ECS_APP_NAME
+ export ICS_DOMAIN_NAME=$ICS_APP_NAME
fi
}
return 0
}
-# API Test function: GET /ei-producer/v1/eitypes towards ECS
+# API Test function: GET /ei-producer/v1/eitypes towards ICS
# Note: This is just to test service response
# args: <response-code>
# (Function for test scripts)
-gateway_ecs_get_types() {
+gateway_ics_get_types() {
__log_test_start $@
if [ $# -ne 1 ]; then
__print_err "<response-code>" $@
# args: <protocol> <internal-port> <external-port>
__agent_set_protocoll() {
echo -e $BOLD"$POLICY_AGENT_DISPLAY_NAME protocol setting"$EBOLD
- echo -e " Using $BOLD http $EBOLD towards $POLICY_AGENT_DISPLAY_NAME"
+ echo -e " Using $BOLD $1 $EBOLD towards $POLICY_AGENT_DISPLAY_NAME"
## Access to Dmaap adapter
}
+# Function to perpare the consul configuration according to the current simulator configuration
+# args: SDNC|NOSDNC <output-file>
+# (Function for test scripts)
+prepare_consul_config() {
+ echo -e $BOLD"Prepare Consul config"$EBOLD
+
+ echo " Writing consul config for "$POLICY_AGENT_APP_NAME" to file: "$2
+
+ if [ $# != 2 ]; then
+ ((RES_CONF_FAIL++))
+ __print_err "need two args, SDNC|NOSDNC <output-file>" $@
+ exit 1
+ fi
+
+ if [ $1 == "SDNC" ]; then
+ echo -e " Config$BOLD including SDNC$EBOLD configuration"
+ elif [ $1 == "NOSDNC" ]; then
+ echo -e " Config$BOLD excluding SDNC$EBOLD configuration"
+ else
+ ((RES_CONF_FAIL++))
+ __print_err "need two args, SDNC|NOSDNC <output-file>" $@
+ exit 1
+ fi
+
+ config_json="\n {"
+ if [ $1 == "SDNC" ]; then
+ config_json=$config_json"\n \"controller\": ["
+ config_json=$config_json"\n {"
+ config_json=$config_json"\n \"name\": \"$SDNC_APP_NAME\","
+ config_json=$config_json"\n \"baseUrl\": \"$SDNC_SERVICE_PATH\","
+ config_json=$config_json"\n \"userName\": \"$SDNC_USER\","
+ config_json=$config_json"\n \"password\": \"$SDNC_PWD\""
+ config_json=$config_json"\n }"
+ config_json=$config_json"\n ],"
+ fi
+
+ config_json=$config_json"\n \"streams_publishes\": {"
+ config_json=$config_json"\n \"dmaap_publisher\": {"
+ config_json=$config_json"\n \"type\": \"message-router\","
+ config_json=$config_json"\n \"dmaap_info\": {"
+ config_json=$config_json"\n \"topic_url\": \"$MR_SERVICE_PATH$MR_WRITE_URL\""
+ config_json=$config_json"\n }"
+ config_json=$config_json"\n }"
+ config_json=$config_json"\n },"
+ config_json=$config_json"\n \"streams_subscribes\": {"
+ config_json=$config_json"\n \"dmaap_subscriber\": {"
+ config_json=$config_json"\n \"type\": \"message-router\","
+ config_json=$config_json"\n \"dmaap_info\": {"
+ config_json=$config_json"\n \"topic_url\": \"$MR_SERVICE_PATH$MR_READ_URL\""
+ config_json=$config_json"\n }"
+ config_json=$config_json"\n }"
+ config_json=$config_json"\n },"
+
+ config_json=$config_json"\n \"ric\": ["
+
+ if [ $RUNMODE == "KUBE" ]; then
+ result=$(kubectl get pods -n $KUBE_A1SIM_NAMESPACE -o jsonpath='{.items[?(@.metadata.labels.autotest=="RICSIM")].metadata.name}')
+ rics=""
+ ric_cntr=0
+ if [ $? -eq 0 ] && [ ! -z "$result" ]; then
+ for im in $result; do
+ if [[ $im != *"-0" ]]; then
+ ric_subdomain=$(kubectl get pod $im -n $KUBE_A1SIM_NAMESPACE -o jsonpath='{.spec.subdomain}')
+ rics=$rics" "$im"."$ric_subdomain"."$KUBE_A1SIM_NAMESPACE
+ let ric_cntr=ric_cntr+1
+ fi
+ done
+ fi
+ if [ $ric_cntr -eq 0 ]; then
+ echo $YELLOW"Warning: No rics found for the configuration"$EYELLOW
+ fi
+ else
+ rics=$(docker ps --filter "name=$RIC_SIM_PREFIX" --filter "network=$DOCKER_SIM_NWNAME" --filter "status=running" --format {{.Names}})
+ if [ $? -ne 0 ] || [ -z "$rics" ]; then
+ echo -e $RED" FAIL - the names of the running RIC Simulator cannot be retrieved." $ERED
+ ((RES_CONF_FAIL++))
+ return 1
+ fi
+ fi
+ cntr=0
+ for ric in $rics; do
+ if [ $cntr -gt 0 ]; then
+ config_json=$config_json"\n ,"
+ fi
+ config_json=$config_json"\n {"
+ if [ $RUNMODE == "KUBE" ]; then
+ ric_id=${ric%.*.*} #extract pod id from full hosthame
+ ric_id=$(echo "$ric_id" | tr '-' '_')
+ else
+ if [ $DOCKER_COMPOSE_VERION == "V1" ]; then
+ ric_id=$ric
+ else
+ ric_id=$(echo "$ric" | tr '-' '_') #ric id still needs underscore as it is different from the container name
+ fi
+ fi
+ echo " Found a1 sim: "$ric_id
+ config_json=$config_json"\n \"name\": \"$ric_id\","
+ config_json=$config_json"\n \"baseUrl\": \"$RIC_SIM_HTTPX://$ric:$RIC_SIM_PORT\","
+ if [ $1 == "SDNC" ]; then
+ config_json=$config_json"\n \"controller\": \"$SDNC_APP_NAME\","
+ fi
+ config_json=$config_json"\n \"managedElementIds\": ["
+ config_json=$config_json"\n \"me1_$ric_id\","
+ config_json=$config_json"\n \"me2_$ric_id\""
+ config_json=$config_json"\n ]"
+ config_json=$config_json"\n }"
+ let cntr=cntr+1
+ done
+
+ config_json=$config_json"\n ]"
+ config_json=$config_json"\n}"
+
+ if [ $RUNMODE == "KUBE" ]; then
+ config_json="{\"config\":"$config_json"}"
+ fi
+
+ printf "$config_json">$2
+
+ echo ""
+}
# Load the the appl config for the agent into a config map
agent_load_config() {
# args: <protocol> <internal-port> <external-port>
__prod_stub_set_protocoll() {
echo -e $BOLD"$PROD_STUB_DISPLAY_NAME protocol setting"$EBOLD
- echo -e " Using $BOLD http $EBOLD towards $PROD_STUB_DISPLAY_NAME"
+ echo -e " Using $BOLD $1 $EBOLD towards $PROD_STUB_DISPLAY_NAME"
## Access to Prod stub sim
# args:
__prodstub_export_vars() {
export PROD_STUB_APP_NAME
- export PROD_STUB_APP_NAME_ALIAS
export PROD_STUB_DISPLAY_NAME
export DOCKER_SIM_NWNAME
retcode_p=$?
if [ $retcode_i -ne 0 ] && [ $retcode_p -ne 0 ]; then
- echo -e $RED"The $ECS_APP_NAME app is not included as managed nor prestarted in this test script"$ERED
- echo -e $RED"The $ECS_APP_NAME will not be started"$ERED
+ echo -e $RED"The $ICS_APP_NAME app is not included as managed nor prestarted in this test script"$ERED
+ echo -e $RED"The $ICS_APP_NAME will not be started"$ERED
exit
fi
if [ $retcode_i -eq 0 ] && [ $retcode_p -eq 0 ]; then
- echo -e $RED"The $ECS_APP_NAME app is included both as managed and prestarted in this test script"$ERED
- echo -e $RED"The $ECS_APP_NAME will not be started"$ERED
+ echo -e $RED"The $ICS_APP_NAME app is included both as managed and prestarted in this test script"$ERED
+ echo -e $RED"The $ICS_APP_NAME will not be started"$ERED
exit
fi
__log_test_fail_general "Template file "$7" for jobdata, does not exist"
return 1
fi
- if [[ "$ECS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
+ if [[ "$ICS_FEATURE_LEVEL" == *"INFO-TYPES"* ]]; then
targetJson="{\"info_job_identity\":\"$3\",\"info_type_identity\":\"$4\",\"target_uri\":\"$5\",\"owner\":\"$6\", \"info_job_data\":$jobfile,\"last_updated\":\"????\"}"
else
targetJson="{\"ei_job_identity\":\"$3\",\"ei_type_identity\":\"$4\",\"target_uri\":\"$5\",\"owner\":\"$6\", \"ei_job_data\":$jobfile,\"last_updated\":\"????\"}"
# args: <protocol> <internal-port> <external-port>
__rapp_catalogue_set_protocoll() {
echo -e $BOLD"$RAPP_CAT_DISPLAY_NAME protocol setting"$EBOLD
- echo -e " Using $BOLD http $EBOLD towards $RAPP_CAT_DISPLAY_NAME"
+ echo -e " Using $BOLD $1 $EBOLD towards $RAPP_CAT_DISPLAY_NAME"
## Access to Rapp catalogue
#__var_test RC "$LOCALHOST_HTTP:$RC_EXTERNAL_PORT/" $1 "=" $2 $3
__var_test RC "$RC_SERVICE_PATH/" $1 "=" $2 $3
else
- __print_err "Wrong args to ecs_equal, needs two or three args: <sim-param> <target-value> [ timeout ]" $@
+ __print_err "Wrong args to ics_equal, needs two or three args: <sim-param> <target-value> [ timeout ]" $@
fi
}
# This function is called for apps managed by the test script as well as for prestarted apps.
# args: -
__RICSIM_statisics_setup() {
- if [ $RUNMODE == "KUBE" ]; then
- echo ""
- else
- echo ""
- fi
+ for ((RICSIM_INSTANCE=10; RICSIM_INSTANCE>0; RICSIM_INSTANCE-- )); do
+ if [ $RUNMODE == "KUBE" ]; then
+ RICSIM_INSTANCE_KUBE=$(($RICSIM_INSTANCE-1))
+ echo -n " RICSIM_G1_$RICSIM_INSTANCE_KUBE ${RIC_SIM_PREFIX}-g1-$RICSIM_INSTANCE_KUBE $KUBE_A1SIM_NAMESPACE "
+ echo -n " RICSIM_G2_$RICSIM_INSTANCE_KUBE ${RIC_SIM_PREFIX}-g2-$RICSIM_INSTANCE_KUBE $KUBE_A1SIM_NAMESPACE "
+ echo -n " RICSIM_G3_$RICSIM_INSTANCE_KUBE ${RIC_SIM_PREFIX}-g3-$RICSIM_INSTANCE_KUBE $KUBE_A1SIM_NAMESPACE "
+ else
+ if [ $DOCKER_COMPOSE_VERION == "V1" ]; then
+ echo -n " RICSIM_G1_$RICSIM_INSTANCE ${RIC_SIM_PREFIX}_g1_$RICSIM_INSTANCE "
+ echo -n " RICSIM_G2_$RICSIM_INSTANCE ${RIC_SIM_PREFIX}_g2_$RICSIM_INSTANCE "
+ echo -n " RICSIM_G3_$RICSIM_INSTANCE ${RIC_SIM_PREFIX}_g3_$RICSIM_INSTANCE "
+ else
+ echo -n " RICSIM_G1_$RICSIM_INSTANCE ${RIC_SIM_PREFIX}-g1-$RICSIM_INSTANCE "
+ echo -n " RICSIM_G2_$RICSIM_INSTANCE ${RIC_SIM_PREFIX}-g2-$RICSIM_INSTANCE "
+ echo -n " RICSIM_G3_$RICSIM_INSTANCE ${RIC_SIM_PREFIX}-g3-$RICSIM_INSTANCE "
+ fi
+ fi
+ done
}
#######################################################
RIC_SIM_HTTPX="http"
-RIC_SIM_HOST=$RIC_SIM_HTTPX"://"$LOCALHOST_NAME
RIC_SIM_PORT=$RIC_SIM_INTERNAL_PORT
-#Vars for A1 interface version and container count
-G1_A1_VERSION=""
-G2_A1_VERSION=""
-G3_A1_VERSION=""
-G4_A1_VERSION=""
-G5_A1_VERSION=""
+#Vars for container count
G1_COUNT=0
G2_COUNT=0
G3_COUNT=0
echo -e $BOLD"RICSIM protocol setting"$EBOLD
echo -e " Using $BOLD http $EBOLD towards the simulators"
RIC_SIM_HTTPX="http"
- RIC_SIM_HOST=$RIC_SIM_HTTPX"://"$LOCALHOST_NAME
RIC_SIM_PORT=$RIC_SIM_INTERNAL_PORT
echo ""
}
echo -e $BOLD"RICSIM protocol setting"$EBOLD
echo -e " Using $BOLD https $EBOLD towards the simulators"
RIC_SIM_HTTPX="https"
- RIC_SIM_HOST=$RIC_SIM_HTTPX"://"$LOCALHOST_NAME
RIC_SIM_PORT=$RIC_SIM_INTERNAL_SECURE_PORT
echo ""
}
#Set env var for simulator count and A1 interface vesion for the given group
if [ $1 == "$RIC1" ]; then
G1_COUNT=$2
- G1_A1_VERSION=$3
elif [ $1 == "$RIC2" ]; then
G2_COUNT=$2
- G2_A1_VERSION=$3
elif [ $1 == "$RIC3" ]; then
G3_COUNT=$2
- G3_A1_VERSION=$3
elif [ $1 == "$RIC4" ]; then
G4_COUNT=$2
- G4_A1_VERSION=$3
elif [ $1 == "$RIC5" ]; then
G5_COUNT=$2
- G5_A1_VERSION=$3
else
((RES_CONF_FAIL++))
__print_err "need three args, $RIC1|$RIC2|$RIC3|$RIC4|$RIC5 <count> <interface-id>" $@
# Create .env file to compose project, all ric container will get this prefix
echo "COMPOSE_PROJECT_NAME="$RIC_SIM_PREFIX > $SIM_GROUP/$RIC_SIM_COMPOSE_DIR/.env
- export G1_A1_VERSION
- export G2_A1_VERSION
- export G3_A1_VERSION
- export G4_A1_VERSION
- export G5_A1_VERSION
+ #extract service name (group), g1, g2, g3, g4 or g5 from var $1
+ #E.g. ricsim_g1 -> g1 is the service name
+ TMP_GRP=$1
+ RICSIM_COMPOSE_SERVICE_NAME=$(echo "${TMP_GRP##*_}")
+
+ export RICSIM_COMPOSE_A1_VERSION=$3
+ export RICSIM_COMPOSE_SERVICE_NAME
export RIC_SIM_INTERNAL_PORT
export RIC_SIM_INTERNAL_SECURE_PORT
export RIC_SIM_CERT_MOUNT_DIR
export DOCKER_SIM_NWNAME
export RIC_SIM_DISPLAY_NAME
- docker_args="--scale g1=$G1_COUNT --scale g2=$G2_COUNT --scale g3=$G3_COUNT --scale g4=$G4_COUNT --scale g5=$G5_COUNT"
+ docker_args="--no-recreate --scale $RICSIM_COMPOSE_SERVICE_NAME=$2"
+
+ #Create a list of contsiner names
+ #Will be <ricsim-prefix>_<service-name>_<index>
+ # or
+ # <ricsim-prefix>-<service-name>-<index>
app_data=""
cntr=1
+ if [ $DOCKER_COMPOSE_VERION == "V1" ]; then
+ app_name_prefix=$RIC_SIM_PREFIX"_"$RICSIM_COMPOSE_SERVICE_NAME"_"
+ else
+ app_name_prefix=$RIC_SIM_PREFIX"-"$RICSIM_COMPOSE_SERVICE_NAME"-"
+ fi
while [ $cntr -le $2 ]; do
- app=$1"_"$cntr
+ app=$app_name_prefix$cntr
app_data="$app_data $app"
let cntr=cntr+1
done
cntr=1
while [ $cntr -le $2 ]; do
- app=$1"_"$cntr
+ if [ $DOCKER_COMPOSE_VERION == "V1" ]; then
+ app=$RIC_SIM_PREFIX"_"$RICSIM_COMPOSE_SERVICE_NAME"_"$cntr
+ else
+ app=$RIC_SIM_PREFIX"-"$RICSIM_COMPOSE_SERVICE_NAME"-"$cntr
+ fi
__check_service_start $app $RIC_SIM_HTTPX"://"$app:$RIC_SIM_PORT$RIC_SIM_ALIVE_URL
let cntr=cntr+1
done
ric_setname="${ricname%-*}" #Extract the stateful set name
echo $RIC_SIM_HTTPX"://"$ricname.$ric_setname.$KUBE_A1SIM_NAMESPACE":"$RIC_SIM_PORT
else
- echo $RIC_SIM_HTTPX"://"$1":"$RIC_SIM_PORT
+ if [ $DOCKER_COMPOSE_VERION == "V1" ]; then
+ echo $RIC_SIM_HTTPX"://"$1":"$RIC_SIM_PORT
+ else
+ ricname=$(echo "$1" | tr '_' '-')
+ echo $RIC_SIM_HTTPX"://"$ricname":"$RIC_SIM_PORT
+ fi
fi
}
# args: <protocol> <internal-port> <external-port>
__sdnc_set_protocoll() {
echo -e $BOLD"$SDNC_DISPLAY_NAME protocol setting"$EBOLD
- echo -e " Using $BOLD http $EBOLD towards $SDNC_DISPLAY_NAME"
+ echo -e " Using $BOLD $1 $EBOLD towards $SDNC_DISPLAY_NAME"
## Access to SDNC
SDNC_SERVICE_PATH=$1"://"$SDNC_APP_NAME":"$2 # docker access, container->container and script->container via proxy
- SDNC_SERVICE_API_PATH=$1"://"$SDNC_USER":"$SDNC_PWD"@"$SDNC_APP_NAME":"$1$SDNC_API_URL
+ SDNC_SERVICE_API_PATH=$1"://"$SDNC_USER":"$SDNC_PWD"@"$SDNC_APP_NAME":"$2$SDNC_API_URL
if [ $RUNMODE == "KUBE" ]; then
SDNC_SERVICE_PATH=$1"://"$SDNC_APP_NAME.$KUBE_SDNC_NAMESPACE":"$3 # kube access, pod->svc and script->svc via proxy
- SDNC_SERVICE_API_PATH=$1"://"$SDNC_USER":"$SDNC_PWD"@"$SDNC_APP_NAME.KUBE_SDNC_NAMESPACE":"$1$SDNC_API_URL
+ SDNC_SERVICE_API_PATH=$1"://"$SDNC_USER":"$SDNC_PWD"@"$SDNC_APP_NAME.$KUBE_SDNC_NAMESPACE":"$3$SDNC_API_URL
fi
echo ""
controller_api_get_A1_policy_ids() {
__log_test_start $@
- ric_id=$3
- if [ $RUNMODE == "KUBE" ]; then
- ric_id=$(get_kube_sim_host $3)
- fi
+ ric_id=$(__find_sim_host $3)
paramError=1
if [ $# -gt 3 ] && [ $2 == "OSC" ]; then
- url="$RIC_SIM_HTTPX://$ric_id:$RIC_SIM_PORT/a1-p/policytypes/$4/policies"
+ url="$ric_id/a1-p/policytypes/$4/policies"
paramError=0
elif [ $# -gt 2 ] && [ $2 == "STD" ]; then
- url="$RIC_SIM_HTTPX://$ric_id:$RIC_SIM_PORT/A1-P/v1/policies"
+ url="$ric_id/A1-P/v1/policies"
paramError=0
fi
controller_api_get_A1_policy_type() {
__log_test_start $@
- ric_id=$3
- if [ $RUNMODE == "KUBE" ]; then
- ric_id=$(get_kube_sim_host $3)
- fi
+ ric_id=$(__find_sim_host $3)
paramError=1
if [ $# -gt 3 ] && [ $2 == "OSC" ]; then
- url="$RIC_SIM_HTTPX://$ric_id:$RIC_SIM_PORT/a1-p/policytypes/$4"
+ url="$ric_id/a1-p/policytypes/$4"
paramError=0
fi
controller_api_delete_A1_policy() {
__log_test_start $@
- ric_id=$3
- if [ $RUNMODE == "KUBE" ]; then
- ric_id=$(get_kube_sim_host $3)
- fi
+ ric_id=$(__find_sim_host $3)
paramError=1
if [ $# -eq 5 ] && [ $2 == "OSC" ]; then
- url="$RIC_SIM_HTTPX://$ric_id:$RIC_SIM_PORT/a1-p/policytypes/$4/policies/$UUID$5"
+ url="$ric_id/a1-p/policytypes/$4/policies/$UUID$5"
paramError=0
elif [ $# -eq 4 ] && [ $2 == "STD" ]; then
- url="$RIC_SIM_HTTPX://$ric_id:$RIC_SIM_PORT/A1-P/v1/policies/$UUID$4"
+ url="$ric_id/A1-P/v1/policies/$UUID$4"
paramError=0
fi
controller_api_put_A1_policy() {
__log_test_start $@
- ric_id=$3
- if [ $RUNMODE == "KUBE" ]; then
- ric_id=$(get_kube_sim_host $3)
- fi
+ ric_id=$(__find_sim_host $3)
paramError=1
if [ $# -eq 6 ] && [ $2 == "OSC" ]; then
- url="$RIC_SIM_HTTPX://$ric_id:$RIC_SIM_PORT/a1-p/policytypes/$4/policies/$UUID$5"
+ url="$ric_id/a1-p/policytypes/$4/policies/$UUID$5"
body=$(sed 's/XXX/'${5}'/g' $6)
paramError=0
elif [ $# -eq 5 ] && [ $2 == "STD" ]; then
- url="$RIC_SIM_HTTPX://$ric_id:$RIC_SIM_PORT/A1-P/v1/policies/$UUID$4"
+ url="$ric_id/A1-P/v1/policies/$UUID$4"
body=$(sed 's/XXX/'${4}'/g' $5)
paramError=0
fi
controller_api_get_A1_policy_status() {
__log_test_start $@
- ric_id=$3
- if [ $RUNMODE == "KUBE" ]; then
- ric_id=$(get_kube_sim_host $3)
- fi
+ ric_id=$(__find_sim_host $3)
targetJson=""
paramError=1
if [ $# -ge 5 ] && [ $2 == "OSC" ]; then
- url="$RIC_SIM_HTTPX://$ric_id:$RIC_SIM_PORT/a1-p/policytypes/$4/policies/$UUID$5/status"
+ url="$ric_id/a1-p/policytypes/$4/policies/$UUID$5/status"
if [ $# -gt 5 ]; then
targetJson="{\"instance_status\":\"$6\""
targetJson=$targetJson",\"has_been_deleted\":\"$7\""
fi
paramError=0
elif [ $# -ge 4 ] && [ $2 == "STD" ]; then
- url="$RIC_SIM_HTTPX://$ric_id:$RIC_SIM_PORT/A1-P/v1/policies/$UUID$4/status"
+ url="$ric_id/A1-P/v1/policies/$UUID$4/status"
if [ $# -gt 4 ]; then
targetJson="{\"enforceStatus\":\"$5\""
if [ $# -eq 6 ]; then
MR_STUB_COMPOSE_DIR="mrstub" # Dir in simulator_group for mr stub for - docker-compose
MR_KAFKA_APP_NAME="message-router-kafka" # Kafka app name, if just named "kafka" the image will not start...
MR_KAFKA_PORT=9092 # Kafka port number
+MR_KAFKA_DOCKER_LOCALHOST_PORT=30098 # Kafka port number for docker localhost
+MR_KAFKA_KUBE_NODE_PORT=30099 # Kafka node port number for kube
MR_ZOOKEEPER_APP_NAME="zookeeper" # Zookeeper app name
MR_ZOOKEEPER_PORT="2181" # Zookeeper port number
MR_DMAAP_HOST_MNT_DIR="/mnt" # Basedir localhost for mounted files
-MR_DMAAP_HOST_CONFIG_DIR="/configs" # Config files dir on localhost
+MR_DMAAP_HOST_CONFIG_DIR="/configs0" # Config files dir on localhost
CR_APP_NAME="callback-receiver" # Name for the Callback receiver
CR_DISPLAY_NAME="Callback Reciever"
CR_APP_CALLBACK="/callbacks" # Url for callbacks
CR_APP_CALLBACK_MR="/callbacks-mr" # Url for callbacks (data from mr which contains string encoded jsons in a json arr)
CR_APP_CALLBACK_TEXT="/callbacks-text" # Url for callbacks (data containing text data)
-CR_ALIVE_URL="/" # Base path for alive check
+CR_ALIVE_URL="/reset" # Base path for alive check
CR_COMPOSE_DIR="cr" # Dir in simulator_group for docker-compose
CONSUL_HOST="consul-server" # Host name of consul
SDNC_DB_IMAGE_BASE="mariadb"
SDNC_DB_IMAGE_TAG_REMOTE_PROXY="10.5"
-# ECS image and tag - uses cherry release
-ECS_IMAGE_BASE="o-ran-sc/nonrtric-enrichment-coordinator-service"
-ECS_IMAGE_TAG_REMOTE_RELEASE_ORAN="1.0.1"
-#Note: Update var ECS_FEATURE_LEVEL if image version is changed
+# ICS image and tag - uses cherry release
+ICS_IMAGE_BASE="o-ran-sc/nonrtric-enrichment-coordinator-service"
+ICS_IMAGE_TAG_REMOTE_RELEASE_ORAN="1.0.1"
+#Note: Update var ICS_FEATURE_LEVEL if image version is changed
# Control Panel image and tag - uses cherry release
CONTROL_PANEL_IMAGE_BASE="o-ran-sc/nonrtric-controlpanel"
PROJECT_IMAGES_APP_NAMES="PA SDNC"
# List of app short names which images pulled from ORAN
-ORAN_IMAGES_APP_NAMES="CP ECS RICSIM RC"
+ORAN_IMAGES_APP_NAMES="CP ICS RICSIM RC"
# List of app short names which images pulled from ONAP
ONAP_IMAGES_APP_NAMES="" # Not used
POLICY_AGENT_DATA_FILE="application_configuration.json" # Container data file name
POLICY_AGENT_CONTAINER_MNT_DIR="/var/policy-management-service" # Mounted dir in the container
-ECS_APP_NAME="enrichmentservice" # Name for ECS container
-ECS_DISPLAY_NAME="Enrichment Coordinator Service" # Display name for ECS container
-ECS_EXTERNAL_PORT=8083 # ECS container external port (host -> container)
-ECS_INTERNAL_PORT=8083 # ECS container internal port (container -> container)
-ECS_EXTERNAL_SECURE_PORT=8434 # ECS container external secure port (host -> container)
-ECS_INTERNAL_SECURE_PORT=8434 # ECS container internal secure port (container -> container)
-
-ECS_LOGPATH="/var/log/enrichment-coordinator-service/application.log" # Path the application log in the ECS container
-ECS_APP_NAME_ALIAS="enrichment-service-container" # Alias name, name used by the control panel
-ECS_HOST_MNT_DIR="./mnt" # Mounted dir, relative to compose file, on the host
-ECS_CONTAINER_MNT_DIR="/var/enrichment-coordinator-service" # Mounted dir in the container
-ECS_ACTUATOR="/actuator/loggers/org.oransc.enrichment" # Url for trace/debug
-ECS_CERT_MOUNT_DIR="./cert"
-ECS_ALIVE_URL="/status" # Base path for alive check
-ECS_COMPOSE_DIR="ecs" # Dir in simulator_group for docker-compose
-ECS_CONFIG_MOUNT_PATH=/opt/app/enrichment-coordinator-service/config # Internal container path for configuration
-ECS_CONFIG_FILE=application.yaml # Config file name
-ECS_VERSION="V1-2" # Version where the types are added in the producer registration
-ECS_FEATURE_LEVEL="" # Space separated list of features
+ICS_APP_NAME="informationservice" # Name for ICS container
+ICS_DISPLAY_NAME="Enrichment Coordinator Service" # Display name for ICS container
+ICS_EXTERNAL_PORT=8083 # ICS container external port (host -> container)
+ICS_INTERNAL_PORT=8083 # ICS container internal port (container -> container)
+ICS_EXTERNAL_SECURE_PORT=8434 # ICS container external secure port (host -> container)
+ICS_INTERNAL_SECURE_PORT=8434 # ICS container internal secure port (container -> container)
+
+ICS_LOGPATH="/var/log/information-coordinator-service/application.log" # Path the application log in the ICS container
+ICS_APP_NAME_ALIAS="information-service-container" # Alias name, name used by the control panel
+ICS_HOST_MNT_DIR="./mnt" # Mounted dir, relative to compose file, on the host
+ICS_CONTAINER_MNT_DIR="/var/information-coordinator-service" # Mounted dir in the container
+ICS_ACTUATOR="/actuator/loggers/org.oransc.information" # Url for trace/debug
+ICS_CERT_MOUNT_DIR="./cert"
+ICS_ALIVE_URL="/status" # Base path for alive check
+ICS_COMPOSE_DIR="ics" # Dir in simulator_group for docker-compose
+ICS_CONFIG_MOUNT_PATH=/opt/app/information-coordinator-service/config # Internal container path for configuration
+ICS_CONFIG_FILE=application.yaml # Config file name
+ICS_VERSION="V1-2" # Version where the types are added in the producer registration
+ICS_FEATURE_LEVEL="" # Space separated list of features
MR_DMAAP_APP_NAME="message-router" # Name for the Dmaap MR
MR_STUB_APP_NAME="mr-stub" # Name of the MR stub
MR_STUB_COMPOSE_DIR="mrstub" # Dir in simulator_group for mr stub for - docker-compose
MR_KAFKA_APP_NAME="message-router-kafka" # Kafka app name, if just named "kafka" the image will not start...
MR_KAFKA_PORT=9092 # Kafka port number
+MR_KAFKA_DOCKER_LOCALHOST_PORT=30098 # Kafka port number for docker localhost
+MR_KAFKA_KUBE_NODE_PORT=30099 # Kafka node port number for kube
MR_ZOOKEEPER_APP_NAME="zookeeper" # Zookeeper app name
MR_ZOOKEEPER_PORT="2181" # Zookeeper port number
MR_DMAAP_HOST_MNT_DIR="/mnt" # Basedir localhost for mounted files
-MR_DMAAP_HOST_CONFIG_DIR="/configs" # Config files dir on localhost
+MR_DMAAP_HOST_CONFIG_DIR="/configs0" # Config files dir on localhost
CR_APP_NAME="callback-receiver" # Name for the Callback receiver
CR_DISPLAY_NAME="Callback Reciever"
CR_APP_CALLBACK="/callbacks" # Url for callbacks
CR_APP_CALLBACK_MR="/callbacks-mr" # Url for callbacks (data from mr which contains string encoded jsons in a json arr)
CR_APP_CALLBACK_TEXT="/callbacks-text" # Url for callbacks (data containing text data)
-CR_ALIVE_URL="/" # Base path for alive check
+CR_ALIVE_URL="/reset" # Base path for alive check
CR_COMPOSE_DIR="cr" # Dir in simulator_group for docker-compose
PROD_STUB_APP_NAME="producer-stub" # Name for the Producer stub
SDNC_DB_IMAGE_BASE="mariadb"
SDNC_DB_IMAGE_TAG_REMOTE_PROXY="10.5"
-# ECS image and tag - uses d release
-ECS_IMAGE_BASE="o-ran-sc/nonrtric-enrichment-coordinator-service"
-ECS_IMAGE_TAG_REMOTE_RELEASE_ORAN="1.1.0"
-#Note: Update var ECS_FEATURE_LEVEL if image version is changed
+# ICS image and tag - uses d release
+ICS_IMAGE_BASE="o-ran-sc/nonrtric-enrichment-coordinator-service"
+ICS_IMAGE_TAG_REMOTE_RELEASE_ORAN="1.1.0"
+#Note: Update var ICS_FEATURE_LEVEL if image version is changed
# Control Panel image and tag - uses d release
CONTROL_PANEL_IMAGE_BASE="o-ran-sc/nonrtric-controlpanel"
PROJECT_IMAGES_APP_NAMES="PA SDNC"
# List of app short names which images pulled from ORAN
-ORAN_IMAGES_APP_NAMES="CP ECS RICSIM RC NGW"
+ORAN_IMAGES_APP_NAMES="CP ICS RICSIM RC NGW"
# List of app short names which images pulled from ONAP
ONAP_IMAGES_APP_NAMES="" # Not used
POLICY_AGENT_DATA_FILE="application_configuration.json" # Container data file name
POLICY_AGENT_CONTAINER_MNT_DIR="/var/policy-management-service" # Mounted dir in the container
-ECS_APP_NAME="enrichmentservice" # Name for ECS container
-ECS_DISPLAY_NAME="Enrichment Coordinator Service" # Display name for ECS container
-ECS_EXTERNAL_PORT=8083 # ECS container external port (host -> container)
-ECS_INTERNAL_PORT=8083 # ECS container internal port (container -> container)
-ECS_EXTERNAL_SECURE_PORT=8434 # ECS container external secure port (host -> container)
-ECS_INTERNAL_SECURE_PORT=8434 # ECS container internal secure port (container -> container)
-
-ECS_LOGPATH="/var/log/enrichment-coordinator-service/application.log" # Path the application log in the ECS container
-ECS_APP_NAME_ALIAS="enrichment-service-container" # Alias name, name used by the control panel
-ECS_HOST_MNT_DIR="./mnt" # Mounted dir, relative to compose file, on the host
-ECS_CONTAINER_MNT_DIR="/var/enrichment-coordinator-service" # Mounted dir in the container
-ECS_ACTUATOR="/actuator/loggers/org.oransc.enrichment" # Url for trace/debug
-ECS_CERT_MOUNT_DIR="./cert"
-ECS_ALIVE_URL="/status" # Base path for alive check
-ECS_COMPOSE_DIR="ecs" # Dir in simulator_group for docker-compose
-ECS_CONFIG_MOUNT_PATH=/opt/app/enrichment-coordinator-service/config # Internal container path for configuration
-ECS_CONFIG_FILE=application.yaml # Config file name
-ECS_VERSION="V1-2" # Version where the types are added in the producer registration
-ECS_FEATURE_LEVEL="INFO-TYPES" # Space separated list of features
+ICS_APP_NAME="informationservice" # Name for ICS container
+ICS_DISPLAY_NAME="Enrichment Coordinator Service" # Display name for ICS container
+ICS_EXTERNAL_PORT=8083 # ICS container external port (host -> container)
+ICS_INTERNAL_PORT=8083 # ICS container internal port (container -> container)
+ICS_EXTERNAL_SECURE_PORT=8434 # ICS container external secure port (host -> container)
+ICS_INTERNAL_SECURE_PORT=8434 # ICS container internal secure port (container -> container)
+
+ICS_LOGPATH="/var/log/information-coordinator-service/application.log" # Path the application log in the ICS container
+ICS_APP_NAME_ALIAS="information-service-container" # Alias name, name used by the control panel
+ICS_HOST_MNT_DIR="./mnt" # Mounted dir, relative to compose file, on the host
+ICS_CONTAINER_MNT_DIR="/var/information-coordinator-service" # Mounted dir in the container
+ICS_ACTUATOR="/actuator/loggers/org.oransc.information" # Url for trace/debug
+ICS_CERT_MOUNT_DIR="./cert"
+ICS_ALIVE_URL="/status" # Base path for alive check
+ICS_COMPOSE_DIR="ics" # Dir in simulator_group for docker-compose
+ICS_CONFIG_MOUNT_PATH=/opt/app/information-coordinator-service/config # Internal container path for configuration
+ICS_CONFIG_FILE=application.yaml # Config file name
+ICS_VERSION="V1-2" # Version where the types are added in the producer registration
+ICS_FEATURE_LEVEL="INFO-TYPES" # Space separated list of features
MR_DMAAP_APP_NAME="message-router" # Name for the Dmaap MR
MR_STUB_APP_NAME="mr-stub" # Name of the MR stub
MR_STUB_COMPOSE_DIR="mrstub" # Dir in simulator_group for mr stub for - docker-compose
MR_KAFKA_APP_NAME="message-router-kafka" # Kafka app name, if just named "kafka" the image will not start...
MR_KAFKA_PORT=9092 # Kafka port number
+MR_KAFKA_DOCKER_LOCALHOST_PORT=30098 # Kafka port number for docker localhost
+MR_KAFKA_KUBE_NODE_PORT=30099 # Kafka node port number for kube
MR_ZOOKEEPER_APP_NAME="zookeeper" # Zookeeper app name
MR_ZOOKEEPER_PORT="2181" # Zookeeper port number
MR_DMAAP_HOST_MNT_DIR="/mnt" # Basedir localhost for mounted files
-MR_DMAAP_HOST_CONFIG_DIR="/configs" # Config files dir on localhost
+MR_DMAAP_HOST_CONFIG_DIR="/configs1" # Config files dir on localhost
CR_APP_NAME="callback-receiver" # Name for the Callback receiver
CR_DISPLAY_NAME="Callback Reciever"
CR_APP_CALLBACK="/callbacks" # Url for callbacks
CR_APP_CALLBACK_MR="/callbacks-mr" # Url for callbacks (data from mr which contains string encoded jsons in a json arr)
CR_APP_CALLBACK_TEXT="/callbacks-text" # Url for callbacks (data containing text data)
-CR_ALIVE_URL="/" # Base path for alive check
+CR_ALIVE_URL="/reset" # Base path for alive check
CR_COMPOSE_DIR="cr" # Dir in simulator_group for docker-compose
PROD_STUB_APP_NAME="producer-stub" # Name for the Producer stub
POLICY_AGENT_IMAGE_TAG_REMOTE="2.1.1"
POLICY_AGENT_IMAGE_TAG_REMOTE_RELEASE="2.1.1"
-# ECS image and tags
-ECS_IMAGE_BASE="o-ran-sc/nonrtric-enrichment-coordinator-service"
-ECS_IMAGE_TAG_LOCAL="1.0.1-SNAPSHOT"
-ECS_IMAGE_TAG_REMOTE_SNAPSHOT="1.0.1-SNAPSHOT"
-ECS_IMAGE_TAG_REMOTE="1.0.1"
-ECS_IMAGE_TAG_REMOTE_RELEASE="1.0.1"
+# ICS image and tags
+ICS_IMAGE_BASE="o-ran-sc/nonrtric-enrichment-coordinator-service"
+ICS_IMAGE_TAG_LOCAL="1.0.1-SNAPSHOT"
+ICS_IMAGE_TAG_REMOTE_SNAPSHOT="1.0.1-SNAPSHOT"
+ICS_IMAGE_TAG_REMOTE="1.0.1"
+ICS_IMAGE_TAG_REMOTE_RELEASE="1.0.1"
# Control Panel image and tags
#No local image for pvc cleaner, remote image always used
# List of app short names produced by the project
-PROJECT_IMAGES_APP_NAMES="PA ECS CP SDNC RC RICSIM"
+PROJECT_IMAGES_APP_NAMES="PA ICS CP SDNC RC RICSIM"
# List of app short names which images pulled from ORAN
ORAN_IMAGES_APP_NAMES="" # Not used
POLICY_AGENT_DATA_FILE="application_configuration.json" # Container data file name
POLICY_AGENT_CONTAINER_MNT_DIR="/var/policy-management-service" # Mounted dir in the container
-ECS_APP_NAME="enrichmentservice" # Name for ECS container
-ECS_DISPLAY_NAME="Enrichment Coordinator Service" # Display name for ECS container
-ECS_EXTERNAL_PORT=8083 # ECS container external port (host -> container)
-ECS_INTERNAL_PORT=8083 # ECS container internal port (container -> container)
-ECS_EXTERNAL_SECURE_PORT=8434 # ECS container external secure port (host -> container)
-ECS_INTERNAL_SECURE_PORT=8434 # ECS container internal secure port (container -> container)
-
-ECS_LOGPATH="/var/log/enrichment-coordinator-service/application.log" # Path the application log in the ECS container
-ECS_APP_NAME_ALIAS="enrichment-service-container" # Alias name, name used by the control panel
-ECS_HOST_MNT_DIR="./mnt" # Mounted dir, relative to compose file, on the host
-ECS_CONTAINER_MNT_DIR="/var/enrichment-coordinator-service" # Mounted dir in the container
-ECS_ACTUATOR="/actuator/loggers/org.oransc.enrichment" # Url for trace/debug
-ECS_CERT_MOUNT_DIR="./cert"
-ECS_ALIVE_URL="/status" # Base path for alive check
-ECS_COMPOSE_DIR="ecs" # Dir in simulator_group for docker-compose
-ECS_CONFIG_MOUNT_PATH=/opt/app/enrichment-coordinator-service/config # Internal container path for configuration
-ECS_CONFIG_FILE=application.yaml # Config file name
-ECS_VERSION="V1-2" # Version where the types are added in the producer registration
-ECS_FEATURE_LEVEL="" # Space separated list of features
+ICS_APP_NAME="informationservice" # Name for ICS container
+ICS_DISPLAY_NAME="Enrichment Coordinator Service" # Display name for ICS container
+ICS_EXTERNAL_PORT=8083 # ICS container external port (host -> container)
+ICS_INTERNAL_PORT=8083 # ICS container internal port (container -> container)
+ICS_EXTERNAL_SECURE_PORT=8434 # ICS container external secure port (host -> container)
+ICS_INTERNAL_SECURE_PORT=8434 # ICS container internal secure port (container -> container)
+
+ICS_LOGPATH="/var/log/information-coordinator-service/application.log" # Path the application log in the ICS container
+ICS_APP_NAME_ALIAS="information-service-container" # Alias name, name used by the control panel
+ICS_HOST_MNT_DIR="./mnt" # Mounted dir, relative to compose file, on the host
+ICS_CONTAINER_MNT_DIR="/var/information-coordinator-service" # Mounted dir in the container
+ICS_ACTUATOR="/actuator/loggers/org.oransc.information" # Url for trace/debug
+ICS_CERT_MOUNT_DIR="./cert"
+ICS_ALIVE_URL="/status" # Base path for alive check
+ICS_COMPOSE_DIR="ics" # Dir in simulator_group for docker-compose
+ICS_CONFIG_MOUNT_PATH=/opt/app/information-coordinator-service/config # Internal container path for configuration
+ICS_CONFIG_FILE=application.yaml # Config file name
+ICS_VERSION="V1-2" # Version where the types are added in the producer registration
+ICS_FEATURE_LEVEL="" # Space separated list of features
MR_DMAAP_APP_NAME="message-router" # Name for the Dmaap MR
MR_STUB_APP_NAME="mr-stub" # Name of the MR stub
MR_STUB_COMPOSE_DIR="mrstub" # Dir in simulator_group for mr stub for - docker-compose
MR_KAFKA_APP_NAME="message-router-kafka" # Kafka app name, if just named "kafka" the image will not start...
MR_KAFKA_PORT=9092 # Kafka port number
+MR_KAFKA_DOCKER_LOCALHOST_PORT=30098 # Kafka port number for docker localhost
+MR_KAFKA_KUBE_NODE_PORT=30099 # Kafka node port number for kube
MR_ZOOKEEPER_APP_NAME="zookeeper" # Zookeeper app name
MR_ZOOKEEPER_PORT="2181" # Zookeeper port number
MR_DMAAP_HOST_MNT_DIR="/mnt" # Basedir localhost for mounted files
-MR_DMAAP_HOST_CONFIG_DIR="/configs" # Config files dir on localhost
+MR_DMAAP_HOST_CONFIG_DIR="/configs0" # Config files dir on localhost
CR_APP_NAME="callback-receiver" # Name for the Callback receiver
CR_DISPLAY_NAME="Callback Reciever"
CR_APP_CALLBACK="/callbacks" # Url for callbacks
CR_APP_CALLBACK_MR="/callbacks-mr" # Url for callbacks (data from mr which contains string encoded jsons in a json arr)
CR_APP_CALLBACK_TEXT="/callbacks-text" # Url for callbacks (data containing text data)
-CR_ALIVE_URL="/" # Base path for alive check
+CR_ALIVE_URL="/reset" # Base path for alive check
CR_COMPOSE_DIR="cr" # Dir in simulator_group for docker-compose
PROD_STUB_APP_NAME="producer-stub" # Name for the Producer stub
POLICY_AGENT_IMAGE_TAG_REMOTE="2.2.1"
POLICY_AGENT_IMAGE_TAG_REMOTE_RELEASE="2.2.1"
-# ECS image and tags
-ECS_IMAGE_BASE="o-ran-sc/nonrtric-enrichment-coordinator-service"
-ECS_IMAGE_TAG_LOCAL="1.1.0-SNAPSHOT"
-ECS_IMAGE_TAG_REMOTE_SNAPSHOT="1.1.0-SNAPSHOT"
-ECS_IMAGE_TAG_REMOTE="1.1.0"
-ECS_IMAGE_TAG_REMOTE_RELEASE="1.1.0"
-#Note: Update var ECS_FEATURE_LEVEL if image version is changed
+# ICS image and tags
+ICS_IMAGE_BASE="o-ran-sc/nonrtric-enrichment-coordinator-service"
+ICS_IMAGE_TAG_LOCAL="1.1.0-SNAPSHOT"
+ICS_IMAGE_TAG_REMOTE_SNAPSHOT="1.1.0-SNAPSHOT"
+ICS_IMAGE_TAG_REMOTE="1.1.0"
+ICS_IMAGE_TAG_REMOTE_RELEASE="1.1.0"
+#Note: Update var ICS_FEATURE_LEVEL if image version is changed
#Control Panel image and tags
CONTROL_PANEL_IMAGE_BASE="o-ran-sc/nonrtric-controlpanel"
#No local image for pvc cleaner, remote image always used
# List of app short names produced by the project
-PROJECT_IMAGES_APP_NAMES="PA ECS CP RC RICSIM NGW" # Add SDNC here if oran image is used
+PROJECT_IMAGES_APP_NAMES="PA ICS CP RC RICSIM NGW" # Add SDNC here if oran image is used
# List of app short names which images pulled from ORAN
ORAN_IMAGES_APP_NAMES="" # Not used
POLICY_AGENT_DATA_FILE="application_configuration.json" # Container data file name
POLICY_AGENT_CONTAINER_MNT_DIR="/var/policy-management-service" # Mounted dir in the container
-ECS_APP_NAME="enrichmentservice" # Name for ECS container
-ECS_DISPLAY_NAME="Enrichment Coordinator Service" # Display name for ECS container
-ECS_EXTERNAL_PORT=8083 # ECS container external port (host -> container)
-ECS_INTERNAL_PORT=8083 # ECS container internal port (container -> container)
-ECS_EXTERNAL_SECURE_PORT=8434 # ECS container external secure port (host -> container)
-ECS_INTERNAL_SECURE_PORT=8434 # ECS container internal secure port (container -> container)
-
-ECS_LOGPATH="/var/log/enrichment-coordinator-service/application.log" # Path the application log in the ECS container
-ECS_APP_NAME_ALIAS="enrichment-service-container" # Alias name, name used by the control panel
-ECS_HOST_MNT_DIR="./mnt" # Mounted db dir, relative to compose file, on the host
-ECS_CONTAINER_MNT_DIR="/var/enrichment-coordinator-service" # Mounted dir in the container
-ECS_ACTUATOR="/actuator/loggers/org.oransc.enrichment" # Url for trace/debug
-ECS_CERT_MOUNT_DIR="./cert"
-ECS_ALIVE_URL="/status" # Base path for alive check
-ECS_COMPOSE_DIR="ecs" # Dir in simulator_group for docker-compose
-ECS_CONFIG_MOUNT_PATH=/opt/app/enrichment-coordinator-service/config # Internal container path for configuration
-ECS_CONFIG_FILE=application.yaml # Config file name
-ECS_VERSION="V1-2" # Version where the types are decoupled from the producer registration
-ECS_FEATURE_LEVEL="INFO-TYPES" # Space separated list of features
+ICS_APP_NAME="informationservice" # Name for ICS container
+ICS_DISPLAY_NAME="Enrichment Coordinator Service" # Display name for ICS container
+ICS_EXTERNAL_PORT=8083 # ICS container external port (host -> container)
+ICS_INTERNAL_PORT=8083 # ICS container internal port (container -> container)
+ICS_EXTERNAL_SECURE_PORT=8434 # ICS container external secure port (host -> container)
+ICS_INTERNAL_SECURE_PORT=8434 # ICS container internal secure port (container -> container)
+
+ICS_LOGPATH="/var/log/information-coordinator-service/application.log" # Path the application log in the ICS container
+ICS_APP_NAME_ALIAS="information-service-container" # Alias name, name used by the control panel
+ICS_HOST_MNT_DIR="./mnt" # Mounted db dir, relative to compose file, on the host
+ICS_CONTAINER_MNT_DIR="/var/information-coordinator-service" # Mounted dir in the container
+ICS_ACTUATOR="/actuator/loggers/org.oransc.information" # Url for trace/debug
+ICS_CERT_MOUNT_DIR="./cert"
+ICS_ALIVE_URL="/status" # Base path for alive check
+ICS_COMPOSE_DIR="ics" # Dir in simulator_group for docker-compose
+ICS_CONFIG_MOUNT_PATH=/opt/app/information-coordinator-service/config # Internal container path for configuration
+ICS_CONFIG_FILE=application.yaml # Config file name
+ICS_VERSION="V1-2" # Version where the types are decoupled from the producer registration
+ICS_FEATURE_LEVEL="INFO-TYPES" # Space separated list of features
MR_DMAAP_APP_NAME="message-router" # Name for the Dmaap MR
MR_STUB_APP_NAME="mr-stub" # Name of the MR stub
MR_STUB_COMPOSE_DIR="mrstub" # Dir in simulator_group for mr stub for - docker-compose
MR_KAFKA_APP_NAME="message-router-kafka" # Kafka app name, if just named "kafka" the image will not start...
MR_KAFKA_PORT=9092 # Kafka port number
+MR_KAFKA_DOCKER_LOCALHOST_PORT=30098 # Kafka port number for docker localhost
+MR_KAFKA_KUBE_NODE_PORT=30099 # Kafka node port number for kube
MR_ZOOKEEPER_APP_NAME="zookeeper" # Zookeeper app name
MR_ZOOKEEPER_PORT="2181" # Zookeeper port number
MR_DMAAP_HOST_MNT_DIR="/mnt" # Basedir localhost for mounted files
-MR_DMAAP_HOST_CONFIG_DIR="/configs" # Config files dir on localhost
+MR_DMAAP_HOST_CONFIG_DIR="/configs0" # Config files dir on localhost
CR_APP_NAME="callback-receiver" # Name for the Callback receiver
CR_DISPLAY_NAME="Callback receiver"
CR_APP_CALLBACK="/callbacks" # Url for callbacks
CR_APP_CALLBACK_MR="/callbacks-mr" # Url for callbacks (data from mr which contains string encoded jsons in a json arr)
CR_APP_CALLBACK_TEXT="/callbacks-text" # Url for callbacks (data containing text data)
-CR_ALIVE_URL="/" # Base path for alive check
+CR_ALIVE_URL="/reset" # Base path for alive check
CR_COMPOSE_DIR="cr" # Dir in simulator_group for docker-compose
PROD_STUB_APP_NAME="producer-stub" # Name for the Producer stub
CONTROL_PANEL_NGINX_KUBE_RESOLVER="kube-dns.kube-system.svc.cluster.local valid=5s" #nginx resolver for kube
CONTROL_PANEL_NGINX_DOCKER_RESOLVER="127.0.0.11" # nginx resolver for docker
CONTROL_PANEL_PATH_POLICY_PREFIX="/a1-policy/" # Path prefix for forwarding policy calls to NGW
-CONTROL_PANEL_PATH_ECS_PREFIX="/data-producer/" # Path prefix for forwarding ecs calls to NGW
-CONTROL_PANEL_PATH_ECS_PREFIX2="/data-consumer/" # Path prefix for forwarding ecs calls to NGW
+CONTROL_PANEL_PATH_ICS_PREFIX="/data-producer/" # Path prefix for forwarding ics calls to NGW
+CONTROL_PANEL_PATH_ICS_PREFIX2="/data-consumer/" # Path prefix for forwarding ics calls to NGW
NRT_GATEWAY_APP_NAME="nonrtricgateway" # Name of the Gateway container
NRT_GATEWAY_DISPLAY_NAME="NonRT-RIC Gateway"
POLICY_AGENT_IMAGE_TAG_REMOTE="2.3.0"
POLICY_AGENT_IMAGE_TAG_REMOTE_RELEASE="2.3.0"
-# ECS image and tags
-ECS_IMAGE_BASE="o-ran-sc/nonrtric-enrichment-coordinator-service"
-ECS_IMAGE_TAG_LOCAL="1.2.0-SNAPSHOT"
-ECS_IMAGE_TAG_REMOTE_SNAPSHOT="1.2.0-SNAPSHOT"
-ECS_IMAGE_TAG_REMOTE="1.2.0"
-ECS_IMAGE_TAG_REMOTE_RELEASE="1.2.0"
-#Note: Update var ECS_FEATURE_LEVEL if image version is changed
+# ICS image and tags
+ICS_IMAGE_BASE="o-ran-sc/nonrtric-information-coordinator-service"
+ICS_IMAGE_TAG_LOCAL="1.2.0-SNAPSHOT"
+ICS_IMAGE_TAG_REMOTE_SNAPSHOT="1.2.0-SNAPSHOT"
+ICS_IMAGE_TAG_REMOTE="1.2.0"
+ICS_IMAGE_TAG_REMOTE_RELEASE="1.2.0"
+#Note: Update var ICS_FEATURE_LEVEL if image version is changed
#Control Panel image and tags
CONTROL_PANEL_IMAGE_BASE="o-ran-sc/nonrtric-controlpanel"
KUBE_PROXY_IMAGE_TAG_LOCAL="latest"
#No remote image for kube proxy, local image always used
-#Kube proxy remote image and tag
+#PVC Cleaner remote image and tag
PVC_CLEANER_IMAGE_BASE="ubuntu"
PVC_CLEANER_IMAGE_TAG_REMOTE_PROXY="20.10"
#No local image for pvc cleaner, remote image always used
+#Kafka Procon image and tag
+KAFKAPC_IMAGE_BASE="kafka-procon"
+KAFKAPC_IMAGE_TAG_LOCAL="latest"
+#No local image for pvc cleaner, remote image always used
+
# List of app short names produced by the project
-PROJECT_IMAGES_APP_NAMES="PA ECS CP RC RICSIM NGW DMAAPADP DMAAPMED" # Add SDNC here if oran image is used
+PROJECT_IMAGES_APP_NAMES="PA ICS CP RC RICSIM NGW DMAAPADP DMAAPMED" # Add SDNC here if oran image is used
# List of app short names which images pulled from ORAN
ORAN_IMAGES_APP_NAMES="" # Not used
POLICY_AGENT_DATA_FILE="application_configuration.json" # Container data file name
POLICY_AGENT_CONTAINER_MNT_DIR="/var/policy-management-service" # Mounted dir in the container
-ECS_APP_NAME="enrichmentservice" # Name for ECS container
-ECS_DISPLAY_NAME="Enrichment Coordinator Service" # Display name for ECS container
-ECS_EXTERNAL_PORT=8083 # ECS container external port (host -> container)
-ECS_INTERNAL_PORT=8083 # ECS container internal port (container -> container)
-ECS_EXTERNAL_SECURE_PORT=8434 # ECS container external secure port (host -> container)
-ECS_INTERNAL_SECURE_PORT=8434 # ECS container internal secure port (container -> container)
-
-ECS_LOGPATH="/var/log/enrichment-coordinator-service/application.log" # Path the application log in the ECS container
-ECS_APP_NAME_ALIAS="enrichment-service-container" # Alias name, name used by the control panel
-ECS_HOST_MNT_DIR="./mnt" # Mounted db dir, relative to compose file, on the host
-ECS_CONTAINER_MNT_DIR="/var/enrichment-coordinator-service" # Mounted dir in the container
-ECS_ACTUATOR="/actuator/loggers/org.oransc.enrichment" # Url for trace/debug
-ECS_CERT_MOUNT_DIR="./cert"
-ECS_ALIVE_URL="/status" # Base path for alive check
-ECS_COMPOSE_DIR="ecs" # Dir in simulator_group for docker-compose
-ECS_CONFIG_MOUNT_PATH=/opt/app/enrichment-coordinator-service/config # Internal container path for configuration
-ECS_CONFIG_FILE=application.yaml # Config file name
-ECS_VERSION="V1-2" # Version where the types are decoupled from the producer registration
-ECS_FEATURE_LEVEL="INFO-TYPES TYPE-SUBSCRIPTIONS INFO-TYPE-INFO" # Space separated list of features
+ICS_APP_NAME="informationservice" # Name for ICS container
+ICS_DISPLAY_NAME="Information Coordinator Service" # Display name for ICS container
+ICS_EXTERNAL_PORT=8083 # ICS container external port (host -> container)
+ICS_INTERNAL_PORT=8083 # ICS container internal port (container -> container)
+ICS_EXTERNAL_SECURE_PORT=8434 # ICS container external secure port (host -> container)
+ICS_INTERNAL_SECURE_PORT=8434 # ICS container internal secure port (container -> container)
+
+ICS_LOGPATH="/var/log/information-coordinator-service/application.log" # Path the application log in the ICS container
+ICS_APP_NAME_ALIAS="information-service-container" # Alias name, name used by the control panel
+ICS_HOST_MNT_DIR="./mnt" # Mounted db dir, relative to compose file, on the host
+ICS_CONTAINER_MNT_DIR="/var/information-coordinator-service" # Mounted dir in the container
+ICS_ACTUATOR="/actuator/loggers/org.oransc.information" # Url for trace/debug
+ICS_CERT_MOUNT_DIR="./cert"
+ICS_ALIVE_URL="/status" # Base path for alive check
+ICS_COMPOSE_DIR="ics" # Dir in simulator_group for docker-compose
+ICS_CONFIG_MOUNT_PATH=/opt/app/information-coordinator-service/config # Internal container path for configuration
+ICS_CONFIG_FILE=application.yaml # Config file name
+ICS_VERSION="V1-2" # Version where the types are decoupled from the producer registration
+ICS_FEATURE_LEVEL="INFO-TYPES TYPE-SUBSCRIPTIONS INFO-TYPE-INFO RESP_CODE_CHANGE_1" # Space separated list of features
MR_DMAAP_APP_NAME="message-router" # Name for the Dmaap MR
MR_STUB_APP_NAME="mr-stub" # Name of the MR stub
MR_STUB_COMPOSE_DIR="mrstub" # Dir in simulator_group for mr stub for - docker-compose
MR_KAFKA_APP_NAME="message-router-kafka" # Kafka app name, if just named "kafka" the image will not start...
MR_KAFKA_PORT=9092 # Kafka port number
+MR_KAFKA_DOCKER_LOCALHOST_PORT=30098 # Kafka port number for docker localhost
+MR_KAFKA_KUBE_NODE_PORT=30099 # Kafka node port number for kube
MR_ZOOKEEPER_APP_NAME="zookeeper" # Zookeeper app name
MR_ZOOKEEPER_PORT="2181" # Zookeeper port number
MR_DMAAP_HOST_MNT_DIR="/mnt" # Basedir localhost for mounted files
-MR_DMAAP_HOST_CONFIG_DIR="/configs" # Config files dir on localhost
+MR_DMAAP_HOST_CONFIG_DIR="/configs1" # Config files dir on localhost
CR_APP_NAME="callback-receiver" # Name for the Callback receiver
CR_DISPLAY_NAME="Callback receiver"
CR_APP_CALLBACK="/callbacks" # Url for callbacks
CR_APP_CALLBACK_MR="/callbacks-mr" # Url for callbacks (data from mr which contains string encoded jsons in a json arr)
CR_APP_CALLBACK_TEXT="/callbacks-text" # Url for callbacks (data containing text data)
-CR_ALIVE_URL="/" # Base path for alive check
+CR_ALIVE_URL="/reset" # Base path for alive check
CR_COMPOSE_DIR="cr" # Dir in simulator_group for docker-compose
PROD_STUB_APP_NAME="producer-stub" # Name for the Producer stub
CONTROL_PANEL_NGINX_KUBE_RESOLVER="kube-dns.kube-system.svc.cluster.local valid=5s" #nginx resolver for kube
CONTROL_PANEL_NGINX_DOCKER_RESOLVER="127.0.0.11" # nginx resolver for docker
CONTROL_PANEL_PATH_POLICY_PREFIX="/a1-policy/" # Path prefix for forwarding policy calls to NGW
-CONTROL_PANEL_PATH_ECS_PREFIX="/data-producer/" # Path prefix for forwarding ecs calls to NGW
-CONTROL_PANEL_PATH_ECS_PREFIX2="/data-consumer/" # Path prefix for forwarding ecs calls to NGW
+CONTROL_PANEL_PATH_ICS_PREFIX="/data-producer/" # Path prefix for forwarding ics calls to NGW
+CONTROL_PANEL_PATH_ICS_PREFIX2="/data-consumer/" # Path prefix for forwarding ics calls to NGW
NRT_GATEWAY_APP_NAME="nonrtricgateway" # Name of the Gateway container
NRT_GATEWAY_DISPLAY_NAME="NonRT-RIC Gateway"
DMAAP_MED_LOGPATH="/var/log/dmaap-adaptor-service/application.log" # Path the application log in the Dmaap Mediator container
DMAAP_MED_HOST_MNT_DIR="./mnt" # Mounted db dir, relative to compose file, on the host
#MAAP_ADP_CONTAINER_MNT_DIR="/var/dmaap-adaptor-service" # Mounted dir in the container
-#DMAAP_MED_ACTUATOR="/actuator/loggers/org.oransc.enrichment" # Url for trace/debug
+#DMAAP_MED_ACTUATOR="/actuator/loggers/org.oransc.information" # Url for trace/debug
#DMAAP_MED_CERT_MOUNT_DIR="./cert"
DMAAP_MED_ALIVE_URL="/status" # Base path for alive check
DMAAP_MED_COMPOSE_DIR="dmaapmed" # Dir in simulator_group for docker-compose
DMAAP_MED_DATA_MOUNT_PATH="/configs" # Path in container for data file
DMAAP_MED_DATA_FILE="type_config.json" # Container data file name
+KAFKAPC_APP_NAME="kafka-procon" # Name for the Kafka procon
+KAFKAPC_DISPLAY_NAME="Kafaka Producer/Consumer"
+KAFKAPC_EXTERNAL_PORT=8096 # Kafka procon container external port (host -> container)
+KAFKAPC_INTERNAL_PORT=8090 # Kafka procon container internal port (container -> container)
+KAFKAPC_EXTERNAL_SECURE_PORT=8097 # Kafka procon container external secure port (host -> container)
+KAFKAPC_INTERNAL_SECURE_PORT=8091 # Kafka procon container internal secure port (container -> container)
+KAFKAPC_ALIVE_URL="/" # Base path for alive check
+KAFKAPC_COMPOSE_DIR="kafka-procon" # Dir in simulator_group for docker-compose
+KAFKAPC_BUILD_DIR="kafka-procon" # Build dir
########################################
# Setting for common curl-base function
########################################
# Applies only to images defined in the test-env files with image names and tags defined as XXXX_RELEASE
IMAGE_CATEGORY="DEV"
+#Var to indicate docker-compose version, V1 or V2
+#V1 names replicated containers <proj-name>_<service-name>_<index>
+#V2 names replicated containers <proj-name>-<service-name>-<index>
+DOCKER_COMPOSE_VERION="V1"
+
# Function to indent cmd output with one space
indent1() { sed 's/^/ /'; }
#Var for measuring execution time
TCTEST_START=$SECONDS
+#Vars to hold the start time and timer text for a custom timer
+TC_TIMER_STARTTIME=""
+TC_TIMER_TIMER_TEXT=""
+TC_TIMER_CURRENT_FAILS="" # Then numer of failed test when timer starts.
+ # Compared with the current number of fails at timer stop
+ # to judge the measurement reliability
+
#File to save timer measurement results
TIMER_MEASUREMENTS=".timer_measurement.txt"
-echo -e "Activity \t Duration" > $TIMER_MEASUREMENTS
+echo -e "Activity \t Duration \t Info" > $TIMER_MEASUREMENTS
# If this is set, some images (control by the parameter repo-polcy) will be re-tagged and pushed to this repo before any
IMAGE_REPO_ADR=""
if [ $paramerror -eq 0 ]; then
if [ "$1" == "--print-stats" ]; then
PRINT_CURRENT_STATS=1
- echo "Option set - Print stats"
+ echo "Option set - Print stats after every test-case and config"
shift;
foundparm=0
fi
else
echo " None"
fi
+
+echo -e $BOLD"Auto adding included apps"$EBOLD
+ for iapp in $INCLUDED_IMAGES; do
+ file_pointer=$(echo $iapp | tr '[:upper:]' '[:lower:]')
+ file_pointer="../common/"$file_pointer"_api_functions.sh"
+ padded_iapp=$iapp
+ while [ ${#padded_iapp} -lt 16 ]; do
+ padded_iapp=$padded_iapp" "
+ done
+ echo " Auto-adding included app $padded_iapp Sourcing $file_pointer"
+ . $file_pointer
+ if [ ! -f "$file_pointer" ]; then
+ echo " Include file $file_pointer for app $iapp does not exist"
+ exit 1
+ fi
+ done
echo ""
+echo -e $BOLD"Test environment info"$EBOLD
+
# Check needed installed sw
+
+tmp=$(which bash)
+if [ $? -ne 0 ] || [ -z "$tmp" ]; then
+ echo -e $RED"bash is required to run the test environment, pls install"$ERED
+ exit 1
+fi
+echo " bash is installed and using version:"
+echo "$(bash --version)" | indent2
+
tmp=$(which python3)
-if [ $? -ne 0 ] || [ -z tmp ]; then
+if [ $? -ne 0 ] || [ -z "$tmp" ]; then
echo -e $RED"python3 is required to run the test environment, pls install"$ERED
exit 1
fi
+echo " python3 is installed and using version: $(python3 --version)"
+
tmp=$(which docker)
-if [ $? -ne 0 ] || [ -z tmp ]; then
+if [ $? -ne 0 ] || [ -z "$tmp" ]; then
echo -e $RED"docker is required to run the test environment, pls install"$ERED
exit 1
fi
+echo " docker is installed and using versions:"
+echo " $(docker version --format 'Client version {{.Client.Version}} Server version {{.Server.Version}}')"
tmp=$(which docker-compose)
-if [ $? -ne 0 ] || [ -z tmp ]; then
+if [ $? -ne 0 ] || [ -z "$tmp" ]; then
if [ $RUNMODE == "DOCKER" ]; then
echo -e $RED"docker-compose is required to run the test environment, pls install"$ERED
exit 1
fi
fi
-if [ $RUNMODE == "DOCKER" ]; then
- tmp=$(docker-compose version | grep -i 'docker' | grep -i 'compose' | grep -i 'version')
- if [[ "$tmp" == *'v2'* ]]; then
- echo -e $RED"docker-compose is using docker-compose version 2"$ERED
- echo -e $RED"The test environment only support version 1"$ERED
- echo -e $RED"Disable version 2 by cmd 'docker-compose disable-v2' and re-run the script "$ERED
- exit 1
- fi
+tmp=$(docker-compose version --short)
+echo " docker-compose installed and using version $tmp"
+if [[ "$tmp" == *'v2'* ]]; then
+ DOCKER_COMPOSE_VERION="V2"
fi
tmp=$(which kubectl)
fi
else
if [ $RUNMODE == "KUBE" ]; then
+ echo " kubectl is installed and using versions:"
+ echo $(kubectl version --short=true) | indent2
res=$(kubectl cluster-info 2>&1)
if [ $? -ne 0 ]; then
echo -e "$BOLD$RED############################################# $ERED$EBOLD"
fi
fi
+echo ""
+
echo -e $BOLD"Checking configured image setting for this test case"$EBOLD
#Temp var to check for image variable name errors
IMAGE_SUFFIX="none"
fi
# A function name is created from the app short name
- # for example app short name 'ECS' -> produce the function
- # name __ECS_imagesetup
+ # for example app short name 'ICS' -> produce the function
+ # name __ICS_imagesetup
# This function is called and is expected to exist in the imported
- # file for the ecs test functions
+ # file for the ics test functions
# The resulting function impl will call '__check_and_create_image_var' function
# with appropriate parameters
# If the image suffix is none, then the component decides the suffix
echo "===================================="
column -t -s $'\t' $TIMER_MEASUREMENTS
if [ $RES_PASS != $RES_TEST ]; then
- echo -e $RED"Measurement may not be reliable when there are failed test - script timeouts may cause long measurement values"$ERED
+ echo -e $RED"Measurement may not be reliable when there are failed test - failures may cause long measurement values due to timeouts etc."$ERED
fi
echo ""
#####################################################################
# Start timer for time measurement
-# args - (any args will be printed though)
+# args: <timer message to print> - timer value and message will be printed both on screen
+# and in the timer measurement report - if at least one "print_timer is called"
start_timer() {
echo -e $BOLD"INFO(${BASH_LINENO[0]}): "${FUNCNAME[0]}"," $@ $EBOLD
- TC_TIMER=$SECONDS
+ TC_TIMER_STARTTIME=$SECONDS
+ TC_TIMER_TIMER_TEXT="${@:1}"
+ if [ $# -ne 1 ]; then
+ __print_err "need 1 arg, <timer message to print>" $@
+ TC_TIMER_TIMER_TEXT=${FUNCNAME[0]}":"${BASH_LINENO[0]}
+ echo " Assigning timer name: "$TC_TIMER_TIMER_TEXT
+ fi
+ TC_TIMER_CURRENT_FAILS=$(($RES_FAIL+$RES_CONF_FAIL))
echo " Timer started: $(date)"
}
-# Print the value of the time (in seconds)
-# args - <timer message to print> - timer value and message will be printed both on screen
-# and in the timer measurement report
+# Print the running timer the value of the time (in seconds)
+# Timer value and message will be printed both on screen and in the timer measurement report
print_timer() {
- echo -e $BOLD"INFO(${BASH_LINENO[0]}): "${FUNCNAME[0]}"," $@ $EBOLD
- if [ $# -lt 1 ]; then
- ((RES_CONF_FAIL++))
- __print_err "need 1 or more args, <timer message to print>" $@
- exit 1
+ echo -e $BOLD"INFO(${BASH_LINENO[0]}): "${FUNCNAME[0]}"," $TC_TIMER_TIMER_TEXT $EBOLD
+ if [ -z "$TC_TIMER_STARTTIME" ]; then
+ __print_err "timer not started" $@
+ return 1
fi
- duration=$(($SECONDS-$TC_TIMER))
+ duration=$(($SECONDS-$TC_TIMER_STARTTIME))
if [ $duration -eq 0 ]; then
duration="<1 second"
else
duration=$duration" seconds"
fi
echo " Timer duration :" $duration
-
- echo -e "${@:1} \t $duration" >> $TIMER_MEASUREMENTS
-}
-
-# Print the value of the time (in seconds) and reset the timer
-# args - <timer message to print> - timer value and message will be printed both on screen
-# and in the timer measurement report
-print_and_reset_timer() {
- echo -e $BOLD"INFO(${BASH_LINENO[0]}): "${FUNCNAME[0]}"," $@ $EBOLD
- if [ $# -lt 1 ]; then
- ((RES_CONF_FAIL++))
- __print_err "need 1 or more args, <timer message to print>" $@
- exit 1
- fi
- duration=$(($SECONDS-$TC_TIMER))" seconds"
- if [ $duration -eq 0 ]; then
- duration="<1 second"
- else
- duration=$duration" seconds"
+ res="-"
+ if [ $(($RES_FAIL+$RES_CONF_FAIL)) -ne $TC_TIMER_CURRENT_FAILS ]; then
+ res="Failures occured during test - timer not reliabled"
fi
- echo " Timer duration :" $duration
- TC_TIMER=$SECONDS
- echo " Timer reset"
-
- echo -e "${@:1} \t $duration" >> $TIMER_MEASUREMENTS
+ echo -e "$TC_TIMER_TIMER_TEXT \t $duration \t $res" >> $TIMER_MEASUREMENTS
}
+
# Print info about a deviations from intended tests
# Each deviation counted is also printed in the testreport
# args <deviation message to print>
for imagename in $APP_SHORT_NAMES; do
docker ps -a --filter "label=nrttest_app=$imagename" --filter "network=$DOCKER_SIM_NWNAME" --format ' {{.Label "nrttest_dp"}}\n{{.Label "nrttest_app"}}\n{{.Names}}' >> $running_contr_file
done
+ running_contr_file_empty="No docker containers running, started by previous test execution"
+ if [ -s $running_contr_file ]; then
+ running_contr_file_empty=""
+ fi
# Kill all containers started by the test env - to speed up shut down
docker kill $(docker ps -a --filter "label=nrttest_app" --format '{{.Names}}') &> /dev/null
tab_heading3="$tab_heading3"" "
done
- echo " $tab_heading1$tab_heading2$tab_heading3"" Actions"
- cntr=0
- while read p; do
- if (( $cntr % 3 == 0 ));then
- row=""
- heading=$p
- heading_len=$tab_heading1_len
- fi
- if (( $cntr % 3 == 1));then
- heading=$p
- heading_len=$tab_heading2_len
- fi
- if (( $cntr % 3 == 2));then
- contr=$p
- heading=$p
- heading_len=$tab_heading3_len
- fi
- while (( ${#heading} < $heading_len)); do
- heading="$heading"" "
- done
- row=$row$heading
- if (( $cntr % 3 == 2));then
- echo -ne $row$SAMELINE
- echo -ne " $row ${GREEN}stopping...${EGREEN}${SAMELINE}"
- docker stop $(docker ps -qa --filter name=${contr} --filter network=$DOCKER_SIM_NWNAME) &> /dev/null
- echo -ne " $row ${GREEN}stopped removing...${EGREEN}${SAMELINE}"
- docker rm --force $(docker ps -qa --filter name=${contr} --filter network=$DOCKER_SIM_NWNAME) &> /dev/null
- echo -e " $row ${GREEN}stopped removed ${EGREEN}"
- fi
- let cntr=cntr+1
- done <$running_contr_file
+ if [ ! -z "$running_contr_file_empty" ]; then
+ echo $running_contr_file_empty | indent1
+ else
+ echo " $tab_heading1$tab_heading2$tab_heading3"" Actions"
+ cntr=0
+ while read p; do
+ if (( $cntr % 3 == 0 ));then
+ row=""
+ heading=$p
+ heading_len=$tab_heading1_len
+ fi
+ if (( $cntr % 3 == 1));then
+ heading=$p
+ heading_len=$tab_heading2_len
+ fi
+ if (( $cntr % 3 == 2));then
+ contr=$p
+ heading=$p
+ heading_len=$tab_heading3_len
+ fi
+ while (( ${#heading} < $heading_len)); do
+ heading="$heading"" "
+ done
+ row=$row$heading
+ if (( $cntr % 3 == 2));then
+ echo -ne $row$SAMELINE
+ echo -ne " $row ${GREEN}stopping...${EGREEN}${SAMELINE}"
+ docker stop $(docker ps -qa --filter name=${contr} --filter network=$DOCKER_SIM_NWNAME) &> /dev/null
+ echo -ne " $row ${GREEN}stopped removing...${EGREEN}${SAMELINE}"
+ docker rm --force $(docker ps -qa --filter name=${contr} --filter network=$DOCKER_SIM_NWNAME) &> /dev/null
+ echo -e " $row ${GREEN}stopped removed ${EGREEN}"
+ fi
+ let cntr=cntr+1
+ done <$running_contr_file
+ fi
echo ""
echo -e " Scaled $restype $resid $ns_text with label $labelname=$labelid to 0, current count=$count $GREEN OK $EGREEN"
fi
echo -ne " Deleting $restype $resid $ns_text with label $labelname=$labelid "$SAMELINE
- kubectl delete $restype $resid $ns_flag 1> /dev/null 2> ./tmp/kubeerr
+ kubectl delete --grace-period=1 $restype $resid $ns_flag 1> /dev/null 2> ./tmp/kubeerr
if [ $? -eq 0 ]; then
echo -e " Deleted $restype $resid $ns_text with label $labelname=$labelid $GREEN OK $EGREEN"
else
if [ $RUNMODE == "KUBE" ]; then
__clean_kube
if [ $PRE_CLEAN -eq 1 ]; then
- echo " Clean docker resouces to free up resources, may take time..."
+ echo " Cleaning docker resouces to free up resources, may take time..."
../common/clean_docker.sh 2&>1 /dev/null
echo ""
fi
appcount=$1
shift
+ envsubst < $compose_file > "gen_"$compose_file
+ compose_file="gen_"$compose_file
+
if [ "$compose_args" == "NODOCKERARGS" ]; then
docker-compose -f $compose_file up -d &> .dockererr
if [ $? -ne 0 ]; then
#
# List of short names for all supported apps, including simulators etc
-APP_SHORT_NAMES="PA ECS SDNC CP NGW RC RICSIM HTTPPROXY CBS CONSUL DMAAPMR MR CR PRODSTUB KUBEPROXY DMAAPMED DMAAPADP PVCCLEANER"
+APP_SHORT_NAMES="PA ICS SDNC CP NGW RC RICSIM HTTPPROXY CBS CONSUL DMAAPMR MR CR PRODSTUB KUBEPROXY DMAAPMED DMAAPADP PVCCLEANER KAFKAPC"
# List of available apps that built and released of the project
-PROJECT_IMAGES="PA ECS SDNC CP NGW RICSIM RC DMAAPMED DMAAPADP"
+PROJECT_IMAGES="PA ICS SDNC CP NGW RICSIM RC DMAAPMED DMAAPADP"
# List of available apps to override with local or remote staging/snapshot/release image
-AVAILABLE_IMAGES_OVERRIDE="PA ECS SDNC CP NGW RICSIM RC DMAAPMED DMAAPADP"
+AVAILABLE_IMAGES_OVERRIDE="PA ICS SDNC CP NGW RICSIM RC DMAAPMED DMAAPADP"
# List of available apps where the image is built by the test environment
-LOCAL_IMAGE_BUILD="MR CR PRODSTUB KUBEPROXY HTTPPROXY"
+LOCAL_IMAGE_BUILD="MR CR PRODSTUB KUBEPROXY HTTPPROXY KAFKAPC"
# List of system app used only by the test env - kubernetes
TESTENV_KUBE_SYSTEM_APPS="PVCCLEANER"
.tmp.json
-.dockererr
\ No newline at end of file
+.dockererr
+.env
\ No newline at end of file
RUN chmod +x start.sh
+RUN groupadd -g 999 appuser && \
+ useradd -r -u 999 -g appuser appuser
+
+## add permissions for appuser user
+RUN chown -R appuser:appuser /usr/src/app/ && chmod -R 755 /usr/src/app/ && \
+ chown -R appuser:appuser /var/log/nginx && \
+ chown -R appuser:appuser /var/lib/nginx && \
+ chown -R appuser:appuser /etc/nginx/conf.d
+RUN touch /var/run/nginx.pid && \
+ chown -R appuser:appuser /var/run/nginx.pid
+
+USER appuser
+
CMD [ "./start.sh" ]
-user www-data;
+# user www-data;
worker_processes auto;
pid /run/nginx.pid;
include /etc/nginx/modules-enabled/*.conf;
WORKDIR /usr/src/app
COPY http_proxy.js .
+USER node
+
CMD [ "node", "http_proxy.js" ]
\ No newline at end of file
--- /dev/null
+.tmp.json
+.dockererr
+.env
+.payload
--- /dev/null
+#==================================================================================
+# Copyright (C) 2021: Nordix Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# This source code is part of the near-RT RIC (RAN Intelligent Controller)
+# platform project (RICP).
+#==================================================================================
+
+ARG NEXUS_PROXY_REPO
+
+##
+## Build
+##
+
+FROM ${NEXUS_PROXY_REPO}golang:1.17-bullseye AS build
+WORKDIR /app
+COPY go.mod .
+COPY go.sum .
+RUN go mod download
+COPY main.go .
+RUN go build -o /kafkaprocon
+
+##
+## Deploy
+##
+
+FROM gcr.io/distroless/base-debian11
+WORKDIR /
+## Copy from "build" stage
+COPY --from=build /kafkaprocon .
+USER nonroot:nonroot
+ENTRYPOINT ["/kafkaprocon"]
\ No newline at end of file
--- /dev/null
+#!/bin/bash
+
+# ============LICENSE_START===============================================
+# Copyright (C) 2020 Nordix Foundation. All rights reserved.
+# ========================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=================================================
+#
+
+# Automated test script for Kafka procon container
+
+# NOTE: Need a running instance of kafka
+
+
+export PORT=8096
+export HTTPX="http"
+export REQ_CONTENT=""
+export RESP_CONTENT="text/plain"
+
+# source function to do curl and check result
+. ../common/do_curl_function.sh
+
+echo "Requires a running kafka"
+
+payload=".payload"
+
+echo "=== hello world ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="OK"
+do_curl GET / 200
+
+echo "=== reset ==="
+REQ_CONTENT=""
+RESP_CONTENT=""
+RESULT="*"
+do_curl POST /reset 200
+
+echo "=== get topics ==="
+REQ_CONTENT=""
+RESP_CONTENT="application/json"
+RESULT="json:[]"
+do_curl GET /topics 200
+
+echo "=== get global counter sent ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="0"
+do_curl GET /counters/sent 200
+
+echo "=== get global counter received ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="0"
+do_curl GET /counters/received 200
+
+echo "=== get topic ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl GET /topics/test-topic 404
+
+echo "=== get topic counter sent ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl GET /topics/test-topic/counters/sent 404
+
+echo "=== get topic counter received ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl GET /topics/test-topic/counters/received 404
+
+echo "=== create a topic ==="
+REQ_CONTENT=""
+RESP_CONTENT=""
+RESULT="*"
+do_curl PUT /topics/test-topic 405
+
+echo "=== start to send on a topic ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/test-topic/startsend 404
+
+echo "=== start to receive from a topic ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/test-topic/startreceive 404
+
+echo "=== send a msg on a topic ==="
+echo "TEST1" > $payload
+REQ_CONTENT="text/plain"
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/test-topic/msg 404 $payload
+
+echo "=== receive a msg from a topic ==="
+echo "TEST1" > $payload
+REQ_CONTENT="text/plain"
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl GET /topics/test-topic/msg 404 $payload
+
+echo "=== stop to send on a topic ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/test-topic/stopsend 404
+
+echo "=== stop to receive from a topic ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/test-topic/stopreceive 404
+
+# Create 4 topics
+
+echo "=== create topic1 ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl PUT /topics/topic1?type=text/plain 201
+
+echo "=== get topics ==="
+REQ_CONTENT=""
+RESP_CONTENT="application/json"
+RESULT="json:[\"topic1\"]"
+do_curl GET /topics 200
+
+echo "=== create topic2 ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl PUT /topics/topic2?type=text/plain 201
+
+echo "=== get topics ==="
+REQ_CONTENT=""
+RESP_CONTENT="application/json"
+RESULT="json:[\"topic1\",\"topic2\"]"
+do_curl GET /topics 200
+
+echo "=== create topic3 ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl PUT /topics/topic3?type=application/json 201
+
+echo "=== get topics ==="
+REQ_CONTENT=""
+RESP_CONTENT="application/json"
+RESULT="json:[\"topic1\",\"topic2\",\"topic3\"]"
+do_curl GET /topics 200
+
+echo "=== create topic4 ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl PUT /topics/topic4?type=application/json 201
+
+echo "=== get topics ==="
+REQ_CONTENT=""
+RESP_CONTENT="application/json"
+RESULT="json:[\"topic1\",\"topic2\",\"topic3\",\"topic4\"]"
+do_curl GET /topics 200
+
+echo "=== get topic1 ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="text/plain"
+do_curl GET /topics/topic1 200
+
+echo "=== get topic2 ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="text/plain"
+do_curl GET /topics/topic2 200
+
+echo "=== get topic3 ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="application/json"
+do_curl GET /topics/topic3 200
+
+echo "=== get topic4 ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="application/json"
+do_curl GET /topics/topic4 200
+
+echo "=== send a msg on topic1 ==="
+echo "TEST11" > $payload
+REQ_CONTENT="text/plain"
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/topic1/msg 400 $payload
+
+echo "=== receive a msg from topic1 ==="
+REQ_CONTENT="text/plain"
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl GET /topics/topic1/msg 400
+
+echo "=== send a msg on topic2 ==="
+echo "TEST22" > $payload
+REQ_CONTENT="text/plain"
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/topic2/msg 400 $payload
+
+echo "=== receive a msg from topic2 ==="
+REQ_CONTENT="text/plain"
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl GET /topics/topic2/msg 400
+
+
+
+echo "=== send a msg on topic3 ==="
+echo "{\"test\":\"33\"}" > $payload
+REQ_CONTENT="application/json"
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/topic3/msg 400 $payload
+
+echo "=== receive a msg from topic3 ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl GET /topics/topic3/msg 400
+
+echo "=== send a msg on topic4 ==="
+echo "{\"test\":\"44\"}" > $payload
+REQ_CONTENT="application/json"
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/topic4/msg 400 $payload
+
+echo "=== receive a msg from topic4 ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl GET /topics/topic2/msg 400
+
+
+echo "=== get global counter sent ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="0"
+do_curl GET /counters/sent 200
+
+echo "=== get global counter received ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="0"
+do_curl GET /counters/received 200
+
+echo "=== get topic1 counter sent ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="0"
+do_curl GET /topics/topic1/counters/sent 200
+
+echo "=== get topic1 counter received ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="0"
+do_curl GET /topics/topic1/counters/received 200
+
+echo "=== get topic2 counter sent ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="0"
+do_curl GET /topics/topic2/counters/sent 200
+
+echo "=== get topic2 counter received ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="0"
+do_curl GET /topics/topic2/counters/received 200
+
+echo "=== get topic3 counter sent ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="0"
+do_curl GET /topics/topic3/counters/sent 200
+
+echo "=== get topic3 counter received ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="0"
+do_curl GET /topics/topic3/counters/received 200
+
+echo "=== get topic4 counter sent ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="0"
+do_curl GET /topics/topic4/counters/sent 200
+
+echo "=== get topic4 counter received ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="0"
+do_curl GET /topics/topic4/counters/received 200
+
+# Begins send and receive
+
+echo "=== set topic1 start sending ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/topic1/startsend 200
+
+echo "=== send a msg on topic1 ==="
+echo "TEST11" > $payload
+REQ_CONTENT="application/json"
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/topic1/msg 400 $payload
+
+echo "=== send a msg on topic1 ==="
+echo "TEST11" > $payload
+REQ_CONTENT="text/plain"
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/topic1/msg 200 $payload
+
+echo "sleep 2 to allow sending the msg to kafka"
+sleep 2
+
+echo "=== receive a msg from topic1 ==="
+REQ_CONTENT="text/plain"
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl GET /topics/topic1/msg 400
+
+echo "=== get topic1 counter sent ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="1"
+do_curl GET /topics/topic1/counters/sent 200
+
+echo "=== get topic1 counter received ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="0"
+do_curl GET /topics/topic1/counters/received 200
+
+echo "=== set topic1 start receiving ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/topic1/startreceive 200
+
+echo "sleep 60 to allow kafka to process the msg, unclear why first message takes a long time..."
+sleep 60
+
+echo "=== get topic1 counter sent ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="1"
+do_curl GET /topics/topic1/counters/sent 200
+
+echo "=== get topic1 counter received ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="1"
+do_curl GET /topics/topic1/counters/received 200
+
+echo "=== get global counter sent ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="1"
+do_curl GET /counters/sent 200
+
+echo "=== get global counter received ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="1"
+do_curl GET /counters/received 200
+
+echo "=== receive a msg from topic1 ==="
+REQ_CONTENT="text/plain"
+RESP_CONTENT="text/plain"
+RESULT="TEST11"
+do_curl GET /topics/topic1/msg 200
+
+echo "=== receive a msg from topic1 ==="
+REQ_CONTENT="text/plain"
+RESP_CONTENT=""
+RESULT="*"
+do_curl GET /topics/topic1/msg 204
+
+
+echo "=== set topic1 start sending ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/topic1/startsend 200
+
+echo "=== set topic2 start sending ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/topic2/startsend 200
+
+echo "=== set topic3 start sending ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/topic3/startsend 200
+
+echo "=== set topic4 start sending ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/topic4/startsend 200
+
+echo "=== set topic1 start receiving ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/topic1/startreceive 200
+
+echo "=== set topic2 start receiving ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/topic2/startreceive 200
+
+echo "=== set topic3 start receiving ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/topic3/startreceive 200
+
+echo "=== set topic4 start receiving ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/topic4/startreceive 200
+
+
+# Send and receive on all topics
+
+echo "=== send a msg on topic1 ==="
+echo "TEST101" > $payload
+REQ_CONTENT="text/plain"
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/topic1/msg 200 $payload
+
+echo "=== send two msg on topic2 ==="
+echo "TEST201" > $payload
+REQ_CONTENT="text/plain"
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/topic2/msg 200 $payload
+echo "TEST202" > $payload
+do_curl POST /topics/topic2/msg 200 $payload
+
+echo "=== send three msg on topic3 ==="
+echo "{\"a\":\"msg301\"}" > $payload
+REQ_CONTENT="application/json"
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/topic3/msg 200 $payload
+echo "{\"a\":\"msg302\"}" > $payload
+do_curl POST /topics/topic3/msg 200 $payload
+echo "{\"a\":\"msg303\"}" > $payload
+do_curl POST /topics/topic3/msg 200 $payload
+
+
+echo "=== send four msg on topic4 ==="
+echo "{\"a\":\"msg401\"}" > $payload
+REQ_CONTENT="application/json"
+RESP_CONTENT="text/plain"
+RESULT="*"
+do_curl POST /topics/topic4/msg 200 $payload
+echo "{\"a\":\"msg402\"}" > $payload
+do_curl POST /topics/topic4/msg 200 $payload
+echo "{\"a\":\"msg403\"}" > $payload
+do_curl POST /topics/topic4/msg 200 $payload
+echo "{\"a\":\"msg404\"}" > $payload
+do_curl POST /topics/topic4/msg 200 $payload
+
+echo "sleep 10 to allow kafka to process msg"
+sleep 10
+
+echo "=== get global counter sent ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="11"
+do_curl GET /counters/sent 200
+
+echo "=== get global counter received ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="11"
+do_curl GET /counters/received 200
+
+
+echo "=== get topic1 counter sent ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="2"
+do_curl GET /topics/topic1/counters/sent 200
+
+echo "=== get topic1 counter received ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="2"
+do_curl GET /topics/topic1/counters/received 200
+
+
+echo "=== get topic2 counter sent ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="2"
+do_curl GET /topics/topic2/counters/sent 200
+
+echo "=== get topic2 counter received ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="2"
+do_curl GET /topics/topic2/counters/received 200
+
+
+echo "=== get topic3 counter sent ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="3"
+do_curl GET /topics/topic3/counters/sent 200
+
+echo "=== get topic3 counter received ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="3"
+do_curl GET /topics/topic3/counters/received 200
+
+
+echo "=== get topic4 counter sent ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="4"
+do_curl GET /topics/topic4/counters/sent 200
+
+echo "=== get topic4 counter received ==="
+REQ_CONTENT=""
+RESP_CONTENT="text/plain"
+RESULT="4"
+do_curl GET /topics/topic4/counters/received 200
+
+
+echo "=== get a msg on topic1 ==="
+REQ_CONTENT="text/plain"
+RESP_CONTENT="text/plain"
+RESULT="TEST101"
+do_curl GET /topics/topic1/msg 200
+
+
+echo "=== attempt to receive a msg from topic1 ==="
+REQ_CONTENT="text/plain"
+RESP_CONTENT=""
+RESULT="*"
+do_curl GET /topics/topic1/msg 204
+
+echo "=== get a two msg on topic2 ==="
+REQ_CONTENT="text/plain"
+RESP_CONTENT="text/plain"
+RESULT="TEST201"
+do_curl GET /topics/topic2/msg 200
+RESULT="TEST202"
+do_curl GET /topics/topic2/msg 200
+
+
+echo "=== attempt to receive a msg from topic2 ==="
+REQ_CONTENT="text/plain"
+RESP_CONTENT=""
+RESULT="*"
+do_curl GET /topics/topic2/msg 204
+
+echo "=== get three msg on topic3 ==="
+REQ_CONTENT="text/plain"
+RESP_CONTENT="application/json"
+RESULT="json:{\"a\":\"msg301\"}"
+do_curl GET /topics/topic3/msg 200
+RESULT="json:{\"a\":\"msg302\"}"
+do_curl GET /topics/topic3/msg 200
+RESULT="json:{\"a\":\"msg303\"}"
+do_curl GET /topics/topic3/msg 200
+
+echo "=== attempt to receive a msg from topic3 ==="
+REQ_CONTENT="text/plain"
+RESP_CONTENT=""
+RESULT="*"
+do_curl GET /topics/topic3/msg 204
+
+echo "=== send four msg on topic4 ==="
+REQ_CONTENT="text/plain"
+RESP_CONTENT="application/json"
+RESULT="json:{\"a\":\"msg401\"}"
+do_curl GET /topics/topic4/msg 200
+RESULT="json:{\"a\":\"msg402\"}"
+do_curl GET /topics/topic4/msg 200
+RESULT="json:{\"a\":\"msg403\"}"
+do_curl GET /topics/topic4/msg 200
+RESULT="json:{\"a\":\"msg404\"}"
+do_curl GET /topics/topic4/msg 200
+
+echo "=== attempt to receive a msg from topic4 ==="
+REQ_CONTENT="text/plain"
+RESP_CONTENT=""
+RESULT="*"
+do_curl GET /topics/topic4/msg 204
+
+echo "********************"
+echo "*** All tests ok ***"
+echo "********************"
+
--- /dev/null
+#!/bin/bash
+
+# ============LICENSE_START===============================================
+# Copyright (C) 2021 Nordix Foundation. All rights reserved.
+# ========================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=================================================
+#
+
+echo "This script requires running kafka instance in a docker private network"
+
+# Script to build and start the container
+if [ $# -ne 2 ]; then
+ echo "usage: ./build-and-start.sh <docker-network> <kafka-boostrapserver-host>:<kafka-boostrapserver-port>"
+ echo "example: ./build-and-start.sh nonrtric-docker-net message-router-kafka:9092"
+ exit 1
+fi
+IMAGE="kafka-procon:latest"
+#Build the image
+docker build -t $IMAGE .
+
+if [ $? -ne 0 ]; then
+ echo "Build failed, exiting..."
+ exit 1
+fi
+
+echo "Starting kafka-procon"
+#Run the container in interactive mode o port 8090.
+docker run --rm -it -p "8090:8090" --network $1 -e KAFKA_BOOTSTRAP_SERVER=$2 --name kafka-procon $IMAGE
+
--- /dev/null
+module kafkaprocon
+
+go 1.17
+
+require (
+ github.com/confluentinc/confluent-kafka-go v1.7.0 // indirect
+ github.com/enriquebris/goconcurrentqueue v0.6.0 // indirect
+ github.com/gorilla/mux v1.8.0 // indirect
+)
--- /dev/null
+github.com/confluentinc/confluent-kafka-go v1.7.0 h1:tXh3LWb2Ne0WiU3ng4h5qiGA9XV61rz46w60O+cq8bM=
+github.com/confluentinc/confluent-kafka-go v1.7.0/go.mod h1:u2zNLny2xq+5rWeTQjFHbDzzNuba4P1vo31r9r4uAdg=
+github.com/enriquebris/goconcurrentqueue v0.6.0 h1:DJ97cgoPVoqlC4tTGBokn/omaB3o16yIs5QdAm6YEjc=
+github.com/enriquebris/goconcurrentqueue v0.6.0/go.mod h1:wGJhQNFI4wLNHleZLo5ehk1puj8M6OIl0tOjs3kwJus=
+github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI=
+github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So=
--- /dev/null
+// Writing a basic HTTP server is easy using the
+// `net/http` package.
+package main
+
+import (
+ "context"
+ "encoding/json"
+ "fmt"
+ "io/ioutil"
+ "net/http"
+ "os"
+ "strconv"
+ "sync/atomic"
+ "time"
+
+ "github.com/confluentinc/confluent-kafka-go/kafka"
+ "github.com/enriquebris/goconcurrentqueue"
+ "github.com/gorilla/mux"
+)
+
+// Note: consumer 'group' and consumer 'user' both uses hardcoded values specific to this interface
+// globalCounters var holding the "global counters"
+// recieved number of received messages from all topics (int)
+// sent number of sent messages to all topics (int)
+// topics var holding all topic related info
+// <topic-name> name of a topic (present after topic is created)
+// content-type data type of the topic (string)
+// counters
+// recieved number of received messages from the topic (int)
+// sent number of sent messages to the topic (int)
+// messages
+// send messages waiting to be sent (set when sending is started) (fifo)
+// received received messages waiting to be fetched (set when reception is started) (fifo)
+
+type counter struct {
+ c uint64
+}
+
+func (c *counter) step() {
+ atomic.AddUint64(&c.c, 1)
+}
+
+func (c counter) get() uint64 {
+ return atomic.LoadUint64(&c.c)
+}
+
+type counters struct {
+ received counter
+ sent counter
+}
+
+func newCounters() counters {
+ return counters{
+ received: counter{},
+ sent: counter{},
+ }
+}
+
+type messages struct {
+ send *goconcurrentqueue.FIFO
+ received *goconcurrentqueue.FIFO
+}
+
+func (m *messages) startSend() bool {
+ if m.send == nil {
+ m.send = goconcurrentqueue.NewFIFO()
+ return true
+ }
+ return false
+}
+
+func (m *messages) stopSend() {
+ m.send = nil
+}
+
+func (m *messages) addToSend(msg string) error {
+ if m.send == nil {
+ return fmt.Errorf("sending not started")
+ }
+ m.send.Lock()
+ defer m.send.Unlock()
+ return m.send.Enqueue(msg)
+}
+
+func (m *messages) getToSend() (interface{}, error) {
+ if m.send == nil {
+ return "", fmt.Errorf("sending not started")
+ }
+ m.send.Lock()
+ defer m.send.Unlock()
+ return m.send.Dequeue()
+}
+
+func (m *messages) startReceive() bool {
+ if m.received == nil {
+ m.received = goconcurrentqueue.NewFIFO()
+ return true
+ }
+ return false
+}
+
+func (m *messages) stopReceive() {
+ m.send = nil
+}
+
+type topic struct {
+ contentType string
+ counters counters
+ messages messages
+}
+
+func newTopic(ct string) *topic {
+ return &topic{
+ contentType: ct,
+ counters: counters{},
+ messages: messages{},
+ }
+}
+
+var globalCounters counters
+var topics map[string]*topic = make(map[string]*topic)
+
+var bootstrapserver = ""
+
+func initApp() {
+ bootstrapserver = os.Getenv("KAFKA_BOOTSTRAP_SERVER")
+ if len(bootstrapserver) == 0 {
+ fmt.Println("Fatal error: env var KAFKA_BOOTSTRAP_SERVER not set")
+ fmt.Println("Exiting... ")
+ os.Exit(1)
+ }
+ fmt.Println("Using KAFKA_BOOTSTRAP_SERVER=" + bootstrapserver)
+}
+
+//Helper function to get a created topic, if it exists
+func getTopicFromRequest(w http.ResponseWriter, req *http.Request) (*topic, string, bool) {
+ topicId := mux.Vars(req)["topic"]
+ t, exist := topics[topicId]
+ if exist == false {
+ w.WriteHeader(http.StatusNotFound)
+ fmt.Fprintf(w, "Topic %v does not exist", topicId)
+ return nil, "", false
+ }
+ return t, topicId, true
+}
+
+// Alive check
+// GET on /
+func healthCheck(w http.ResponseWriter, req *http.Request) {
+ fmt.Fprintf(w, "OK")
+}
+
+// Deep reset of this interface stub - no removal of msgs or topics in kafka
+// POST on /reset
+func allreset(w http.ResponseWriter, req *http.Request) {
+ for _, v := range topics {
+ v.messages.stopSend()
+ v.messages.stopReceive()
+ }
+ time.Sleep(5 * time.Second) //Allow producers/consumers to shut down
+ globalCounters = newCounters()
+ topics = make(map[string]*topic)
+ fmt.Fprintf(w, "OK")
+}
+
+// Get topics, return json array of strings of topics created by this interface stub
+// Returns json array of strings, array is empty if no topics exist
+// GET on /topics
+
+func getTopics(w http.ResponseWriter, req *http.Request) {
+ topicKeys := make([]string, 0, len(topics))
+ fmt.Printf("len topics: %v\n", len(topics))
+ for k := range topics {
+ topicKeys = append(topicKeys, k)
+ }
+ var j, err = json.Marshal(topicKeys)
+ if err != nil {
+ w.WriteHeader(http.StatusInternalServerError)
+ fmt.Fprintf(w, "Cannot convert list of topics to json, error details: %v", err)
+ return
+ } else {
+ w.Header().Set("Content-Type", "application/json")
+ w.WriteHeader(http.StatusOK)
+ w.Write(j)
+ }
+}
+
+func writeOkRepsonse(w http.ResponseWriter, httpStatus int, msg string) {
+ w.WriteHeader(httpStatus)
+ w.Header().Set("Content-Type", "text/plain")
+ fmt.Fprintf(w, msg)
+}
+
+// Get a counter value
+// GET /topics/counters/{counter}
+func getCounter(w http.ResponseWriter, req *http.Request) {
+ ctr := mux.Vars(req)["counter"]
+ var ctrvalue = -1
+ if ctr == "received" {
+ ctrvalue = int(globalCounters.received.get())
+ } else if ctr == "sent" {
+ ctrvalue = int(globalCounters.sent.get())
+ }
+
+ if ctrvalue == -1 {
+ w.WriteHeader(http.StatusBadRequest)
+ fmt.Fprintf(w, "Counter %v does not exist", ctr)
+ return
+ }
+ writeOkRepsonse(w, http.StatusOK, strconv.Itoa(ctrvalue))
+ return
+
+}
+
+// Create a topic
+// PUT on /topics/<topic>?type=<type> type shall be 'application/json' or 'text/plain'
+func createTopic(w http.ResponseWriter, req *http.Request) {
+ topicId := mux.Vars(req)["topic"]
+ topicType := mux.Vars(req)["type"]
+
+ fmt.Printf("Creating topic: %v, content type: %v\n", topicId, topicType)
+
+ if len(topicType) == 0 {
+ w.WriteHeader(http.StatusBadRequest)
+ fmt.Fprintf(w, "Type not specified")
+ return
+ }
+
+ tid, exist := topics[topicId]
+ if exist == true {
+ if tid.contentType != topicType {
+ w.WriteHeader(http.StatusBadRequest)
+ fmt.Fprintf(w, "Topic type exist but type is different, queue content type: %v, requested content type: %v", tid.contentType, topicType)
+ return
+ }
+ writeOkRepsonse(w, http.StatusOK, "Topic exist")
+ return
+ }
+
+ t := newTopic(topicType)
+
+ a, err := kafka.NewAdminClient(&kafka.ConfigMap{"bootstrap.servers": bootstrapserver})
+ defer func() { a.Close() }()
+ if err != nil {
+ w.WriteHeader(http.StatusInternalServerError)
+ fmt.Fprintf(w, "Cannot create client to bootstrap server: "+bootstrapserver+", error details: %v", err)
+ return
+ }
+
+ ctx, cancel := context.WithCancel(context.Background())
+ defer cancel()
+
+ maxDur := 10 * time.Second
+
+ _, err = a.CreateTopics(
+ ctx,
+ []kafka.TopicSpecification{{
+ Topic: topicId,
+ NumPartitions: 1,
+ ReplicationFactor: 1}},
+ kafka.SetAdminOperationTimeout(maxDur))
+
+ if err != nil {
+ w.WriteHeader(http.StatusInternalServerError)
+ fmt.Fprintf(w, "Failed to create topic: %v, error details: %v", topicId, err)
+ return
+ }
+ topics[topicId] = t
+ w.WriteHeader(http.StatusCreated)
+ fmt.Fprintf(w, "Topic created")
+}
+
+// Get topic type
+// GET on /topic/<topic>
+func getTopic(w http.ResponseWriter, req *http.Request) {
+ t, _, exist := getTopicFromRequest(w, req)
+ if !exist {
+ return
+ }
+ w.WriteHeader(http.StatusOK)
+ fmt.Fprintf(w, t.contentType)
+}
+
+// Get a topics counter value
+// GET /topics/{topic}/counters/{counter}
+func getTopicCounter(w http.ResponseWriter, req *http.Request) {
+ t, _, exist := getTopicFromRequest(w, req)
+ if !exist {
+ return
+ }
+ ctr := mux.Vars(req)["counter"]
+
+ var ctrvalue = -1
+ if ctr == "received" {
+ ctrvalue = int(t.counters.received.get())
+ } else if ctr == "sent" {
+ ctrvalue = int(t.counters.sent.get())
+ }
+
+ if ctrvalue == -1 {
+ w.WriteHeader(http.StatusBadRequest)
+ fmt.Fprintf(w, "Counter %v does not exist", ctr)
+ return
+ }
+ w.WriteHeader(http.StatusOK)
+ fmt.Fprintf(w, strconv.Itoa(ctrvalue))
+ return
+}
+
+func startToSend(w http.ResponseWriter, req *http.Request) {
+ t, topicId, exist := getTopicFromRequest(w, req)
+ if !exist {
+ return
+ }
+
+ if !t.messages.startSend() {
+ w.WriteHeader(http.StatusOK)
+ fmt.Fprintf(w, "Already started sending")
+ return
+ }
+ go func() {
+ p, err := kafka.NewProducer(&kafka.ConfigMap{"bootstrap.servers": bootstrapserver})
+ if err != nil {
+ fmt.Printf("Cannot create producer for topic: %v, error details: %v\n", topicId, err)
+ return
+ }
+ defer func() { p.Close() }()
+ for {
+ q := t.messages.send
+ if q == nil {
+ return
+ }
+ m, err := q.Get(0)
+ if err == nil {
+ err = p.Produce(&kafka.Message{
+ TopicPartition: kafka.TopicPartition{Topic: &topicId, Partition: kafka.PartitionAny},
+ Value: []byte(fmt.Sprintf("%v", m)),
+ }, nil)
+ if err == nil {
+ q.Remove(0)
+ t.counters.sent.step()
+ globalCounters.sent.step()
+ msg := fmt.Sprintf("%v", m)
+ if len(msg) < 500 {
+ fmt.Printf("Message sent on topic: %v, len: %v, msg: %v", topicId, len(msg), msg)
+ } else {
+ fmt.Printf("Message sent on topic: %v, len: %v, is larger than 500...not printed", topicId, len(msg))
+ }
+ } else {
+ fmt.Printf("Failed to send message on topic: %v. Discarded. Error details: %v", topicId, err)
+ q.Remove(0)
+ }
+ } else {
+ time.Sleep(10 * time.Millisecond)
+ }
+ }
+ }()
+
+ w.WriteHeader(http.StatusOK)
+ fmt.Fprintf(w, "Sending started")
+}
+
+func startToReceive(w http.ResponseWriter, req *http.Request) {
+ t, topicId, exist := getTopicFromRequest(w, req)
+ if !exist {
+ return
+ }
+
+ if !t.messages.startReceive() {
+ w.WriteHeader(http.StatusOK)
+ fmt.Fprintf(w, "Already started receiving")
+ return
+ }
+
+ go func() {
+
+ defer func() { t.messages.stopReceive() }()
+
+ groudId := "kafkaprocon"
+
+ c, err := kafka.NewConsumer(&kafka.ConfigMap{
+ "bootstrap.servers": bootstrapserver,
+ "group.id": groudId,
+ "auto.offset.reset": "earliest",
+ "enable.auto.commit": true,
+ "auto.commit.interval.ms": 5000,
+ })
+ if err != nil {
+ fmt.Printf("Cannot create consumer for topic: %v, error details: %v\n", topicId, err)
+ t.messages.stopReceive()
+ return
+ }
+ c.Commit()
+ defer func() { c.Close() }()
+ for {
+ que := t.messages.received
+ if que == nil {
+ fmt.Printf("Cannot start receiving on topic: %v, queue does not exist\n", topicId)
+ return
+ }
+ fmt.Printf("Start subscribing on topic: %v\n", topicId)
+ err = c.SubscribeTopics([]string{topicId}, nil)
+ if err != nil {
+ fmt.Printf("Cannot start subscribing on topic: %v, error details: %v\n", topicId, err)
+ return
+ }
+ maxDur := 1 * time.Second
+ for {
+ msg, err := c.ReadMessage(maxDur)
+ if err == nil {
+ if len(msg.Value) < 500 {
+ fmt.Printf("Message received on topic: %v, partion: %v, len: %v, msg: %v", topicId, msg.TopicPartition, len(msg.Value), string(msg.Value))
+ } else {
+ fmt.Printf("Message received on topic: %v, partion: %v, len: %v is larger than 500...not printed", topicId, msg.TopicPartition, len(msg.Value))
+ }
+ err = t.messages.received.Enqueue(string(msg.Value))
+ if err != nil {
+ w.WriteHeader(http.StatusInternalServerError)
+ fmt.Fprintf(w, "Received message topic: %v, cannot be put in queue, %v", topicId, err)
+ return
+ }
+ t.counters.received.step()
+ globalCounters.received.step()
+ } else {
+ fmt.Printf("Nothing to consume on topic: %v, reason: %v\n", topicId, err)
+ }
+ }
+ }
+ }()
+
+ w.WriteHeader(http.StatusOK)
+ fmt.Fprintf(w, "Receiving started")
+}
+
+// Post a message to a topic
+// POST /send content type is specified in content type
+func sendToTopic(w http.ResponseWriter, req *http.Request) {
+ t, topicId, exist := getTopicFromRequest(w, req)
+ if !exist {
+ return
+ }
+ q := t.messages.send
+ if q == nil {
+ w.WriteHeader(http.StatusBadRequest)
+ fmt.Fprintf(w, "Sending not initiated on topic: %v", topicId)
+ return
+ }
+ ct := req.Header.Get("Content-Type")
+ if ct != t.contentType {
+ w.WriteHeader(http.StatusBadRequest)
+ fmt.Fprintf(w, "Message to send content type: %v on topic: %v does not match queue content type: %v", ct, topicId, t.contentType)
+ return
+ }
+
+ if ct == "application/json" {
+ // decoder := json.NewDecoder(req.Body)
+ // var j :=
+ // err := decoder.Decode(&j)
+ // if err != nil {
+ // w.WriteHeader(http.StatusBadRequest)
+ // w.Header().Set("Content-Type", "text/plain")
+ // fmt.Fprintf(w, "Json payload cannot be decoded, error details %v\n", err)
+ // return
+ // }
+ //m = mux.Vars(req)[""]
+ if err := req.ParseForm(); err != nil {
+ w.WriteHeader(http.StatusBadRequest)
+ fmt.Fprintf(w, "Json payload cannot be decoded on topic: %v, error details %v", topicId, err)
+ return
+ }
+ b, err := ioutil.ReadAll(req.Body)
+ if err == nil {
+ if len(b) < 500 {
+ fmt.Printf("Json payload to send on topic: %v, msg: %v", topicId, string(b))
+ } else {
+ fmt.Printf("Json payload to send on topic: %v larger than 500 bytes, not printed...", topicId)
+ }
+ }
+ err = q.Enqueue(string(b))
+ if err != nil {
+ w.WriteHeader(http.StatusInternalServerError)
+ fmt.Fprintf(w, "Json message to send cannot be put in queue")
+ return
+ }
+ } else if ct == "text/plain" {
+ if err := req.ParseForm(); err != nil {
+ w.WriteHeader(http.StatusBadRequest)
+ fmt.Fprintf(w, "Text payload to send on topic: %v cannot be decoded, error details %v\n", topicId, err)
+ return
+ }
+ b, err := ioutil.ReadAll(req.Body)
+ if err == nil {
+ if len(b) < 500 {
+ fmt.Printf("Text payload to send on topic: %v, msg: %v", topicId, string(b))
+ } else {
+ fmt.Printf("Text payload to send on topic: %v larger than 500 bytes, not printed...", topicId)
+ }
+ }
+ err = q.Enqueue(string(b))
+ if err != nil {
+ w.WriteHeader(http.StatusInternalServerError)
+ fmt.Fprintf(w, "Text message to send cannot be put in queue")
+ return
+ }
+ } else {
+ w.WriteHeader(http.StatusBadRequest)
+ fmt.Fprintf(w, "Message to send, unknown content type %v", ct)
+ return
+ }
+
+ w.WriteHeader(http.StatusOK)
+ w.Header().Set("Content-Type", "text/plain")
+ fmt.Fprintf(w, "Message to send put in queue")
+}
+
+// Get zero or one message from a topic
+// GET /receive
+func receiveFromTopic(w http.ResponseWriter, req *http.Request) {
+ t, topicId, exist := getTopicFromRequest(w, req)
+ if !exist {
+ return
+ }
+ if t.messages.received == nil {
+ w.WriteHeader(http.StatusBadRequest)
+ fmt.Fprintf(w, "Receiving not initiated on topic %v", topicId)
+ return
+ }
+
+ m, err := t.messages.received.Dequeue()
+ if err != nil {
+ w.WriteHeader(http.StatusNoContent)
+ return
+ }
+
+ w.Header().Set("Content-Type", t.contentType)
+ w.WriteHeader(http.StatusOK)
+ fmt.Fprintf(w, "%v", m)
+}
+
+// Remove the send queue to stop sending
+func stopToSend(w http.ResponseWriter, req *http.Request) {
+ fmt.Printf("Stop sending\n")
+ t, _, exist := getTopicFromRequest(w, req)
+ if !exist {
+ return
+ }
+ t.messages.stopSend()
+ w.WriteHeader(http.StatusNoContent)
+}
+
+// Remove the receive queue to stop receiving
+func stopToReceive(w http.ResponseWriter, req *http.Request) {
+ fmt.Printf("Stop receiving\n")
+ t, _, exist := getTopicFromRequest(w, req)
+ if !exist {
+ return
+ }
+ t.messages.stopReceive()
+ w.WriteHeader(http.StatusNoContent)
+}
+
+func HelloServer(w http.ResponseWriter, r *http.Request) {
+ fmt.Fprintf(w, "Hello, %s!", r.URL.Path[1:])
+}
+
+func main() {
+
+ initApp()
+
+ r := mux.NewRouter()
+
+ r.HandleFunc("/", healthCheck).Methods("GET")
+ r.HandleFunc("/reset", allreset).Methods("POST")
+ r.HandleFunc("/counters/{counter}", getCounter).Methods("GET")
+ r.HandleFunc("/topics", getTopics).Methods("GET")
+ r.HandleFunc("/topics/{topic}/counters/{counter}", getTopicCounter).Methods("GET")
+ r.HandleFunc("/topics/{topic}", createTopic).Methods("PUT").Queries("type", "{type}")
+ r.HandleFunc("/topics/{topic}", getTopic).Methods("GET")
+ r.HandleFunc("/topics/{topic}/startsend", startToSend).Methods("POST")
+ r.HandleFunc("/topics/{topic}/startreceive", startToReceive).Methods("POST")
+ r.HandleFunc("/topics/{topic}/stopsend", stopToSend).Methods("POST")
+ r.HandleFunc("/topics/{topic}/stopreceive", stopToReceive).Methods("POST")
+ r.HandleFunc("/topics/{topic}/msg", sendToTopic).Methods("POST")
+ r.HandleFunc("/topics/{topic}/msg", receiveFromTopic).Methods("GET")
+
+ port := "8090"
+ srv := &http.Server{
+ Handler: r,
+ Addr: ":" + port,
+ WriteTimeout: 15 * time.Second,
+ ReadTimeout: 15 * time.Second,
+ }
+ fmt.Println("Running on port: " + port)
+ fmt.Printf(srv.ListenAndServe().Error())
+}
--- /dev/null
+#!/bin/bash
+
+# ============LICENSE_START===============================================
+# Copyright (C) 2021 Nordix Foundation. All rights reserved.
+# ========================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=================================================
+#
+
+echo "This script requires golang to be installed and a running kafka instance on (or availble to) localhost"
+
+# Script to build and start app locally
+if [ $# -ne 1 ]; then
+ echo "usage: ./start-local.sh <kafka-boostrapserver-port>"
+ echo "example: ./start-local.sh 30098"
+ exit 1
+fi
+
+export KAFKA_BOOTSTRAP_SERVER=localhost:$1
+
+echo "Starting kafka-procon on local machine"
+go run main.go
RUN chmod +x start.sh
+RUN groupadd -g 999 appuser && \
+ useradd -r -u 999 -g appuser appuser
+## add permissions for appuser user
+RUN chown -R appuser:appuser /usr/src/app/ && chmod -R 755 /usr/src/app/ && \
+ chown -R appuser:appuser /var/log/nginx && \
+ chown -R appuser:appuser /var/lib/nginx && \
+ chown -R appuser:appuser /etc/nginx/conf.d
+RUN touch /var/run/nginx.pid && \
+ chown -R appuser:appuser /var/run/nginx.pid
+
+USER appuser
+
CMD [ "./start.sh" ]
\ No newline at end of file
-user www-data;
+# user www-data;
worker_processes auto;
pid /run/nginx.pid;
include /etc/nginx/modules-enabled/*.conf;
RUN apt-get update
RUN apt-get install -y nginx=1.14.*
+RUN groupadd -g 999 appuser && \
+ useradd -r -u 999 -g appuser appuser
+
+## add permissions for appuser user
+RUN chown -R appuser:appuser /usr/src/app/ && chmod -R 755 /usr/src/app/ && \
+ chown -R appuser:appuser /var/log/nginx && \
+ chown -R appuser:appuser /var/lib/nginx && \
+ chown -R appuser:appuser /etc/nginx/conf.d
+RUN touch /var/run/nginx.pid && \
+ chown -R appuser:appuser /var/run/nginx.pid
+
+USER appuser
+
CMD [ "./start.sh" ]
-user www-data;
+# user www-data;
worker_processes auto;
pid /run/nginx.pid;
include /etc/nginx/modules-enabled/*.conf;
.tmp.json
-.dockererr
\ No newline at end of file
+.dockererr
+gen_docker-compose*
\ No newline at end of file
networks:
default:
- external:
- name: ${DOCKER_SIM_NWNAME}
-
+ external: true
+ name: ${DOCKER_SIM_NWNAME}
services:
consul-server:
.tmp.json
-.dockererr
\ No newline at end of file
+.dockererr
+gen_docker-compose*
\ No newline at end of file
portalapi.username = Default
portalapi.password = password
-# URL for enrichment coordinator service
-enrichmentcontroller.url.prefix = https://${ECS_DOMAIN_NAME}:${ECS_EXTERNAL_SECURE_PORT}/ei-producer/v1
+# URL for information coordinator service
+informationcontroller.url.prefix = https://${ICS_DOMAIN_NAME}:${ICS_EXTERNAL_SECURE_PORT}/ei-producer/v1
# Mimic slow endpoints by defining sleep period, in milliseconds
mock.config.delay = 0
version: '3.0'
networks:
default:
- external:
- name: ${DOCKER_SIM_NWNAME}
+ external: true
+ name: ${DOCKER_SIM_NWNAME}
services:
control-panel:
image: ${CONTROL_PANEL_IMAGE}
set $upstream ${NGW_DOMAIN_NAME};
proxy_pass http://$upstream:${NRT_GATEWAY_EXTERNAL_PORT};
}
- location ${CONTROL_PANEL_PATH_ECS_PREFIX} {
+ location ${CONTROL_PANEL_PATH_ICS_PREFIX} {
set $upstream ${NGW_DOMAIN_NAME};
proxy_pass http://$upstream:${NRT_GATEWAY_EXTERNAL_PORT};
}
- location ${CONTROL_PANEL_PATH_ECS_PREFIX2} {
+ location ${CONTROL_PANEL_PATH_ICS_PREFIX2} {
set $upstream ${NGW_DOMAIN_NAME};
proxy_pass http://$upstream:${NRT_GATEWAY_EXTERNAL_PORT};
}
--- /dev/null
+COMPOSE_PROJECT_NAME=callback-receiver
.tmp.json
-.dockererr
\ No newline at end of file
+.dockererr
+gen_docker-compose*
\ No newline at end of file
apiVersion: apps/v1
-kind: Deployment
+kind: StatefulSet
metadata:
name: $CR_APP_NAME
namespace: $KUBE_SIM_NAMESPACE
run: $CR_APP_NAME
autotest: CR
spec:
- replicas: 1
+ replicas: $CR_APP_COUNT
+ serviceName: $CR_APP_NAME
selector:
matchLabels:
run: $CR_APP_NAME
version: '3.0'
networks:
default:
- external:
- name: ${DOCKER_SIM_NWNAME}
+ external: true
+ name: ${DOCKER_SIM_NWNAME}
services:
- callback-receiver:
+ cr:
+ scale: $CR_APP_COUNT
networks:
- default
- container_name: ${CR_APP_NAME}
image: ${CR_IMAGE}
ports:
- - ${CR_EXTERNAL_PORT}:${CR_INTERNAL_PORT}
- - ${CR_EXTERNAL_SECURE_PORT}:${CR_INTERNAL_SECURE_PORT}
+ - ${CR_INTERNAL_PORT}/tcp
+ - ${CR_INTERNAL_SECURE_PORT}/tcp
labels:
- "nrttest_app=CR"
- "nrttest_dp=${CR_DISPLAY_NAME}"
run: $CR_APP_NAME
autotest: CR
spec:
- type: ClusterIP
ports:
- - port: $CR_EXTERNAL_PORT
- targetPort: $CR_INTERNAL_PORT
- protocol: TCP
+ - port: 80
name: http
- - port: $CR_EXTERNAL_SECURE_PORT
- targetPort: $CR_INTERNAL_SECURE_PORT
- protocol: TCP
- name: https
+ clusterIP: None
selector:
- run: $CR_APP_NAME
\ No newline at end of file
+ run: $CR_APP_NAME
.tmp.json
-.dockererr
\ No newline at end of file
+.dockererr
+gen_docker-compose*
\ No newline at end of file
# The HTTP proxy (if configured) will only be used for accessing NearRT RIC:s
http.proxy-host: $DMAAP_ADP_HTTP_PROXY_CONFIG_HOST_NAME
http.proxy-port: $DMAAP_ADP_HTTP_PROXY_CONFIG_PORT
- ecs-base-url: $ECS_SERVICE_PATH
+ ics-base-url: $ICS_SERVICE_PATH
# Location of the component configuration file. The file will only be used if the Consul database is not used;
# configuration from the Consul will override the file.
configuration-filepath: /opt/app/dmaap-adaptor-service/data/application_configuration.json
version: '3.0'
networks:
default:
- external:
- name: ${DOCKER_SIM_NWNAME}
+ external: true
+ name: ${DOCKER_SIM_NWNAME}
services:
dmaap-adapter-service:
image: ${DMAAP_ADP_IMAGE}
.tmp.json
-.dockererr
\ No newline at end of file
+.dockererr
+gen_docker-compose*
\ No newline at end of file
- name: INFO_PRODUCER_PORT
value: "$DMAAP_MED_CONF_SELF_PORT"
- name: INFO_COORD_ADDR
- value: "$ECS_SERVICE_PATH"
+ value: "$ICS_SERVICE_PATH"
- name: DMAAP_MR_ADDR
value: "$MR_SERVICE_PATH"
- name: LOG_LEVEL
version: '3.0'
networks:
default:
- external:
- name: ${DOCKER_SIM_NWNAME}
+ external: true
+ name: ${DOCKER_SIM_NWNAME}
services:
dmaap-mediator-service:
image: ${DMAAP_MED_IMAGE}
environment:
- INFO_PRODUCER_HOST=${DMAAP_MED_CONF_SELF_HOST}
- INFO_PRODUCER_PORT=${DMAAP_MED_CONF_SELF_PORT}
- - INFO_COORD_ADDR=${ECS_SERVICE_PATH}
+ - INFO_COORD_ADDR=${ICS_SERVICE_PATH}
- DMAAP_MR_ADDR=${MR_SERVICE_PATH}
- LOG_LEVEL=Debug
volumes:
--- /dev/null
+.tmp.json
+.dockererr
+gen_docker-compose*
\ No newline at end of file
apiVersion: apps/v1
kind: Deployment
metadata:
- name: $MR_DMAAP_APP_NAME
+ name: $MR_ZOOKEEPER_APP_NAME
namespace: $KUBE_ONAP_NAMESPACE
labels:
- run: $MR_DMAAP_APP_NAME
+ run: $MR_ZOOKEEPER_APP_NAME
autotest: DMAAPMR
spec:
- replicas: 1
selector:
matchLabels:
- run: $MR_DMAAP_APP_NAME
+ run: $MR_ZOOKEEPER_APP_NAME
template:
metadata:
labels:
- run: $MR_DMAAP_APP_NAME
+ run: $MR_ZOOKEEPER_APP_NAME
autotest: DMAAPMR
spec:
containers:
- - name: $MR_DMAAP_APP_NAME
- image: $ONAP_DMAAPMR_IMAGE
+ - name: $MR_ZOOKEEPER_APP_NAME
+ image: $ONAP_ZOOKEEPER_IMAGE
imagePullPolicy: $KUBE_IMAGE_PULL_POLICY
ports:
- name: http
- containerPort: $MR_INTERNAL_PORT
- - name: https
- containerPort: $MR_INTERNAL_SECURE_PORT
+ containerPort: $MR_ZOOKEEPER_PORT
env:
- - name: enableCadi
- value: 'false'
+ - name: ZOOKEEPER_REPLICAS
+ value: '1'
+ - name: ZOOKEEPER_TICK_TIME
+ value: '2000'
+ - name: ZOOKEEPER_SYNC_LIMIT
+ value: '5'
+ - name: ZOOKEEPER_INIT_LIMIT
+ value: '10'
+ - name: ZOOKEEPER_MAX_CLIENT_CNXNS
+ value: '200'
+ - name: ZOOKEEPER_AUTOPURGE_SNAP_RETAIN_COUNT
+ value: '3'
+ - name: ZOOKEEPER_AUTOPURGE_PURGE_INTERVAL
+ value: '24'
+ - name: ZOOKEEPER_CLIENT_PORT
+ value: '$MR_ZOOKEEPER_PORT'
+ - name: KAFKA_OPTS
+ value: '-Djava.security.auth.login.config=/etc/zookeeper/secrets/jaas/zk_server_jaas.conf -Dzookeeper.kerberos.removeHostFromPrincipal=true -Dzookeeper.kerberos.removeRealmFromPrincipal=true -Dzookeeper.authProvider.1=org.apache.zookeeper.server.auth.SASLAuthenticationProvider -Dzookeeper.requireClientAuthScheme=sasl'
+ - name: ZOOKEEPER_SERVER_ID
+ value: '1'
volumeMounts:
- - mountPath: /appl/dmaapMR1/bundleconfig/etc/appprops/MsgRtrApi.properties
- subPath: MsgRtrApi.properties
- name: dmaapmr-msg-rtr-api
- - mountPath: /appl/dmaapMR1/bundleconfig/etc/logback.xml
- subPath: logback.xml
- name: dmaapmr-log-back
- - mountPath: /appl/dmaapMR1/etc/cadi.properties
- subPath: cadi.properties
- name: dmaapmr-cadi
+ - mountPath: /etc/zookeeper/secrets/jaas/zk_server_jaas.conf
+ subPath: zk_server_jaas.conf
+ name: dmaapmr-zk-server-jaas
volumes:
- configMap:
defaultMode: 420
- name: dmaapmr-msgrtrapi.properties
- name: dmaapmr-msg-rtr-api
- - configMap:
- defaultMode: 420
- name: dmaapmr-logback.xml
- name: dmaapmr-log-back
- - configMap:
- defaultMode: 420
- name: dmaapmr-cadi.properties
- name: dmaapmr-cadi
+ name: dmaapmr-zk-server-jaas.conf
+ name: dmaapmr-zk-server-jaas
---
apiVersion: apps/v1
kind: Deployment
ports:
- name: http
containerPort: $MR_KAFKA_PORT
+ - name: http-external
+ containerPort: $MR_KAFKA_KUBE_NODE_PORT
env:
- name: enableCadi
value: 'false'
- name: KAFKA_ZOOKEEPER_SESSION_TIMEOUT_MS
value: '40000'
- name: KAFKA_LISTENER_SECURITY_PROTOCOL_MAP
- value: 'INTERNAL_PLAINTEXT:PLAINTEXT,EXTERNAL_PLAINTEXT:PLAINTEXT'
+ value: 'INTERNAL_PLAINTEXT:PLAINTEXT,EXTERNAL_PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT'
- name: KAFKA_ADVERTISED_LISTENERS
- value: 'INTERNAL_PLAINTEXT://$MR_KAFKA_APP_NAME:$MR_KAFKA_PORT'
+ value: 'INTERNAL_PLAINTEXT://$MR_KAFKA_SERVICE_PATH,PLAINTEXT_HOST://localhost:$MR_KAFKA_KUBE_NODE_PORT'
- name: KAFKA_LISTENERS
- value: 'INTERNAL_PLAINTEXT://0.0.0.0:$MR_KAFKA_PORT'
- # - name: KAFKA_LISTENERS
- # value: 'EXTERNAL_PLAINTEXT://0.0.0.0:9091,INTERNAL_PLAINTEXT://0.0.0.0:$MR_KAFKA_PORT'
+ value: 'INTERNAL_PLAINTEXT://0.0.0.0:$MR_KAFKA_PORT,PLAINTEXT_HOST://0.0.0.0:$MR_KAFKA_KUBE_NODE_PORT'
- name: KAFKA_INTER_BROKER_LISTENER_NAME
value: INTERNAL_PLAINTEXT
- name: KAFKA_CONFLUENT_SUPPORT_METRICS_ENABLE
- name: KAFKA_OPTS
value: '-Djava.security.auth.login.config=/etc/kafka/secrets/jaas/zk_client_jaas.conf'
- name: KAFKA_ZOOKEEPER_SET_ACL
- value: 'false'
+ value: 'true'
- name: KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR
value: '1'
- name: KAFKA_OFFSETS_TOPIC_NUM_PARTITIONS
apiVersion: apps/v1
kind: Deployment
metadata:
- name: $MR_ZOOKEEPER_APP_NAME
+ name: $MR_DMAAP_APP_NAME
namespace: $KUBE_ONAP_NAMESPACE
labels:
- run: $MR_ZOOKEEPER_APP_NAME
+ run: $MR_DMAAP_APP_NAME
autotest: DMAAPMR
spec:
replicas: 1
selector:
matchLabels:
- run: $MR_ZOOKEEPER_APP_NAME
+ run: $MR_DMAAP_APP_NAME
template:
metadata:
labels:
- run: $MR_ZOOKEEPER_APP_NAME
+ run: $MR_DMAAP_APP_NAME
autotest: DMAAPMR
spec:
containers:
- - name: $MR_ZOOKEEPER_APP_NAME
- image: $ONAP_ZOOKEEPER_IMAGE
+ - name: $MR_DMAAP_APP_NAME
+ image: $ONAP_DMAAPMR_IMAGE
imagePullPolicy: $KUBE_IMAGE_PULL_POLICY
ports:
- name: http
- containerPort: $MR_ZOOKEEPER_PORT
+ containerPort: $MR_INTERNAL_PORT
+ - name: https
+ containerPort: $MR_INTERNAL_SECURE_PORT
env:
- - name: ZOOKEEPER_REPLICAS
- value: '1'
- - name: ZOOKEEPER_TICK_TIME
- value: '2000'
- - name: ZOOKEEPER_SYNC_LIMIT
- value: '5'
- - name: ZOOKEEPER_INIT_LIMIT
- value: '10'
- - name: ZOOKEEPER_MAX_CLIENT_CNXNS
- value: '200'
- - name: ZOOKEEPER_AUTOPURGE_SNAP_RETAIN_COUNT
- value: '3'
- - name: ZOOKEEPER_AUTOPURGE_PURGE_INTERVAL
- value: '24'
- - name: ZOOKEEPER_CLIENT_PORT
- value: '$MR_ZOOKEEPER_PORT'
- - name: KAFKA_OPTS
- value: '-Djava.security.auth.login.config=/etc/zookeeper/secrets/jaas/zk_server_jaas.conf -Dzookeeper.kerberos.removeHostFromPrincipal=true -Dzookeeper.kerberos.removeRealmFromPrincipal=true -Dzookeeper.authProvider.1=org.apache.zookeeper.server.auth.SASLAuthenticationProvider -Dzookeeper.requireClientAuthScheme=sasl'
- - name: ZOOKEEPER_SERVER_ID
- value: '1'
- - name: ZOOKEEPER_SASL_ENABLED
+ - name: enableCadi
value: 'false'
volumeMounts:
- - mountPath: /etc/zookeeper/secrets/jaas/zk_server_jaas.conf
- subPath: zk_server_jaas.conf
- name: dmaapmr-zk-server-jaas
+ - mountPath: /appl/dmaapMR1/bundleconfig/etc/appprops/MsgRtrApi.properties
+ subPath: MsgRtrApi.properties
+ name: dmaapmr-msg-rtr-api
+ - mountPath: /appl/dmaapMR1/bundleconfig/etc/logback.xml
+ subPath: logback.xml
+ name: dmaapmr-log-back
+ - mountPath: /appl/dmaapMR1/etc/cadi.properties
+ subPath: cadi.properties
+ name: dmaapmr-cadi
volumes:
- configMap:
defaultMode: 420
- name: dmaapmr-zk-server-jaas.conf
- name: dmaapmr-zk-server-jaas
\ No newline at end of file
+ name: dmaapmr-msgrtrapi.properties
+ name: dmaapmr-msg-rtr-api
+ - configMap:
+ defaultMode: 420
+ name: dmaapmr-logback.xml
+ name: dmaapmr-log-back
+ - configMap:
+ defaultMode: 420
+ name: dmaapmr-cadi.properties
+ name: dmaapmr-cadi
+
--- /dev/null
+Client {
+ org.apache.zookeeper.server.auth.DigestLoginModule required
+ username="kafka"
+ password="kafka_secret";
+ };
+
# LICENSE_START=======================================================
# org.onap.dmaap
# ================================================================================
-# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
+# Copyright © 2020 Nordix Foundation. All rights reserved.
+# Copyright © 2017 AT&T Intellectual Property. All rights reserved.
# ================================================================================
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
##
## Both Cambria and Kafka make use of Zookeeper.
##
-config.zk.servers=zookeeper:2181
+#config.zk.servers=172.18.1.1
+#config.zk.servers={{.Values.zookeeper.name}}:{{.Values.zookeeper.port}}
+config.zk.servers=$MR_ZOOKEEPER_SERVICE_PATH
+
+#config.zk.root=/fe3c/cambria/config
+
###############################################################################
##
## if you want to change request.required.acks it can take this one value
#kafka.metadata.broker.list=localhost:9092,localhost:9093
#kafka.metadata.broker.list={{.Values.kafka.name}}:{{.Values.kafka.port}}
-kafka.metadata.broker.list=message-router-kafka:9092
+kafka.metadata.broker.list=$MR_KAFKA_SERVICE_PATH
##kafka.request.required.acks=-1
#kafka.client.zookeeper=${config.zk.servers}
consumer.timeout.ms=100
cambria.secureConfig.key=b/7ouTn9FfEw2PQwL0ov/Q==
cambria.secureConfig.iv=wR9xP5k5vbz/xD0LmtqQLw==
authentication.adminSecret=fe3cCompound
+#cambria.secureConfig.key[pc569h]=YT3XPyxEmKCTLI2NK+Sjbw==
+#cambria.secureConfig.iv[pc569h]=rMm2jhR3yVnU+u2V9Ugu3Q==
###############################################################################
## This server can report its metrics periodically on a topic.
##
#metrics.send.cambria.enabled=true
-#metrics.send.cambria.topic=cambria.apinode.metrics
-#msgrtr.apinode.metrics.dmaap
+#metrics.send.cambria.topic=cambria.apinode.metrics #msgrtr.apinode.metrics.dmaap
#metrics.send.cambria.sendEverySeconds=60
cambria.consumer.cache.zkBasePath=/fe3c/cambria/consumerCache
consumer.timeout=17
default.partitions=3
-default.replicas=1
+default.replicas=3
##############################################################################
#100mb
maxcontentlength=10000
kafka.max.poll.interval.ms=300000
kafka.heartbeat.interval.ms=60000
kafka.session.timeout.ms=240000
-kafka.max.poll.records=1000
\ No newline at end of file
+kafka.max.poll.records=1000
+
-#Removed to be disable aaf in test env
-#aaf_locate_url=https://aaf-onap-test.osaaf.org:8095\
+aaf_locate_url=https://aaf-locate.{{ include "common.namespace" . }}:8095
aaf_url=https://AAF_LOCATE_URL/onap.org.osaaf.aaf.service:2.1
aaf_env=DEV
aaf_lur=org.onap.aaf.cadi.aaf.v2_0.AAFLurPerm
-#Removed to be disable aaf in test env
-# cadi_truststore=/appl/dmaapMR1/etc/org.onap.dmaap.mr.trust.jks
-# cadi_truststore_password=8FyfX+ar;0$uZQ0h9*oXchNX
+cadi_truststore=/appl/dmaapMR1/etc/org.onap.dmaap.mr.trust.jks
+cadi_truststore_password=enc:mN6GiIzFQxKGDzAXDOs7b4j8DdIX02QrZ9QOWNRpxV3rD6whPCfizSMZkJwxi_FJ
cadi_keyfile=/appl/dmaapMR1/etc/org.onap.dmaap.mr.keyfile
cadi_alias=dmaapmr@mr.dmaap.onap.org
cadi_keystore=/appl/dmaapMR1/etc/org.onap.dmaap.mr.p12
-cadi_keystore_password=GDQttV7)BlOvWMf6F7tz&cjy
+cadi_keystore_password=enc:_JJT2gAEkRzXla5xfDIHal8pIoIB5iIos3USvZQT6sL-l14LpI5fRFR_QIGUCh5W
cadi_x509_issuers=CN=intermediateCA_1, OU=OSAAF, O=ONAP, C=US:CN=intermediateCA_7, OU=OSAAF, O=ONAP, C=US:CN=intermediateCA_9, OU=OSAAF, O=ONAP, C=US
cadi_loglevel=INFO
cadi_protocols=TLSv1.1,TLSv1.2
cadi_latitude=37.78187
-cadi_longitude=-122.26147
\ No newline at end of file
+cadi_longitude=-122.26147
+
<!--
============LICENSE_START=======================================================
- Copyright © 2019 AT&T Intellectual Property. All rights reserved.
+ Copyright © 2020 Nordix Foundation. All rights reserved.
+ Copyright © 2019 AT&T Intellectual Property. All rights reserved.
================================================================================
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
</root>
</configuration>
+
Server {
org.apache.zookeeper.server.auth.DigestLoginModule required
- user_kafka="kafka_secret";
-};
\ No newline at end of file
+ user_kafka=kafka_secret;
+};
+
##
## Both Cambria and Kafka make use of Zookeeper.
##
-config.zk.servers=$MR_ZOOKEEPER_APP_NAME:$MR_ZOOKEEPER_PORT
+config.zk.servers=$MR_ZOOKEEPER_SERVICE_PATH
+#$MR_ZOOKEEPER_APP_NAME:$MR_ZOOKEEPER_PORT
###############################################################################
##
## if you want to change request.required.acks it can take this one value
#kafka.metadata.broker.list=localhost:9092,localhost:9093
#kafka.metadata.broker.list={{.Values.kafka.name}}:{{.Values.kafka.port}}
-kafka.metadata.broker.list=$MR_KAFKA_APP_NAME:$MR_KAFKA_PORT
+kafka.metadata.broker.list=$MR_KAFKA_SERVICE_PATH
+#$MR_KAFKA_APP_NAME:$MR_KAFKA_PORT
##kafka.request.required.acks=-1
#kafka.client.zookeeper=${config.zk.servers}
consumer.timeout.ms=100
kafka.rebalance.backoff.ms=10000
kafka.rebalance.max.retries=6
-
###############################################################################
##
## Secured Config
version: '3.0'
networks:
default:
- external:
- name: ${DOCKER_SIM_NWNAME}
-
+ external: true
+ name: ${DOCKER_SIM_NWNAME}
services:
zookeeper:
image: $ONAP_ZOOKEEPER_IMAGE
container_name: $MR_KAFKA_APP_NAME
ports:
- "$MR_KAFKA_PORT:$MR_KAFKA_PORT"
+ - "$MR_KAFKA_DOCKER_LOCALHOST_PORT:$MR_KAFKA_DOCKER_LOCALHOST_PORT"
environment:
enableCadi: 'false'
KAFKA_ZOOKEEPER_CONNECT: $MR_ZOOKEEPER_APP_NAME:$MR_ZOOKEEPER_PORT
KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 40000
KAFKA_ZOOKEEPER_SESSION_TIMEOUT_MS: 40000
- KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTERNAL_PLAINTEXT:PLAINTEXT,EXTERNAL_PLAINTEXT:PLAINTEXT
- KAFKA_ADVERTISED_LISTENERS: INTERNAL_PLAINTEXT://$MR_KAFKA_APP_NAME:$MR_KAFKA_PORT
- KAFKA_LISTENERS: INTERNAL_PLAINTEXT://0.0.0.0:$MR_KAFKA_PORT
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTERNAL_PLAINTEXT:PLAINTEXT,EXTERNAL_PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
+ KAFKA_ADVERTISED_LISTENERS: INTERNAL_PLAINTEXT://$MR_KAFKA_SERVICE_PATH,PLAINTEXT_HOST://localhost:$MR_KAFKA_DOCKER_LOCALHOST_PORT
+ KAFKA_LISTENERS: INTERNAL_PLAINTEXT://0.0.0.0:$MR_KAFKA_PORT,PLAINTEXT_HOST://0.0.0.0:$MR_KAFKA_DOCKER_LOCALHOST_PORT
KAFKA_INTER_BROKER_LISTENER_NAME: INTERNAL_PLAINTEXT
KAFKA_CONFLUENT_SUPPORT_METRICS_ENABLE: 'false'
KAFKA_OPTS: -Djava.security.auth.login.config=/etc/kafka/secrets/jaas/zk_client_jaas.conf
+++ /dev/null
-Client {
- org.apache.zookeeper.server.auth.DigestLoginModule required
- username="kafka"
- password="kafka_secret";
- };
\ No newline at end of file
apiVersion: v1
kind: Service
metadata:
- name: $MR_DMAAP_APP_NAME
+ name: $MR_ZOOKEEPER_APP_NAME
namespace: $KUBE_ONAP_NAMESPACE
labels:
- run: $MR_DMAAP_APP_NAME
+ run: $MR_ZOOKEEPER_APP_NAME
autotest: DMAAPMR
spec:
type: ClusterIP
ports:
- - port: $MR_EXTERNAL_PORT
- targetPort: $MR_INTERNAL_PORT
+ - port: $MR_ZOOKEEPER_PORT
+ targetPort: $MR_ZOOKEEPER_PORT
protocol: TCP
name: http
- - port: $MR_EXTERNAL_SECURE_PORT
- targetPort: $MR_INTERNAL_SECURE_PORT
- protocol: TCP
- name: https
selector:
- run: $MR_DMAAP_APP_NAME
+ run: $MR_ZOOKEEPER_APP_NAME
---
apiVersion: v1
kind: Service
run: $MR_KAFKA_APP_NAME
autotest: DMAAPMR
spec:
- type: ClusterIP
+ type: NodePort
ports:
- port: $MR_KAFKA_PORT
targetPort: $MR_KAFKA_PORT
protocol: TCP
name: http
+ - port: $MR_KAFKA_KUBE_NODE_PORT
+ targetPort: $MR_KAFKA_KUBE_NODE_PORT
+ protocol: TCP
+ name: http-external
+ nodePort: $MR_KAFKA_KUBE_NODE_PORT
selector:
run: $MR_KAFKA_APP_NAME
---
apiVersion: v1
kind: Service
metadata:
- name: $MR_ZOOKEEPER_APP_NAME
+ name: $MR_DMAAP_APP_NAME
namespace: $KUBE_ONAP_NAMESPACE
labels:
- run: $MR_ZOOKEEPER_APP_NAME
+ run: $MR_DMAAP_APP_NAME
autotest: DMAAPMR
spec:
type: ClusterIP
ports:
- - port: $MR_ZOOKEEPER_PORT
- targetPort: $MR_ZOOKEEPER_PORT
+ - port: $MR_EXTERNAL_PORT
+ targetPort: $MR_INTERNAL_PORT
protocol: TCP
name: http
+ - port: $MR_EXTERNAL_SECURE_PORT
+ targetPort: $MR_INTERNAL_SECURE_PORT
+ protocol: TCP
+ name: https
selector:
- run: $MR_ZOOKEEPER_APP_NAME
+ run: $MR_DMAAP_APP_NAME
+
+
+++ /dev/null
-.tmp.json
-.dockererr
\ No newline at end of file
+++ /dev/null
-apiVersion: apps/v1
-kind: Deployment
-metadata:
- name: $ECS_APP_NAME
- namespace: $KUBE_NONRTRIC_NAMESPACE
- labels:
- run: $ECS_APP_NAME
- autotest: ECS
-spec:
- replicas: 1
- selector:
- matchLabels:
- run: $ECS_APP_NAME
- template:
- metadata:
- labels:
- run: $ECS_APP_NAME
- autotest: ECS
- spec:
- containers:
- - name: $ECS_APP_NAME
- image: $ECS_IMAGE
- imagePullPolicy: $KUBE_IMAGE_PULL_POLICY
- ports:
- - name: http
- containerPort: $ECS_INTERNAL_PORT
- - name: https
- containerPort: $ECS_INTERNAL_SECURE_PORT
- volumeMounts:
- - mountPath: $ECS_CONFIG_MOUNT_PATH
- name: ecs-conf-name
- volumeMounts:
- - mountPath: $ECS_CONTAINER_MNT_DIR
- name: ecs-data-name
- volumes:
- - configMap:
- defaultMode: 420
- name: $ECS_CONFIG_CONFIGMAP_NAME
- name: ecs-conf-name
- - persistentVolumeClaim:
- claimName: $ECS_DATA_PVC_NAME
- name: ecs-data-name
-# Selector will be set when pod is started first time
- nodeSelector:
-
+++ /dev/null
-apiVersion: v1
-kind: Service
-metadata:
- name: $ECS_APP_NAME
- namespace: $KUBE_NONRTRIC_NAMESPACE
- labels:
- run: $ECS_APP_NAME
- autotest: ECS
-spec:
- type: ClusterIP
- ports:
- - port: $ECS_EXTERNAL_PORT
- targetPort: $ECS_INTERNAL_PORT
- protocol: TCP
- name: http
- - port: $ECS_EXTERNAL_SECURE_PORT
- targetPort: $ECS_INTERNAL_SECURE_PORT
- protocol: TCP
- name: https
- selector:
- run: $ECS_APP_NAME
\ No newline at end of file
-.dockererr
\ No newline at end of file
+.dockererr
+gen_docker-compose*
\ No newline at end of file
version: '3.0'
networks:
default:
- external:
- name: ${DOCKER_SIM_NWNAME}
+ external: true
+ name: ${DOCKER_SIM_NWNAME}
services:
httpproxy:
networks:
--- /dev/null
+.tmp.json
+.dockererr
+gen_docker-compose*
\ No newline at end of file
--- /dev/null
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: $ICS_APP_NAME
+ namespace: $KUBE_NONRTRIC_NAMESPACE
+ labels:
+ run: $ICS_APP_NAME
+ autotest: ICS
+spec:
+ replicas: 1
+ selector:
+ matchLabels:
+ run: $ICS_APP_NAME
+ template:
+ metadata:
+ labels:
+ run: $ICS_APP_NAME
+ autotest: ICS
+ spec:
+ containers:
+ - name: $ICS_APP_NAME
+ image: $ICS_IMAGE
+ imagePullPolicy: $KUBE_IMAGE_PULL_POLICY
+ ports:
+ - name: http
+ containerPort: $ICS_INTERNAL_PORT
+ - name: https
+ containerPort: $ICS_INTERNAL_SECURE_PORT
+ volumeMounts:
+ - mountPath: $ICS_CONFIG_MOUNT_PATH/$ICS_CONFIG_FILE
+ subPath: $ICS_CONFIG_FILE
+ name: ics-conf-name
+ - mountPath: $ICS_CONTAINER_MNT_DIR
+ name: ics-data-name
+ volumes:
+ - configMap:
+ defaultMode: 420
+ name: $ICS_CONFIG_CONFIGMAP_NAME
+ name: ics-conf-name
+ - persistentVolumeClaim:
+ claimName: $ICS_DATA_PVC_NAME
+ name: ics-data-name
+# Selector will be set when pod is started first time
+ nodeSelector:
+
################################################################################
-# Copyright (c) 2020 Nordix Foundation. #
+# Copyright (c) 2021 Nordix Foundation. #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); #
# you may not use this file except in compliance with the License. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
################################################################################
-
spring:
profiles:
active: prod
allow-bean-definition-overriding: true
aop:
auto: false
+springdoc:
+ show-actuator: true
management:
endpoints:
web:
exposure:
+ # Enabling of springboot actuator features. See springboot documentation.
include: "loggers,logfile,health,info,metrics,threaddump,heapdump"
logging:
+ # Configuration of logging
level:
ROOT: ERROR
org.springframework: ERROR
org.springframework.data: ERROR
org.springframework.web.reactive.function.client.ExchangeFunctions: ERROR
- org.oransc.enrichment: INFO
+ org.oransc.ics: INFO
file:
- name: /var/log/enrichment-coordinator-service/application.log
+ name: $ICS_LOGPATH
server:
- port : 8434
- http-port: 8083
+ # Configuration of the HTTP/REST server. The parameters are defined and handeled by the springboot framework.
+ # See springboot documentation.
+ port : $ICS_INTERNAL_SECURE_PORT
+ http-port: $ICS_INTERNAL_PORT
ssl:
key-store-type: JKS
key-store-password: policy_agent
- key-store: /opt/app/enrichment-coordinator-service/etc/cert/keystore.jks
+ key-store: /opt/app/information-coordinator-service/etc/cert/keystore.jks
key-password: policy_agent
key-alias: policy_agent
app:
- filepath: /opt/app/enrichment-coordinator-service/data/application_configuration.json
webclient:
+ # Configuration of the trust store used for the HTTP client (outgoing requests)
+ # The file location and the password for the truststore is only relevant if trust-store-used == true
+ # Note that the same keystore as for the server is used.
trust-store-used: false
trust-store-password: policy_agent
- trust-store: /opt/app/enrichment-coordinator-service/etc/cert/truststore.jks
- http.proxy-host: $ECS_HTTP_PROXY_CONFIG_HOST_NAME
- http.proxy-port: $ECS_HTTP_PROXY_CONFIG_PORT
- vardata-directory: /var/enrichment-coordinator-service
\ No newline at end of file
+ trust-store: /opt/app/information-coordinator-service/etc/cert/truststore.jks
+ # Configuration of usage of HTTP Proxy for the southbound accesses.
+ # The HTTP proxy (if configured) will only be used for accessing NearRT RIC:s
+ http.proxy-host: $ICS_HTTP_PROXY_CONFIG_HOST_NAME
+ http.proxy-port: $ICS_HTTP_PROXY_CONFIG_PORT
+ vardata-directory: $ICS_CONTAINER_MNT_DIR
version: '3.0'
networks:
default:
- external:
- name: ${DOCKER_SIM_NWNAME}
+ external: true
+ name: ${DOCKER_SIM_NWNAME}
services:
- ecs:
- image: ${ECS_IMAGE}
- container_name: ${ECS_APP_NAME}
+ ics:
+ image: ${ICS_IMAGE}
+ container_name: ${ICS_APP_NAME}
networks:
default:
aliases:
- - ${ECS_APP_NAME_ALIAS}
+ - ${ICS_APP_NAME_ALIAS}
volumes:
- - ${ECS_HOST_MNT_DIR}/db:${ECS_CONTAINER_MNT_DIR}
- - ${ECS_HOST_MNT_DIR}/$ECS_CONFIG_FILE:${ECS_CONFIG_MOUNT_PATH}/$ECS_CONFIG_FILE
+ - ${ICS_HOST_MNT_DIR}/db:${ICS_CONTAINER_MNT_DIR}
+ - ${ICS_HOST_MNT_DIR}/$ICS_CONFIG_FILE:${ICS_CONFIG_MOUNT_PATH}/$ICS_CONFIG_FILE
ports:
- - ${ECS_EXTERNAL_PORT}:${ECS_INTERNAL_PORT}
- - ${ECS_EXTERNAL_SECURE_PORT}:${ECS_INTERNAL_SECURE_PORT}
+ - ${ICS_EXTERNAL_PORT}:${ICS_INTERNAL_PORT}
+ - ${ICS_EXTERNAL_SECURE_PORT}:${ICS_INTERNAL_SECURE_PORT}
labels:
- - "nrttest_app=ECS"
- - "nrttest_dp=${ECS_DISPLAY_NAME}"
+ - "nrttest_app=ICS"
+ - "nrttest_dp=${ICS_DISPLAY_NAME}"
apiVersion: v1
kind: PersistentVolume
metadata:
- name: $ECS_DATA_PV_NAME
+ name: $ICS_DATA_PV_NAME
labels:
- run: $ECS_APP_NAME
- autotest: ECS
+ run: $ICS_APP_NAME
+ autotest: ICS
spec:
- storageClassName: ecs-standard
+ storageClassName: ics-standard
capacity:
storage: 1Mi
accessModes:
- ReadWriteOnce
persistentVolumeReclaimPolicy: Delete
hostPath:
- path: "/tmp/$ECS_PV_PATH"
+ path: "/tmp/$ICS_PV_PATH"
apiVersion: v1
kind: PersistentVolumeClaim
metadata:
- name: $ECS_DATA_PVC_NAME
+ name: $ICS_DATA_PVC_NAME
namespace: $KUBE_NONRTRIC_NAMESPACE
labels:
- run: $ECS_APP_NAME
- autotest: ECS
+ run: $ICS_APP_NAME
+ autotest: ICS
spec:
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 1Mi
- storageClassName: ecs-standard
+ storageClassName: ics-standard
volumeMode: Filesystem
--- /dev/null
+apiVersion: v1
+kind: Service
+metadata:
+ name: $ICS_APP_NAME
+ namespace: $KUBE_NONRTRIC_NAMESPACE
+ labels:
+ run: $ICS_APP_NAME
+ autotest: ICS
+spec:
+ type: ClusterIP
+ ports:
+ - port: $ICS_EXTERNAL_PORT
+ targetPort: $ICS_INTERNAL_PORT
+ protocol: TCP
+ name: http
+ - port: $ICS_EXTERNAL_SECURE_PORT
+ targetPort: $ICS_INTERNAL_SECURE_PORT
+ protocol: TCP
+ name: https
+ selector:
+ run: $ICS_APP_NAME
\ No newline at end of file
--- /dev/null
+.tmp.json
+.dockererr
+gen_docker-compose*
\ No newline at end of file
--- /dev/null
+apiVersion: apps/v1
+kind: StatefulSet
+metadata:
+ name: $KAFKAPC_APP_NAME
+ namespace: $KUBE_SIM_NAMESPACE
+ labels:
+ run: $KAFKAPC_APP_NAME
+ autotest: CR
+spec:
+ replicas: 1
+ serviceName: $KAFKAPC_APP_NAME
+ selector:
+ matchLabels:
+ run: $KAFKAPC_APP_NAME
+ template:
+ metadata:
+ labels:
+ run: $KAFKAPC_APP_NAME
+ autotest: CR
+ spec:
+ containers:
+ - name: $KAFKAPC_APP_NAME
+ image: $KAFKAPC_IMAGE
+ imagePullPolicy: $KUBE_IMAGE_PULL_POLICY
+ ports:
+ - name: http
+ containerPort: $KAFKAPC_INTERNAL_PORT
+ - name: https
+ containerPort: $KAFKAPC_INTERNAL_SECURE_PORT
+ env:
+ - name: KAFKA_BOOTSTRAP_SERVER
+ value: $MR_KAFKA_SERVICE_PATH
+
--- /dev/null
+# ============LICENSE_START===============================================
+# Copyright (C) 2020 Nordix Foundation. All rights reserved.
+# ========================================================================
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+# ============LICENSE_END=================================================
+#
+
+version: '3.0'
+networks:
+ default:
+ external: true
+ name: ${DOCKER_SIM_NWNAME}
+services:
+ kafka-procon:
+ networks:
+ - default
+ image: ${KAFKAPC_IMAGE}
+ container_name: ${KAFKAPC_APP_NAME}
+ ports:
+ - ${KAFKAPC_EXTERNAL_PORT}:${KAFKAPC_INTERNAL_PORT}
+ - ${KAFKAPC_EXTERNAL_SECURE_PORT}:${KAFKAPC_INTERNAL_SECURE_PORT}
+ environment:
+ KAFKA_BOOTSTRAP_SERVER: $MR_KAFKA_SERVICE_PATH
+ labels:
+ - "nrttest_app=CR"
+ - "nrttest_dp=${KAFKAPC_DISPLAY_NAME}"
--- /dev/null
+apiVersion: v1
+kind: Service
+metadata:
+ name: $KAFKAPC_APP_NAME
+ namespace: $KUBE_SIM_NAMESPACE
+ labels:
+ run: $KAFKAPC_APP_NAME
+ autotest: KAFKAPC
+spec:
+ type: ClusterIP
+ ports:
+ - port: $KAFKAPC_EXTERNAL_PORT
+ targetPort: $KAFKAPC_INTERNAL_PORT
+ protocol: TCP
+ name: http
+ - port: $KAFKAPC_EXTERNAL_SECURE_PORT
+ targetPort: $KAFKAPC_INTERNAL_SECURE_PORT
+ protocol: TCP
+ name: https
+ selector:
+ run: $KAFKAPC_APP_NAME
\ No newline at end of file
.dockererr
+gen_docker-compose*
version: '3.0'
networks:
default:
- external:
- name: ${DOCKER_SIM_NWNAME}
+ external: true
+ name: ${DOCKER_SIM_NWNAME}
services:
kubeproxy:
image: ${KUBE_PROXY_IMAGE}
.tmp.json
-.dockererr
\ No newline at end of file
+.dockererr
+gen_docker-compose*
\ No newline at end of file
version: '3.0'
networks:
default:
- external:
- name: ${DOCKER_SIM_NWNAME}
+ external: true
+ name: ${DOCKER_SIM_NWNAME}
services:
mr-stub:
networks:
.tmp.json
.dockererr
+gen_docker-compose*
predicates:
- Path=/a1-policy/**
- id: A1-EI
- uri: https://${ECS_DOMAIN_NAME}:${ECS_EXTERNAL_SECURE_PORT}
+ uri: https://${ICS_DOMAIN_NAME}:${ICS_EXTERNAL_SECURE_PORT}
predicates:
- Path=/ei-producer/**
- id: A1-EI2
- uri: https://${ECS_DOMAIN_NAME}:${ECS_EXTERNAL_SECURE_PORT}
+ uri: https://${ICS_DOMAIN_NAME}:${ICS_EXTERNAL_SECURE_PORT}
predicates:
- Path=/data-producer/**,/data-consumer/**
management:
version: '3.0'
networks:
default:
- external:
- name: ${DOCKER_SIM_NWNAME}
+ external: true
+ name: ${DOCKER_SIM_NWNAME}
services:
nonrtric-gateway:
image: ${NRT_GATEWAY_IMAGE}
.tmp.json
-.dockererr
\ No newline at end of file
+.dockererr
+gen_docker-compose*
\ No newline at end of file
volumeMounts:
- mountPath: $POLICY_AGENT_CONFIG_MOUNT_PATH
name: pa-conf-name
-# volumeMounts:
- mountPath: $POLICY_AGENT_CONTAINER_MNT_DIR
name: pa-pv-data-name
-# volumeMounts:
- mountPath: $POLICY_AGENT_DATA_MOUNT_PATH
name: pa-data-name
volumes:
version: '3.0'
networks:
default:
- external:
- name: ${DOCKER_SIM_NWNAME}
+ external: true
+ name: ${DOCKER_SIM_NWNAME}
services:
policy-agent:
image: ${POLICY_AGENT_IMAGE}
.tmp.json
-.dockererr
\ No newline at end of file
+.dockererr
+gen_docker-compose*
version: '3.0'
networks:
default:
- external:
- name: ${DOCKER_SIM_NWNAME}
+ external: true
+ name: ${DOCKER_SIM_NWNAME}
services:
producer-stub:
networks:
- default:
- aliases:
- - ${PROD_STUB_APP_NAME_ALIAS}
+ - default
container_name: ${PROD_STUB_APP_NAME}
image: ${PROD_STUB_IMAGE}
ports:
.tmp.json
-.dockererr
\ No newline at end of file
+.dockererr
+gen_docker-compose*
\ No newline at end of file
version: '3.0'
networks:
default:
- external:
- name: ${DOCKER_SIM_NWNAME}
+ external: true
+ name: ${DOCKER_SIM_NWNAME}
services:
rapp-catalogue:
image: ${RAPP_CAT_IMAGE}
.tmp.json
.dockererr
.env
-fakedir
\ No newline at end of file
+fakedir
+gen_docker-compose*
\ No newline at end of file
networks:
default:
- external:
- name: ${DOCKER_SIM_NWNAME}
+ external: true
+ name: ${DOCKER_SIM_NWNAME}
services:
- g1:
+ ${RICSIM_COMPOSE_SERVICE_NAME}:
image: ${RIC_SIM_IMAGE}
networks:
- default
- ${RIC_SIM_INTERNAL_PORT}/tcp
- ${RIC_SIM_INTERNAL_SECURE_PORT}/tcp
environment:
- - A1_VERSION=${G1_A1_VERSION}
+ - A1_VERSION=${RICSIM_COMPOSE_A1_VERSION}
- REMOTE_HOSTS_LOGGING=1
- ALLOW_HTTP=true
- DUPLICATE_CHECK=1
labels:
- "nrttest_app=RICSIM"
- "nrttest_dp=${RIC_SIM_DISPLAY_NAME}"
-
- g2:
- image: ${RIC_SIM_IMAGE}
- networks:
- - default
- ports:
- - ${RIC_SIM_INTERNAL_PORT}/tcp
- - ${RIC_SIM_INTERNAL_SECURE_PORT}/tcp
- environment:
- - A1_VERSION=${G2_A1_VERSION}
- - REMOTE_HOSTS_LOGGING=1
- - ALLOW_HTTP=true
- - DUPLICATE_CHECK=1
- volumes:
- - ${RIC_SIM_CERT_MOUNT_DIR}:/usr/src/app/cert:ro
- labels:
- - "nrttest_app=RICSIM"
- - "nrttest_dp=${RIC_SIM_DISPLAY_NAME}"
-
- g3:
- image: ${RIC_SIM_IMAGE}
- networks:
- - default
- ports:
- - ${RIC_SIM_INTERNAL_PORT}/tcp
- - ${RIC_SIM_INTERNAL_SECURE_PORT}/tcp
- environment:
- - A1_VERSION=${G3_A1_VERSION}
- - REMOTE_HOSTS_LOGGING=1
- - ALLOW_HTTP=true
- - DUPLICATE_CHECK=1
- volumes:
- - ${RIC_SIM_CERT_MOUNT_DIR}:/usr/src/app/cert:ro
- labels:
- - "nrttest_app=RICSIM"
- - "nrttest_dp=${RIC_SIM_DISPLAY_NAME}"
-
- g4:
- image: ${RIC_SIM_IMAGE}
- networks:
- - default
- ports:
- - ${RIC_SIM_INTERNAL_PORT}/tcp
- - ${RIC_SIM_INTERNAL_SECURE_PORT}/tcp
- environment:
- - A1_VERSION=${G4_A1_VERSION}
- - REMOTE_HOSTS_LOGGING=1
- - ALLOW_HTTP=true
- - DUPLICATE_CHECK=1
- volumes:
- - ${RIC_SIM_CERT_MOUNT_DIR}:/usr/src/app/cert:ro
- labels:
- - "nrttest_app=RICSIM"
- - "nrttest_dp=${RIC_SIM_DISPLAY_NAME}"
-
- g5:
- image: ${RIC_SIM_IMAGE}
- networks:
- - default
- ports:
- - ${RIC_SIM_INTERNAL_PORT}/tcp
- - ${RIC_SIM_INTERNAL_SECURE_PORT}/tcp
- environment:
- - A1_VERSION=${G5_A1_VERSION}
- - REMOTE_HOSTS_LOGGING=1
- - ALLOW_HTTP=true
- - DUPLICATE_CHECK=1
- volumes:
- - ${RIC_SIM_CERT_MOUNT_DIR}:/usr/src/app/cert:ro
- labels:
- - "nrttest_app=RICSIM"
- - "nrttest_dp=${RIC_SIM_DISPLAY_NAME}"
\ No newline at end of file
.tmp.json
-.dockererr
\ No newline at end of file
+.dockererr
+gen_docker-compose*
\ No newline at end of file
networks:
default:
- external:
- name: ${DOCKER_SIM_NWNAME}
-
+ external: true
+ name: ${DOCKER_SIM_NWNAME}
services:
db:
image: ${SDNC_DB_IMAGE}
networks:
default:
- external:
- name: ${DOCKER_SIM_NWNAME}
-
+ external: true
+ name: ${DOCKER_SIM_NWNAME}
services:
db:
image: ${SDNC_DB_IMAGE}
var MRSTUB_PORT="3905"
var AGENT_PORT="8081"
var CR_PORT="8090"
-var ECS_PORT="8083"
+var ICS_PORT="8083"
var PRODSTUB_PORT="8092"
var RC_PORT="8680"
var simvar5=[]
var simvar6=[]
-//Status variables, for parameters values fetched from ecs
-var ecs1="", ecs2="", ecs3="", ecs4="", ecs_types="-", ecs_producers="-";
-var ecs_producer_arr=new Array(0)
-var ecs_producer_type_arr=new Array(0)
-var ecs_producer_jobs_arr=new Array(0)
-var ecs_producer_status_arr=new Array(0)
-var ecs_jobs=new Array(0)
-var ecs_job_status=new Array(0)
+//Status variables, for parameters values fetched from ics
+var ics1="", ics2="", ics3="", ics4="", ics_types="-", ics_producers="-";
+var ics_producer_arr=new Array(0)
+var ics_producer_type_arr=new Array(0)
+var ics_producer_jobs_arr=new Array(0)
+var ics_producer_status_arr=new Array(0)
+var ics_jobs=new Array(0)
+var ics_job_status=new Array(0)
//Status variables, for parameters values fetched from prodstub
var ps2="", ps3="", ps4="", ps_types="-", ps_producers="-";
var refreshCount_pol=-1
-var refreshCount_ecs=-1
+var refreshCount_ics=-1
var refreshCount_cr=-1
}, 500)
}
-function fetchAllMetrics_ecs() {
+function fetchAllMetrics_ics() {
- console.log("Fetching enrichment metrics - timer:" + refreshCount_ecs)
+ console.log("Fetching information metrics - timer:" + refreshCount_ics)
- if (refreshCount_ecs < 0) {
- refreshCount_ecs = -1
+ if (refreshCount_ics < 0) {
+ refreshCount_ics = -1
return
} else {
- refreshCount_ecs = refreshCount_ecs - 1
+ refreshCount_ics = refreshCount_ics - 1
}
setTimeout(() => {
- if (checkFunctionFlag("ecs_stat")) {
- getSimCtr(LOCALHOST+ECS_PORT+"/status", 0, function(data, index) {
+ if (checkFunctionFlag("ics_stat")) {
+ getSimCtr(LOCALHOST+ICS_PORT+"/status", 0, function(data, index) {
try {
var jd=JSON.parse(data);
- ecs1=jd["status"]
- ecs2=""+jd["no_of_producers"]
- ecs3=""+jd["no_of_types"]
- ecs4=""+jd["no_of_jobs"]
+ ics1=jd["status"]
+ ics2=""+jd["no_of_producers"]
+ ics3=""+jd["no_of_types"]
+ ics4=""+jd["no_of_jobs"]
}
catch (err) {
- ecs1="error response"
- ecs2="error response"
- ecs3="error response"
- ecs4="error response"
+ ics1="error response"
+ ics2="error response"
+ ics3="error response"
+ ics4="error response"
}
});
- clearFlag("ecs_stat")
+ clearFlag("ics_stat")
}
- if (checkFunctionFlag("ecs_types")) {
- getSimCtr(LOCALHOST+ECS_PORT+"/ei-producer/v1/eitypes", 0, function(data, index) {
- var tmp_ecs_types="-"
+ if (checkFunctionFlag("ics_types")) {
+ getSimCtr(LOCALHOST+ICS_PORT+"/ei-producer/v1/eitypes", 0, function(data, index) {
+ var tmp_ics_types="-"
try {
var jd=JSON.parse(data);
for(var i=0;i<jd.length;i++) {
- if (tmp_ecs_types.length == 1) {
- tmp_ecs_types=""
+ if (tmp_ics_types.length == 1) {
+ tmp_ics_types=""
}
- tmp_ecs_types=""+tmp_ecs_types+jd[i]+" "
+ tmp_ics_types=""+tmp_ics_types+jd[i]+" "
}
}
catch (err) {
- tmp_ecs_types="error response"
+ tmp_ics_types="error response"
}
- ecs_types = tmp_ecs_types
+ ics_types = tmp_ics_types
});
- clearFlag("ecs_types")
+ clearFlag("ics_types")
}
- if (checkFunctionFlag("ecs_producers")) {
- getSimCtr(LOCALHOST+ECS_PORT+"/ei-producer/v1/eiproducers", 0, function(data, index) {
- var tmp_ecs_producers="-"
+ if (checkFunctionFlag("ics_producers")) {
+ getSimCtr(LOCALHOST+ICS_PORT+"/ei-producer/v1/eiproducers", 0, function(data, index) {
+ var tmp_ics_producers="-"
try {
var jd=JSON.parse(data);
- var tmp_ecs_producer_arr=new Array(jd.length)
+ var tmp_ics_producer_arr=new Array(jd.length)
for(var i=0;i<jd.length;i++) {
- if (tmp_ecs_producers.length == 1) {
- tmp_ecs_producers=""
+ if (tmp_ics_producers.length == 1) {
+ tmp_ics_producers=""
}
- tmp_ecs_producers=""+tmp_ecs_producers+jd[i]+" "
- tmp_ecs_producer_arr[i]=jd[i]
+ tmp_ics_producers=""+tmp_ics_producers+jd[i]+" "
+ tmp_ics_producer_arr[i]=jd[i]
}
- ecs_producer_arr = tmp_ecs_producer_arr
- ecs_producers = tmp_ecs_producers
+ ics_producer_arr = tmp_ics_producer_arr
+ ics_producers = tmp_ics_producers
}
catch (err) {
- ecs_producers="error response"
- ecs_producer_arr=new Array(0)
+ ics_producers="error response"
+ ics_producer_arr=new Array(0)
}
});
- clearFlag("ecs_producers")
+ clearFlag("ics_producers")
}
- if (checkFunctionFlag("ecs_data")) {
+ if (checkFunctionFlag("ics_data")) {
try {
- var tmp_ecs_producer_type_arr = JSON.parse(JSON.stringify(ecs_producer_arr))
- for(var x=0;x<tmp_ecs_producer_type_arr.length;x++) {
- getSimCtr(LOCALHOST+ECS_PORT+"/ei-producer/v1/eiproducers/"+tmp_ecs_producer_type_arr[x], x, function(data, idx) {
- var row=""+tmp_ecs_producer_type_arr[idx]+" : "
+ var tmp_ics_producer_type_arr = JSON.parse(JSON.stringify(ics_producer_arr))
+ for(var x=0;x<tmp_ics_producer_type_arr.length;x++) {
+ getSimCtr(LOCALHOST+ICS_PORT+"/ei-producer/v1/eiproducers/"+tmp_ics_producer_type_arr[x], x, function(data, idx) {
+ var row=""+tmp_ics_producer_type_arr[idx]+" : "
try {
var jd=JSON.parse(data);
var jda=jd["supported_ei_types"]
row=""+row+jda[j]+" "
}
- tmp_ecs_producer_type_arr[idx]=row
+ tmp_ics_producer_type_arr[idx]=row
}
catch (err) {
- tmp_ecs_producer_type_arr=new Array(0)
+ tmp_ics_producer_type_arr=new Array(0)
}
});
}
- ecs_producer_type_arr = tmp_ecs_producer_type_arr
+ ics_producer_type_arr = tmp_ics_producer_type_arr
} catch (err) {
- ecs_producer_type_arr=new Array(0)
+ ics_producer_type_arr=new Array(0)
}
try {
- var tmp_ecs_producer_jobs_arr = JSON.parse(JSON.stringify(ecs_producer_arr))
- for(x=0;x<tmp_ecs_producer_jobs_arr.length;x++) {
- getSimCtr(LOCALHOST+ECS_PORT+"/ei-producer/v1/eiproducers/"+tmp_ecs_producer_jobs_arr[x]+"/eijobs", x, function(data, idx) {
- var row=""+tmp_ecs_producer_jobs_arr[idx]+" : "
+ var tmp_ics_producer_jobs_arr = JSON.parse(JSON.stringify(ics_producer_arr))
+ for(x=0;x<tmp_ics_producer_jobs_arr.length;x++) {
+ getSimCtr(LOCALHOST+ICS_PORT+"/ei-producer/v1/eiproducers/"+tmp_ics_producer_jobs_arr[x]+"/eijobs", x, function(data, idx) {
+ var row=""+tmp_ics_producer_jobs_arr[idx]+" : "
try {
var jd=JSON.parse(data);
for(var j=0;j<jd.length;j++) {
var jda=jd[j]
row=""+row+jda["ei_job_identity"]+"("+jda["ei_type_identity"]+") "
}
- tmp_ecs_producer_jobs_arr[idx]=row
+ tmp_ics_producer_jobs_arr[idx]=row
}
catch (err) {
- tmp_ecs_producer_jobs_arr=new Array(0)
+ tmp_ics_producer_jobs_arr=new Array(0)
}
});
}
- ecs_producer_jobs_arr = tmp_ecs_producer_jobs_arr
+ ics_producer_jobs_arr = tmp_ics_producer_jobs_arr
} catch (err) {
- ecs_producer_jobs_arr=new Array(0)
+ ics_producer_jobs_arr=new Array(0)
}
try {
- var tmp_ecs_producer_status_arr = JSON.parse(JSON.stringify(ecs_producer_arr))
- for(x=0;x<tmp_ecs_producer_status_arr.length;x++) {
- getSimCtr(LOCALHOST+ECS_PORT+"/ei-producer/v1/eiproducers/"+tmp_ecs_producer_status_arr[x]+"/status", x, function(data, idx) {
- var row=""+tmp_ecs_producer_status_arr[idx]+" : "
+ var tmp_ics_producer_status_arr = JSON.parse(JSON.stringify(ics_producer_arr))
+ for(x=0;x<tmp_ics_producer_status_arr.length;x++) {
+ getSimCtr(LOCALHOST+ICS_PORT+"/ei-producer/v1/eiproducers/"+tmp_ics_producer_status_arr[x]+"/status", x, function(data, idx) {
+ var row=""+tmp_ics_producer_status_arr[idx]+" : "
try {
var jd=JSON.parse(data);
row=""+row+jd["operational_state"]
- tmp_ecs_producer_status_arr[idx]=row
+ tmp_ics_producer_status_arr[idx]=row
}
catch (err) {
- tmp_ecs_producer_status_arr=new Array(0)
+ tmp_ics_producer_status_arr=new Array(0)
}
});
}
- ecs_producer_status_arr = tmp_ecs_producer_status_arr
+ ics_producer_status_arr = tmp_ics_producer_status_arr
} catch (err) {
- ecs_producer_status_arr=new Array(0)
+ ics_producer_status_arr=new Array(0)
}
- clearFlag("ecs_data")
+ clearFlag("ics_data")
}
- if (checkFunctionFlag("ecs_jobs")) {
- getSimCtr(LOCALHOST+ECS_PORT+"/A1-EI/v1/eijobs", 0, function(data, index) {
+ if (checkFunctionFlag("ics_jobs")) {
+ getSimCtr(LOCALHOST+ICS_PORT+"/A1-EI/v1/eijobs", 0, function(data, index) {
try {
var jd=JSON.parse(data);
var tmpArr=new Array(jd.length)
for(var i=0;i<jd.length;i++) {
tmpArr[i]=jd[i]
}
- ecs_jobs=tmpArr
+ ics_jobs=tmpArr
}
catch (err) {
- ecs_jobs=new Array(0)
+ ics_jobs=new Array(0)
}
});
- clearFlag("ecs_jobs")
+ clearFlag("ics_jobs")
}
- if (checkFunctionFlag("ecs_job_status")) {
+ if (checkFunctionFlag("ics_job_status")) {
try {
- var tmp_ecs_job_status= JSON.parse(JSON.stringify(ecs_jobs))
- for(x=0;x<tmp_ecs_job_status.length;x++) {
- getSimCtr(LOCALHOST+ECS_PORT+"/A1-EI/v1/eijobs/"+tmp_ecs_job_status[x]+"/status", x, function(data, idx) {
+ var tmp_ics_job_status= JSON.parse(JSON.stringify(ics_jobs))
+ for(x=0;x<tmp_ics_job_status.length;x++) {
+ getSimCtr(LOCALHOST+ICS_PORT+"/A1-EI/v1/eijobs/"+tmp_ics_job_status[x]+"/status", x, function(data, idx) {
try {
var jd=JSON.parse(data);
- tmp_ecs_job_status[idx]=""+tmp_ecs_job_status[idx]+":"+jd["eiJobStatus"]
+ tmp_ics_job_status[idx]=""+tmp_ics_job_status[idx]+":"+jd["eiJobStatus"]
}
catch (err) {
- tmp_ecs_job_status="-"
+ tmp_ics_job_status="-"
}
});
}
- ecs_job_status = tmp_ecs_job_status
+ ics_job_status = tmp_ics_job_status
} catch (err) {
- ecs_job_status="-"
+ ics_job_status="-"
}
- clearFlag("ecs_job_status")
+ clearFlag("ics_job_status")
}
if (checkFunctionFlag("prodstub_stat")) {
getSimCtr(LOCALHOST+PRODSTUB_PORT+"/status", x, function(data, idx) {
clearFlag("prodstub_stat")
}
- fetchAllMetrics_ecs();
+ fetchAllMetrics_ics();
}, 500)
}
function fetchAllMetrics_cr() {
- console.log("Fetching CR DB - timer:" + refreshCount_ecs)
+ console.log("Fetching CR DB - timer:" + refreshCount_ics)
if (refreshCount_cr < 0) {
refreshCount_cr = -1
function fetchAllMetrics_rc() {
- console.log("Fetching RC services - timer:" + refreshCount_ecs)
+ console.log("Fetching RC services - timer:" + refreshCount_ics)
if (refreshCount_rc < 0) {
refreshCount_rc = -1
// Monitor for CR db
app.get("/mon3",function(req, res){
- console.log("Creating CR DB page - timer: " + refreshCount_ecs)
+ console.log("Creating CR DB page - timer: " + refreshCount_ics)
if (refreshCount_cr < 0) {
refreshCount_cr=5
res.send(htmlStr);
})
-// Monitor for ECS
+// Monitor for ICS
app.get("/mon2",function(req, res){
- console.log("Creating enrichment metrics - timer: " + refreshCount_ecs)
+ console.log("Creating information metrics - timer: " + refreshCount_ics)
- if (refreshCount_ecs < 0) {
- refreshCount_ecs=5
- fetchAllMetrics_ecs()
+ if (refreshCount_ics < 0) {
+ refreshCount_ics=5
+ fetchAllMetrics_ics()
}
- refreshCount_ecs=5
+ refreshCount_ics=5
var summary=req.query.summary
"<html>" +
"<head>" +
"<meta http-equiv=\"refresh\" content=\"2\">"+ //2 sec auto refresh
- "<title>Enrichment coordinator service and producer stub</title>"+
+ "<title>information coordinator service and producer stub</title>"+
"</head>" +
"<body>" +
"<font size=\"-3\" face=\"summary\">"
} else {
htmlStr=htmlStr+"<p>Set query param '?summary' to false to only show full statistics</p>"
}
- if (ecs_job_status.length > 10) {
+ if (ics_job_status.length > 10) {
htmlStr=htmlStr+"<div style=\"color:red\"> Avoid running the server for large number of producers and/or jobs</div>"
}
htmlStr=htmlStr+"</font>" +
- "<h3>Enrichment Coordinator Service</h3>" +
+ "<h3>Information Coordinator Service</h3>" +
"<font face=\"monospace\">" +
- "Status:..........." + formatDataRow(ecs1) + "<br>" +
- "Producers:........" + formatDataRow(ecs2) + "<br>" +
- "Types:............" + formatDataRow(ecs3) + "<br>" +
- "Jobs:............." + formatDataRow(ecs4) + "<br>" +
+ "Status:..........." + formatDataRow(ics1) + "<br>" +
+ "Producers:........" + formatDataRow(ics2) + "<br>" +
+ "Types:............" + formatDataRow(ics3) + "<br>" +
+ "Jobs:............." + formatDataRow(ics4) + "<br>" +
"</font>"
if (summary == "false") {
htmlStr=htmlStr+
"<h4>Details</h4>" +
"<font face=\"monospace\">" +
- "Producer ids:....." + formatDataRow(ecs_producers) + "<br>" +
- "Type ids:........." + formatDataRow(ecs_types) + "<br>" +
+ "Producer ids:....." + formatDataRow(ics_producers) + "<br>" +
+ "Type ids:........." + formatDataRow(ics_types) + "<br>" +
"<br>";
- for(var i=0;i<ecs_producer_type_arr.length;i++) {
- var tmp=ecs_producer_type_arr[i]
+ for(var i=0;i<ics_producer_type_arr.length;i++) {
+ var tmp=ics_producer_type_arr[i]
if (tmp != undefined) {
- var s = "Producer types...." + formatDataRow(ecs_producer_type_arr[i]) + "<br>"
+ var s = "Producer types...." + formatDataRow(ics_producer_type_arr[i]) + "<br>"
htmlStr=htmlStr+s
}
}
htmlStr=htmlStr+"<br>";
- for(i=0;i<ecs_producer_jobs_arr.length;i++) {
- tmp=ecs_producer_jobs_arr[i]
+ for(i=0;i<ics_producer_jobs_arr.length;i++) {
+ tmp=ics_producer_jobs_arr[i]
if (tmp != undefined) {
- s = "Producer jobs....." + formatDataRow(ecs_producer_jobs_arr[i]) + "<br>"
+ s = "Producer jobs....." + formatDataRow(ics_producer_jobs_arr[i]) + "<br>"
htmlStr=htmlStr+s
}
}
htmlStr=htmlStr+"<br>";
- for(i=0;i<ecs_producer_status_arr.length;i++) {
- tmp=ecs_producer_status_arr[i]
+ for(i=0;i<ics_producer_status_arr.length;i++) {
+ tmp=ics_producer_status_arr[i]
if (tmp != undefined) {
s = "Producer status..." + formatDataRow(tmp) + "<br>"
htmlStr=htmlStr+s
}
}
htmlStr=htmlStr+"<br>";
- for(i=0;i<ecs_job_status.length;i++) {
- tmp=ecs_job_status[i]
+ for(i=0;i<ics_job_status.length;i++) {
+ tmp=ics_job_status[i]
if (tmp != undefined) {
s = padding("Job", 18, ".") + formatDataRow(tmp) + "<br>"
htmlStr=htmlStr+s
httpServer.listen(httpPort);
console.log("Simulator monitor listening (http) at "+httpPort);
console.log("Open the web page on localhost:9999/mon to view the policy statistics page.")
-console.log("Open the web page on localhost:9999/mon2 to view the enrichment statistics page.")
+console.log("Open the web page on localhost:9999/mon2 to view the information statistics page.")
console.log("Open the web page on localhost:9999/mon3 to view CR DB in json.")
\ No newline at end of file
--- /dev/null
+{
+ "type": "record",
+ "name": "Std_Defined_Output",
+ "fields": [
+ {
+ "name": "radio_DasH_resource_DasH_management_DasH_policy_DasH_ratio",
+ "type": {
+ "type": "array",
+ "items": {
+ "name": "RRM_Policy_Ratio",
+ "type": "record",
+ "fields": [
+ {
+ "name": "id",
+ "type": "string"
+ },
+ {
+ "name": "administrative_DasH_state",
+ "type": "string"
+ },
+ {
+ "name": "user_DasH_label",
+ "type": "string"
+ },
+ {
+ "name": "radio_DasH_resource_DasH_management_DasH_policy_DasH_max_DasH_ratio",
+ "type": "string"
+ },
+ {
+ "name": "radio_DasH_resource_DasH_management_DasH_policy_DasH_min_DasH_ratio",
+ "type": "string"
+ },
+ {
+ "name": "radio_DasH_resource_DasH_management_DasH_policy_DasH_dedicated_DasH_ratio",
+ "type": "string"
+ },
+ {
+ "name": "resource_DasH_type",
+ "type": "string"
+ },
+ {
+ "name": "radio_DasH_resource_DasH_management_DasH_policy_DasH_members",
+ "type": {
+ "type": "array",
+ "items": {
+ "name": "RRM_Policy_Members",
+ "type": "record",
+ "fields": [
+ {
+ "name": "mobile_DasH_country_DasH_code",
+ "type": "string"
+ },
+ {
+ "name": "mobile_DasH_network_DasH_code",
+ "type": "string"
+ },
+ {
+ "name": "slice_DasH_differentiator",
+ "type": "int"
+ },
+ {
+ "name": "slice_DasH_service_DasH_type",
+ "type": "int"
+ }
+ ]
+ }
+ }
+ }
+ ]
+ }
+ }
+ }
+ ]
+}
r.HandleFunc("/rests/data/network-topology:network-topology/topology=topology-netconf/node={O-DU-ID}/yang-ext:mount/o-ran-sc-du-hello-world:network-function/du-to-ru-connection={O-RU-ID}", handleData)
fmt.Println("Starting SDNR on port: ", *port)
- http.ListenAndServe(fmt.Sprintf(":%v", *port), r)
+ fmt.Println(http.ListenAndServe(fmt.Sprintf(":%v", *port), r))
}
RUN pip install -r requirements.txt
+RUN groupadd -g 999 appuser && \
+ useradd -r -u 999 -g appuser appuser
+USER appuser
+
CMD [ "python3", "-u", "main.py" ]
{{- toYaml .Values.securityContext | nindent 12 }}
image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}"
imagePullPolicy: {{ .Values.image.pullPolicy }}
+ env:
+ - name: TOPIC_READ
+ value: http://dmaap-mr:3904/events/unauthenticated.SEC_FAULT_OUTPUT
+ - name: TOPIC_WRITE
+ value: http://dmaap-mr:3904/events/unauthenticated.SEC_FAULT_OUTPUT
+ - name: GENERIC_TOPICS_UPLOAD_BASEURL
+ value: http://dmaap-mr:3904
ports:
- name: http
containerPort: 3904
RUN pip install -r requirements.txt
+RUN groupadd -g 999 appuser && \
+ useradd -r -u 999 -g appuser appuser
+
+USER appuser
+
CMD [ "python3", "-u", "message_generator.py" ]
RUN pip install -r requirements.txt
+RUN groupadd -g 999 appuser && \
+ useradd -r -u 999 -g appuser appuser
+
+USER appuser
+
CMD [ "python3", "-u", "sdnr_simulator.py" ]