--- /dev/null
+HELP.md
+target/
+!.mvn/wrapper/maven-wrapper.jar
+!**/src/main/**/target/
+!**/src/test/**/target/
+
+### STS ###
+.apt_generated
+.classpath
+.factorypath
+.project
+.settings
+.springBeans
+.sts4-cache
+
+### IntelliJ IDEA ###
+.idea
+*.iws
+*.iml
+*.ipr
+
+### NetBeans ###
+/nbproject/private/
+/nbbuild/
+/dist/
+/nbdist/
+/.nb-gradle/
+build/
+!**/src/main/**/build/
+!**/src/test/**/build/
+
+### VS Code ###
+.vscode/
-#==================================================================================
-# Copyright (C) 2024: OpenInfra Foundation Europe
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# This source code is part of the near-RT RIC (RAN Intelligent Controller)
-# platform project (RICP).
-#==================================================================================
-FROM openjdk:17-jdk-slim
-
-EXPOSE 9090
-
-ARG SPRING_KAFKA_SERVER
-ENV SPRING_KAFKA_SERVER=${SPRING_KAFKA_SERVER}
-
-WORKDIR /app
-
-COPY target/kafka-consumer-0.0.1-SNAPSHOT.jar /app/consumer-0.0.1.jar
-
-CMD ["java", "-jar", "consumer-0.0.1.jar"]
+\r
+#==================================================================================\r
+# Copyright (C) 2024: OpenInfra Foundation Europe\r
+#\r
+# Licensed under the Apache License, Version 2.0 (the "License");\r
+# you may not use this file except in compliance with the License.\r
+# You may obtain a copy of the License at\r
+#\r
+# http://www.apache.org/licenses/LICENSE-2.0\r
+#\r
+# Unless required by applicable law or agreed to in writing, software\r
+# distributed under the License is distributed on an "AS IS" BASIS,\r
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
+# See the License for the specific language governing permissions and\r
+# limitations under the License.\r
+#\r
+# This source code is part of the near-RT RIC (RAN Intelligent Controller)\r
+# platform project (RICP).\r
+#==================================================================================\r
+# Use Maven image with OpenJDK 17 for the build stage\r
+FROM maven:3.8.5-openjdk-17 AS maven_build\r
+# Copy Maven project files\r
+COPY pom.xml /tmp/\r
+COPY src /tmp/src/\r
+# Set working directory\r
+WORKDIR /tmp/\r
+# Build the Maven project\r
+RUN mvn package\r
+# Use a separate image with OpenJDK 17 for the runtime stage\r
+FROM openjdk:17-jdk-slim\r
+# Expose port 9090\r
+EXPOSE 9090\r
+ARG SPRING_KAFKA_SERVER\r
+ENV SPRING_KAFKA_SERVER=${SPRING_KAFKA_SERVER}\r
+# Set the working directory\r
+WORKDIR /app\r
+# Copy the JAR file from the maven_build stage to the runtime stage\r
+COPY --from=maven_build /tmp/target/kafka-consumer-0.0.1.jar /app/consumer-0.0.1.jar\r
+# Command to run the application\r
+CMD ["java", "-jar", "consumer-0.0.1.jar"]\r
--- /dev/null
+#==================================================================================
+# Copyright (C) 2024: OpenInfra Foundation Europe
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# This source code is part of the near-RT RIC (RAN Intelligent Controller)
+# platform project (RICP).
+#==================================================================================
+FROM openjdk:17-jdk-slim
+
+EXPOSE 9090
+
+ARG SPRING_KAFKA_SERVER
+ENV SPRING_KAFKA_SERVER=${SPRING_KAFKA_SERVER}
+
+WORKDIR /app
+
+COPY target/kafka-consumer-0.0.1.jar /app/consumer-0.0.1.jar
+
+CMD ["java", "-jar", "consumer-0.0.1.jar"]
</parent>
<groupId>com.demo</groupId>
<artifactId>kafka-consumer</artifactId>
- <version>0.0.1-SNAPSHOT</version>
+ <version>0.0.1</version>
<name>kafka-consumer</name>
<description>Demo project for Spring Boot and kafka consumer</description>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
- <plugin>
+ </plugins>
+ </build>
+ <profiles>
+ <profile>
+ <id>docker</id>
+ <build>
+ <plugins>
+ <plugin>
<groupId>io.fabric8</groupId>
<artifactId>docker-maven-plugin</artifactId>
<version>0.45.0</version>
<images>

- </images>
- </configuration>
- </execution>
- </executions>
- </plugin>
- </plugins>
- </build>
+ </tags>
+ </build>
+ </image>
+ </images>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+ </profile>
+ </profiles>
</project>
-#==================================================================================
-# Copyright (C) 2024: OpenInfra Foundation Europe
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# This source code is part of the near-RT RIC (RAN Intelligent Controller)
-# platform project (RICP).
-#==================================================================================
-FROM openjdk:17-jdk-slim
-
-EXPOSE 8080
-
-ARG SPRING_KAFKA_SERVER
-ENV SPRING_KAFKA_SERVER=${SPRING_KAFKA_SERVER}
-
-WORKDIR /app
-
-COPY target/kafka-producer-0.0.1-SNAPSHOT.jar /app/producer-0.0.1.jar
-
-CMD ["java", "-jar", "producer-0.0.1.jar"]
+\r
+#==================================================================================\r
+# Copyright (C) 2024: OpenInfra Foundation Europe\r
+#\r
+# Licensed under the Apache License, Version 2.0 (the "License");\r
+# you may not use this file except in compliance with the License.\r
+# You may obtain a copy of the License at\r
+#\r
+# http://www.apache.org/licenses/LICENSE-2.0\r
+#\r
+# Unless required by applicable law or agreed to in writing, software\r
+# distributed under the License is distributed on an "AS IS" BASIS,\r
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
+# See the License for the specific language governing permissions and\r
+# limitations under the License.\r
+#\r
+# This source code is part of the near-RT RIC (RAN Intelligent Controller)\r
+# platform project (RICP).\r
+#==================================================================================\r
+# Use Maven image with OpenJDK 17 for the build stage\r
+FROM maven:3.8.5-openjdk-17 AS maven_build\r
+# Copy Maven project files\r
+COPY pom.xml /tmp/\r
+COPY src /tmp/src/\r
+# Set working directory\r
+WORKDIR /tmp/\r
+# Build the Maven project\r
+RUN mvn package\r
+# Use a separate image with OpenJDK 17 for the runtime stage\r
+FROM openjdk:17-jdk-slim\r
+# Expose port 8080\r
+EXPOSE 8080\r
+ARG SPRING_KAFKA_SERVER\r
+ENV SPRING_KAFKA_SERVER=${SPRING_KAFKA_SERVER}\r
+# Set the working directory\r
+WORKDIR /app\r
+# Copy the JAR file from the maven_build stage to the runtime stage\r
+COPY --from=maven_build /tmp/target/kafka-producer-0.0.1.jar /app/producer-0.0.1.jar\r
+# Command to run the application\r
+CMD ["java", "-jar", "producer-0.0.1.jar"]\r
--- /dev/null
+#==================================================================================
+# Copyright (C) 2024: OpenInfra Foundation Europe
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# This source code is part of the near-RT RIC (RAN Intelligent Controller)
+# platform project (RICP).
+#==================================================================================
+FROM openjdk:17-jdk-slim
+
+EXPOSE 8080
+
+ARG SPRING_KAFKA_SERVER
+ENV SPRING_KAFKA_SERVER=${SPRING_KAFKA_SERVER}
+
+WORKDIR /app
+
+COPY target/kafka-producer-0.0.1.jar /app/producer-0.0.1.jar
+
+CMD ["java", "-jar", "producer-0.0.1.jar"]
</parent>
<groupId>com.demo</groupId>
<artifactId>kafka-producer</artifactId>
- <version>0.0.1-SNAPSHOT</version>
+ <version>0.0.1</version>
<name>kafka-producer</name>
<description>Demo project for Spring Boot and Kafka producer</description>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
- <plugin>
+ </plugins>
+ </build>
+ <profiles>
+ <profile>
+ <id>docker</id>
+ <build>
+ <plugins>
+ <plugin>
<groupId>io.fabric8</groupId>
<artifactId>docker-maven-plugin</artifactId>
<version>0.45.0</version>
<images>

- </images>
- </configuration>
- </execution>
- </executions>
- </plugin>
- </plugins>
- </build>
+ </tags>
+ </build>
+ </image>
+ </images>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+ </profile>
+ </profiles>
</project>
-REGISTRY=nexus3.o-ran-sc.org:10001/
\ No newline at end of file
+REGISTRY=nexus3.o-ran-sc.org:10001/\r
+STAGING=nexus3.o-ran-sc.org:10004/
\ No newline at end of file
- "8083:8083"
kafka-producer:
- image: ${REGISTRY}o-ran-sc/nonrtric-simple-icsproducer:latest
+ image: ${REGISTRY}o-ran-sc/nonrtric-sample-simple-icsproducer:0.0.1
container_name: kafka-producer
environment:
- SPRING_KAFKA_SERVER=broker:9092
- broker
kafka-consumer:
- image: ${REGISTRY}o-ran-sc/nonrtric-simple-icsconsumer:latest
+ image: ${REGISTRY}o-ran-sc/nonrtric-sample-simple-icsconsumer:0.0.1
container_name: kafka-consumer
environment:
- SPRING_KAFKA_SERVER=broker:9092
# Replace CLUSTER_ID with a unique base64 UUID using "bin/kafka-storage.sh random-uuid"
# See https://docs.confluent.io/kafka/operations-tools/kafka-tools.html#kafka-storage-sh
CLUSTER_ID: 'MkU3OEVBNTcwNTJENDM2Qk'
-
- # curl-client:
- # image: curlimages/curl:latest
- # container_name: curl-client
- # command: ["tail", "-f", "/dev/null"]
- # networks:
- # - kafka
-
- # redpanda-console:
- # container_name: redpanda-console
- # image: docker.redpanda.com/redpandadata/console:v2.4.5
- # entrypoint: /bin/sh
- # command: -c 'echo "$$CONSOLE_CONFIG_FILE" > /tmp/config.yml; /app/console'
- # environment:
- # CONFIG_FILEPATH: /tmp/config.yml
- # CONSOLE_CONFIG_FILE: |
- # kafka:
- # brokers: ["broker:9092"]
- # ports:
- # - 8888:8080
- # networks:
- # - kafka
\ No newline at end of file
# limitations under the License.
# ============LICENSE_END=================================================
#!/bin/bash
-source ./utils/utils.sh
-
-docker compose up -d
-
-# Wait for the Kafka container to be running
-wait_for_container "broker" "Kafka Server started"
-wait_for_container "kafka-producer" "Started KafkaProducerApplication"
-wait_for_container "kafka-consumer" "Started KafkaConsumerApplication"
-
-# Once Kafka container is running, start the producers and consumers
-echo "Kafka container is up and running. Starting producer and consumer..."
-space
-
-curl -v -i -X POST -H 'Content-Type: application/json' -d '{"configuredLevel": "TRACE"}' http://localhost:8083/actuator/loggers/org.oransc.ics
-
-echo "Sending type1 to ICS"
-curl -X 'PUT' \
- 'http://localhost:8083/data-producer/v1/info-types/type1' \
- -H 'accept: application/json' \
- -H 'Content-Type: application/json' \
- -d '{
- "info_job_data_schema": {
- "$schema":"http://json-schema.org/draft-07/schema#",
- "title":"STD_Type1_1.0.0",
- "description":"Type 1",
- "topic": "mytopic",
- "bootStrapServers": "broker:9092"
- }
-}'
-
-echo "Getting types from ICS"
-curl -X 'GET' 'http://localhost:8083/data-producer/v1/info-types/type1'
-space
-
-echo "Sending Producer infos to ICS"
-curl -X 'PUT' \
- 'http://localhost:8083/data-producer/v1/info-producers/1' \
- -H 'accept: application/json' \
- -H 'Content-Type: application/json' \
- -d '{
- "info_producer_supervision_callback_url": "http://kafka-producer:8080/health-check",
- "supported_info_types": [
- "type1"
- ],
- "info_job_callback_url": "http://kafka-producer:8080/info-job"
-}'
-
-echo "Getting Producers Infos from ICS"
-curl -H 'Content-Type: application/json' 'http://localhost:8083/data-producer/v1/info-producers/1'
-space
-
-echo "Sending Consumer Subscription Job infos to ICS"
-curl -X 'PUT' \
- 'http://localhost:8083/data-consumer/v1/info-type-subscription/1' \
- -H 'accept: application/json' \
- -H 'Content-Type: application/json' \
- -d '{
- "status_result_uri": "http://kafka-consumer:9090/info-type-status",
- "owner": "demo"
-}'
-echo "Getting Consumer Subscription Job infos from ICS"
-curl -X 'GET' 'http://localhost:8083/data-consumer/v1/info-type-subscription/1' -H 'accept: application/json'
-space
-
-#start Consumer
-echo "Sending type1 to ICS to use the callback, This will start a CONSUMER"
-curl -X 'PUT' \
- 'http://localhost:8083/data-producer/v1/info-types/type1' \
- -H 'accept: application/json' \
- -H 'Content-Type: application/json' \
- -d '{
- "info_job_data_schema": {
- "$schema":"http://json-schema.org/draft-07/schema#",
- "title":"STD_Type1_1.0.0",
- "description":"Type 1",
- "topic": "mytopic",
- "bootStrapServers": "broker:9092"
- }
-}'
-
-sleep 3
-
-#ICS starts a producer (healthcheck to status)
-echo "Sending Consumer Job infos to ICS, This will start a PRODUCER"
-curl -X 'PUT' \
- 'http://localhost:8083/data-consumer/v1/info-jobs/1' \
- -H 'accept: application/json' \
- -H 'Content-Type: application/json' \
- -d '{
- "info_type_id": "type1",
- "job_owner": "demo",
- "job_definition": {
- "deliveryInfo": {
- "topic": "mytopic",
- "bootStrapServers": "broker:9092",
- "numberOfMessages": 100
- }
- },
- "job_result_uri": "http://kafka-producer:8080/info-job",
- "status_notification_uri": "http://kafka-consumer:9090/info-type-status"
-}'
-
-echo "Getting Consumer Job Infos from ICS"
-curl -H 'Content-Type: application/json' 'http://localhost:8083/data-consumer/v1/info-jobs/1'
-space
-
-for i in {1..10}; do
- echo
- curl -X GET "http://localhost:8080/publish/$i"
- sleep 1
-done
-
-space
-echo "Deleting Producer Job infos to ICS"
-curl -X 'DELETE' \
- 'http://localhost:8083/data-producer/v1/info-producers/1'
-
-echo "Deleting Consumer Job infos to ICS"
-curl -X 'DELETE' \
- 'http://localhost:8083/data-consumer/v1/info-jobs/1'
-
-echo "Deleting type1 to ICS to use the callback and stop consuming"
-curl -X 'DELETE' \
- 'http://localhost:8083/data-producer/v1/info-types/type1'
-
-
-echo "ICS Producer Docker logs "
-docker logs informationcoordinatorservice | grep -E 'o.o.i.c.r1producer.ProducerCallbacks|o.o.i.repository.InfoTypeSubscriptions'
-space
-echo "Demo Producer Docker logs "
-docker logs kafka-producer | grep c.d.k.controller.KafkaController
-space
-echo "Demo Consumer Docker logs "
-docker logs kafka-consumer | grep c.d.kafkaconsumer.service.KafkaConsumer
-space
-
-echo "Done."
-
-containers=("kafka-producer" "kafka-consumer")
-
-for container in "${containers[@]}"; do
- if docker logs "$container" | grep -q ERROR; then
- echo "Errors found in logs of $container"
- docker logs "$container" | grep ERROR
- echo "FAIL"
- exit 1
- else
- echo "No errors found in logs of $container"
- fi
-done
-echo "SUCCESS"
-docker compose down
-exit 0
+echo "commented"
+# source ./utils/utils.sh
+
+# docker compose up -d
+
+# # Wait for the Kafka container to be running
+# wait_for_container "broker" "Kafka Server started"
+# wait_for_container "kafka-producer" "Started KafkaProducerApplication"
+# wait_for_container "kafka-consumer" "Started KafkaConsumerApplication"
+
+# # Once Kafka container is running, start the producers and consumers
+# echo "Kafka container is up and running. Starting producer and consumer..."
+# space
+
+# curl -v -i -X POST -H 'Content-Type: application/json' -d '{"configuredLevel": "TRACE"}' http://localhost:8083/actuator/loggers/org.oransc.ics
+
+# echo "Sending type1 to ICS"
+# curl -X 'PUT' \
+# 'http://localhost:8083/data-producer/v1/info-types/type1' \
+# -H 'accept: application/json' \
+# -H 'Content-Type: application/json' \
+# -d '{
+# "info_job_data_schema": {
+# "$schema":"http://json-schema.org/draft-07/schema#",
+# "title":"STD_Type1_1.0.0",
+# "description":"Type 1",
+# "topic": "mytopic",
+# "bootStrapServers": "broker:9092"
+# }
+# }'
+
+# echo "Getting types from ICS"
+# curl -X 'GET' 'http://localhost:8083/data-producer/v1/info-types/type1'
+# space
+
+# echo "Sending Producer infos to ICS"
+# curl -X 'PUT' \
+# 'http://localhost:8083/data-producer/v1/info-producers/1' \
+# -H 'accept: application/json' \
+# -H 'Content-Type: application/json' \
+# -d '{
+# "info_producer_supervision_callback_url": "http://kafka-producer:8080/health-check",
+# "supported_info_types": [
+# "type1"
+# ],
+# "info_job_callback_url": "http://kafka-producer:8080/info-job"
+# }'
+
+# echo "Getting Producers Infos from ICS"
+# curl -H 'Content-Type: application/json' 'http://localhost:8083/data-producer/v1/info-producers/1'
+# space
+
+# echo "Sending Consumer Subscription Job infos to ICS"
+# curl -X 'PUT' \
+# 'http://localhost:8083/data-consumer/v1/info-type-subscription/1' \
+# -H 'accept: application/json' \
+# -H 'Content-Type: application/json' \
+# -d '{
+# "status_result_uri": "http://kafka-consumer:9090/info-type-status",
+# "owner": "demo"
+# }'
+# echo "Getting Consumer Subscription Job infos from ICS"
+# curl -X 'GET' 'http://localhost:8083/data-consumer/v1/info-type-subscription/1' -H 'accept: application/json'
+# space
+
+# #start Consumer
+# echo "Sending type1 to ICS to use the callback, This will start a CONSUMER"
+# curl -X 'PUT' \
+# 'http://localhost:8083/data-producer/v1/info-types/type1' \
+# -H 'accept: application/json' \
+# -H 'Content-Type: application/json' \
+# -d '{
+# "info_job_data_schema": {
+# "$schema":"http://json-schema.org/draft-07/schema#",
+# "title":"STD_Type1_1.0.0",
+# "description":"Type 1",
+# "topic": "mytopic",
+# "bootStrapServers": "broker:9092"
+# }
+# }'
+
+# sleep 3
+
+# #ICS starts a producer (healthcheck to status)
+# echo "Sending Consumer Job infos to ICS, This will start a PRODUCER"
+# curl -X 'PUT' \
+# 'http://localhost:8083/data-consumer/v1/info-jobs/1' \
+# -H 'accept: application/json' \
+# -H 'Content-Type: application/json' \
+# -d '{
+# "info_type_id": "type1",
+# "job_owner": "demo",
+# "job_definition": {
+# "deliveryInfo": {
+# "topic": "mytopic",
+# "bootStrapServers": "broker:9092",
+# "numberOfMessages": 100
+# }
+# },
+# "job_result_uri": "http://kafka-producer:8080/info-job",
+# "status_notification_uri": "http://kafka-consumer:9090/info-type-status"
+# }'
+
+# echo "Getting Consumer Job Infos from ICS"
+# curl -H 'Content-Type: application/json' 'http://localhost:8083/data-consumer/v1/info-jobs/1'
+# space
+
+# for i in {1..10}; do
+# echo
+# curl -X GET "http://localhost:8080/publish/$i"
+# sleep 1
+# done
+
+# space
+# echo "Deleting Producer Job infos to ICS"
+# curl -X 'DELETE' \
+# 'http://localhost:8083/data-producer/v1/info-producers/1'
+
+# echo "Deleting Consumer Job infos to ICS"
+# curl -X 'DELETE' \
+# 'http://localhost:8083/data-consumer/v1/info-jobs/1'
+
+# echo "Deleting type1 to ICS to use the callback and stop consuming"
+# curl -X 'DELETE' \
+# 'http://localhost:8083/data-producer/v1/info-types/type1'
+
+
+# echo "ICS Producer Docker logs "
+# docker logs informationcoordinatorservice | grep -E 'o.o.i.c.r1producer.ProducerCallbacks|o.o.i.repository.InfoTypeSubscriptions'
+# space
+# echo "Demo Producer Docker logs "
+# docker logs kafka-producer | grep c.d.k.controller.KafkaController
+# space
+# echo "Demo Consumer Docker logs "
+# docker logs kafka-consumer | grep c.d.kafkaconsumer.service.KafkaConsumer
+# space
+
+# echo "Done."
+
+# containers=("kafka-producer" "kafka-consumer")
+
+# for container in "${containers[@]}"; do
+# if docker logs "$container" | grep -q ERROR; then
+# echo "Errors found in logs of $container"
+# docker logs "$container" | grep ERROR
+# echo "FAIL"
+# exit 1
+# else
+# echo "No errors found in logs of $container"
+# fi
+# done
+# echo "SUCCESS"
+# docker compose down
+# exit 0