From 31cb268ca0dd6f0ef2bae4b812176a835f077b2b Mon Sep 17 00:00:00 2001 From: santanude Date: Tue, 21 Dec 2021 20:19:01 +0530 Subject: [PATCH] Clean up docker-compose file to get rid of the generic term 'ves' SMO-41 Signed-off-by: santanude Change-Id: I05d1e5caf4236ff4c5837ecc93e18749882daf61 Signed-off-by: santanude --- README.md | 8 +- agent/Dockerfile | 10 +- agent/Makefile | 2 +- .../LICENSE | 0 .../agent_app/agent_app.py} | 16 +- .../agent_app/config/agent_app_config.conf} | 4 +- .../agent_app}/normalizer.py | 0 .../agent_app}/yaml/host.yaml | 0 agent/start.sh | 62 +- collector/Dashboard.json | 1215 -------------------- collector/Dockerfile | 13 +- collector/Makefile | 2 +- collector/datasource.json | 14 - .../evel-test-collector/code/collector/monitor.py | 25 +- .../evel-test-collector/config/collector.conf | 4 +- collector/start.sh | 38 +- dmaapadapter/Dockerfile | 10 +- dmaapadapter/Makefile | 2 +- dmaapadapter/adapter/code/app_config.py | 2 +- dmaapadapter/start.sh | 12 +- docker-compose.yaml | 99 +- influxdb-connector/Dockerfile | 8 +- .../influxdb-connector/code/influxdb_connector.py | 55 +- .../config/influxdb_connector.conf | 2 +- influxdb-connector/start.sh | 32 +- kafka/Dockerfile | 8 +- kafka/Makefile | 2 +- kafka/start.sh | 2 +- postconfig/Dockerfile | 10 +- postconfig/grafana/dashboard.json | 20 +- postconfig/grafana/datasource.json | 4 +- postconfig/start.sh | 23 +- releases/container-release-smo-ves.yaml | 14 +- 33 files changed, 237 insertions(+), 1481 deletions(-) rename agent/barometer/3rd_party/{collectd-ves-app => collectd-agent-app}/LICENSE (100%) rename agent/barometer/3rd_party/{collectd-ves-app/ves_app/ves_app.py => collectd-agent-app/agent_app/agent_app.py} (95%) rename agent/barometer/3rd_party/{collectd-ves-app/ves_app/config/ves_app_config.conf => collectd-agent-app/agent_app/config/agent_app_config.conf} (80%) rename agent/barometer/3rd_party/{collectd-ves-app/ves_app => collectd-agent-app/agent_app}/normalizer.py (100%) rename agent/barometer/3rd_party/{collectd-ves-app/ves_app => collectd-agent-app/agent_app}/yaml/host.yaml (100%) delete mode 100755 collector/Dashboard.json delete mode 100644 collector/datasource.json diff --git a/README.md b/README.md index bf87f4b..46b15df 100755 --- a/README.md +++ b/README.md @@ -23,8 +23,8 @@ folder. To run the solution, you need to invoke the following command - % docker-compose up -d ves-collector - % docker-compose up -d ves-agent + % docker-compose up -d smo-collector + % docker-compose up -d agent or simply by the following make command @@ -32,8 +32,8 @@ or simply by the following make command To stop the solution the following command should be invoked. - % docker-compose down -d ves-collector - % docker-compose down -d ves-agent + % docker-compose down -d smo-collector + % docker-compose down -d agent or simply by the following make command diff --git a/agent/Dockerfile b/agent/Dockerfile index 82bbf3e..432f679 100755 --- a/agent/Dockerfile +++ b/agent/Dockerfile @@ -20,7 +20,7 @@ FROM ubuntu:focal -RUN mkdir /opt/ves +RUN mkdir /opt/smo RUN apt-get update && apt-get -y upgrade RUN apt-get install -y tzdata @@ -36,8 +36,8 @@ libssl-dev libsasl2-dev liblz4-dev libz-dev RUN pip3 install kafka-python pyaml RUN pip3 install --upgrade certifi -RUN mkdir /opt/ves/barometer -ADD barometer /opt/ves/barometer +RUN mkdir /opt/smo/barometer +ADD barometer /opt/smo/barometer -COPY start.sh /opt/ves/start.sh -ENTRYPOINT ["/bin/bash", "/opt/ves/start.sh"] +COPY start.sh /opt/smo/start.sh +ENTRYPOINT ["/bin/bash", "/opt/smo/start.sh"] diff --git a/agent/Makefile b/agent/Makefile index 966ec40..7238da8 100755 --- a/agent/Makefile +++ b/agent/Makefile @@ -16,4 +16,4 @@ default: all all: - docker build -t ves-agent . + docker build -t agent . diff --git a/agent/barometer/3rd_party/collectd-ves-app/LICENSE b/agent/barometer/3rd_party/collectd-agent-app/LICENSE similarity index 100% rename from agent/barometer/3rd_party/collectd-ves-app/LICENSE rename to agent/barometer/3rd_party/collectd-agent-app/LICENSE diff --git a/agent/barometer/3rd_party/collectd-ves-app/ves_app/ves_app.py b/agent/barometer/3rd_party/collectd-agent-app/agent_app/agent_app.py similarity index 95% rename from agent/barometer/3rd_party/collectd-ves-app/ves_app/ves_app.py rename to agent/barometer/3rd_party/collectd-agent-app/agent_app/agent_app.py index 71aa1c6..cac186c 100755 --- a/agent/barometer/3rd_party/collectd-ves-app/ves_app/ves_app.py +++ b/agent/barometer/3rd_party/collectd-agent-app/agent_app/agent_app.py @@ -53,7 +53,7 @@ class VESApp(Normalizer): 'Path': '', 'Username': 'user', 'Password': 'password', - 'Topic': 'events', + 'Directory_path': 'events', 'UseHttps': False, 'SendEventInterval': 10.0, 'ApiVersion': 5, @@ -69,8 +69,8 @@ class VESApp(Normalizer): '{}'.format('/{}'.format(self._app_config['Path']) if len( self._app_config['Path']) > 0 else ''), int(self._app_config['ApiVersion']), '{}'.format( - '/{}'.format(self._app_config['Topic']) if len( - self._app_config['Topic']) > 0 else '')) + '/{}'.format(self._app_config['Directory_path']) if len( + self._app_config['Directory_path']) > 0 else '')) logging.info('Vendor Event Listener is at: {}'.format(server_url)) credentials = base64.b64encode('{}:{}'.format( self._app_config['Username'], @@ -88,10 +88,12 @@ class VESApp(Normalizer): except (HTTPError, URLError) as e: logging.error('Vendor Event Listener is is not reachable: {}'.format(e)) except timeout: - logging.error('Timed out - URL %s', url) + logging.error('socket timed out - URL %s', url) except Exception as e: - logging.error('Vendor Event Listener error: {}'.format(e)) - + logging.error('Vendor Event Listener error: {}'.format(e)) + else: + logging.info('Access successful.') + def config(self, config): """VES option configuration""" for key, value in config.items('config'): @@ -191,7 +193,7 @@ def main(): choices=['DEBUG', 'INFO', 'WARNING', 'ERROR'], help="Specify log level (default: %(default)s)", metavar="LEVEL") - parser.add_argument("--logfile", dest="logfile", default='ves_app.log', + parser.add_argument("--logfile", dest="logfile", default='agent_app.log', help="Specify log file (default: %(default)s)", metavar="FILE") args = parser.parse_args() diff --git a/agent/barometer/3rd_party/collectd-ves-app/ves_app/config/ves_app_config.conf b/agent/barometer/3rd_party/collectd-agent-app/agent_app/config/agent_app_config.conf similarity index 80% rename from agent/barometer/3rd_party/collectd-ves-app/ves_app/config/ves_app_config.conf rename to agent/barometer/3rd_party/collectd-agent-app/agent_app/config/agent_app_config.conf index c4b6f42..17a4099 100755 --- a/agent/barometer/3rd_party/collectd-ves-app/ves_app/config/ves_app_config.conf +++ b/agent/barometer/3rd_party/collectd-agent-app/agent_app/config/agent_app_config.conf @@ -2,8 +2,8 @@ Domain = 127.0.0.1 Path = Port = 9999 -Topic = events -UseHttps = False +Directory_path = events +UseHttps = True Username = user Password = password SendEventInterval = 10 diff --git a/agent/barometer/3rd_party/collectd-ves-app/ves_app/normalizer.py b/agent/barometer/3rd_party/collectd-agent-app/agent_app/normalizer.py similarity index 100% rename from agent/barometer/3rd_party/collectd-ves-app/ves_app/normalizer.py rename to agent/barometer/3rd_party/collectd-agent-app/agent_app/normalizer.py diff --git a/agent/barometer/3rd_party/collectd-ves-app/ves_app/yaml/host.yaml b/agent/barometer/3rd_party/collectd-agent-app/agent_app/yaml/host.yaml similarity index 100% rename from agent/barometer/3rd_party/collectd-ves-app/ves_app/yaml/host.yaml rename to agent/barometer/3rd_party/collectd-agent-app/agent_app/yaml/host.yaml diff --git a/agent/start.sh b/agent/start.sh index 522b6a7..a65c6d7 100755 --- a/agent/start.sh +++ b/agent/start.sh @@ -14,10 +14,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # -#. What this is: Startup script for the OPNFV VES Agent running under docker. +#. What this is: Startup script for the OPNFV Agent running under docker. -echo "Ves-agent is trying to connect Kafka Broker.." -timeout 1m bash -c 'until printf "" 2>>/dev/null >>/dev/tcp/$ves_kafka_host/$ves_kafka_port; do sleep 2; done' +echo "Agent is trying to connect Kafka Broker.." +timeout 1m bash -c 'until printf "" 2>>/dev/null >>/dev/tcp/$agent_kafka_host/$agent_kafka_port; do sleep 2; done' success=$? if [ $success -eq 0 ] then @@ -27,48 +27,48 @@ if [ $success -eq 0 ] exit; fi -echo "Ves-agent is trying to connect ves-collector.." -timeout 1m bash -c 'until printf "" 2>>/dev/null >>/dev/tcp/$ves_host/$ves_port; do sleep 2; done' +echo "Agent is trying to connect smo-collector.." +timeout 1m bash -c 'until printf "" 2>>/dev/null >>/dev/tcp/$smo_collector_host/$smo_collector_port; do sleep 2; done' success=$? if [ $success -eq 0 ] then - echo "ves-collector is up.." + echo "smo-collector is up.." else - echo "No ves-collector found .. exiting container.." + echo "No smo-collector found .. exiting container.." exit; fi -echo "$ves_kafka_host $ves_kafka_hostname" >>/etc/hosts -echo "ves_kafka_hostname=$ves_kafka_hostname" +echo "$agent_kafka_host $agent_kafka_host" >>/etc/hosts +echo "agent_kafka_host =$agent_kafka_host" echo "*** /etc/hosts ***" cat /etc/hosts -cd /opt/ves/barometer/3rd_party/collectd-ves-app/ves_app -cat <ves_app_config.conf +cd /opt/smo/barometer/3rd_party/collectd-agent-app/agent_app +cat <agent_app_config.conf [config] -Domain = $ves_host -Port = $ves_port -Path = $ves_path -Topic = $ves_topic -UseHttps = $ves_https -Username = $ves_user -Password = $ves_pass -SendEventInterval = $ves_interval -ApiVersion = $ves_version -KafkaPort = $ves_kafka_port -KafkaBroker = $ves_kafka_host +Domain = $smo_collector_host +Port = $smo_collector_port +Path = $smo_collector_path +Directory_path = $smo_collector_directory_path +UseHttps = $smo_collector_https +Username = $smo_collector_user +Password = $smo_collector_pass +SendEventInterval = $agent_interval +ApiVersion = $smo_collector_version +KafkaPort = $agent_kafka_port +KafkaBroker = $agent_kafka_host EOF -cat ves_app_config.conf -echo "ves_mode=$ves_mode" +cat agent_app_config.conf +echo "agent_mode=$agent_mode" -if [[ "$ves_loglevel" == "" ]]; then - ves_loglevel=ERROR +if [[ "$loglevel" == "" ]]; then + loglevel=ERROR fi -python3 ves_app.py --events-schema=$ves_mode.yaml --loglevel $ves_loglevel \ - --config=ves_app_config.conf +python3 agent_app.py --events-schema=$agent_mode.yaml --loglevel $loglevel \ + --config=agent_app_config.conf -# Dump ves_app.log if the command above exits (fails) -echo "*** ves_app.log ***" -cat ves_app.log +# Dump agent_app.log if the command above exits (fails) +echo "*** agent_app.log ***" +cat agent_app.log diff --git a/collector/Dashboard.json b/collector/Dashboard.json deleted file mode 100755 index 28c2f25..0000000 --- a/collector/Dashboard.json +++ /dev/null @@ -1,1215 +0,0 @@ -{ -"dashboard": { - "description": "This Dashboard provides a general overview of a host, with templating to select the hostname.", - "editable": true, - "gnetId": null, - "graphTooltip": 0, - "hideControls": false, - "id": null, - "links": [], - "refresh": "10s", - "panels": [ - { - "aliasColors": {}, - "bars": false, - "dashLength": 10, - "dashes": false, - "datasource": "VESEvents", - "fieldConfig": { - "defaults": { - "custom": {} - }, - "overrides": [] - }, - "fill": 1, - "fillGradient": 0, - "grid": { - "leftLogBase": 1, - "leftMax": null, - "leftMin": null, - "rightLogBase": 1, - "rightMax": null, - "rightMin": null - }, - "gridPos": { - "h": 11, - "w": 12, - "x": 0, - "y": 0 - }, - "hiddenSeries": false, - "id": 3, - "interval": "30s", - "legend": { - "alignAsTable": true, - "avg": true, - "current": true, - "max": true, - "min": true, - "rightSide": false, - "show": true, - "sort": "current", - "sortDesc": true, - "total": false, - "values": true - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "options": { - "alertThreshold": true - }, - "percentage": false, - "pluginVersion": "7.3.2", - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "spaceLength": 10, - "stack": false, - "steppedLine": false, - "targets": [ - { - "alias": "", - "dsType": "influxdb", - "groupBy": [ - { - "params": [ - "system" - ], - "type": "tag" - } - ], - "measurement": "measurementload", - "orderByTime": "ASC", - "policy": "default", - "query": "SELECT moving_average(\"longTerm\", 5) AS \"alias\", moving_average(\"midTerm\", 5), moving_average(\"shortTerm\", 5) FROM \"measurementload\" WHERE (\"system\" =~ /^$host$/) AND $timeFilter GROUP BY \"system\"", - "rawQuery": false, - "refId": "B", - "resultFormat": "time_series", - "select": [ - [ - { - "params": [ - "longTerm" - ], - "type": "field" - }, - { - "params": [ - "5" - ], - "type": "moving_average" - }, - { - "params": [ - "Long Term" - ], - "type": "alias" - } - ], - [ - { - "params": [ - "midTerm" - ], - "type": "field" - }, - { - "params": [ - "5" - ], - "type": "moving_average" - }, - { - "params": [ - "Mid Term" - ], - "type": "alias" - } - ], - [ - { - "params": [ - "shortTerm" - ], - "type": "field" - }, - { - "params": [ - "5" - ], - "type": "moving_average" - }, - { - "params": [ - "Short Term" - ], - "type": "alias" - } - ] - ], - "tags": [ - { - "key": "system", - "operator": "=~", - "value": "/^$host$/" - } - ] - } - ], - "thresholds": [], - "timeFrom": null, - "timeRegions": [], - "timeShift": null, - "title": "host load", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "x-axis": true, - "xaxis": { - "buckets": null, - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "y-axis": true, - "y_formats": [ - "short", - "short" - ], - "yaxes": [ - { - "format": "short", - "label": "Percent", - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ], - "yaxis": { - "align": false, - "alignLevel": null - } - }, - { - "aliasColors": {}, - "bars": false, - "dashLength": 10, - "dashes": false, - "datasource": "VESEvents", - "fieldConfig": { - "defaults": { - "custom": {} - }, - "overrides": [] - }, - "fill": 1, - "fillGradient": 0, - "grid": { - "leftLogBase": 1, - "leftMax": null, - "leftMin": null, - "rightLogBase": 1, - "rightMax": null, - "rightMin": null - }, - "gridPos": { - "h": 11, - "w": 12, - "x": 12, - "y": 0 - }, - "hiddenSeries": false, - "id": 6, - "interval": "30s", - "legend": { - "alignAsTable": true, - "avg": true, - "current": true, - "max": true, - "min": true, - "rightSide": false, - "show": true, - "sort": "current", - "sortDesc": true, - "total": false, - "values": true - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "options": { - "alertThreshold": true - }, - "percentage": false, - "pluginVersion": "7.3.2", - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "spaceLength": 10, - "stack": false, - "steppedLine": false, - "targets": [ - { - "alias": "", - "dsType": "influxdb", - "groupBy": [ - { - "params": [ - "system" - ], - "type": "tag" - }, - { - "params": [ - "cpuIdentifier" - ], - "type": "tag" - } - ], - "measurement": "measurementcpuusage", - "orderByTime": "ASC", - "policy": "default", - "query": "SELECT mean(\"cpusystem\") FROM \"cpu\" WHERE $timeFilter GROUP BY time(1m) fill(null)", - "refId": "B", - "resultFormat": "time_series", - "select": [ - [ - { - "params": [ - "cpuUsageUser" - ], - "type": "field" - }, - { - "params": [ - "5" - ], - "type": "moving_average" - } - ] - ], - "tags": [ - { - "key": "system", - "operator": "=~", - "value": "/^$host$/" - } - ] - } - ], - "thresholds": [], - "timeFrom": null, - "timeRegions": [], - "timeShift": null, - "title": "host CPU Usage User", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "x-axis": true, - "xaxis": { - "buckets": null, - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "y-axis": true, - "y_formats": [ - "short", - "short" - ], - "yaxes": [ - { - "format": "short", - "label": "Percent", - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ], - "yaxis": { - "align": false, - "alignLevel": null - } - }, - { - "aliasColors": {}, - "bars": false, - "dashLength": 10, - "dashes": false, - "datasource": "VESEvents", - "fieldConfig": { - "defaults": { - "custom": {} - }, - "overrides": [] - }, - "fill": 1, - "fillGradient": 0, - "grid": { - "leftLogBase": 1, - "leftMax": null, - "leftMin": null, - "rightLogBase": 1, - "rightMax": null, - "rightMin": null - }, - "gridPos": { - "h": 12, - "w": 12, - "x": 0, - "y": 11 - }, - "hiddenSeries": false, - "id": 2, - "interval": "30s", - "legend": { - "alignAsTable": true, - "avg": true, - "current": true, - "max": true, - "min": true, - "rightSide": false, - "show": true, - "sort": "current", - "sortDesc": true, - "total": false, - "values": true - }, - "lines": true, - "linewidth": 2, - "links": [], - "nullPointMode": "null", - "options": { - "alertThreshold": true - }, - "percentage": false, - "pluginVersion": "7.3.2", - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "spaceLength": 10, - "stack": false, - "steppedLine": false, - "targets": [ - { - "alias": "", - "dsType": "influxdb", - "groupBy": [ - { - "params": [ - "system" - ], - "type": "tag" - }, - { - "params": [ - "nicIdentifier" - ], - "type": "tag" - } - ], - "measurement": "measurementnicperformance", - "orderByTime": "ASC", - "policy": "default", - "query": "SELECT moving_average(\"receivedTotalPacketsAccumulated\", 5) FROM \"measurementnicperformance\" WHERE (\"system\" =~ /^$host$/) AND $timeFilter GROUP BY \"system\", \"nicIdentifier\"", - "rawQuery": false, - "refId": "A", - "resultFormat": "time_series", - "select": [ - [ - { - "params": [ - "receivedTotalPacketsAccumulated" - ], - "type": "field" - }, - { - "params": [ - "5" - ], - "type": "moving_average" - } - ] - ], - "tags": [ - { - "key": "system", - "operator": "=~", - "value": "/^$host$/" - } - ] - } - ], - "thresholds": [], - "timeFrom": null, - "timeRegions": [], - "timeShift": null, - "title": "Received Octets", - "tooltip": { - "shared": true, - "sort": 2, - "value_type": "individual" - }, - "type": "graph", - "x-axis": true, - "xaxis": { - "buckets": null, - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "y-axis": true, - "y_formats": [ - "short", - "short" - ], - "yaxes": [ - { - "format": "short", - "label": "Octets/Packets", - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ], - "yaxis": { - "align": false, - "alignLevel": null - } - }, - { - "aliasColors": {}, - "bars": false, - "dashLength": 10, - "dashes": false, - "datasource": "VESEvents", - "fieldConfig": { - "defaults": { - "custom": {} - }, - "overrides": [] - }, - "fill": 1, - "fillGradient": 0, - "grid": { - "leftLogBase": 1, - "leftMax": null, - "leftMin": null, - "rightLogBase": 1, - "rightMax": null, - "rightMin": null - }, - "gridPos": { - "h": 12, - "w": 12, - "x": 12, - "y": 11 - }, - "hiddenSeries": false, - "id": 4, - "interval": "30s", - "legend": { - "alignAsTable": true, - "avg": true, - "current": true, - "max": true, - "min": true, - "rightSide": false, - "show": true, - "sort": "current", - "sortDesc": true, - "total": false, - "values": true - }, - "lines": true, - "linewidth": 2, - "links": [], - "nullPointMode": "null", - "options": { - "alertThreshold": true - }, - "percentage": false, - "pluginVersion": "7.3.2", - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "spaceLength": 10, - "stack": false, - "steppedLine": false, - "targets": [ - { - "alias": "", - "dsType": "influxdb", - "groupBy": [ - { - "params": [ - "system" - ], - "type": "tag" - }, - { - "params": [ - "nicIdentifier" - ], - "type": "tag" - } - ], - "measurement": "measurementnicperformance", - "orderByTime": "ASC", - "policy": "default", - "refId": "B", - "resultFormat": "time_series", - "select": [ - [ - { - "params": [ - "receivedOctetsAccumulated" - ], - "type": "field" - }, - { - "params": [ - "5" - ], - "type": "moving_average" - } - ] - ], - "tags": [ - { - "key": "system", - "operator": "=~", - "value": "/^$host$/" - } - ] - } - ], - "thresholds": [], - "timeFrom": null, - "timeRegions": [], - "timeShift": null, - "title": "Transmitted Octets", - "tooltip": { - "shared": true, - "sort": 2, - "value_type": "individual" - }, - "type": "graph", - "x-axis": true, - "xaxis": { - "buckets": null, - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "y-axis": true, - "y_formats": [ - "short", - "short" - ], - "yaxes": [ - { - "format": "short", - "label": "Octets/Packets", - "logBase": 1, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ], - "yaxis": { - "align": false, - "alignLevel": null - } - }, - { - "aliasColors": {}, - "bars": false, - "dashLength": 10, - "dashes": false, - "datasource": "VESEvents", - "fieldConfig": { - "defaults": { - "custom": {} - }, - "overrides": [] - }, - "fill": 1, - "fillGradient": 0, - "gridPos": { - "h": 10, - "w": 12, - "x": 0, - "y": 23 - }, - "hiddenSeries": false, - "id": 7, - "legend": { - "alignAsTable": true, - "avg": true, - "current": true, - "max": true, - "min": true, - "show": true, - "sort": "current", - "sortDesc": true, - "total": false, - "values": true - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "options": { - "alertThreshold": true - }, - "percentage": false, - "pluginVersion": "7.3.2", - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "spaceLength": 10, - "stack": false, - "steppedLine": false, - "targets": [ - { - "alias": "", - "dsType": "influxdb", - "groupBy": [ - { - "params": [ - "system" - ], - "type": "tag" - }, - { - "params": [ - "diskIdentifier" - ], - "type": "tag" - } - ], - "measurement": "measurementdiskusage", - "orderByTime": "ASC", - "policy": "autogen", - "query": "SELECT moving_average(\"diskOpsWriteLast\", 5) FROM \"autogen\".\"diskUsage\" WHERE (\"system\" =~ /^$host$/ AND \"disk\" = 'sda') AND $timeFilter GROUP BY \"system\", \"disk\"", - "rawQuery": false, - "refId": "A", - "resultFormat": "time_series", - "select": [ - [ - { - "params": [ - "diskOpsWriteLast" - ], - "type": "field" - }, - { - "params": [ - "5" - ], - "type": "moving_average" - } - ] - ], - "tags": [ - { - "key": "system", - "operator": "=~", - "value": "/^$host$/" - }, - { - "condition": "AND", - "key": "diskIdentifier", - "operator": "=", - "value": "sda" - } - ] - } - ], - "thresholds": [], - "timeFrom": null, - "timeRegions": [], - "timeShift": null, - "title": "Disk Usage SDA", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "buckets": null, - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 10, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ], - "yaxis": { - "align": false, - "alignLevel": null - } - }, - { - "aliasColors": {}, - "bars": false, - "dashLength": 10, - "dashes": false, - "datasource": "VESEvents", - "fieldConfig": { - "defaults": { - "custom": {} - }, - "overrides": [] - }, - "fill": 1, - "fillGradient": 0, - "gridPos": { - "h": 10, - "w": 12, - "x": 12, - "y": 23 - }, - "hiddenSeries": false, - "id": 8, - "legend": { - "alignAsTable": true, - "avg": true, - "current": true, - "max": true, - "min": true, - "show": true, - "sort": "current", - "sortDesc": true, - "total": false, - "values": true - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "options": { - "alertThreshold": true - }, - "percentage": false, - "pluginVersion": "7.3.2", - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "spaceLength": 10, - "stack": false, - "steppedLine": false, - "targets": [ - { - "alias": "", - "dsType": "influxdb", - "groupBy": [ - { - "params": [ - "system" - ], - "type": "tag" - }, - { - "params": [ - "diskIdentifier" - ], - "type": "tag" - } - ], - "measurement": "measurementdiskusage", - "orderByTime": "ASC", - "policy": "autogen", - "query": "SELECT moving_average(\"diskOpsWriteLast\", 5) FROM \"autogen\".\"measurementdiskusage\" WHERE (\"system\" =~ /^$host$/ AND \"diskIdentifier\" = 'sdb') AND $timeFilter GROUP BY \"system\", \"diskIdentifier\"", - "rawQuery": false, - "refId": "A", - "resultFormat": "time_series", - "select": [ - [ - { - "params": [ - "diskOpsWriteLast" - ], - "type": "field" - }, - { - "params": [ - "5" - ], - "type": "moving_average" - } - ] - ], - "tags": [ - { - "key": "system", - "operator": "=~", - "value": "/^$host$/" - }, - { - "condition": "AND", - "key": "diskIdentifier", - "operator": "=", - "value": "sdb" - } - ] - } - ], - "thresholds": [], - "timeFrom": null, - "timeRegions": [], - "timeShift": null, - "title": "Disk Usage SDB", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "buckets": null, - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 10, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ], - "yaxis": { - "align": false, - "alignLevel": null - } - }, - { - "aliasColors": {}, - "bars": false, - "dashLength": 10, - "dashes": false, - "datasource": "VESEvents", - "fieldConfig": { - "defaults": { - "custom": {} - }, - "overrides": [] - }, - "fill": 1, - "fillGradient": 0, - "gridPos": { - "h": 7, - "w": 24, - "x": 0, - "y": 33 - }, - "hiddenSeries": false, - "id": 5, - "legend": { - "alignAsTable": true, - "avg": true, - "current": true, - "max": true, - "min": true, - "show": true, - "sort": "current", - "sortDesc": true, - "total": false, - "values": true - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "null", - "options": { - "alertThreshold": true - }, - "percentage": false, - "pluginVersion": "7.3.2", - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "spaceLength": 10, - "stack": false, - "steppedLine": false, - "targets": [ - { - "alias": "", - "dsType": "influxdb", - "groupBy": [ - { - "params": [ - "system" - ], - "type": "tag" - } - ], - "measurement": "measurementmemoryusage", - "orderByTime": "ASC", - "policy": "autogen", - "refId": "A", - "resultFormat": "time_series", - "select": [ - [ - { - "params": [ - "memoryCached" - ], - "type": "field" - }, - { - "params": [ - "5" - ], - "type": "moving_average" - }, - { - "params": [ - "Memory Cached" - ], - "type": "alias" - } - ], - [ - { - "params": [ - "memoryUsed" - ], - "type": "field" - }, - { - "params": [ - "5" - ], - "type": "moving_average" - }, - { - "params": [ - "Memory Used" - ], - "type": "alias" - } - ], - [ - { - "params": [ - "memoryFree" - ], - "type": "field" - }, - { - "params": [ - "5" - ], - "type": "moving_average" - }, - { - "params": [ - "Memory Free" - ], - "type": "alias" - } - ] - ], - "tags": [ - { - "key": "system", - "operator": "=~", - "value": "/^$host$/" - } - ] - } - ], - "thresholds": [], - "timeFrom": null, - "timeRegions": [], - "timeShift": null, - "title": "Memory", - "tooltip": { - "shared": true, - "sort": 0, - "value_type": "individual" - }, - "type": "graph", - "xaxis": { - "buckets": null, - "mode": "time", - "name": null, - "show": true, - "values": [] - }, - "yaxes": [ - { - "format": "short", - "label": null, - "logBase": 10, - "max": null, - "min": null, - "show": true - }, - { - "format": "short", - "label": null, - "logBase": 1, - "max": null, - "min": null, - "show": true - } - ], - "yaxis": { - "align": false, - "alignLevel": null - } - } - ], - "refresh": "10s", - "schemaVersion": 26, - "style": "dark", - "tags": [], - "templating": { - "list": [ - { - "allValue": null, - "current": { - "selected": false, - "text": "All", - "value": "$__all" - }, - "datasource": "VESEvents", - "definition": "", - "error": null, - "hide": 0, - "includeAll": true, - "label": "host", - "multi": true, - "name": "host", - "options": [], - "query": "SHOW TAG VALUES WITH KEY=system", - "refresh": 1, - "regex": "", - "skipUrlSync": false, - "sort": 0, - "tagValuesQuery": "", - "tags": [], - "tagsQuery": "", - "type": "query", - "useTags": false - } - ] - }, - "time": { - "from": "now-15m", - "to": "now" - }, - "timepicker": { - "now": true, - "refresh_intervals": [ - "10s", - "20s", - "30s", - "1m" - ], - "time_options": [ - "5m", - "15m", - "1h", - "6h", - "12h", - "24h", - "2d", - "7d", - "30d" - ] - }, - "timezone": "browser", - "title": "VES Demo", - "version": 4 - } -} - diff --git a/collector/Dockerfile b/collector/Dockerfile index c70184e..f2731eb 100755 --- a/collector/Dockerfile +++ b/collector/Dockerfile @@ -25,12 +25,13 @@ RUN apt-get install -y git curl python3 python3-pip RUN pip3 install requests jsonschema elasticsearch kafka-python gevent -RUN mkdir -p /opt/ves/certs +RUN mkdir -p /opt/smo/certs # Clone VES Collector -RUN mkdir /opt/ves/evel-test-collector -ADD evel-test-collector /opt/ves/evel-test-collector +RUN mkdir /opt/smo/evel-test-collector +ADD evel-test-collector /opt/smo/evel-test-collector -COPY Dashboard.json /opt/ves/Dashboard.json -COPY start.sh /opt/ves/start.sh -ENTRYPOINT ["/bin/bash", "/opt/ves/start.sh"] + +COPY start.sh /opt/smo/start.sh + +ENTRYPOINT ["/bin/bash", "/opt/smo/start.sh"] diff --git a/collector/Makefile b/collector/Makefile index 1446064..74d96bc 100644 --- a/collector/Makefile +++ b/collector/Makefile @@ -16,5 +16,5 @@ default: all all: - docker build -t ves-collector . + docker build -t smo-collector . diff --git a/collector/datasource.json b/collector/datasource.json deleted file mode 100644 index 4f439e8..0000000 --- a/collector/datasource.json +++ /dev/null @@ -1,14 +0,0 @@ -{ "name":"VESEvents", - "type":"influxdb", - "access":"direct", - "url":"http://127.0.0.1:3330", - "password":"root", - "user":"root", - "database":"veseventsdb", - "basicAuth":false, - "basicAuthUser":"", - "basicAuthPassword":"", - "withCredentials":false, - "isDefault":false, - "jsonData":null -} diff --git a/collector/evel-test-collector/code/collector/monitor.py b/collector/evel-test-collector/code/collector/monitor.py index de54891..793ec20 100755 --- a/collector/evel-test-collector/code/collector/monitor.py +++ b/collector/evel-test-collector/code/collector/monitor.py @@ -67,12 +67,6 @@ TESTRUN = False DEBUG = False PROFILE = False -# ------------------------------------------------------------------------------ -# Address of influxdb server. -# ------------------------------------------------------------------------------ - -influxdb = '127.0.0.1' - # ------------------------------------------------------------------------------ # Credentials we expect clients to authenticate themselves with. # ------------------------------------------------------------------------------ @@ -268,7 +262,7 @@ def save_event_in_kafka(body): if (len(topic) == 0): topic = kafka_topic - logger.debug('Kafka broker ={} and kafka topic={}'.format(kafka_port, topic)) + logger.debug('Kafka broker ={} and kafka topic={}'.format(kafka_server, topic)) produce_events_in_kafka(jobj, topic) @@ -277,7 +271,7 @@ def produce_events_in_kafka(jobj, topic): global producer if producer is None: logger.debug('Producer is None') - producer = KafkaProducer(bootstrap_servers=[kafka_port], + producer = KafkaProducer(bootstrap_servers=[kafka_server], value_serializer=lambda x: dumps(x).encode('utf-8')) producer.send(topic, value=jobj) @@ -528,10 +522,6 @@ USAGE # ---------------------------------------------------------------------- parser = ArgumentParser(description=program_license, formatter_class=ArgumentDefaultsHelpFormatter) - parser.add_argument('-i', '--influxdb', - dest='influxdb', - default='localhost', - help='InfluxDB server addresss') parser.add_argument('-v', '--verbose', dest='verbose', action='count', @@ -580,22 +570,20 @@ USAGE # ---------------------------------------------------------------------- # extract the values we want. # ---------------------------------------------------------------------- - global influxdb global vel_username global vel_password global vel_topic_name global data_storage global elasticsearch_domain global elasticsearch_port - global kafka_port + global kafka_server global kafka_topic - influxdb = config.get(config_section, 'influxdb', vars=overrides) log_file = config.get(config_section, 'log_file', vars=overrides) vel_port = config.get(config_section, 'vel_port', vars=overrides) vel_path = config.get(config_section, 'vel_path', vars=overrides) - kafka_port = config.get(config_section, - 'kafka_second_port', + kafka_server = config.get(config_section, + 'kafka_server', vars=overrides) kafka_topic = config.get(config_section, 'kafka_topic', @@ -659,7 +647,6 @@ USAGE # Log the details of the configuration. # --------------------------------------------------------------------- logger.debug('Log file = {0}'.format(log_file)) - logger.debug('Influxdb server = {0}'.format(influxdb)) logger.debug('Event Listener Port = {0}'.format(vel_port)) logger.debug('Event Listener Path = {0}'.format(vel_path)) logger.debug('Event Listener Topic = {0}'.format(vel_topic_name)) @@ -772,7 +759,7 @@ USAGE dispatcher.register('POST', test_control_url, test_control_listener) dispatcher.register('GET', test_control_url, test_control_listener) - httpd = pywsgi.WSGIServer(('', int(vel_port)), vendor_event_listener, keyfile='/opt/ves/certs/vescertificate.key', certfile='/opt/ves/certs/vescertificate.crt') + httpd = pywsgi.WSGIServer(('', int(vel_port)), vendor_event_listener, keyfile='/opt/smo/certs/vescertificate.key', certfile='/opt/smo/certs/vescertificate.crt') logger.info('Serving on port {0}...'.format(vel_port)) httpd.serve_forever() diff --git a/collector/evel-test-collector/config/collector.conf b/collector/evel-test-collector/config/collector.conf index 83cfd77..4d8078a 100755 --- a/collector/evel-test-collector/config/collector.conf +++ b/collector/evel-test-collector/config/collector.conf @@ -43,11 +43,11 @@ vel_port = 9999 vel_path = vel_username = vel_password = -vel_topic_name = events data_storage = elasticsearch_domain = elasticsearch_port= 9200 -kafka_second_port = +vel_topic_name = events +kafka_server = kafka_topic = #------------------------------------------------------------------------------ diff --git a/collector/start.sh b/collector/start.sh index fd66ead..9e1c6e2 100755 --- a/collector/start.sh +++ b/collector/start.sh @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # -#. What this is: Startup script for the OPNFV VES Collector running under docker. +#. What this is: Startup script for the OPNFV SMO Collector running under docker. # the variables used below are now passed in as environmental variables # from the docker run command. -cd /opt/ves +cd /opt/smo touch monitor.log config_file="evel-test-collector/config/collector.conf" @@ -34,42 +34,36 @@ else fi sed -i -- \ - "s~log_file = /var/log/att/collector.log~log_file = /opt/ves/collector.log~" \ + "s~log_file = /var/log/att/collector.log~log_file = /opt/smo/collector.log~" \ $config_file -sed -i -- "s/vel_domain = 127.0.0.1/vel_domain = $ves_host/g" \ +sed -i -- "s/vel_domain = 127.0.0.1/vel_domain = $collector_host/g" \ $config_file -sed -i -- "s/vel_port = 30000/vel_port = $ves_port/g" \ +sed -i -- "s/vel_port = 30000/vel_port = $collector_port/g" \ $config_file -sed -i -- "s/vel_username =/vel_username = $ves_user/g" \ +sed -i -- "s/vel_username =/vel_username = $collector_user/g" \ $config_file -sed -i -- "s/vel_password =/vel_password = $ves_pass/g" \ +sed -i -- "s/vel_password =/vel_password = $collector_pass/g" \ $config_file -sed -i -- "s~vel_path = vendor_event_listener/~vel_path = $ves_path~g" \ - $config_file -sed -i -- "s~vel_topic_name = example_vnf~vel_topic_name = $ves_topic~g" \ - $config_file -sed -i -- "/vel_topic_name = /a influxdb = $ves_influxdb_host:$ves_influxdb_port" \ +sed -i -- "s~vel_path = vendor_event_listener/~vel_path = $collector_path~g" \ $config_file sed -i -- "s/elasticsearch_domain =/elasticsearch_domain = $elasticsearch_domain/g" \ $config_file sed -i -- "s/data_storage =/data_storage = $data_storage/g" \ $config_file -sed -i -- "s/kafka_second_port =/kafka_second_port = $kafka_host_2:$kafka_port_2/g" \ +sed -i -- "s/kafka_server =/kafka_server = $smo_kafka_host:$smo_kafka_port/g" \ $config_file -sed -i -- "s/kafka_topic =/kafka_topic = $kafka_topic/g" \ +sed -i -- "s/kafka_topic =/kafka_topic = $smo_kafka_topic/g" \ $config_file echo; echo $config_file cat $config_file -if [ "$ves_loglevel" != "" ]; then - python3 /opt/ves/evel-test-collector/code/collector/monitor.py \ - --config /opt/ves/evel-test-collector/config/collector.conf \ - --influxdb $ves_influxdb_host:$ves_influxdb_port \ - --section default > /opt/ves/monitor.log 2>&1 +if [ "$loglevel" != "" ]; then + python3 /opt/smo/evel-test-collector/code/collector/monitor.py \ + --config /opt/smo/evel-test-collector/config/collector.conf \ + --section default > /opt/smo/monitor.log 2>&1 else - python3 /opt/ves/evel-test-collector/code/collector/monitor.py \ - --config /opt/ves/evel-test-collector/config/collector.conf \ - --influxdb $ves_influxdb_host:$ves_influxdb_port \ + python3 /opt/smo/evel-test-collector/code/collector/monitor.py \ + --config /opt/smo/evel-test-collector/config/collector.conf \ --section default fi diff --git a/dmaapadapter/Dockerfile b/dmaapadapter/Dockerfile index 977c6f1..58a8caa 100755 --- a/dmaapadapter/Dockerfile +++ b/dmaapadapter/Dockerfile @@ -20,12 +20,12 @@ RUN apt-get install -y git curl python3 python3-pip RUN pip3 install requests jsonschema kafka-python flask confluent-kafka -RUN mkdir /opt/ves +RUN mkdir /opt/smo # Clone adapter folder -RUN mkdir /opt/ves/adapter -ADD adapter /opt/ves/adapter +RUN mkdir /opt/smo/adapter +ADD adapter /opt/smo/adapter -COPY start.sh /opt/ves/start.sh -ENTRYPOINT ["/bin/bash", "/opt/ves/start.sh"] +COPY start.sh /opt/smo/start.sh +ENTRYPOINT ["/bin/bash", "/opt/smo/start.sh"] diff --git a/dmaapadapter/Makefile b/dmaapadapter/Makefile index 929f921..87b93fb 100755 --- a/dmaapadapter/Makefile +++ b/dmaapadapter/Makefile @@ -16,5 +16,5 @@ default: all all: - docker build -t ves-dmaap-adapter . + docker build -t smo-dmaap-adapter . diff --git a/dmaapadapter/adapter/code/app_config.py b/dmaapadapter/adapter/code/app_config.py index 7eb2029..c2befe8 100644 --- a/dmaapadapter/adapter/code/app_config.py +++ b/dmaapadapter/adapter/code/app_config.py @@ -27,7 +27,7 @@ class AppConfig: formatter_class=ArgumentDefaultsHelpFormatter) parser.add_argument('-c', '--config', dest='config', - default='/opt/ves/adapter/config/adapter.conf', + default='/opt/smo/adapter/config/adapter.conf', help='Use this config file.') parser.add_argument('-s', '--section', dest='section', diff --git a/dmaapadapter/start.sh b/dmaapadapter/start.sh index e2cd683..11d6f7f 100755 --- a/dmaapadapter/start.sh +++ b/dmaapadapter/start.sh @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -cd /opt/ves +cd /opt/smo touch dmaap.log config_file="adapter/config/adapter.conf" @@ -29,11 +29,11 @@ cat $config_file if [ "$log_level" != "" ]; then - python3 /opt/ves/adapter/code/dmaap_adapter.py \ - --config /opt/ves/adapter/config/adapter.conf \ - --section default > /opt/ves/dmaap.log 2>&1 + python3 /opt/smo/adapter/code/dmaap_adapter.py \ + --config /opt/smo/adapter/config/adapter.conf \ + --section default > /opt/smo/dmaap.log 2>&1 else - python3 /opt/ves/adapter/code/dmaap_adapter.py \ - --config /opt/ves/adapter/config/adapter.conf \ + python3 /opt/smo/adapter/code/dmaap_adapter.py \ + --config /opt/smo/adapter/config/adapter.conf \ --section default fi diff --git a/docker-compose.yaml b/docker-compose.yaml index 3268303..8471ce5 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -102,6 +102,18 @@ services: - 29000:9000 environment: KAFKA_BROKERCONNECT: smo-kafka:29092 + smo-dmaap-adapter: + container_name: smo-dmaap-adapter + build: ./dmaapadapter + image: smo-dmaap-adapter + networks: + - smo-net + ports: + - 5000:5000 + environment: + kafka_host: "smo-kafka" + kafka_port: "29092" + log_level: "DEBUG" smo-collector: container_name: smo-collector build: ./collector @@ -112,25 +124,19 @@ services: ports: - 9999:9999 volumes: - - ~/ves-certificate:/opt/ves/certs + - ~/ves-certificate:/opt/smo/certs environment: - ves_influxdb_host: "smo-influxdb" - ves_influxdb_port: "8086" - ves_grafana_host: "smo-grafana" - ves_grafana_port: "3000" - data_storage: "elasticsearch" elasticsearch_domain: "smo-elasticsearch" - kafka_host_2: "smo-kafka" - kafka_port_2: "29092" - kafka_topic: "smo-events" - ves_host: "smo-collector" - ves_port: "9999" - ves_grafana_auth: "admin:admin" - ves_user: "user" - ves_pass: "password" - ves_path: "" - ves_topic: "events" - ves_loglevel: "ERROR" + smo_kafka_host: "smo-kafka" + smo_kafka_port: "29092" + smo_kafka_topic: "smo-events" + data_storage: "elasticsearch" + collector_host: "smo-collector" + collector_port: "9999" + collector_user: "user" + collector_pass: "password" + collector_path: "" + loglevel: "ERROR" depends_on: - smo-kafka smo-influxdb-connector: @@ -142,11 +148,11 @@ services: ports: - 9990:9990 environment: - ves_influxdb_host: "smo-influxdb" - ves_influxdb_port: "8086" - ves_loglevel: "ERROR" - kafka_host_2: "smo-kafka" - kafka_port_2: "29092" + smo_influxdb_host: "smo-influxdb" + smo_influxdb_port: "8086" + smo_kafka_host: "smo-kafka" + smo_kafka_port: "29092" + loglevel: "ERROR" depends_on: - smo-kafka - smo-influxdb @@ -158,35 +164,22 @@ services: - agent-net restart: always environment: - ves_kafka_host: "agent-kafka" - ves_kafka_hostname: "agent-kafka" - ves_host: "smo-collector" - ves_port: "9999" - ves_path: "" - ves_topic: "events" - ves_https: "True" - ves_user: "user" - ves_pass: "password" - ves_interval: "10" - ves_kafka_port: "9092" - ves_mode: "./yaml/host" - ves_version: "5" - ves_loglevel: "ERROR" + smo_collector_host: "smo-collector" + smo_collector_port: "9999" + smo_collector_path: "" + smo_collector_directory_path: "events" + smo_collector_https: "True" + smo_collector_user: "user" + smo_collector_pass: "password" + smo_collector_version: "5" + agent_interval: "10" + agent_kafka_port: "9092" + agent_kafka_host: "agent-kafka" + agent_mode: "./yaml/host" + loglevel: "ERROR" depends_on: - agent-kafka - smo-collector - smo-dmaap-adapter: - container_name: smo-dmaap-adapter - build: ./dmaapadapter - image: smo-dmaap-adapter - networks: - - smo-net - ports: - - 5000:5000 - environment: - kafka_host: "smo-kafka" - kafka_port: "29092" - log_level: "DEBUG" smo-elasticsearch: image: docker.elastic.co/elasticsearch/elasticsearch:7.11.1 container_name: smo-elasticsearch @@ -204,11 +197,11 @@ services: build: ./postconfig image: smo-post-config environment: - ves_influxdb_host: "smo-influxdb" - ves_influxdb_port: "8086" - ves_grafana_host: "smo-grafana" - ves_grafana_port: "3000" - ves_grafana_auth: "admin:admin" + smo_influxdb_host: "smo-influxdb" + smo_influxdb_port: "8086" + smo_grafana_host: "smo-grafana" + smo_grafana_port: "3000" + smo_grafana_auth: "admin:admin" depends_on: - smo-grafana networks: diff --git a/influxdb-connector/Dockerfile b/influxdb-connector/Dockerfile index 89b3ac7..bd91696 100755 --- a/influxdb-connector/Dockerfile +++ b/influxdb-connector/Dockerfile @@ -20,8 +20,8 @@ RUN apt-get install -y git curl python3 python3-pip RUN pip3 install requests confluent-kafka # Clone influxdb-connector -RUN mkdir -p /opt/ves/influxdb-connector -ADD influxdb-connector /opt/ves/influxdb-connector +RUN mkdir -p /opt/smo/influxdb-connector +ADD influxdb-connector /opt/smo/influxdb-connector -COPY start.sh /opt/ves/start.sh -ENTRYPOINT ["/bin/bash", "/opt/ves/start.sh"] +COPY start.sh /opt/smo/start.sh +ENTRYPOINT ["/bin/bash", "/opt/smo/start.sh"] diff --git a/influxdb-connector/influxdb-connector/code/influxdb_connector.py b/influxdb-connector/influxdb-connector/code/influxdb_connector.py index 34df978..d998824 100644 --- a/influxdb-connector/influxdb-connector/code/influxdb_connector.py +++ b/influxdb-connector/influxdb-connector/code/influxdb_connector.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import sys +import os import platform import json import logging @@ -19,6 +21,7 @@ from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter import configparser import logging.handlers import requests +import urllib.request as url from confluent_kafka import Consumer, KafkaError # ------------------------------------------------------------------------------ @@ -29,15 +32,13 @@ influxdb = '127.0.0.1' logger = None - def send_to_influxdb(event, pdata): - url = 'http://{}/write?db=veseventsdb'.format(influxdb) + url = 'http://{}/write?db=eventsdb'.format(influxdb) logger.debug('Send {} to influxdb at {}: {}'.format(event, influxdb, pdata)) r = requests.post(url, data=pdata, headers={'Content-Type': 'text/plain'}) logger.info('influxdb return code {}'.format(r.status_code)) if r.status_code != 204: - logger.debug('*** Influxdb save failed, return code {} ***'.format(r.status_code)) - + logger.debug('*** Influxdb save failed, return code {} ***'.format(r.status_code)) def process_additional_measurements(val, domain, eventId, startEpochMicrosec, lastEpochMicrosec): for additionalMeasurements in val: @@ -289,25 +290,25 @@ def main(): # Setup argument parser so we can parse the command-line. # ---------------------------------------------------------------------- parser = ArgumentParser(description='', - formatter_class=ArgumentDefaultsHelpFormatter) + formatter_class=ArgumentDefaultsHelpFormatter) parser.add_argument('-i', '--influxdb', - dest='influxdb', - default='localhost', - help='InfluxDB server addresss') + dest='influxdb', + default='localhost', + help='InfluxDB server addresss') parser.add_argument('-v', '--verbose', - dest='verbose', - action='count', - help='set verbosity level') + dest='verbose', + action='count', + help='set verbosity level') parser.add_argument('-c', '--config', - dest='config', - default='/opt/ves/connector/config/consumer.conf', - help='Use this config file.', - metavar='') + dest='config', + default='/opt/smo/connector/config/consumer.conf', + help='Use this config file.', + metavar='') parser.add_argument('-s', '--section', - dest='section', - default='default', - metavar='
', - help='section to use in the config file') + dest='section', + default='default', + metavar='
', + help='section to use in the config file') # ---------------------------------------------------------------------- # Process arguments received. @@ -327,6 +328,7 @@ def main(): } config.read(config_file) + # ---------------------------------------------------------------------- # extract the values we want. # ---------------------------------------------------------------------- @@ -336,7 +338,8 @@ def main(): influxdb = config.get(config_section, 'influxdb', vars=overrides) log_file = config.get(config_section, 'log_file', vars=overrides) - kafka_server = config.get(config_section, 'kafka_server', vars=overrides) + kafka_server=config.get(config_section,'kafka_server', + vars=overrides) # ---------------------------------------------------------------------- # Finally we have enough info to start a proper flow trace. @@ -350,8 +353,8 @@ def main(): else: logger.setLevel(logging.DEBUG) handler = logging.handlers.RotatingFileHandler(log_file, - maxBytes=1000000, - backupCount=10) + maxBytes=1000000, + backupCount=10) if (platform.system() == 'Windows'): date_format = '%Y-%m-%d %H:%M:%S' else: @@ -373,6 +376,7 @@ def main(): # kafka Consumer code . # ---------------------------------------------------------------------- + settings = { 'bootstrap.servers': kafka_server, 'group.id': 'mygroup', @@ -384,8 +388,8 @@ def main(): c = Consumer(settings) - c.subscribe(['measurement', 'pnfregistration', - 'fault', 'thresholdcrossingalert', 'heartbeat']) + c.subscribe(['measurement','pnfregistration', + 'fault','thresholdcrossingalert','heartbeat']) try: while True: @@ -398,7 +402,7 @@ def main(): save_event_in_db(msg.value()) elif msg.error().code() == KafkaError._PARTITION_EOF: logger.error('End of partition reached {0}/{1}' - .format(msg.topic(), msg.partition())) + .format(msg.topic(), msg.partition())) else: logger.error('Error occured: {0}'.format(msg.error().str())) @@ -408,6 +412,5 @@ def main(): finally: c.close() - if __name__ == '__main__': main() diff --git a/influxdb-connector/influxdb-connector/config/influxdb_connector.conf b/influxdb-connector/influxdb-connector/config/influxdb_connector.conf index dc0fc5d..fbd449e 100755 --- a/influxdb-connector/influxdb-connector/config/influxdb_connector.conf +++ b/influxdb-connector/influxdb-connector/config/influxdb_connector.conf @@ -1,4 +1,4 @@ [default] -log_file = /opt/ves/influxdbconnector.log +log_file = /opt/smo/influxdbconnector.log kafka_server = influxdb = diff --git a/influxdb-connector/start.sh b/influxdb-connector/start.sh index 301d785..e7d0ecb 100755 --- a/influxdb-connector/start.sh +++ b/influxdb-connector/start.sh @@ -12,23 +12,23 @@ # See the License for the specific language governing permissions and # limitations under the License. # -cd /opt/ves +cd /opt/smo touch monitor.log config_file="influxdb-connector/config/influxdb_connector.conf" -sed -i -- "s/influxdb =/influxdb = $ves_influxdb_host:$ves_influxdb_port/g" \ +sed -i -- "s/influxdb =/influxdb = $smo_influxdb_host:$smo_influxdb_port/g" \ $config_file -sed -i -- "s/kafka_server =/kafka_server = $kafka_host_2:$kafka_port_2/g" \ +sed -i -- "s/kafka_server =/kafka_server = $smo_kafka_host:$smo_kafka_port/g" \ $config_file echo; echo $config_file cat $config_file -echo; echo "wait for InfluxDB API at $ves_influxdb_host:$ves_influxdb_port" +echo; echo "wait for InfluxDB API at $smo_influxdb_host:$smo_influxdb_port" STARTTIME=$(date +%s) max_time=60 -while ! curl http://$ves_influxdb_host:$ves_influxdb_port/ping ; +while ! curl http://$smo_influxdb_host:$smo_influxdb_port/ping ; do ELAPSED_TIME=$(($(date +%s) - $STARTTIME)) if [ $ELAPSED_TIME -ge $max_time ]; then @@ -39,19 +39,19 @@ while ! curl http://$ves_influxdb_host:$ves_influxdb_port/ping ; sleep 10 done echo "Done." -echo; echo "setup veseventsdb in InfluxDB" +echo; echo "setup eventsdb in InfluxDB" # TODO: check if pre-existing and skip -curl -X POST http://$ves_influxdb_host:$ves_influxdb_port/query \ - --data-urlencode "q=CREATE DATABASE veseventsdb" +curl -X POST http://$smo_influxdb_host:$smo_influxdb_port/query \ + --data-urlencode "q=CREATE DATABASE eventsdb" -if [ "$ves_loglevel" != "" ]; then - python3 /opt/ves/influxdb-connector/code/influxdb_connector.py \ - --config /opt/ves/influxdb-connector/config/influxdb_connector.conf \ - --influxdb $ves_influxdb_host:$ves_influxdb_port \ - --section default > /opt/ves/monitor.log 2>&1 +if [ "$loglevel" != "" ]; then + python3 /opt/smo/influxdb-connector/code/influxdb_connector.py \ + --config /opt/smo/influxdb-connector/config/influxdb_connector.conf \ + --influxdb $smo_influxdb_host:$smo_influxdb_port \ + --section default > /opt/smo/monitor.log 2>&1 else - python3 /opt/ves/influxdb-connector/code/influxdb_connector.py \ - --config /opt/ves/influxdb-connector/config/influxdb_connector.conf \ - --influxdb $ves_influxdb_host:$ves_influxdb_port \ + python3 /opt/smo/influxdb-connector/code/influxdb_connector.py \ + --config /opt/smo/influxdb-connector/config/influxdb_connector.conf \ + --influxdb $smo_influxdb_host:$smo_influxdb_port \ --section default fi diff --git a/kafka/Dockerfile b/kafka/Dockerfile index ce59204..6506204 100755 --- a/kafka/Dockerfile +++ b/kafka/Dockerfile @@ -26,13 +26,13 @@ RUN apt-get install -y default-jre python-pip wget # Required for kafka RUN pip install kafka-python -RUN mkdir /opt/ves +RUN mkdir /opt/smo -RUN cd /opt/ves; \ +RUN cd /opt/smo; \ wget https://archive.apache.org/dist/kafka/0.11.0.2/kafka_2.11-0.11.0.2.tgz; \ tar -xvzf kafka_2.11-0.11.0.2.tgz; \ sed -i -- 's/#delete.topic.enable=true/delete.topic.enable=true/' \ kafka_2.11-0.11.0.2/config/server.properties -COPY start.sh /opt/ves/start.sh -ENTRYPOINT ["/bin/bash", "/opt/ves/start.sh"] +COPY start.sh /opt/smo/start.sh +ENTRYPOINT ["/bin/bash", "/opt/smo/start.sh"] diff --git a/kafka/Makefile b/kafka/Makefile index 1570a8b..0f9c6d5 100755 --- a/kafka/Makefile +++ b/kafka/Makefile @@ -16,4 +16,4 @@ default: all all: - docker build -t ves-kafka . + docker build -t agent-kafka . diff --git a/kafka/start.sh b/kafka/start.sh index 37c36c2..8a04cce 100755 --- a/kafka/start.sh +++ b/kafka/start.sh @@ -18,7 +18,7 @@ echo "$zookeeper_host $zookeeper_hostname" >>/etc/hosts cat /etc/hosts -cd /opt/ves +cd /opt/smo sed -i "s/localhost:2181/$zookeeper_hostname:$zookeeper_port/" \ kafka_2.11-0.11.0.2/config/server.properties diff --git a/postconfig/Dockerfile b/postconfig/Dockerfile index e920d8a..5a26e35 100755 --- a/postconfig/Dockerfile +++ b/postconfig/Dockerfile @@ -19,11 +19,11 @@ RUN apt-get update --fix-missing && apt-get -y upgrade RUN apt-get install -y git curl -RUN mkdir /opt/ves +RUN mkdir /opt/smo -RUN mkdir /opt/ves/grafana -ADD grafana /opt/ves/grafana +RUN mkdir /opt/smo/grafana +ADD grafana /opt/smo/grafana -COPY start.sh /opt/ves/start.sh -ENTRYPOINT ["/bin/bash", "/opt/ves/start.sh"] +COPY start.sh /opt/smo/start.sh +ENTRYPOINT ["/bin/bash", "/opt/smo/start.sh"] diff --git a/postconfig/grafana/dashboard.json b/postconfig/grafana/dashboard.json index 627be5e..6cd488f 100755 --- a/postconfig/grafana/dashboard.json +++ b/postconfig/grafana/dashboard.json @@ -14,7 +14,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "VESEvents", + "datasource": "Events", "fieldConfig": { "defaults": { "custom": {} @@ -211,7 +211,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "VESEvents", + "datasource": "Events", "fieldConfig": { "defaults": { "custom": {} @@ -367,7 +367,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "VESEvents", + "datasource": "Events", "fieldConfig": { "defaults": { "custom": {} @@ -524,7 +524,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "VESEvents", + "datasource": "Events", "fieldConfig": { "defaults": { "custom": {} @@ -679,7 +679,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "VESEvents", + "datasource": "Events", "fieldConfig": { "defaults": { "custom": {} @@ -826,7 +826,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "VESEvents", + "datasource": "Events", "fieldConfig": { "defaults": { "custom": {} @@ -973,7 +973,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "VESEvents", + "datasource": "Events", "fieldConfig": { "defaults": { "custom": {} @@ -1152,7 +1152,7 @@ "bars": false, "dashLength": 10, "dashes": false, - "datasource": "VESEvents", + "datasource": "Events", "fieldConfig": { "defaults": { "custom": {} @@ -1290,7 +1290,7 @@ "text": "All", "value": "$__all" }, - "datasource": "VESEvents", + "datasource": "Events", "definition": "", "error": null, "hide": 0, @@ -1337,7 +1337,7 @@ ] }, "timezone": "browser", - "title": "VES Demo", + "title": "Events Demo", "version": 4 } } diff --git a/postconfig/grafana/datasource.json b/postconfig/grafana/datasource.json index 4f439e8..aefdf06 100755 --- a/postconfig/grafana/datasource.json +++ b/postconfig/grafana/datasource.json @@ -1,10 +1,10 @@ -{ "name":"VESEvents", +{ "name":"Events", "type":"influxdb", "access":"direct", "url":"http://127.0.0.1:3330", "password":"root", "user":"root", - "database":"veseventsdb", + "database":"eventsdb", "basicAuth":false, "basicAuthUser":"", "basicAuthPassword":"", diff --git a/postconfig/start.sh b/postconfig/start.sh index da24c0b..68e0f67 100755 --- a/postconfig/start.sh +++ b/postconfig/start.sh @@ -13,15 +13,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -cd /opt/ves +cd /opt/smo sleep 10 - echo; echo "Wait for Grafana API to be active" STARTTIME=$(date +%s) max_time=60 -while ! curl http://$ves_grafana_host:$ves_grafana_port/ping ; +while ! curl http://$smo_grafana_host:$smo_grafana_port/ping ; do ELAPSED_TIME=$(($(date +%s) - $STARTTIME)) if [ $ELAPSED_TIME -ge $max_time ]; then @@ -32,18 +31,18 @@ while ! curl http://$ves_grafana_host:$ves_grafana_port/ping ; sleep 10 done echo "Done." -echo; echo "add VESEvents datasource to Grafana" +echo; echo "add Events datasource to Grafana" # TODO: check if pre-existing and skip -cat </opt/ves/grafana/datasource.json -{ "name":"VESEvents", +cat </opt/smo/grafana/datasource.json +{ "name":"Events", "type":"influxdb", "access":"direct", - "url":"http://$ves_influxdb_host:$ves_influxdb_port", + "url":"http://$smo_influxdb_host:$smo_influxdb_port", "password":"root", "user":"root", - "database":"veseventsdb", + "database":"eventsdb", "basicAuth":false, "basicAuthUser":"", "basicAuthPassword":"", @@ -54,13 +53,13 @@ cat </opt/ves/grafana/datasource.json EOF curl -H "Accept: application/json" -H "Content-type: application/json" \ - -X POST -d @/opt/ves/grafana/datasource.json \ - http://$ves_grafana_auth@$ves_grafana_host:$ves_grafana_port/api/datasources + -X POST -d @/opt/smo/grafana/datasource.json \ + http://$smo_grafana_auth@$smo_grafana_host:$smo_grafana_port/api/datasources echo; echo "add VES dashboard to Grafana" curl -H "Accept: application/json" -H "Content-type: application/json" \ - -X POST -d @/opt/ves/grafana/dashboard.json \ - http://$ves_grafana_auth@$ves_grafana_host:$ves_grafana_port/api/dashboards/db + -X POST -d @/opt/smo/grafana/dashboard.json \ + http://$smo_grafana_auth@$smo_grafana_host:$smo_grafana_port/api/dashboards/db diff --git a/releases/container-release-smo-ves.yaml b/releases/container-release-smo-ves.yaml index e7b3788..3e2a711 100644 --- a/releases/container-release-smo-ves.yaml +++ b/releases/container-release-smo-ves.yaml @@ -6,9 +6,9 @@ container_push_registry: nexus3.o-ran-sc.org:10002 project: smo/ves ref: I152b0ad5a7b6676eef702e3c3811c2f381b0f4f8 containers: - - name: ves-influxdb + - name: smo-influxdb version: 1.8.5 - - name: ves-grafana + - name: smo-grafana version: 7.5.11 - name: smo-zookeeper version: 5.5.6 @@ -16,7 +16,13 @@ containers: version: 5.5.6 - name: smo-kafdrop version: 3.27.0 - - name: ves-dmaap-adapter + - name: smo-dmaap-adapter version: 1.0.0 - - name: ves-collector + - name: smo-collector version: 2.0.1 + - name: smo-post-config + version: 1.0.0 + - name: smo-influxdb-connector + version: 1.0.0 + - name: smo-elasticsearch + version: 7.11.1 -- 2.16.6