From: Bin Yang Date: Tue, 7 Dec 2021 03:32:07 +0000 (+0800) Subject: Add nfdeployment handlers X-Git-Tag: 1.0.0~10 X-Git-Url: https://gerrit.o-ran-sc.org/r/gitweb?a=commitdiff_plain;h=d14329a57d8d01d443e4158fa5030a5b5ada4060;p=pti%2Fo2.git Add nfdeployment handlers add install and uninstall with helm sdk Issue-ID: INF-239 Signed-off-by: Bin Yang Change-Id: I7e6fbe3aa58114cc0918c0ed3142b315aae4354e --- diff --git a/Dockerfile b/Dockerfile index db2258d..d0039fa 100644 --- a/Dockerfile +++ b/Dockerfile @@ -14,6 +14,8 @@ RUN git clone --depth 1 --branch master https://opendev.org/starlingx/distcloud- RUN pip install -e /distcloud-client/distributedcloud-client # in case git repo is not accessable +# RUN git clone --depth 1 --branch master https://github.com/cloudify-incubator/cloudify-helm-plugin.git /helmsdk + COPY requirements.txt /tmp/ COPY requirements-stx.txt /tmp/ diff --git a/Dockerfile.localtest b/Dockerfile.localtest index 109ce75..eee30ef 100644 --- a/Dockerfile.localtest +++ b/Dockerfile.localtest @@ -29,14 +29,29 @@ COPY o2common/ /src/o2common/ RUN mkdir -p /src/o2app/ COPY o2app/ /src/o2app/ + +RUN mkdir -p /src/helm_sdk/ +COPY helm_sdk/ /src/helm_sdk/ + COPY setup.py /src/ COPY configs/ /etc/o2/ -# RUN pip install -e /src +# RUN mkdir -p /helmsdk +# COPY temp/helmsdk /helmsdk/ +# # RUN git clone --depth 1 --branch master https://github.com/cloudify-incubator/cloudify-helm-plugin.git helmsdk +# COPY /helmsdk/helm_sdk /src/helm_sdk +# RUN pip install -e /src COPY tests/ /tests/ RUN apt-get install -y procps vim +RUN apt-get install -y curl +RUN curl -O https://get.helm.sh/helm-v3.3.1-linux-amd64.tar.gz; +RUN tar -zxvf helm-v3.3.1-linux-amd64.tar.gz; cp linux-amd64/helm /usr/local/bin + +RUN mkdir -p /etc/kubeconfig/ +COPY temp/kubeconfig/config /etc/kubeconfig/ + WORKDIR /src diff --git a/README.md b/README.md index 0782690..aae2c5c 100644 --- a/README.md +++ b/README.md @@ -57,3 +57,253 @@ pytest tests/unit pytest tests/integration pytest tests/e2e ``` + + +Test O2DMS with docker-compose +============================== + +## setup account over INF and get token + +```sh +USER="admin-user" +NAMESPACE="kube-system" + +cat < admin-login.yaml +apiVersion: v1 +kind: ServiceAccount +metadata: + name: ${USER} + namespace: kube-system +--- +apiVersion: rbac.authorization.k8s.io/v1 +kind: ClusterRoleBinding +metadata: + name: ${USER} +roleRef: + apiGroup: rbac.authorization.k8s.io + kind: ClusterRole + name: cluster-admin +subjects: +- kind: ServiceAccount + name: ${USER} + namespace: kube-system +EOF +kubectl apply -f admin-login.yaml +TOKEN_DATA=$(kubectl -n kube-system describe secret $(kubectl -n kube-system get secret | grep ${USER} | awk '{print $1}') | grep "token:" | awk '{print $2}') + +``` + +## setup remote cli to access kubenetes cluster over INF + +```sh +sudo apt-get install -y apt-transport-https +echo "deb http://mirrors.ustc.edu.cn/kubernetes/apt kubernetes-xenial main" | \ +sudo tee -a /etc/apt/sources.list.d/kubernetes.list +sudo apt-get update +sudo apt-get install -y kubectl + +source <(kubectl completion bash) # setup autocomplete in bash into the current shell, bash-completion package should be installed first. +echo "source <(kubectl completion bash)" >> ~/.bashrc # add autocomplete permanently to your bash shell. + +https://get.helm.sh/helm-v3.5.3-linux-amd64.tar.gz +tar xvf helm-v3.5.3-linux-amd64.tar.gz +sudo cp linux-amd64/helm /usr/local/bin + +source <(helm completion bash) +echo "source <(helm completion bash)" >> ~/.bashrc + +OAM_IP= +NAMESPACE=default +TOKEN_DATA= + +USER="admin-user" + +kubectl config set-cluster inf-cluster --server=https://${OAM_IP}:6443 --insecure-skip-tls-verify +kubectl config set-credentials ${USER} --token=$TOKEN_DATA +kubectl config set-context ${USER}@inf-cluster --cluster=inf-cluster --user ${USER} --namespace=${NAMESPACE} +kubectl config use-context ${USER}@inf-cluster + +kubectl get pods -A + +``` + + +## setup local repo: o2imsrepo + +```sh +helm repo add chartmuseum https://chartmuseum.github.io/charts + +export NODE_IP= + +cat <chartmuseum-override.yaml +env: + open: + DISABLE_API: false +service: + type: NodePort + nodePort: 30330 +EOF +helm install chartmuseumrepo chartmuseum/chartmuseum -f chartmuseum-override.yaml +kubectl get pods +Kubectl get services + +helm repo add o2imsrepo http://${NODE_IP}:30330 +helm repo update + +helm repo add bitnami https://charts.bitnami.com/bitnami +helm repo update + +helm pull bitnami/mysql +helm push mysql-8.8.16.tgz o2imsrepo +helm repo update + +helm install my-release o2imsrepo/mysql +kubectl get pods +helm del my-release + +``` + + + +## Verify CFW over INF: Test with cnf firewall-host-netdevice + +## Setup host netdevice over INF + +```sh +ssh sysadmin@ +sudo ip link add name veth11 type veth peer name veth12 +sudo ip link add name veth21 type veth peer name veth22 +sudo ip link |grep veth +exit +``` + + +## verify CNF over INF +```sh +git clone https://github.com/biny993/firewall-host-netdevice.git + +cat < cfw-hostdev-override.yaml + +image: + repository: ubuntu + tag: 18.04 + pullPolicy: IfNotPresent + +resources: + cpu: 2 + memory: 2Gi + hugepage: 256Mi + +#global vars for parent and subcharts. + + + unprotectedNetPortVpg: veth11 + unprotectedNetPortVfw: veth12 + unprotectedNetCidr: 10.10.1.0/24 + unprotectedNetGwIp: 10.10.1.1 + + protectedNetPortVfw: veth21 + protectedNetPortVsn: veth22 + protectedNetCidr: 10.10.2.0/24 + protectedNetGwIp: 10.10.2.1 + + vfwPrivateIp0: 10.10.1.1 + vfwPrivateIp1: 10.10.2.1 + + vpgPrivateIp0: 10.10.1.2 + + vsnPrivateIp0: 10.10.2.2 + +EOF + +helm install cfw1 firewall-host-netdevice -f cfw-hostdev-override.yaml +kubectl get pods +helm del cfw1 +``` + +## push repo to o2imsrepo + +```sh +tar -zcvf firewall-host-netdevice-1.0.0.tgz firewall-host-netdevice/ +helm push firewall-host-netdevice-1.0.0.tgz o2imsrepo +helm repo update +helm search repo firewall + +helm install cfw1 o2imsrepo/firewall-host-netdevice -f cfw-hostdev-override.yaml +kubectl get pods +helm del cfw1 +``` + +## build docker image for o2 services +```sh +cd o2 +docker-compose build + +``` + +## bootstrap o2 service with docker-compose +```sh + +mkdir -p temp/kubeconfig/ +cp temp/kubeconfig/ + +source ./admin_openrc.sh +export K8S_KUBECONFIG=/etc/kubeconfig/config +docker-compose up -d +docker logs -f o2_redis_pubsub_1 + +``` + +## simiulate SMO to deploy CFW + +```sh + +curl --location --request GET 'http://localhost:5005/o2ims_infrastructureInventory/v1/deploymentManagers' +export dmsId= +curl --location --request POST 'http://localhost:5005/o2dms/${dmsId}/O2dms_DeploymentLifecycle/NfDeploymentDescriptor' \ +--header 'Content-Type: application/json' \ +--data-raw '{ + "name": "cfwdesc1", + "description": "demo nf deployment descriptor", + "artifactRepoUrl": "http://128.224.115.15:30330", + "artifactName": "firewall-host-netdevice", + "inputParams": + "{\n \"image\": {\n \"repository\": \"ubuntu\",\n \"tag\": 18.04,\n \"pullPolicy\": \"IfNotPresent\"\n },\n \"resources\": {\n \"cpu\": 2,\n \"memory\": \"2Gi\",\n \"hugepage\": \"256Mi\",\n \"unprotectedNetPortVpg\": \"veth11\",\n \"unprotectedNetPortVfw\": \"veth12\",\n \"unprotectedNetCidr\": \"10.10.1.0/24\",\n \"unprotectedNetGwIp\": \"10.10.1.1\",\n \"protectedNetPortVfw\": \"veth21\",\n \"protectedNetPortVsn\": \"veth22\",\n \"protectedNetCidr\": \"10.10.2.0/24\",\n \"protectedNetGwIp\": \"10.10.2.1\",\n \"vfwPrivateIp0\": \"10.10.1.1\",\n \"vfwPrivateIp1\": \"10.10.2.1\",\n \"vpgPrivateIp0\": \"10.10.1.2\",\n \"vsnPrivateIp0\": \"10.10.2.2\"\n }\n}", + "outputParams": "{\"output1\": 100}" +}' + +curl --location --request GET 'http://localhost:5005/o2dms/${dmsId}/O2dms_DeploymentLifecycle/NfDeploymentDescriptor' + +curl --location --request POST 'http://localhost:5005/o2dms/${dmsId}/O2dms_DeploymentLifecycle/NfDeployment' \ +--header 'Content-Type: application/json' \ +--data-raw '{ + "name": "cfw100", + "description": "demo nf deployment", + "descriptorId": "", + "parentDeploymentId": "" +}' + +curl --location --request GET 'http://localhost:5005/o2dms/${dmsId}/O2dms_DeploymentLifecycle/NfDeployment' + +export NfDeploymentId= + +``` + +## check logs + +```sh +docker logs -f o2_redis_pubsub_1 +kubectl get pods +kubectl logs -f cfw100-sink-host-netdevice-59bf6fbd4b-845p4 +``` + +## watch traffic stats + +open browswer with url: http://:30667 + + +## bring down CFW + +```sh +curl --location --request DELETE 'http://localhost:5005/o2dms/${dmsId}/O2dms_DeploymentLifecycle/NfDeployment/${NfDeploymentId}' +``` diff --git a/docker-compose.yml b/docker-compose.yml index 552154d..477441a 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -18,6 +18,9 @@ services: - OS_AUTH_URL=${OS_AUTH_URL} - OS_USERNAME=${OS_USERNAME} - OS_PASSWORD=${OS_PASSWORD} + - K8S_KUBECONFIG=${K8S_KUBECONFIG} + - K8S_APISERVER=${K8S_APISERVER} + - K8S_TOKEN=${K8S_TOKEN} - LOGGING_CONFIG_LEVEL=DEBUG volumes: - ./configs:/configs @@ -25,6 +28,7 @@ services: - ./o2dms:/o2dms - ./o2common:/o2common - ./o2app:/o2app + - ./helm_sdk:/helm_sdk - ./tests:/tests entrypoint: - /bin/sh @@ -53,6 +57,7 @@ services: - ./o2dms:/o2dms - ./o2common:/o2common - ./o2app:/o2app + - ./helm_sdk:/helm_sdk - ./tests:/tests entrypoint: - /bin/sh @@ -82,6 +87,7 @@ services: - ./o2dms:/o2dms - ./o2common:/o2common - ./o2app:/o2app + - ./helm_sdk:/helm_sdk - ./tests:/tests entrypoint: - /bin/sh diff --git a/helm_sdk/__init__.py b/helm_sdk/__init__.py new file mode 100644 index 0000000..bdf0cf9 --- /dev/null +++ b/helm_sdk/__init__.py @@ -0,0 +1,212 @@ +######## +# Copyright (c) 2019 Cloudify Platform Ltd. All rights reserved +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. + +import json + +from .exceptions import CloudifyHelmSDKError +from helm_sdk.utils import ( + run_subprocess, + prepare_parameter, + prepare_set_parameters, + validate_no_collisions_between_params_and_flags) + +# Helm cli flags names +HELM_KUBECONFIG_FLAG = 'kubeconfig' +HELM_KUBE_API_SERVER_FLAG = 'kube-apiserver' +HELM_KUBE_TOKEN_FLAG = 'kube-token' +HELM_VALUES_FLAG = 'values' +APPEND_FLAG_STRING = '--{name}={value}' + + +class Helm(object): + + def __init__(self, + logger, + binary_path, + environment_variables + ): + self.binary_path = binary_path + self.logger = logger + if not isinstance(environment_variables, dict): + raise Exception( + "Unexpected type for environment variables (should be a " + "dict): {0}".format(type( + environment_variables))) + + self.env = environment_variables + + def execute(self, command, return_output=False): + return run_subprocess( + command, + self.logger, + cwd=None, + additional_env=self.env, + additional_args=None, + return_output=return_output) + + def _helm_command(self, args): + cmd = [self.binary_path] + cmd.extend(args) + return cmd + + @staticmethod + def handle_auth_params(cmd, + kubeconfig=None, + token=None, + apiserver=None): + """ + Validation of authentication params. + Until helm will support --insecure, kubeconfig must be provided. + :param kubeconfig: Kubeconfig file path + :param: token: bearer token used for authentication. + :param: apiserver: the address and the port for the Kubernetes API + server. + """ + if kubeconfig is None: + raise CloudifyHelmSDKError( + 'Must provide kubeconfig file path.') + else: + cmd.append(APPEND_FLAG_STRING.format(name=HELM_KUBECONFIG_FLAG, + value=kubeconfig)) + + if token: + cmd.append(APPEND_FLAG_STRING.format(name=HELM_KUBE_TOKEN_FLAG, + value=token)) + + if apiserver: + cmd.append( + APPEND_FLAG_STRING.format(name=HELM_KUBE_API_SERVER_FLAG, + value=apiserver)) + + def install(self, + name, + chart, + flags=None, + set_values=None, + values_file=None, + kubeconfig=None, + token=None, + apiserver=None, + **_): + """ + Execute helm install command. + :param name: name for the created release. + :param chart: chart name to install. + :param flags: list of flags to add to the install command. + :param set_values: list of variables and their values for --set. + :param kubeconfig: path to kubeconfig file. + :param values_file: values file path. + :param token: bearer token used for authentication. + :param apiserver: the address and the port for the Kubernetes API + server. + :return output of install command. + """ + cmd = ['install', name, chart, '--wait', '--output=json'] + self.handle_auth_params(cmd, kubeconfig, token, apiserver) + if values_file: + cmd.append(APPEND_FLAG_STRING.format(name=HELM_VALUES_FLAG, + value=values_file)) + flags = flags or [] + validate_no_collisions_between_params_and_flags(flags) + cmd.extend([prepare_parameter(flag) for flag in flags]) + set_arguments = set_values or [] + cmd.extend(prepare_set_parameters(set_arguments)) + output = self.execute(self._helm_command(cmd), True) + return json.loads(output) + + def uninstall(self, + name, + flags=None, + kubeconfig=None, + token=None, + apiserver=None, + **_): + cmd = ['uninstall', name] + self.handle_auth_params(cmd, kubeconfig, token, apiserver) + flags = flags or [] + validate_no_collisions_between_params_and_flags(flags) + cmd.extend([prepare_parameter(flag) for flag in flags]) + self.execute(self._helm_command(cmd)) + + def repo_add(self, + name, + repo_url, + flags=None, + **_): + cmd = ['repo', 'add', name, repo_url] + flags = flags or [] + cmd.extend([prepare_parameter(flag) for flag in flags]) + self.execute(self._helm_command(cmd)) + + def repo_remove(self, + name, + flags=None, + **_): + cmd = ['repo', 'remove', name] + flags = flags or [] + cmd.extend([prepare_parameter(flag) for flag in flags]) + self.execute(self._helm_command(cmd)) + + def repo_list(self): + cmd = ['repo', 'list', '--output=json'] + output = self.execute(self._helm_command(cmd), True) + return json.loads(output) + + def repo_update(self, flags): + cmd = ['repo', 'update'] + flags = flags or [] + cmd.extend([prepare_parameter(flag) for flag in flags]) + self.execute(self._helm_command(cmd)) + + def upgrade(self, + release_name, + chart=None, + flags=None, + set_values=None, + values_file=None, + kubeconfig=None, + token=None, + apiserver=None, + **_): + """ + Execute helm upgrade command. + :param release_name: name of the release to upgrade. + :param chart: The chart to upgrade the release with. + The chart argument can be either: a chart reference('example/mariadb'), + a packaged chart, or a fully qualified URL. + :param flags: list of flags to add to the upgrade command. + :param set_values: list of variables and their values for --set. + :param kubeconfig: path to kubeconfig file. + :param values_file: values file path. + :param token: bearer token used for authentication. + :param apiserver: the address and the port for the Kubernetes API + server. + :return output of helm upgrade command. + """ + if not chart: + raise CloudifyHelmSDKError( + 'Must provide chart for upgrade release.') + cmd = ['upgrade', release_name, chart, '--atomic', '-o=json'] + self.handle_auth_params(cmd, kubeconfig, token, apiserver) + if values_file: + cmd.append(APPEND_FLAG_STRING.format(name=HELM_VALUES_FLAG, + value=values_file)) + flags = flags or [] + validate_no_collisions_between_params_and_flags(flags) + cmd.extend([prepare_parameter(flag) for flag in flags]) + set_arguments = set_values or [] + cmd.extend(prepare_set_parameters(set_arguments)) + output = self.execute(self._helm_command(cmd), True) + return json.loads(output) diff --git a/helm_sdk/_compat.py b/helm_sdk/_compat.py new file mode 100644 index 0000000..a19d7f4 --- /dev/null +++ b/helm_sdk/_compat.py @@ -0,0 +1,49 @@ +######## +# Copyright (c) 2019 Cloudify Platform Ltd. All rights reserved +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. + +"""Python 2 + 3 compatibility utils""" +# flake8: noqa + +import sys + +PY2 = sys.version_info[0] == 2 + +if PY2: + from StringIO import StringIO + + exec(""" +def reraise(exception_type, value, traceback): + raise exception_type, value, traceback +""") + text_type = unicode + exec(""" +def exec_(code, globs): + exec code in globs +""") + +else: + import builtins + from io import StringIO + + def reraise(exception_type, + value, + traceback): + raise value.with_traceback(traceback) + + text_type = str + exec_ = getattr(builtins, 'exec') + +__all__ = [ + 'PY2', 'StringIO', 'reraise', 'text_type', 'exec_'] diff --git a/helm_sdk/exceptions.py b/helm_sdk/exceptions.py new file mode 100644 index 0000000..aebb5bb --- /dev/null +++ b/helm_sdk/exceptions.py @@ -0,0 +1,21 @@ +######## +# Copyright (c) 2019 Cloudify Platform Ltd. All rights reserved +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. + + +class CloudifyHelmSDKError(Exception): + """Generic Error for handling issues preparing + Helm command. + """ + pass diff --git a/helm_sdk/filters.py b/helm_sdk/filters.py new file mode 100644 index 0000000..2c1d69e --- /dev/null +++ b/helm_sdk/filters.py @@ -0,0 +1,52 @@ +# Copyright (c) 2016-2020 Cloudify Platform Ltd. All rights reserved +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from copy import deepcopy +import re + +from ._compat import text_type + +OBFUSCATION_KEYWORDS = ('PASSWORD', 'SECRET', 'TOKEN',) +OBFUSCATION_RE = re.compile(r'((password|secret|token)(:|=)\s*)(\S+)', + flags=re.IGNORECASE | re.MULTILINE) +OBFUSCATED_SECRET = 'x' * 16 + + +def obfuscate_passwords(obj): + """Obfuscate passwords in dictionary or list of dictionaries. + + Returns a copy of original object with elements potentially containing + passwords obfuscated. A copy.deepcopy() is used for copying dictionaries + but only when absolutely necessary. If a given object does not contain + any passwords, original is returned and deepcopy never performed. + """ + if isinstance(obj, (text_type, bytes,)): + return OBFUSCATION_RE.sub('\\1{0}'.format(OBFUSCATED_SECRET), obj) + if isinstance(obj, list): + return [obfuscate_passwords(elem) for elem in obj] + if not isinstance(obj, dict): + return obj + result = obj + for k, v in list(result.items()): + if any(x for x in OBFUSCATION_KEYWORDS if x in k.upper()): + a_copy = deepcopy(result) + a_copy[k] = OBFUSCATED_SECRET + result = a_copy + if isinstance(v, (dict, list,)): + obfuscated_v = obfuscate_passwords(v) + if obfuscated_v is not v: + a_copy = deepcopy(result) + a_copy[k] = obfuscated_v + result = a_copy + return result + diff --git a/helm_sdk/tests/__init__.py b/helm_sdk/tests/__init__.py new file mode 100644 index 0000000..8448e2e --- /dev/null +++ b/helm_sdk/tests/__init__.py @@ -0,0 +1,32 @@ +######## +# Copyright (c) 2019 Cloudify Platform Ltd. All rights reserved +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. + +import logging +import unittest + +from .. import Helm + +HELM_BINARY = '/tmp/helm' + + +class HelmTestBase(unittest.TestCase): + def setUp(self): + super(HelmTestBase, self).setUp() + log = logging.getLogger('helm_log') + self.helm = Helm(log, HELM_BINARY, environment_variables={}) + + def tearDown(self): + self.helm = None + super(HelmTestBase, self).tearDown() diff --git a/helm_sdk/tests/test_sdk.py b/helm_sdk/tests/test_sdk.py new file mode 100644 index 0000000..2a454fe --- /dev/null +++ b/helm_sdk/tests/test_sdk.py @@ -0,0 +1,162 @@ +######## +# Copyright (c) 2019 Cloudify Platform Ltd. All rights reserved +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. + +import mock + +from . import HelmTestBase, HELM_BINARY +from helm_sdk.exceptions import CloudifyHelmSDKError + +mock_flags = [{'name': 'dry-run'}, + {'name': 'timeout', 'value': '100'}] +mock_set_args = [{'name': 'x', 'value': 'y'}, + {'name': 'a', 'value': 'b'}] + + +class HelmSDKTest(HelmTestBase): + + def test_install_with_token_and_api(self): + with self.assertRaisesRegexp(CloudifyHelmSDKError, + 'Must provide kubeconfig file path.'): + self.helm.install('release1', + 'my_chart', + mock_flags, + mock_set_args, + token='demotoken', + apiserver='https://1.0.0.0') + + def test_install_with_kubeconfig(self): + mock_execute = mock.Mock(return_value='{"manifest":"resourceA"}') + self.helm.execute = mock_execute + out = self.helm.install('release1', + 'my_chart', + mock_flags, + mock_set_args, + kubeconfig='/path/to/config') + cmd_expected = [HELM_BINARY, 'install', 'release1', 'my_chart', + '--wait', '--output=json', + '--kubeconfig=/path/to/config', '--dry-run', + '--timeout=100', '--set', 'x=y', '--set', 'a=b'] + mock_execute.assert_called_once_with(cmd_expected, True) + self.assertEqual(out, {"manifest": "resourceA"}) + + def test_install_no_token_and_no_kubeconfig(self): + with self.assertRaisesRegexp(CloudifyHelmSDKError, + 'Must provide kubeconfig file path.'): + self.helm.install('release1', + 'my_chart', + mock_flags, + mock_set_args, + apiserver='https://1.0.0.0') + + def test_install_no_apiserver_and_no_kubeconfig(self): + with self.assertRaisesRegexp(CloudifyHelmSDKError, + 'Must provide kubeconfig file path.'): + self.helm.install('release1', + 'my_chart', + mock_flags, + mock_set_args, + token='demotoken') + + def test_uninstall_with_kubekonfig(self): + mock_execute = mock.Mock() + self.helm.execute = mock_execute + self.helm.uninstall('release1', + mock_flags, + kubeconfig='/path/to/config') + cmd_expected = [HELM_BINARY, 'uninstall', 'release1', + '--kubeconfig=/path/to/config', '--dry-run', + '--timeout=100'] + mock_execute.assert_called_once_with(cmd_expected) + + def test_uninstall_no_token_and_no_kubeconfig(self): + with self.assertRaisesRegexp(CloudifyHelmSDKError, + 'Must provide kubeconfig file path.'): + self.helm.uninstall('release1', + mock_flags, + apiserver='https://1.0.0.0') + + def test_uninstall_no_apiserver_and_no_kubeconfig(self): + with self.assertRaisesRegexp(CloudifyHelmSDKError, + 'Must provide kubeconfig file path.'): + self.helm.uninstall('release1', + mock_flags, + token='demotoken') + + def test_repo_add(self): + mock_execute = mock.Mock() + self.helm.execute = mock_execute + self.helm.repo_add('my_repo', 'https://github.com/repo') + cmd_expected = [HELM_BINARY, 'repo', 'add', 'my_repo', + 'https://github.com/repo'] + mock_execute.assert_called_once_with(cmd_expected) + + def test_repo_remove(self): + mock_execute = mock.Mock() + self.helm.execute = mock_execute + self.helm.repo_remove('my_repo') + cmd_expected = [HELM_BINARY, 'repo', 'remove', 'my_repo'] + mock_execute.assert_called_once_with(cmd_expected) + + def test_upgrade_with_token_and_api(self): + with self.assertRaisesRegexp(CloudifyHelmSDKError, + 'Must provide kubeconfig file path.'): + self.helm.upgrade('release1', + 'example/mariadb', + mock_flags, + mock_set_args, + token='demotoken', + apiserver='https://1.0.0.0') + + def test_upgrade_with_kubeconfig(self): + mock_execute = mock.Mock(return_value='{"name":"release1"}') + self.helm.execute = mock_execute + out = self.helm.upgrade('release1', + 'my_chart', + mock_flags, + mock_set_args, + kubeconfig='/path/to/config') + cmd_expected = [HELM_BINARY, 'upgrade', 'release1', 'my_chart', + '--atomic', '-o=json', '--kubeconfig=/path/to/config', + '--dry-run', '--timeout=100', '--set', 'x=y', '--set', + 'a=b'] + mock_execute.assert_called_once_with(cmd_expected, True) + self.assertEqual(out, {"name": "release1"}) + + def test_upgrade_no_token_and_no_kubeconfig(self): + with self.assertRaisesRegexp(CloudifyHelmSDKError, + 'Must provide kubeconfig file path.'): + self.helm.upgrade('release1', + 'my_chart', + mock_flags, + mock_set_args, + apiserver='https://1.0.0.0') + + def test_upgrade_no_apiserver_and_no_kubeconfig(self): + with self.assertRaisesRegexp(CloudifyHelmSDKError, + 'Must provide kubeconfig file path.'): + self.helm.upgrade('release1', + 'my_chart', + mock_flags, + mock_set_args, + token='demotoken') + + def test_upgrade_no_chart(self): + with self.assertRaisesRegexp(CloudifyHelmSDKError, + 'Must provide chart for upgrade ' + 'release.'): + self.helm.upgrade(release_name='release1', + flags=mock_flags, + set_values=mock_set_args, + kubeconfig='/path/to/config') diff --git a/helm_sdk/tests/test_utils.py b/helm_sdk/tests/test_utils.py new file mode 100644 index 0000000..10b01b3 --- /dev/null +++ b/helm_sdk/tests/test_utils.py @@ -0,0 +1,62 @@ +######## +# Copyright (c) 2019 Cloudify Platform Ltd. All rights reserved +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. + +import unittest + +from helm_sdk.exceptions import CloudifyHelmSDKError +from helm_sdk.utils import ( + prepare_parameter, + prepare_set_parameters, + validate_no_collisions_between_params_and_flags) + + +class TestUtils(unittest.TestCase): + + def test_prepare_parameter(self): + param_dict = {'name': 'param1'} + self.assertEqual(prepare_parameter(param_dict), '--param1') + param_dict.update({'value': 'value1'}) + self.assertEqual(prepare_parameter(param_dict), '--param1=value1') + + def test_prepare_set_parameters(self): + set_no_val = [{'name': 'x'}] + with self.assertRaisesRegexp( + CloudifyHelmSDKError, + "\"set\" parameter name or value is missing"): + prepare_set_parameters(set_no_val) + + with self.assertRaisesRegexp( + CloudifyHelmSDKError, + "\"set\" parameter name or value is missing"): + set_no_name = [{'value': 'y'}] + prepare_set_parameters(set_no_name) + # Now set_dict_no_val is a valid set parameter dictionary + valid_set_list = [{'name': 'x', 'value': 'y'}] + self.assertEqual(prepare_set_parameters(valid_set_list), + ['--set', 'x=y']) + + def test_validate_no_collisions_between_params_and_flags(self): + fake_flags = [{'name': 'kube-apiserver', 'value': 'https://0.0.0.0'}] + with self.assertRaisesRegexp(CloudifyHelmSDKError, + "Please do not pass"): + validate_no_collisions_between_params_and_flags(fake_flags) + fake_flags = [{'name': 'debug'}] + self.assertEqual( + validate_no_collisions_between_params_and_flags(fake_flags), + None) + fake_flags = [] + self.assertEqual( + validate_no_collisions_between_params_and_flags(fake_flags), + None) diff --git a/helm_sdk/utils.py b/helm_sdk/utils.py new file mode 100644 index 0000000..6561ba1 --- /dev/null +++ b/helm_sdk/utils.py @@ -0,0 +1,166 @@ +######## +# Copyright (c) 2019 Cloudify Platform Ltd. All rights reserved +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# * See the License for the specific language governing permissions and +# * limitations under the License. + +import os +import copy +import threading +import subprocess + +from helm_sdk.filters import obfuscate_passwords + +from helm_sdk._compat import StringIO, text_type +from helm_sdk.exceptions import CloudifyHelmSDKError + +FLAGS_LIST_TO_VALIDATE = ['kube-apiserver', 'kube-token', 'kubeconfig'] + + +def run_subprocess(command, + logger, + cwd=None, + additional_env=None, + additional_args=None, + return_output=False): + if additional_args is None: + additional_args = {} + args_to_pass = copy.deepcopy(additional_args) + if additional_env: + passed_env = args_to_pass.setdefault('env', {}) + passed_env.update(os.environ) + passed_env.update(additional_env) + + logger.info( + "Running: command={cmd}, cwd={cwd}, additional_args={args}".format( + cmd=obfuscate_passwords(command), + cwd=cwd, + args=obfuscate_passwords(args_to_pass))) + + process = subprocess.Popen( + args=command, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + stdin=None, + cwd=cwd, + **args_to_pass) + + if return_output: + stdout_consumer = CapturingOutputConsumer( + process.stdout) + else: + stdout_consumer = LoggingOutputConsumer( + process.stdout, logger, " ") + stderr_consumer = LoggingOutputConsumer( + process.stderr, logger, " ") + + return_code = process.wait() + stdout_consumer.join() + stderr_consumer.join() + + if return_code: + raise subprocess.CalledProcessError(return_code, + [obfuscate_passwords(cmd_element) + for cmd_element in command]) + + output = stdout_consumer.buffer.getvalue() if return_output else None + logger.info("Returning output:\n{0}".format( + obfuscate_passwords(output) if output is not None else '')) + + return output + + +# Stolen from the script plugin, until this class +# moves to a utils module in cloudify-common. +class OutputConsumer(object): + def __init__(self, out): + self.out = out + self.consumer = threading.Thread(target=self.consume_output) + self.consumer.daemon = True + + def consume_output(self): + for line in self.out: + self.handle_line(line) + self.out.close() + + def handle_line(self, line): + raise NotImplementedError("Must be implemented by subclass.") + + def join(self): + self.consumer.join() + + +class LoggingOutputConsumer(OutputConsumer): + def __init__(self, out, logger, prefix): + OutputConsumer.__init__(self, out) + self.logger = logger + self.prefix = prefix + self.consumer.start() + + def handle_line(self, line): + self.logger.info( + "{0}{1}".format(text_type(self.prefix), + obfuscate_passwords( + line.decode('utf-8').rstrip('\n')))) + + +class CapturingOutputConsumer(OutputConsumer): + def __init__(self, out): + OutputConsumer.__init__(self, out) + self.buffer = StringIO() + self.consumer.start() + + def handle_line(self, line): + self.buffer.write(line.decode('utf-8')) + + def get_buffer(self): + return self.buffer + + +def prepare_parameter(arg_dict): + """ + Prepare single parameter. + :param arg_dict: dictionary with the name of the flag and value(optional) + :return: "--name=value" or -"-name" + """ + try: + param_string = "--" + arg_dict["name"] + return param_string + '=' + arg_dict.get("value") if arg_dict.get( + "value") else param_string + except KeyError: + raise CloudifyHelmSDKError("Parameter name doesen't exist.") + + +def prepare_set_parameters(set_values): + """ + Prepare set parameters for install command. + :param set_values: list of dictionaries with the name of the variable to + set command and its value. + :return list like: ["--set", "name=value","--set", + """ + set_list = [] + for set_dict in set_values: + set_list.append('--set') + try: + set_list.append(set_dict["name"] + "=" + set_dict["value"]) + except KeyError: + raise CloudifyHelmSDKError( + "\"set\" parameter name or value is missing.") + return set_list + + +def validate_no_collisions_between_params_and_flags(flags): + if [flag for flag in flags if flag['name'] in FLAGS_LIST_TO_VALIDATE]: + raise CloudifyHelmSDKError( + 'Please do not pass {flags_list} under "flags" property,' + 'each of them has a known property.'.format( + flags_list=FLAGS_LIST_TO_VALIDATE)) diff --git a/o2app/entrypoints/redis_eventconsumer.py b/o2app/entrypoints/redis_eventconsumer.py index 1d38f04..8472949 100644 --- a/o2app/entrypoints/redis_eventconsumer.py +++ b/o2app/entrypoints/redis_eventconsumer.py @@ -32,7 +32,7 @@ def main(): logger.info("Redis pubsub starting") bus = bootstrap.bootstrap() pubsub = r.pubsub(ignore_subscribe_messages=True) - pubsub.subscribe("NfDeploymentCreated") + pubsub.subscribe("NfDeploymentStateChanged") for m in pubsub.listen(): try: @@ -45,17 +45,18 @@ def main(): def handle_dms_changed(m, bus): logger.info("handling %s", m) channel = m['channel'].decode("UTF-8") - if channel == "NfDeploymentCreated": + if channel == "NfDeploymentStateChanged": datastr = m['data'] data = json.loads(datastr) - logger.info('InstallNfDeployment with cmd:{}'.format(data)) - cmd = commands.InstallNfDeployment(NfDeploymentId = data['NfDeploymentId']) + logger.info('HandleNfDeploymentStateChanged with cmd:{}'.format(data)) + cmd = commands.HandleNfDeploymentStateChanged( + NfDeploymentId = data['NfDeploymentId'], + FromState = data['FromState'], + ToState = data['ToState'] + ) bus.handle(cmd) else: logger.info("unhandled:{}".format(channel)) - # data = json.loads(m["data"]) - # cmd = commands.UpdateDms(ref=data["dmsid"]) - # bus.handle(cmd) if __name__ == "__main__": diff --git a/o2app/service/handlers.py b/o2app/service/handlers.py index d68b295..ebbf9c0 100644 --- a/o2app/service/handlers.py +++ b/o2app/service/handlers.py @@ -14,8 +14,8 @@ # pylint: disable=unused-argument from __future__ import annotations -from o2dms.service.nfdeployment_handler import install_nfdeployment -from o2dms.service.nfdeployment_handler import publish_nfdeployment_created + +from o2dms.service import nfdeployment_handler # from dataclasses import asdict from typing import List, Dict, Callable, Type # TYPE_CHECKING @@ -37,8 +37,18 @@ class InvalidResourceType(Exception): EVENT_HANDLERS = { - o2dms_events.NfDeploymentCreated: [publish_nfdeployment_created] -} + o2dms_events.NfDeploymentStateChanged: [ + nfdeployment_handler.publish_nfdeployment_state_change + ] + # o2dms_events.NfDeploymentCreated: [ + # nfdeployment_handler.publish_nfdeployment_created], + # o2dms_events.NfDeploymentInstalled: [ + # nfdeployment_handler.publish_nfdeployment_installed], + # o2dms_events.NfDeploymentUninstalling: [ + # nfdeployment_handler.publish_nfdeployment_uninstalling], + # o2dms_events.NfDeploymentUninstalled: [ + # nfdeployment_handler.publish_nfdeployment_uninstalled] +} # type: Dict[Type[events.Event], Callable] COMMAND_HANDLERS = { @@ -51,5 +61,13 @@ COMMAND_HANDLERS = { commands.UpdatePserverIf: pserver_if_handler.update_pserver_if, commands.UpdatePserverIfPort: pserver_port_handler.update_pserver_port, commands.UpdatePserverEth: pserver_eth_handler.update_pserver_eth, - o2dms_cmmands.InstallNfDeployment: install_nfdeployment + + o2dms_cmmands.HandleNfDeploymentStateChanged: \ + nfdeployment_handler.handle_nfdeployment_statechanged, + o2dms_cmmands.InstallNfDeployment: \ + nfdeployment_handler.install_nfdeployment, + o2dms_cmmands.UninstallNfDeployment: \ + nfdeployment_handler.uninstall_nfdeployment, + o2dms_cmmands.DeleteNfDeployment: \ + nfdeployment_handler.delete_nfdeployment, } # type: Dict[Type[commands.Command], Callable] diff --git a/o2common/config/config.py b/o2common/config/config.py index 2849e49..7207006 100644 --- a/o2common/config/config.py +++ b/o2common/config/config.py @@ -91,3 +91,14 @@ def get_stx_access_info(): os_client_args['os_region_name'] = 'RegionOne' os_client_args['api_version'] = 1 return os_client_args + + +def get_k8s_api_endpoint(): + K8S_KUBECONFIG = os.environ.get("K8S_KUBECONFIG", None) + K8S_APISERVER = os.environ.get("K8S_APISERVER", None) + K8S_TOKEN = os.environ.get("K8S_TOKEN", None) + return K8S_KUBECONFIG, K8S_APISERVER, K8S_TOKEN + + +def get_helm_cli(): + return '/usr/local/bin/helm' diff --git a/o2dms/adapter/orm.py b/o2dms/adapter/orm.py index 4226d26..6bf5e55 100644 --- a/o2dms/adapter/orm.py +++ b/o2dms/adapter/orm.py @@ -18,6 +18,7 @@ from sqlalchemy import ( Column, Integer, String, + Text, # Date, DateTime, # ForeignKey, @@ -45,9 +46,10 @@ nfDeploymentDesc = Table( Column("deploymentManagerId", String(255)), Column("name", String(255)), Column("description", String(255)), - Column("inputParams", String(255)), + Column("inputParams", Text()), Column("outputParams", String(255)), - Column("artifactUrl", String(255)), + Column("artifactRepoUrl", String(255)), + Column("artifactName", String(255)), # Column("extensions", String(1024)) ) diff --git a/o2dms/api/dms_dto.py b/o2dms/api/dms_dto.py index 06d4f3c..8b305e3 100644 --- a/o2dms/api/dms_dto.py +++ b/o2dms/api/dms_dto.py @@ -45,7 +45,8 @@ class DmsLcmNfDeploymentDescriptorDTO: 'description': fields.String, 'inputParams': fields.String, 'outputParams': fields.String, - 'artifactUrl': fields.String + 'artifactRepoUrl': fields.String, + 'artifactName': fields.String } ) @@ -54,7 +55,8 @@ class DmsLcmNfDeploymentDescriptorDTO: { 'name': fields.String, 'description': fields.String, - 'artifactUrl': fields.String, + 'artifactRepoUrl': fields.String, + 'artifactName': fields.String, 'inputParams': fields.String, 'outputParams': fields.String } @@ -73,7 +75,8 @@ class DmsLcmNfDeploymentDescriptorDTO: { 'name': fields.String, 'description': fields.String, - 'artifactUrl': fields.String, + 'artifactRepoUrl': fields.String, + 'artifactName': fields.String, 'inputParams': fields.String, 'outputParams': fields.String } diff --git a/o2dms/api/dms_lcm_nfdeployment.py b/o2dms/api/dms_lcm_nfdeployment.py index bdf5ea5..97eb12a 100644 --- a/o2dms/api/dms_lcm_nfdeployment.py +++ b/o2dms/api/dms_lcm_nfdeployment.py @@ -14,8 +14,8 @@ from sqlalchemy import select import uuid +from o2dms.domain.states import NfDeploymentState from o2common.service import messagebus -from o2dms.domain import events from o2common.service import unit_of_work from o2dms.adapter.orm import nfDeployment from o2dms.api.dms_dto import DmsLcmNfDeploymentDTO @@ -48,19 +48,20 @@ def lcm_nfdeployment_create( bus: messagebus.MessageBus): uow = bus.uow + id = str(uuid.uuid4()) with uow: _check_duplication(input, uow) _check_dependencies(input, uow) - id = str(uuid.uuid4()) entity = NfDeployment( id, input['name'], deploymentManagerId, input['description'], input['descriptorId'], input['parentDeploymentId']) uow.nfdeployments.add(entity) + entity.transit_state(NfDeploymentState.NotInstalled) - # publish event - event = events.NfDeploymentCreated(NfDeploymentId=id) + # to be refactor later according to O2 DMS API design + entity.transit_state(NfDeploymentState.Installing) uow.commit() - bus.handle(event) + _handle_events(bus) return id @@ -68,26 +69,63 @@ def lcm_nfdeployment_create( def lcm_nfdeployment_update( nfdeploymentid: str, input: DmsLcmNfDeploymentDTO.NfDeployment_update, - uow: unit_of_work.AbstractUnitOfWork): + bus: messagebus.MessageBus): + uow = bus.uow with uow: - entity = uow.nfdeployments.get(nfdeploymentid) + entity: NfDeployment = uow.nfdeployments.get(nfdeploymentid) entity.name = input['name'] entity.description = input['description'] entity.outputParams = input['parentDeploymentId'] + entity.transit_state(NfDeploymentState.Updating) uow.commit() + _handle_events(bus) return True -def lcm_nfdeployment_delete( - nfdeploymentid: str, uow: unit_of_work.AbstractUnitOfWork): +def _handle_events(bus: messagebus.MessageBus): + # handle events + events = bus.uow.collect_new_events() + for event in events: + bus.handle(event) + return True + +def lcm_nfdeployment_uninstall( + nfdeploymentid: str, + bus: messagebus.MessageBus): + + uow = bus.uow with uow: - uow.nfdeployments.delete(nfdeploymentid) + entity: NfDeployment = uow.nfdeployments.get(nfdeploymentid) + if entity.status == NfDeploymentState.Installed: + entity.transit_state(NfDeploymentState.Uninstalling) + elif entity.status == NfDeploymentState.Abnormal: + bus.uow.nfdeployments.delete(nfdeploymentid) + else: + entity.transit_state(NfDeploymentState.Abnormal) uow.commit() + _handle_events(bus) return True +# def lcm_nfdeployment_delete( +# nfdeploymentid: str, +# bus: messagebus.MessageBus): + +# uow = bus.uow +# with uow: +# entity = uow.nfdeployments.get(nfdeploymentid) +# if entity.status != NfDeploymentState.Initial: +# raise Exception( +# "NfDeployment {} is not in status to delete".format( +# nfdeploymentid)) +# uow.nfdeployments.delete(nfdeploymentid) +# entity.transit_state(NfDeploymentState.Deleted) +# uow.commit() +# return True + + def _check_duplication( input: DmsLcmNfDeploymentDTO, uow: unit_of_work.AbstractUnitOfWork): diff --git a/o2dms/api/dms_lcm_nfdeploymentdesc.py b/o2dms/api/dms_lcm_nfdeploymentdesc.py index 5ed98b7..02621d6 100644 --- a/o2dms/api/dms_lcm_nfdeploymentdesc.py +++ b/o2dms/api/dms_lcm_nfdeploymentdesc.py @@ -56,7 +56,8 @@ def lcm_nfdeploymentdesc_create( id = str(uuid.uuid4()) entity = NfDeploymentDesc( id, input['name'], deploymentManagerId, input['description'], - input['inputParams'], input['outputParams'], input['artifactUrl']) + input['inputParams'], input['outputParams'], + input['artifactRepoUrl'], input['artifactName']) uow.nfdeployment_descs.add(entity) uow.commit() return id @@ -73,7 +74,8 @@ def lcm_nfdeploymentdesc_update( entity.description = input['description'] entity.inputParams = input['inputParams'] entity.outputParams = input['outputParams'] - entity.artifactUrl = input['artifactUrl'] + entity.artifactRepoUrl = input['artifactRepoUrl'] + entity.artifactName = input['artifactName'] uow.commit() return True diff --git a/o2dms/api/nfdeployment_route.py b/o2dms/api/nfdeployment_route.py index bced33d..1610084 100644 --- a/o2dms/api/nfdeployment_route.py +++ b/o2dms/api/nfdeployment_route.py @@ -105,7 +105,7 @@ class DmsLcmNfDeploymentGetRouter(Resource): bus = MessageBus.get_instance() data = api_dms_lcm_v1.payload dms_lcm_view.lcm_nfdeployment_update( - nfDeploymentId, data, bus.uow) + nfDeploymentId, data, bus) return {}, 201 except Exception as ex: logger.warning("{}".format(str(ex))) @@ -115,7 +115,11 @@ class DmsLcmNfDeploymentGetRouter(Resource): @api_dms_lcm_v1.response(204, 'NfDeployment deleted') def delete(self, nfDeploymentId, deploymentManagerID): bus = MessageBus.get_instance() - with bus.uow: - bus.uow.nfdeployments.delete(nfDeploymentId) - bus.uow.commit() + result = dms_lcm_view\ + .lcm_nfdeployment_uninstall(nfDeploymentId, bus) + if result is not None: + return result + api_dms_lcm_v1.abort( + 404, "NfDeployment {} doesn't exist".format( + nfDeploymentId)) return '', 204 diff --git a/o2dms/domain/commands.py b/o2dms/domain/commands.py index 7c275a2..31f9382 100644 --- a/o2dms/domain/commands.py +++ b/o2dms/domain/commands.py @@ -16,6 +16,8 @@ # from datetime import date # from typing import Optional from dataclasses import dataclass +from o2dms.domain.states import NfDeploymentState +# from o2dms.domain.dms import NfDeployment # from datetime import datetime # from o2ims.domain.resource_type import ResourceTypeEnum @@ -25,3 +27,20 @@ from o2common.domain.commands import Command @dataclass class InstallNfDeployment(Command): NfDeploymentId: str + + +@dataclass +class UninstallNfDeployment(Command): + NfDeploymentId: str + + +@dataclass +class DeleteNfDeployment(Command): + NfDeploymentId: str + + +@dataclass +class HandleNfDeploymentStateChanged(Command): + NfDeploymentId: str + FromState: NfDeploymentState + ToState: NfDeploymentState diff --git a/o2dms/domain/dms.py b/o2dms/domain/dms.py index 783231f..3555e1c 100644 --- a/o2dms/domain/dms.py +++ b/o2dms/domain/dms.py @@ -13,14 +13,16 @@ # limitations under the License. from __future__ import annotations +from o2dms.domain import events +from o2dms.domain.states import NfDeploymentState -from o2common.domain.base import AgRoot +from o2common.domain.base import AgRoot, Serializer -class NfDeploymentDesc(AgRoot): +class NfDeploymentDesc(AgRoot, Serializer): def __init__(self, id: str, name: str, dmsId: str, description: str = '', inputParams: str = '', outputParams: str = '', - artifacturl: str = '') -> None: + artifactRepoUrl: str = '', artifactName: str = '') -> None: super().__init__() self.id = id self.version_number = 0 @@ -29,11 +31,13 @@ class NfDeploymentDesc(AgRoot): self.description = description self.inputParams = inputParams self.outputParams = outputParams - self.artifactUrl = artifacturl + self.artifactRepoUrl = artifactRepoUrl + self.artifactName = artifactName + self.status = 0 # self.extensions = [] -class NfDeployment(AgRoot): +class NfDeployment(AgRoot, Serializer): def __init__(self, id: str, name: str, dmsId: str, description: str = '', descriptorId: str = '', parentId: str = '',) -> None: super().__init__() @@ -44,10 +48,22 @@ class NfDeployment(AgRoot): self.description = description self.descriptorId = descriptorId self.parentDeploymentId = parentId - self.status = 0 + self.status = NfDeploymentState.Initial + + def transit_state(self, state: NfDeploymentState): + if (self.status != state): + self._append_event(self.status, state) + self.status = state + + def _append_event(self, fromState, toState): + if not hasattr(self, "events"): + self.events = [] + self.events.append( + events.NfDeploymentStateChanged( + NfDeploymentId=self.id, FromState=fromState, ToState=toState)) -class NfOCloudVResource(AgRoot): +class NfOCloudVResource(AgRoot, Serializer): def __init__(self, id: str, name: str, dmsId: str, description: str = '', descriptorId: str = '', nfDeploymentId: str = '', vresourceType: int = 0,) -> None: diff --git a/o2dms/domain/events.py b/o2dms/domain/events.py index 93af574..bbb5fb5 100644 --- a/o2dms/domain/events.py +++ b/o2dms/domain/events.py @@ -14,14 +14,47 @@ # pylint: disable=too-few-public-methods from dataclasses import dataclass +from o2dms.domain.states import NfDeploymentState from o2common.domain.events import Event @dataclass -class NfDeploymentCreated(Event): +class NfDeploymentStateChanged(Event): NfDeploymentId: str + FromState: NfDeploymentState + ToState: NfDeploymentState -@dataclass -class NfDeploymentDeleted(Event): - NfDeploymentId: str +# @dataclass +# class NfDeploymentCreated(Event): +# NfDeploymentId: str + + +# @dataclass +# class NfDeploymentInstalled(Event): +# NfDeploymentId: str + + +# @dataclass +# class NfDeploymentUninstalling(Event): +# NfDeploymentId: str + + +# @dataclass +# class NfDeploymentUninstalled(Event): +# NfDeploymentId: str + + +# @dataclass +# class NfDeploymentUpdating(Event): +# NfDeploymentId: str + + +# @dataclass +# class NfDeploymentUpdated(Event): +# NfDeploymentId: str + + +# @dataclass +# class NfDeploymentDeleted(Event): +# NfDeploymentId: str diff --git a/o2dms/domain/states.py b/o2dms/domain/states.py new file mode 100644 index 0000000..3f16832 --- /dev/null +++ b/o2dms/domain/states.py @@ -0,0 +1,29 @@ + +# Copyright (C) 2021 Wind River Systems, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations +from dataclasses import dataclass + + +@dataclass +class NfDeploymentState(): + Initial = 0 + NotInstalled = 1 + Installing = 2 + Installed = 3 + Updating = 4 + Uninstalling = 5 + Abnormal = 6 + Deleted = 7 diff --git a/o2dms/service/nfdeployment_handler.py b/o2dms/service/nfdeployment_handler.py index 6143f18..e960792 100644 --- a/o2dms/service/nfdeployment_handler.py +++ b/o2dms/service/nfdeployment_handler.py @@ -14,30 +14,204 @@ # pylint: disable=unused-argument from __future__ import annotations -from o2dms.domain.commands import InstallNfDeployment +from o2dms.domain.states import NfDeploymentState +# from o2common.service import messagebus +from o2dms.domain.dms import NfDeployment, NfDeploymentDesc +from o2dms.domain import commands from typing import Callable from o2dms.domain import events from o2common.service.unit_of_work import AbstractUnitOfWork +from helm_sdk import Helm +from ruamel import yaml +import json +from o2common.config import config +from retry import retry # if TYPE_CHECKING: # from . import unit_of_work from o2common.helper import o2logging logger = o2logging.get_logger(__name__) +LOCAL_HELM_BIN = config.get_helm_cli() +K8S_KUBECONFIG, K8S_APISERVER, K8S_TOKEN = \ + config.get_k8s_api_endpoint() -def publish_nfdeployment_created( - event: events.NfDeploymentCreated, +def publish_nfdeployment_state_change( + event: events.NfDeploymentStateChanged, publish: Callable, ): - publish("NfDeploymentCreated", event) - logger.debug("published NfDeploymentCreated: {}".format( - event.NfDeploymentId)) + publish("NfDeploymentStateChanged", event) + logger.debug( + "published NfDeploymentStateChanged: {}, state from {} to {}".format( + event.NfDeploymentId, event.FromState, event.ToState)) + + +def handle_nfdeployment_statechanged( + cmd: commands.HandleNfDeploymentStateChanged, + uow: AbstractUnitOfWork +): + if cmd.FromState == NfDeploymentState.Initial: + if cmd.ToState == NfDeploymentState.NotInstalled: + cmd2 = commands.InstallNfDeployment(cmd.NfDeploymentId) + install_nfdeployment(cmd2, uow) + else: + logger.debug("Not insterested state change: {}".format(cmd)) + elif cmd.FromState == NfDeploymentState.Installed: + if cmd.ToState == NfDeploymentState.Uninstalling: + cmd2 = commands.UninstallNfDeployment(cmd.NfDeploymentId) + uninstall_nfdeployment(cmd2, uow) + else: + logger.debug("Not insterested state change: {}".format(cmd)) + elif cmd.FromState == NfDeploymentState.NotInstalled: + if cmd.ToState == NfDeploymentState.Initial: + cmd2 = commands.DeleteNfDeployment(cmd.NfDeploymentId) + delete_nfdeployment(cmd2, uow) + else: + logger.debug("Not insterested state change: {}".format(cmd)) + else: + logger.debug("Not insterested state change: {}".format(cmd)) + + +# retry 10 seconds +@retry(tries=20, max_delay=10000) +def _retry_get_nfdeployment( + cmd: commands.InstallNfDeployment, + uow: AbstractUnitOfWork): + nfdeployment: NfDeployment = uow.nfdeployments.get( + cmd.NfDeploymentId) + if nfdeployment is None: + raise Exception("Cannot find NfDeployment: {}".format( + cmd.NfDeploymentId)) + return nfdeployment def install_nfdeployment( - cmd: InstallNfDeployment, + cmd: commands.InstallNfDeployment, uow: AbstractUnitOfWork ): logger.info("install with NfDeploymentId: {}".format( cmd.NfDeploymentId)) + nfdeployment: NfDeployment = _retry_get_nfdeployment(cmd, uow) + if nfdeployment is None: + raise Exception("Cannot find NfDeployment: {}".format( + cmd.NfDeploymentId)) + # get nfdeploymentdescriptor by descriptorId + desc: NfDeploymentDesc = uow.nfdeployment_descs.get( + nfdeployment.descriptorId) + if desc is None: + raise Exception( + "Cannot find NfDeploymentDescriptor:{} for NfDeployment:{}".format( + nfdeployment.descriptorId, nfdeployment.id + )) + + # helm repo add + repourl = desc.artifactRepoUrl + helm = Helm(logger, LOCAL_HELM_BIN, environment_variables={}) + repoName = None + try: + repolist = helm.repo_list() + for repo in repolist: + if repo['url'] == repourl: + repoName = repo['name'] + break + except Exception: + # repoExisted + repoName = None + + if not repoName: + repoName = "repo4{}".format(nfdeployment.name) + logger.debug("Trying to add repo:{}".format(repourl)) + helm.repo_add(repoName, repourl) + helm.repo_update(None) + + repolist = helm.repo_list() + logger.debug('repo list:{}'.format(repolist)) + + # helm install name chart + values_file_path = '/tmp/override_{}.yaml'.format(nfdeployment.name) + if len(desc.inputParams) > 0: + logger.info("dump override yaml:{}".format(values_file_path)) + values = json.loads(desc.inputParams) + _create_values_file(values_file_path, values) + else: + values_file_path = None + + logger.debug('Try to helm install {}/{} {} -f {}'.format( + repoName, nfdeployment.name, desc.artifactName, values_file_path)) + tokens = desc.artifactName.split(':') + chartname = tokens[0] + myflags = None + # if (len(tokens) > 1): + # myflags = {"name": "version", "value": tokens[1]} + result = helm.install( + nfdeployment.name, "{}/{}".format(repoName, chartname), flags=myflags, + values_file=values_file_path, kubeconfig=K8S_KUBECONFIG, + token=K8S_TOKEN, apiserver=K8S_APISERVER) + logger.debug('result: {}'.format(result)) + + # in case success + with uow: + entity: NfDeployment = uow.nfdeployments.get(cmd.NfDeploymentId) + entity.transit_state(NfDeploymentState.Installed) + uow.commit() + + +def _create_values_file(filePath: str, content: dict): + with open(filePath, "w", encoding="utf-8") as f: + yaml.dump(content, f, Dumper=yaml.RoundTripDumper) + + +def uninstall_nfdeployment( + cmd: commands.UninstallNfDeployment, + uow: AbstractUnitOfWork +): + logger.info("uninstall with NfDeploymentId: {}".format( + cmd.NfDeploymentId)) + nfdeployment: NfDeployment = _retry_get_nfdeployment(cmd, uow) + if nfdeployment is None: + raise Exception("Cannot find NfDeployment: {}".format( + cmd.NfDeploymentId)) + # get nfdeploymentdescriptor by descriptorId + desc: NfDeploymentDesc = uow.nfdeployment_descs.get( + nfdeployment.descriptorId) + if desc is None: + raise Exception( + "Cannot find NfDeploymentDescriptor:{} for NfDeployment:{}".format( + nfdeployment.descriptorId, nfdeployment.id + )) + + helm = Helm(logger, LOCAL_HELM_BIN, environment_variables={}) + + logger.debug('Try to helm del {}'.format( + nfdeployment.name)) + myflags = None + # if (len(tokens) > 1): + # myflags = {"name": "version", "value": tokens[1]} + result = helm.uninstall( + nfdeployment.name, flags=myflags, + kubeconfig=K8S_KUBECONFIG, + token=K8S_TOKEN, apiserver=K8S_APISERVER) + logger.debug('result: {}'.format(result)) + + # in case success + + with uow: + entity: NfDeployment = uow.nfdeployments.get(cmd.NfDeploymentId) + entity.transit_state(NfDeploymentState.Initial) + # uow.nfdeployments.update( + # cmd.NfDeploymentId, status=NfDeploymentState.Initial) + uow.commit() + + +def delete_nfdeployment( + cmd: commands.UninstallNfDeployment, + uow: AbstractUnitOfWork +): + logger.info("delete with NfDeploymentId: {}".format( + cmd.NfDeploymentId)) + + # nfdeployment: NfDeployment = _retry_get_nfdeployment(cmd, uow) + with uow: + uow.nfdeployments.delete(cmd.NfDeploymentId) + uow.commit() diff --git a/requirements-stx.txt b/requirements-stx.txt index 6364e51..d97f489 100644 --- a/requirements-stx.txt +++ b/requirements-stx.txt @@ -1,2 +1,3 @@ -e git+https://opendev.org/starlingx/distcloud-client.git@master#egg=distributedcloud-client&subdirectory=distributedcloud-client -e git+https://opendev.org/starlingx/config.git@master#egg=cgtsclient&subdirectory=sysinv/cgts-client/cgts-client# +# -e git+https://github.com/cloudify-incubator/cloudify-helm-plugin.git@master#egg=helmsdk&subdirectory=helm_sdk diff --git a/requirements.txt b/requirements.txt index cfeda23..4ba1a7f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -15,3 +15,4 @@ babel PrettyTable<0.8,>=0.7.2 retry +ruamel.yaml==0.17.17 \ No newline at end of file diff --git a/tests/o2app-api-entry.sh b/tests/o2app-api-entry.sh index ecdd768..2a8d856 100644 --- a/tests/o2app-api-entry.sh +++ b/tests/o2app-api-entry.sh @@ -13,6 +13,9 @@ mkdir -p /src/o2dms cp -r /o2dms/* /src/o2dms mkdir -p /src/o2app cp -r /o2app/* /src/o2app +mkdir -p /src/helm_sdk +cp -r /helm_sdk/* /src/helm_sdk + pip install -e /src export FLASK_APP=/o2app/entrypoints/flask_application.py diff --git a/tests/o2app-redis-entry.sh b/tests/o2app-redis-entry.sh index e47ce96..a4b2d8c 100644 --- a/tests/o2app-redis-entry.sh +++ b/tests/o2app-redis-entry.sh @@ -3,6 +3,13 @@ # pip install -e /src # python /o2ims/entrypoints/resource_watcher.py +# test only +if [ ! -e '/usr/local/bin/helm' ]; then + apt-get install -y curl + curl -O https://get.helm.sh/helm-v3.3.1-linux-amd64.tar.gz; + tar -zxvf helm-v3.3.1-linux-amd64.tar.gz; cp linux-amd64/helm /usr/local/bin +fi + mkdir -p /etc/o2 cp -r /configs/* /etc/o2/ mkdir -p /src/o2common @@ -13,5 +20,10 @@ mkdir -p /src/o2dms cp -r /o2dms/* /src/o2dms mkdir -p /src/o2app cp -r /o2app/* /src/o2app +mkdir -p /src/helm_sdk +cp -r /helm_sdk/* /src/helm_sdk + pip install -e /src + + python /o2app/entrypoints/redis_eventconsumer.py diff --git a/tests/o2app-watcher-entry.sh b/tests/o2app-watcher-entry.sh index f112f93..3db6cb7 100644 --- a/tests/o2app-watcher-entry.sh +++ b/tests/o2app-watcher-entry.sh @@ -13,5 +13,8 @@ mkdir -p /src/o2dms cp -r /o2dms/* /src/o2dms mkdir -p /src/o2app cp -r /o2app/* /src/o2app +mkdir -p /src/helm_sdk +cp -r /helm_sdk/* /src/helm_sdk + pip install -e /src python /o2app/entrypoints/resource_watcher.py