--- /dev/null
+Unless otherwise specified, all software contained herein is licensed\r
+under the Apache License, Version 2.0 (the "Software License");\r
+you may not use this software except in compliance with the Software\r
+License. You may obtain a copy of the Software License at\r
+\r
+http://www.apache.org/licenses/LICENSE-2.0\r
+\r
+Unless required by applicable law or agreed to in writing, software\r
+distributed under the Software License is distributed on an "AS IS" BASIS,\r
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
+See the Software License for the specific language governing permissions\r
+and limitations under the Software License.\r
+\r
+\r
+\r
+Unless otherwise specified, all documentation contained herein is licensed\r
+under the Creative Commons License, Attribution 4.0 Intl. (the\r
+"Documentation License"); you may not use this documentation except in\r
+compliance with the Documentation License. You may obtain a copy of the\r
+Documentation License at\r
+\r
+https://creativecommons.org/licenses/by/4.0/\r
+\r
+Unless required by applicable law or agreed to in writing, documentation\r
+distributed under the Documentation License is distributed on an "AS IS"\r
+BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\r
+implied. See the Documentation License for the specific language governing\r
+permissions and limitations under the Documentation License.
\ No newline at end of file
--- /dev/null
+#!/usr/bin/env groovy\r
+\r
+\r
+properties([[$class: 'ParametersDefinitionProperty', parameterDefinitions: [ \r
+ [$class: 'hudson.model.StringParameterDefinition', name: 'PHASE', defaultValue: "BUILD"],\r
+ [$class: 'hudson.model.StringParameterDefinition', name: 'ENV', defaultValue: "dev"], \r
+ [$class: 'hudson.model.StringParameterDefinition', name: 'MECHID', defaultValue: "id"], \r
+ [$class: 'hudson.model.StringParameterDefinition', name: 'KUBE_CONFIG', defaultValue: "kubeConfig-dev"], \r
+ [$class: 'hudson.model.StringParameterDefinition', name: 'TILLER_NAMESPACE', defaultValue: "org-onap-otf"] \r
+]]]) \r
+\r
+\r
+echo "Build branch: ${env.BRANCH_NAME}"\r
+\r
+node("docker"){\r
+ stage 'Checkout'\r
+ checkout scm\r
+ PHASES=PHASE.tokenize( '_' );\r
+ echo "PHASES : " + PHASES\r
+ ARTIFACT_ID="otf-aaf-credential-generator"\r
+ echo "Tiller Namespace: " + TILLER_NAMESPACE\r
+\r
+ withEnv(["PATH=${env.PATH}:${tool 'jdk180'}:${env.WORKSPACE}/linux-amd64", "JAVA_HOME=${tool 'jdk180'}","HELM_HOME=${env.WORKSPACE}"]) { \r
+ \r
+ echo "PATH=${env.PATH}"\r
+ echo "JAVA_HOME=${env.JAVA_HOME}"\r
+ echo "HELM_HOME=${env.HELM_HOME}"\r
+\r
+ wrap([$class: 'ConfigFileBuildWrapper', managedFiles: [\r
+ [fileId: 'maven-settings.xml', variable: 'MAVEN_SETTINGS']\r
+ ]]) {\r
+\r
+ if (PHASES.contains("DEPLOY") || PHASES.contains("UNDEPLOY")) { \r
+ stage 'Init Helm'\r
+\r
+ //check if helm exists if not install\r
+ if(fileExists('linux-amd64/helm')){\r
+ sh """\r
+ echo "helm is already installed"\r
+ """\r
+ }\r
+ else{\r
+ //download helm\r
+ sh """\r
+ echo "installing helm"\r
+ wget https://storage.googleapis.com/kubernetes-helm/helm-v2.8.2-linux-amd64.tar.gz\r
+ tar -xf helm-v2.8.2-linux-amd64.tar.gz\r
+ rm helm-v2.8.2-linux-amd64.tar.gz\r
+ """\r
+ }\r
+\r
+ withCredentials([file(credentialsId: KUBE_CONFIG, variable: 'KUBECONFIG')]) {\r
+\r
+ dir('helm'){\r
+ //check if charts are valid, and then perform dry run, if successful then upgrade/install charts\r
+\r
+ if (PHASES.contains("UNDEPLOY") ) {\r
+ stage 'Undeploy'\r
+ \r
+ sh """\r
+ helm delete --tiller-namespace=$TILLER_NAMESPACE --purge $ARTIFACT_ID\r
+ """\r
+ }\r
+\r
+ //NOTE Double quotes are used below to access groovy variables like artifact_id and tiller_namespace\r
+ if (PHASES.contains("DEPLOY") ){\r
+ stage 'Deploy'\r
+ withCredentials([usernamePassword(credentialsId: MECHID, usernameVariable: 'USERNAME', passwordVariable: 'PASSWORD')]) {\r
+\r
+ //Note: KEYFILE_PATH must be within helm/$ARTIFACT_ID and must be relative path starting from that directory\r
+ sh """\r
+\r
+ echo "Remove old key info"\r
+ rm -rf keyfile;\r
+ rm -rf digest.txt\r
+ rm -rf $ARTIFACT_ID/keyfile\r
+\r
+ ls\r
+ echo "Create keyfile and digest"\r
+ java -jar ../aaf_cadi_core_2.1.10_SNAPSHOT.jar keygen keyfile\r
+ java -jar ../aaf_cadi_core_2.1.10_SNAPSHOT.jar digest $PASSWORD keyfile > digest.txt 2>&1\r
+ \r
+ cp keyfile $ARTIFACT_ID\r
+ KEYFILE_PATH=keyfile\r
+ DIGEST="enc:"\r
+ DIGEST+=\$(cat digest.txt)\r
+ echo \$DIGEST\r
+ echo "Validate Yaml"\r
+ helm lint $ARTIFACT_ID\r
+\r
+ echo "View Helm Templates"\r
+ helm template $ARTIFACT_ID --set Secret.aafId=$USERNAME --set Secret.aafPassword=\$DIGEST --set Secret.aafMechPassword=$PASSWORD --set keyfilePath=\$KEYFILE_PATH\r
+\r
+ echo "Perform Dry Run Of Install"\r
+ helm upgrade --tiller-namespace=$TILLER_NAMESPACE --install --dry-run $ARTIFACT_ID $ARTIFACT_ID --set Secret.aafId=$USERNAME --set Secret.aafPassword=\$DIGEST --set Secret.aafMechPassword=$PASSWORD --set keyfilePath=\$KEYFILE_PATH\r
+\r
+ echo "Helm Install/Upgrade"\r
+ helm upgrade --tiller-namespace=$TILLER_NAMESPACE --install $ARTIFACT_ID $ARTIFACT_ID --set Secret.aafId=$USERNAME --set Secret.aafPassword=\$DIGEST --set Secret.aafMechPassword=$PASSWORD --set keyfilePath=\$KEYFILE_PATH\r
+\r
+ rm -rf $ARTIFACT_ID/keyfile\r
+ rm -rf keyfile\r
+ rm -rf digest.txt\r
+ """\r
+ }\r
+ }\r
+\r
+ }\r
+ }\r
+ } \r
+ }\r
+ }\r
+}
\ No newline at end of file
--- /dev/null
+# Patterns to ignore when building packages.
+# This supports shell glob matching, relative path matching, and
+# negation (prefixed with !). Only one pattern per line.
+.DS_Store
+# Common VCS dirs
+.git/
+.gitignore
+.bzr/
+.bzrignore
+.hg/
+.hgignore
+.svn/
+# Common backup files
+*.swp
+*.bak
+*.tmp
+*~
+# Various IDEs
+.project
+.idea/
+*.tmproj
--- /dev/null
+apiVersion: v1
+appVersion: "1.0"
+description: A Helm chart for OTF secret shared secret files
+name: otf-aaf-credential-generator
+version: 0.1.0
--- /dev/null
+apiVersion: v1\r
+kind: Secret\r
+metadata:\r
+ name: {{ .Values.appName}}\r
+type: Opaque\r
+data:\r
+ cadi_keyfile: {{ .Files.Get .Values.keyfilePath | b64enc }}\r
+ aaf_id: {{ .Values.Secret.aafId | b64enc}}\r
+ aaf_password: {{ .Values.Secret.aafPassword | b64enc}}\r
+ aaf_mech_password: {{ .Values.Secret.aafMechPassword | b64enc}}\r
+ keyfile_secret_path: {{.Values.Secret.keyfileSecretPath | b64enc}}\r
--- /dev/null
+appName: otf-aaf-credential-generator
+
+keyfilePath: keyfile
+
+Secret:
+ keyfileSecretPath: /opt/secret/keyfile
+ aafId: id
+ aafPassword: pass
+ aafMechPassword: mechPass
\ No newline at end of file
--- /dev/null
+#!/usr/bin/env groovy\r
+\r
+\r
+properties([[$class: 'ParametersDefinitionProperty', parameterDefinitions: [\r
+ [$class: 'hudson.model.StringParameterDefinition', name: 'PHASE', defaultValue: "BUILD"],\r
+ [$class: 'hudson.model.StringParameterDefinition', name: 'ENV', defaultValue: "dev"],\r
+ [$class: 'hudson.model.StringParameterDefinition', name: 'MECHID', defaultValue: "id"],\r
+ [$class: 'hudson.model.StringParameterDefinition', name: 'KUBE_CONFIG', defaultValue: "kubeConfig-dev"],\r
+ [$class: 'hudson.model.StringParameterDefinition', name: 'TILLER_NAMESPACE', defaultValue: "org-onar-otf"],\r
+ [$class: 'hudson.model.StringParameterDefinition', name: 'PKCS12_CERT', defaultValue: "otf_ssl_pkcs12_dev"],\r
+ [$class: 'hudson.model.StringParameterDefinition', name: 'PKCS12_KEY', defaultValue: "server_ssl_key_store_password"],\r
+ [$class: 'hudson.model.StringParameterDefinition', name: 'PEM_CERT', defaultValue: "otf_ssl_pem_dev"],\r
+ [$class: 'hudson.model.StringParameterDefinition', name: 'PEM_KEY', defaultValue: "otf_ssl_pem_key_dev"]\r
+\r
+\r
+]]])\r
+\r
+\r
+echo "Build branch: ${env.BRANCH_NAME}"\r
+\r
+node("docker"){\r
+ stage 'Checkout'\r
+ checkout scm\r
+ PHASES=PHASE.tokenize( '_' );\r
+ echo "PHASES : " + PHASES\r
+ ARTIFACT_ID="otf-cert-secret-builder"\r
+ echo "Tiller Namespace: " + TILLER_NAMESPACE\r
+\r
+ withEnv(["PATH=${env.PATH}:${tool 'jdk180'}:${env.WORKSPACE}/linux-amd64", "JAVA_HOME=${tool 'jdk180'}","HELM_HOME=${env.WORKSPACE}"]) {\r
+\r
+ echo "PATH=${env.PATH}"\r
+ echo "JAVA_HOME=${env.JAVA_HOME}"\r
+ echo "HELM_HOME=${env.HELM_HOME}"\r
+\r
+ wrap([$class: 'ConfigFileBuildWrapper', managedFiles: [\r
+ [fileId: 'maven-settings.xml', variable: 'MAVEN_SETTINGS']\r
+ ]]) {\r
+\r
+ if (PHASES.contains("DEPLOY") || PHASES.contains("UNDEPLOY")) {\r
+ stage 'Init Helm'\r
+\r
+ //check if helm exists if not install\r
+ if(fileExists('linux-amd64/helm')){\r
+ sh """\r
+ echo "helm is already installed"\r
+ """\r
+ }\r
+ else{\r
+ //download helm\r
+ sh """\r
+ echo "installing helm"\r
+ wget https://storage.googleapis.com/kubernetes-helm/helm-v2.8.2-linux-amd64.tar.gz\r
+ tar -xf helm-v2.8.2-linux-amd64.tar.gz\r
+ rm helm-v2.8.2-linux-amd64.tar.gz\r
+ """\r
+ }\r
+\r
+ withCredentials([file(credentialsId: KUBE_CONFIG, variable: 'KUBECONFIG')]) {\r
+\r
+ dir('helm'){\r
+ //check if charts are valid, and then perform dry run, if successful then upgrade/install charts\r
+\r
+ if (PHASES.contains("UNDEPLOY") ) {\r
+ stage 'Undeploy'\r
+\r
+ sh """\r
+ helm delete --tiller-namespace=$TILLER_NAMESPACE --purge $ARTIFACT_ID\r
+ """\r
+ }\r
+\r
+ //NOTE Double quotes are used below to access groovy variables like artifact_id and tiller_namespace\r
+ if (PHASES.contains("DEPLOY") ){\r
+ stage 'Deploy'\r
+ withCredentials(\r
+ [usernamePassword(credentialsId: MECHID, usernameVariable: 'USERNAME', passwordVariable: 'PASSWORD'),\r
+ file(credentialsId: PKCS12_CERT, variable: 'VAR_PKCS12_CERT'),\r
+ string(credentialsId: PKCS12_KEY, variable: 'VAR_PKCS12_KEY'),\r
+ file(credentialsId: PEM_CERT, variable: 'VAR_PEM_CERT'),\r
+ file(credentialsId: PEM_KEY, variable: 'VAR_PEM_KEY'),\r
+ file(credentialsId: 'PRIVATE_KEY', variable: 'VAR_PRIVATE_KEY'),\r
+ usernamePassword(credentialsId: 'PRIVATE_KEY_USER_PASS', usernameVariable: 'PRIVATE_KEY_USERNAME', passwordVariable: 'PRIVATE_KEY_PASSPHRASE')\r
+ ]) {\r
+\r
+ sh """\r
+\r
+ cp $VAR_PKCS12_CERT $ARTIFACT_ID\r
+ cp $VAR_PEM_CERT $ARTIFACT_ID\r
+ cp $VAR_PEM_KEY $ARTIFACT_ID\r
+ cp $VAR_PRIVATE_KEY $ARTIFACT_ID\r
+ FILE_PKCS12_CERT=`basename $VAR_PKCS12_CERT`\r
+ FILE_PEM_CERT=`basename $VAR_PEM_CERT`\r
+ FILE_PEM_KEY=`basename $VAR_PEM_KEY`\r
+ FILE_PRIVATE_KEY=`basename $VAR_PRIVATE_KEY`\r
+\r
+ echo "Validate Yaml"\r
+ helm lint $ARTIFACT_ID\r
+\r
+ echo "View Helm Templates"\r
+ helm template $ARTIFACT_ID \\r
+ --set Secret.PKCS12_CERT=\$FILE_PKCS12_CERT \\r
+ --set Secret.PKCS12_KEY=$VAR_PKCS12_KEY \\r
+ --set Secret.PEM_CERT=\$FILE_PEM_CERT \\r
+ --set Secret.PEM_KEY=\$FILE_PEM_KEY \\r
+ --set Secret.privateKey.key=\$FILE_PRIVATE_KEY \\r
+ --set Secret.privateKey.username=$PRIVATE_KEY_USERNAME \\r
+ --set Secret.privateKey.passphrase=$PRIVATE_KEY_PASSPHRASE \\r
+\r
+ echo "Perform Dry Run Of Install"\r
+ helm upgrade --tiller-namespace=$TILLER_NAMESPACE --install --dry-run $ARTIFACT_ID $ARTIFACT_ID \\r
+ --set Secret.PKCS12_CERT=\$FILE_PKCS12_CERT \\r
+ --set Secret.PKCS12_KEY=$VAR_PKCS12_KEY \\r
+ --set Secret.PEM_CERT=\$FILE_PEM_CERT \\r
+ --set Secret.PEM_KEY=\$FILE_PEM_KEY \\r
+ --set Secret.privateKey.key=\$FILE_PRIVATE_KEY \\r
+ --set Secret.privateKey.username=$PRIVATE_KEY_USERNAME \\r
+ --set Secret.privateKey.passphrase=$PRIVATE_KEY_PASSPHRASE \\r
+\r
+ echo "Helm Install/Upgrade"\r
+ helm upgrade --tiller-namespace=$TILLER_NAMESPACE --install $ARTIFACT_ID $ARTIFACT_ID \\r
+ --set Secret.PKCS12_CERT=\$FILE_PKCS12_CERT \\r
+ --set Secret.PKCS12_KEY=$VAR_PKCS12_KEY \\r
+ --set Secret.PEM_CERT=\$FILE_PEM_CERT \\r
+ --set Secret.PEM_KEY=\$FILE_PEM_KEY \\r
+ --set Secret.privateKey.key=\$FILE_PRIVATE_KEY \\r
+ --set Secret.privateKey.username=$PRIVATE_KEY_USERNAME \\r
+ --set Secret.privateKey.passphrase=$PRIVATE_KEY_PASSPHRASE \\r
+\r
+ """\r
+ }\r
+ }\r
+\r
+ }\r
+ }\r
+ }\r
+ }\r
+ }\r
+}\r
--- /dev/null
+# Patterns to ignore when building packages.
+# This supports shell glob matching, relative path matching, and
+# negation (prefixed with !). Only one pattern per line.
+.DS_Store
+# Common VCS dirs
+.git/
+.gitignore
+.bzr/
+.bzrignore
+.hg/
+.hgignore
+.svn/
+# Common backup files
+*.swp
+*.bak
+*.tmp
+*~
+# Various IDEs
+.project
+.idea/
+*.tmproj
--- /dev/null
+apiVersion: v1
+appVersion: "1.0"
+description: A Helm chart for OTF certificate secret shared secret files
+name: otf-cert-secret-builder
+version: 0.1.0
--- /dev/null
+apiVersion: v1\r
+kind: Secret\r
+metadata:\r
+ name: {{ .Values.appName}}\r
+type: Opaque\r
+data:\r
+ PKCS12_CERT: {{ .Files.Get .Values.Secret.PKCS12_CERT | b64enc }}\r
+ PKCS12_KEY: {{ .Values.Secret.PKCS12_KEY | b64enc}}\r
+ PEM_CERT: {{ .Files.Get .Values.Secret.PEM_CERT | b64enc}}\r
+ PEM_KEY: {{ .Files.Get .Values.Secret.PEM_KEY | b64enc}}\r
+ private_key: {{ .Files.Get .Values.Secret.privateKey.key | b64enc}}\r
+ private_key_username: {{ .Values.Secret.privateKey.username | b64enc}}\r
+ private_key_passphrase: {{ .Values.Secret.privateKey.passphrase | b64enc}}\r
--- /dev/null
+appName: otf-cert-secret-builder
+
+Secret:
+ PKCS12_CERT: temp
+ PKCS12_KEY: temp
+ PEM_CERT: temp
+ PEM_KEY: temp
+ SSH_PRIVATE_KEY: temp
+ privateKey:
+ username: id
+ passphrase: passphrase
+ key: tmp
--- /dev/null
+otf.pem
+privateKey.pem
+*.log
+*.pem
+/otf
--- /dev/null
+FROM python:2.7\r
+\r
+ARG HTTP_PROXY="localhost:8080"\r
+ARG HTTPS_PROXY="localhost:8080"\r
+ARG http_proxy="localhost:8080"\r
+ARG https_proxy="localhost:8080"\r
+\r
+RUN python --version\r
+\r
+ADD pip-requirements.txt pip-requirements.txt\r
+ADD otfPingTestHead.py otfPingTestHead.py\r
+\r
+RUN mkdir -p /otf/logs\r
+\r
+RUN python -m pip install -r pip-requirements.txt\r
+\r
+ENTRYPOINT ["python", "otfPingTestHead.py"]\r
--- /dev/null
+#!/usr/bin/env groovy\r
+\r
+\r
+properties([[$class: 'ParametersDefinitionProperty', parameterDefinitions: [\r
+ [$class: 'hudson.model.StringParameterDefinition', name: 'PHASE', defaultValue: "BUILD"],\r
+ [$class: 'hudson.model.StringParameterDefinition', name: 'ENV', defaultValue: "dev"],\r
+ [$class: 'hudson.model.StringParameterDefinition', name: 'MECHID', defaultValue: "id_otf_dev"],\r
+ [$class: 'hudson.model.StringParameterDefinition', name: 'KUBE_CONFIG', defaultValue: "kubeConfig-dev"],\r
+ [$class: 'hudson.model.StringParameterDefinition', name: 'TILLER_NAMESPACE', defaultValue: "org-oran-otf"]\r
+]]])\r
+\r
+\r
+echo "Build branch: ${env.BRANCH_NAME}"\r
+\r
+node("docker"){\r
+ stage 'Checkout'\r
+ checkout scm\r
+ PHASES=PHASE.tokenize( '_' );\r
+ echo "PHASES : " + PHASES\r
+\r
+\r
+ ARTIFACT_ID="otf-ping-test-head";\r
+ VERSION="Blitzcrank.1.1";\r
+ NAMESPACE="org-oran-otf"\r
+ DOCKER_REGISTRY="registry.hub.docker.io"\r
+\r
+ if( ENV.equalsIgnoreCase("dev") ){\r
+ IMAGE_NAME=DOCKER_REGISTRY + "/" + NAMESPACE + "/" + ARTIFACT_ID + ":" + VERSION\r
+\r
+ }\r
+ if( ENV.equalsIgnoreCase("prod") || ENV.equalsIgnoreCase("prod-dr")){\r
+ IMAGE_NAME=DOCKER_REGISTRY + "/" + NAMESPACE + ".prod" + "/" + ARTIFACT_ID + ":" + VERSION\r
+\r
+ }\r
+\r
+ if( ENV.equalsIgnoreCase("st") ){\r
+ IMAGE_NAME=DOCKER_REGISTRY + "/" + NAMESPACE + ".st" + "/" + ARTIFACT_ID + ":" + VERSION\r
+\r
+ }\r
+\r
+ echo "Artifact: " + IMAGE_NAME\r
+\r
+ withEnv(["PATH=${env.PATH}:${env.WORKSPACE}/linux-amd64", "HELM_HOME=${env.WORKSPACE}"]) {\r
+\r
+ echo "PATH=${env.PATH}"\r
+ echo "HELM_HOME=${env.HELM_HOME}"\r
+\r
+ if (PHASES.contains("BUILD")){\r
+\r
+ stage 'Publish Artifact'\r
+\r
+ withCredentials([usernamePassword(credentialsId: MECHID, usernameVariable: 'USERNAME', passwordVariable: 'PASSWORD')]) {\r
+\r
+ echo "Artifact: " + IMAGE_NAME\r
+\r
+ sh """\r
+ docker login $DOCKER_REGISTRY --username $USERNAME --password $PASSWORD\r
+ docker build -t $IMAGE_NAME .\r
+ docker push $IMAGE_NAME\r
+ """\r
+ }\r
+\r
+ }\r
+\r
+ if (PHASES.contains("DEPLOY") || PHASES.contains("UNDEPLOY")) {\r
+\r
+ stage 'Init Helm'\r
+\r
+ //check if helm exists if not install\r
+ if(fileExists('linux-amd64/helm')){\r
+ sh """\r
+ echo "helm is already installed"\r
+ """\r
+ }\r
+ else{\r
+ //download helm\r
+ sh """\r
+ echo "installing helm"\r
+ wget https://storage.googleapis.com/kubernetes-helm/helm-v2.8.2-linux-amd64.tar.gz\r
+ tar -xf helm-v2.8.2-linux-amd64.tar.gz\r
+ rm helm-v2.8.2-linux-amd64.tar.gz\r
+ """\r
+ }\r
+\r
+ withCredentials([file(credentialsId: KUBE_CONFIG, variable: 'KUBECONFIG')]) {\r
+\r
+ dir('helm'){\r
+ //check if charts are valid, and then perform dry run, if successful then upgrade/install charts\r
+\r
+ if (PHASES.contains("UNDEPLOY") ) {\r
+ stage 'Undeploy'\r
+\r
+ sh """\r
+ helm delete --tiller-namespace=$TILLER_NAMESPACE --purge $ARTIFACT_ID\r
+ """\r
+ }\r
+\r
+ //NOTE Double quotes are used below to access groovy variables like artifact_id and tiller_namespace\r
+ if (PHASES.contains("DEPLOY") ){\r
+ stage 'Deploy'\r
+ withCredentials([usernamePassword(credentialsId: MECHID, usernameVariable: 'USERNAME', passwordVariable: 'PASSWORD')]) {\r
+\r
+ sh """\r
+ echo "Validate Yaml"\r
+ helm lint $ARTIFACT_ID\r
+\r
+ echo "View Helm Templates"\r
+ helm template $ARTIFACT_ID --set appName=$ARTIFACT_ID \\r
+ --set appName=$ARTIFACT_ID \\r
+ --set version=$VERSION \\r
+ --set env=$ENV \\r
+ --set image=$IMAGE_NAME \\r
+ --set namespace=$TILLER_NAMESPACE\r
+\r
+ echo "Perform Dry Run Of Install"\r
+ helm upgrade --tiller-namespace=$TILLER_NAMESPACE --install --dry-run $ARTIFACT_ID $ARTIFACT_ID \\r
+ --set appName=$ARTIFACT_ID \\r
+ --set version=$VERSION \\r
+ --set env=$ENV \\r
+ --set image=$IMAGE_NAME \\r
+ --set namespace=$TILLER_NAMESPACE\r
+\r
+\r
+ echo "Helm Install/Upgrade"\r
+ helm upgrade --tiller-namespace=$TILLER_NAMESPACE --install $ARTIFACT_ID $ARTIFACT_ID \\r
+ --set appName=$ARTIFACT_ID \\r
+ --set version=$VERSION \\r
+ --set env=$ENV \\r
+ --set image=$IMAGE_NAME \\r
+ --set namespace=$TILLER_NAMESPACE\r
+\r
+ """\r
+ }\r
+ }\r
+\r
+ }\r
+ }\r
+ }\r
+\r
+ }\r
+}\r
--- /dev/null
+Unless otherwise specified, all software contained herein is licensed\r
+under the Apache License, Version 2.0 (the "Software License");\r
+you may not use this software except in compliance with the Software\r
+License. You may obtain a copy of the Software License at\r
+\r
+http://www.apache.org/licenses/LICENSE-2.0\r
+\r
+Unless required by applicable law or agreed to in writing, software\r
+distributed under the Software License is distributed on an "AS IS" BASIS,\r
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
+See the Software License for the specific language governing permissions\r
+and limitations under the Software License.\r
+\r
+\r
+\r
+Unless otherwise specified, all documentation contained herein is licensed\r
+under the Creative Commons License, Attribution 4.0 Intl. (the\r
+"Documentation License"); you may not use this documentation except in\r
+compliance with the Documentation License. You may obtain a copy of the\r
+Documentation License at\r
+\r
+https://creativecommons.org/licenses/by/4.0/\r
+\r
+Unless required by applicable law or agreed to in writing, documentation\r
+distributed under the Documentation License is distributed on an "AS IS"\r
+BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\r
+implied. See the Documentation License for the specific language governing\r
+permissions and limitations under the Documentation License.
\ No newline at end of file
--- /dev/null
+# Patterns to ignore when building packages.\r
+# This supports shell glob matching, relative path matching, and\r
+# negation (prefixed with !). Only one pattern per line.\r
+.DS_Store\r
+# Common VCS dirs\r
+.git/\r
+.gitignore\r
+.bzr/\r
+.bzrignore\r
+.hg/\r
+.hgignore\r
+.svn/\r
+# Common backup files\r
+*.swp\r
+*.bak\r
+*.tmp\r
+*~\r
+# Various IDEs\r
+.project\r
+.idea/\r
+*.tmproj\r
--- /dev/null
+apiVersion: v1\r
+appVersion: "1.0"\r
+description: A Helm chart for the Ping Server Virtual Test Head \r
+name: otf-ping-test-head\r
+version: 0.0.1\r
--- /dev/null
+apiVersion: extensions/v1beta1\r
+kind: Deployment\r
+metadata:\r
+ name: {{ .Values.appName}}\r
+ namespace: {{.Values.namespace}}\r
+ labels:\r
+ app: {{ .Values.appName}}\r
+ version: {{.Values.version}}\r
+spec:\r
+ revisionHistoryLimit: 1\r
+ minReadySeconds: 10\r
+ strategy:\r
+ # indicate which strategy we want for rolling update\r
+ type: RollingUpdate\r
+ rollingUpdate:\r
+ maxSurge: 0\r
+ maxUnavailable: 1\r
+ replicas: {{ .Values.replicas}}\r
+ selector:\r
+ matchLabels:\r
+ app: {{ .Values.appName}}\r
+ version: {{.Values.version}}\r
+ template:\r
+ metadata:\r
+ labels:\r
+ app: {{ .Values.appName}}\r
+ version: {{.Values.version}}\r
+ spec:\r
+ serviceAccount: default\r
+ volumes:\r
+ - name: {{ .Values.appName}}-cert-volume\r
+ secret:\r
+ secretName: {{.Values.sharedCert}}\r
+ optional: true\r
+ items:\r
+ - key: PEM_CERT\r
+ path: otf.pem\r
+ - key: PEM_KEY\r
+ path: privateKey.pem\r
+ {{ if or (eq .Values.env "st") (eq .Values.env "prod-dr")}}\r
+ {{else}}\r
+ - name: logging-pvc\r
+ persistentVolumeClaim:\r
+ {{if eq .Values.env "prod"}}\r
+ claimName: {{ .Values.pvc.prod | quote }}\r
+ {{ else }}\r
+ claimName: {{ .Values.pvc.dev | quote }}\r
+ {{ end }}\r
+ {{end}}\r
+ containers:\r
+ - name: {{ .Values.appName}}\r
+ image: {{ .Values.image}}\r
+ imagePullPolicy: Always\r
+ ports:\r
+ - name: http\r
+ containerPort: 5000\r
+ nodePort: {{.Values.nodePort}}\r
+ protocol: TCP\r
+ {{ if eq .Values.env "st"}}\r
+ resources:\r
+ limits: \r
+ memory: "512Mi"\r
+ cpu: "500m"\r
+ requests:\r
+ memory: "256Mi"\r
+ cpu: "100m"\r
+ {{else}} \r
+ resources:\r
+ limits:\r
+ memory: "1Gi"\r
+ cpu: "1"\r
+ requests:\r
+ memory: "1Gi"\r
+ cpu: "1"\r
+ {{end}}\r
+ env:\r
+ - name: NAMESPACE\r
+ value: {{.Values.namespace}}\r
+ - name: APP_NAME\r
+ value: {{ .Values.appName}}\r
+ - name: APP_VERSION\r
+ value: {{.Values.version}}\r
+ volumeMounts:\r
+ - name: {{.Values.appName}}-cert-volume\r
+ mountPath: /opt/cert\r
+ {{ if or (eq .Values.env "st") (eq .Values.env "prod-dr")}}\r
+ {{else}}\r
+ - name: logging-pvc\r
+ mountPath: "/otf/logs"\r
+ {{end}} \r
+ livenessProbe:\r
+ httpGet:\r
+ path: {{.Values.health}}\r
+ port: http\r
+ scheme: HTTPS\r
+ httpHeaders:\r
+ - name: X-Custom-Header\r
+ value: Alive\r
+ initialDelaySeconds: 30\r
+ timeoutSeconds: 30\r
+ periodSeconds: 30\r
+ readinessProbe:\r
+ httpGet:\r
+ path: {{.Values.health}}\r
+ port: http\r
+ scheme: HTTPS\r
+ httpHeaders:\r
+ - name: X-Custom-Header\r
+ value: Ready\r
+ initialDelaySeconds: 30\r
+ timeoutSeconds: 30\r
+ periodSeconds: 30\r
+ restartPolicy: Always\r
--- /dev/null
+apiVersion: v1\r
+kind: Service\r
+metadata:\r
+ name: {{ .Values.appName }}\r
+ namespace: {{ .Values.namespace}}\r
+ labels:\r
+ app: {{ .Values.appName }}\r
+ version: {{ .Values.version}}\r
+spec:\r
+ type: NodePort\r
+ ports:\r
+ - name: http\r
+ port: 5000\r
+ protocol: TCP\r
+ nodePort: {{ .Values.nodePort}}\r
+ selector:\r
+ app: {{ .Values.appName }}\r
+ version: {{ .Values.version}}\r
--- /dev/null
+appName: otf-ping-test-head\r
+env: dev\r
+version: 0.0.1-SNAPSHOT\r
+image: otf-ping-test-head:0.0.1-SNAPSHOT\r
+namespace: org-oran-otf\r
+nodePort: 32223\r
+replicas: 1\r
+health : /otf/vth/ping/v1/health\r
+sharedCert: otf-cert-secret-builder\r
+pvc:\r
+ dev: org-oran-otf-dev-logs-pv\r
+ prod: org-oran-otf-prod-logs-pv\r
--- /dev/null
+add certs here. requires two pem files (Cert and key)
--- /dev/null
+# Copyright (c) 2019 AT&T Intellectual Property. #\r
+# #\r
+# Licensed under the Apache License, Version 2.0 (the "License"); #\r
+# you may not use this file except in compliance with the License. #\r
+# You may obtain a copy of the License at #\r
+# #\r
+# http://www.apache.org/licenses/LICENSE-2.0 #\r
+# #\r
+# Unless required by applicable law or agreed to in writing, software #\r
+# distributed under the License is distributed on an "AS IS" BASIS, #\r
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #\r
+# See the License for the specific language governing permissions and #\r
+# limitations under the License. #\r
+################################################################################\r
+\r
+from flask import Flask, request, make_response, jsonify, g\r
+import json\r
+import uuid\r
+import traceback\r
+import pyping\r
+import paramiko\r
+import socket\r
+import os\r
+import subprocess\r
+import datetime\r
+import logging\r
+from logging import FileHandler\r
+\r
+#redirect http to https\r
+app = Flask(__name__)\r
+\r
+\r
+# Prevents print statement every time an endpoint is triggered.\r
+logging.getLogger("werkzeug").setLevel(logging.WARNING)\r
+\r
+def unix_time_millis(dt):\r
+ epoch = datetime.datetime.utcfromtimestamp(0)\r
+ return (dt - epoch).total_seconds() * 1000.0\r
+\r
+def pingServer(targetHost):\r
+ try:\r
+ response = subprocess.check_output(\r
+ ['ping', '-c', '1', targetHost], # execute the ping command\r
+ stderr = subprocess.STDOUT, # retrieve all the output\r
+ universal_newlines = True # return as string\r
+ )\r
+ except subprocess.CalledProcessError as e:\r
+ app.logger.error(e)\r
+ app.logger.error('failed getting response from ' + str(targetHost))\r
+ response = None\r
+\r
+ return response\r
+\r
+@app.route("/otf/vth/ping/v1/health", methods = ['GET'])\r
+def getHealth():\r
+ return "UP"\r
+\r
+@app.route('/otf/vth/sample/v1', methods = ['POST'])\r
+def sample():\r
+ startTime = unix_time_millis(datetime.datetime.now())\r
+ responseData = {\r
+ "vthResponse": {\r
+ "testDurationMS": "",\r
+ "dateTimeUTC": "",\r
+ "abstractMessage": "Success",\r
+ "resultData": {}\r
+ }\r
+ }\r
+ responseData['vthResponse']['dateTimeUTC'] = str(datetime.datetime.now())\r
+ endTime = unix_time_millis(datetime.datetime.now())\r
+ responseData['vthResponse']['testDurationMS'] = endTime - startTime\r
+ responseData['vthResponse']['resultData']['result'] = "Executed test successfully in " + str(responseData['vthResponse']['testDurationMS']) + " milliseconds."\r
+ app.logger.info('hit sample endpoint. response: ' + str(responseData))\r
+ return jsonify(responseData)\r
+\r
+@app.route('/otf/vth/ping/v1', methods = ['POST'])\r
+def testHead():\r
+ responseData = {\r
+ "vthResponse": {\r
+ "testDurationMS": "",\r
+ "dateTimeUTC": "",\r
+ "abstractMessage": "",\r
+ "resultData": {}\r
+ }\r
+ }\r
+\r
+ responseData['vthResponse']['dateTimeUTC'] = str(datetime.datetime.now())\r
+ startTime = unix_time_millis(datetime.datetime.now())\r
+\r
+ try:\r
+ if not request.is_json:\r
+ raise ValueError('Request must be a valid JSON object.')\r
+\r
+ requestData = request.get_json()\r
+ app.logger.info('ping endpoint. request: ' + str(requestData))\r
+\r
+ if 'vthInput' in requestData:\r
+ vthInput = requestData['vthInput']\r
+ expectedKeys = ['vthName', 'testConfig', 'testData']\r
+ receivedKeys = vthInput.keys();\r
+ testData = ""\r
+ testConfig = ""\r
+\r
+ if sorted(expectedKeys) == sorted(receivedKeys):\r
+ testData = vthInput['testData']\r
+\r
+ # Check if a target host is provided.\r
+ if 'targetHost' not in testData:\r
+ raise KeyError('targetHost is required to ping server.')\r
+\r
+ # Check if the target host IP address is in the correct format.\r
+ # This excludes IPv6. Use IPy to check both IPv6/IPv4.\r
+ try:\r
+ socket.inet_aton(testData['targetHost'])\r
+ except socket.error:\r
+ raise ValueError('Invalid IP address assigned to targetHost')\r
+\r
+ # Don't use a jump server by default.\r
+ if 'useJumpServer' not in testData:\r
+ testData['useJumpServer'] = False\r
+ else:\r
+ raise ValueError('Missing one or more expected keys: {expectedKeys}.'.format(expectedKeys = expectedKeys))\r
+\r
+ if testData['useJumpServer'] == False:\r
+ responseData['vthResponse']['resultData']['result'] = pingServer(testData['targetHost'])\r
+ else:\r
+ testConfig = vthInput['testConfig']\r
+\r
+ if 'jumpServer' not in testConfig:\r
+ raise KeyError('Cannot use jump server when jumpServer key is missing.')\r
+\r
+ jumpServer = testConfig['jumpServer']\r
+\r
+ if 'host' not in testConfig['jumpServer']:\r
+ raise KeyError('Missing host value in jumpServer.')\r
+\r
+ host = testConfig['jumpServer']['host']\r
+\r
+ if 'credentials' not in jumpServer:\r
+ raise KeyError('Missing credentials in jumpServer.')\r
+\r
+ credentials = jumpServer['credentials']\r
+\r
+ if 'username' not in credentials:\r
+ raise KeyError('Missing username in credentials.')\r
+\r
+ username = credentials['username']\r
+\r
+ if 'password' not in credentials:\r
+ raise KeyError('Missing password in credentials.')\r
+\r
+ password = credentials['password']\r
+\r
+ ssh = paramiko.SSHClient()\r
+ ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())\r
+ ssh.connect(host, username = username, password = password)\r
+ command = "ping -c 1 " + testData['targetHost']\r
+ ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command(command)\r
+ output = ssh_stdout.read()\r
+ error = ssh_stderr.read()\r
+\r
+ responseData['vthResponse']['resultData']['result'] = output\r
+ else:\r
+ raise KeyError('Missing vthInput parameter(s)')\r
+\r
+ # record the end time of the test\r
+ endTime = unix_time_millis(datetime.datetime.now())\r
+\r
+ # Calculate the total duration of the test\r
+ totalTime = endTime - startTime\r
+\r
+ # Set the test duration in the result\r
+ responseData['vthResponse']['testDurationMS'] = totalTime\r
+\r
+ responseData['vthResponse']['abstractMessage'] = 'Result from pinging {host}'.format(host = testData['targetHost'])\r
+ app.logger.info('ping endpoint. response: ' + str(responseData))\r
+\r
+ return jsonify(responseData)\r
+ except Exception as e:\r
+ app.logger.info(e)\r
+ responseData['vthResponse']['abstractMessage'] = str(e)\r
+ resp = make_response(json.dumps(responseData))\r
+ endTime = unix_time_millis(datetime.datetime.now())\r
+\r
+ totalTime = endTime - startTime\r
+ return resp\r
+\r
+if __name__ == '__main__':\r
+ logHandler = FileHandler('otf/logs/pingVTH.log', mode='a')\r
+ # logHandler = FileHandler('pingVTH.log', mode='a')\r
+ logHandler.setLevel(logging.INFO)\r
+ app.logger.setLevel(logging.INFO)\r
+ app.logger.addHandler(logHandler)\r
+ context = ('opt/cert/otf.pem', 'opt/cert/privateKey.pem')\r
+ app.run(debug = False, host = '0.0.0.0', port = 5000, ssl_context = context)\r
+ # app.run(debug = False, host = '0.0.0.0', port = 5000)\r
--- /dev/null
+flask\r
+flask-cors\r
+pyping\r
+paramiko\r
+FLASK\r
+FLASK-CORS\r
--- /dev/null
+# Application-specific stuff\r
+files/\r
+temp/\r
+opt/cert/*.pem\r
+*.log\r
+\r
+envScript.sh\r
+\r
+# Created by https://www.gitignore.io/api/python,pycharm\r
+# Edit at https://www.gitignore.io/?templates=python,pycharm\r
+\r
+### PyCharm ###\r
+# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and WebStorm\r
+# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839\r
+\r
+# User-specific stuff\r
+.idea/**/workspace.xml\r
+.idea/**/tasks.xml\r
+.idea/**/usage.statistics.xml\r
+.idea/**/dictionaries\r
+.idea/**/shelf\r
+\r
+# Generated files\r
+.idea/**/contentModel.xml\r
+\r
+# Sensitive or high-churn files\r
+.idea/**/dataSources/\r
+.idea/**/dataSources.ids\r
+.idea/**/dataSources.local.xml\r
+.idea/**/sqlDataSources.xml\r
+.idea/**/dynamic.xml\r
+.idea/**/uiDesigner.xml\r
+.idea/**/dbnavigator.xml\r
+\r
+# Gradle\r
+.idea/**/gradle.xml\r
+.idea/**/libraries\r
+\r
+# Gradle and Maven with auto-import\r
+# When using Gradle or Maven with auto-import, you should exclude module files,\r
+# since they will be recreated, and may cause churn. Uncomment if using\r
+# auto-import.\r
+# .idea/modules.xml\r
+# .idea/*.iml\r
+# .idea/modules\r
+\r
+# CMake\r
+cmake-build-*/\r
+\r
+# Mongo Explorer plugin\r
+.idea/**/mongoSettings.xml\r
+\r
+# File-based project format\r
+*.iws\r
+\r
+# IntelliJ\r
+out/\r
+\r
+# mpeltonen/sbt-idea plugin\r
+.idea_modules/\r
+\r
+# JIRA plugin\r
+atlassian-ide-plugin.xml\r
+\r
+# Cursive Clojure plugin\r
+.idea/replstate.xml\r
+\r
+# Crashlytics plugin (for Android Studio and IntelliJ)\r
+com_crashlytics_export_strings.xml\r
+crashlytics.properties\r
+crashlytics-build.properties\r
+fabric.properties\r
+\r
+# Editor-based Rest Client\r
+.idea/httpRequests\r
+\r
+# Android studio 3.1+ serialized cache file\r
+.idea/caches/build_file_checksums.ser\r
+\r
+### PyCharm Patch ###\r
+# Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721\r
+\r
+# *.iml\r
+# modules.xml\r
+# .idea/misc.xml\r
+# *.ipr\r
+\r
+# Sonarlint plugin\r
+.idea/sonarlint\r
+\r
+### Python ###\r
+# Byte-compiled / optimized / DLL files\r
+__pycache__/\r
+*.py[cod]\r
+*$py.class\r
+\r
+# C extensions\r
+*.so\r
+\r
+# Distribution / packaging\r
+.Python\r
+build/\r
+develop-eggs/\r
+dist/\r
+downloads/\r
+eggs/\r
+.eggs/\r
+lib/\r
+lib64/\r
+parts/\r
+sdist/\r
+var/\r
+wheels/\r
+share/python-wheels/\r
+*.egg-info/\r
+.installed.cfg\r
+*.egg\r
+MANIFEST\r
+\r
+# PyInstaller\r
+# Usually these files are written by a python script from a template\r
+# before PyInstaller builds the exe, so as to inject date/other infos into it.\r
+*.manifest\r
+*.spec\r
+\r
+# Installer logs\r
+pip-log.txt\r
+pip-delete-this-directory.txt\r
+\r
+# Unit test / coverage reports\r
+htmlcov/\r
+.tox/\r
+.nox/\r
+.coverage\r
+.coverage.*\r
+.cache\r
+nosetests.xml\r
+coverage.xml\r
+*.cover\r
+.hypothesis/\r
+.pytest_cache/\r
+\r
+# Translations\r
+*.mo\r
+*.pot\r
+\r
+# Django stuff:\r
+*.log\r
+local_settings.py\r
+db.sqlite3\r
+\r
+# Flask stuff:\r
+instance/\r
+.webassets-cache\r
+\r
+# Scrapy stuff:\r
+.scrapy\r
+\r
+# Sphinx documentation\r
+docs/_build/\r
+\r
+# PyBuilder\r
+target/\r
+\r
+# Jupyter Notebook\r
+.ipynb_checkpoints\r
+\r
+# IPython\r
+profile_default/\r
+ipython_config.py\r
+\r
+# pyenv\r
+.python-version\r
+\r
+# celery beat schedule file\r
+celerybeat-schedule\r
+\r
+# SageMath parsed files\r
+*.sage.py\r
+\r
+# Environments\r
+.env\r
+.venv\r
+env/\r
+venv/\r
+ENV/\r
+env.bak/\r
+venv.bak/\r
+\r
+# Spyder project settings\r
+.spyderproject\r
+.spyproject\r
+\r
+# Rope project settings\r
+.ropeproject\r
+\r
+# mkdocs documentation\r
+/site\r
+\r
+# mypy\r
+.mypy_cache/\r
+.dmypy.json\r
+dmypy.json\r
+\r
+# Pyre type checker\r
+.pyre/\r
+\r
+### Python Patch ###\r
+.venv/\r
+\r
+# End of https://www.gitignore.io/api/python,pycharm\r
--- /dev/null
+FROM python:2.7\r
+\r
+ARG HTTP_PROXY="http://localhost:8080" \r
+ARG HTTPS_PROXY="http://localhost:8080" \r
+ARG http_proxy="http://localhost:8080" \r
+ARG https_proxy="http://localhost:8080"\r
+\r
+RUN python --version\r
+\r
+ADD pip-requirements.txt pip-requirements.txt\r
+ADD run.py run.py\r
+ADD app app\r
+\r
+RUN python -m pip install -r pip-requirements.txt\r
+\r
+RUN mkdir -p /otf/logs\r
+\r
+ENTRYPOINT ["python", "run.py"]
\ No newline at end of file
--- /dev/null
+#!/usr/bin/env groovy\r
+\r
+\r
+properties([[$class: 'ParametersDefinitionProperty', parameterDefinitions: [\r
+ [$class: 'hudson.model.StringParameterDefinition', name: 'PHASE', defaultValue: "BUILD"],\r
+ [$class: 'hudson.model.StringParameterDefinition', name: 'ENV', defaultValue: "dev"],\r
+ [$class: 'hudson.model.StringParameterDefinition', name: 'MECHID', defaultValue: "id_otf_dev"],\r
+ [$class: 'hudson.model.StringParameterDefinition', name: 'KUBE_CONFIG', defaultValue: "kubeConfig-dev"],\r
+ [$class: 'hudson.model.StringParameterDefinition', name: 'OTF_MONGO_DB', defaultValue: "otf_mongo_dev_db"],\r
+ [$class: 'hudson.model.StringParameterDefinition', name: 'TILLER_NAMESPACE', defaultValue: "org-oran-otf"]\r
+]]])\r
+\r
+echo "Build branch: ${env.BRANCH_NAME}"\r
+\r
+node("docker"){\r
+ stage 'Checkout'\r
+ checkout scm\r
+ PHASES=PHASE.tokenize( '_' );\r
+ echo "PHASES : " + PHASES\r
+\r
+\r
+ ARTIFACT_ID="otf-robot-test-head";\r
+ VERSION="Blitzcrank.1.1";\r
+ NAMESPACE="org.oran.otf"\r
+ DOCKER_REGISTRY="registry.hub.docker.io"\r
+\r
+ if( ENV.equalsIgnoreCase("dev") ){\r
+ IMAGE_NAME=DOCKER_REGISTRY + "/" + NAMESPACE + "/" + ARTIFACT_ID + ":" + VERSION\r
+\r
+ }\r
+ if( ENV.equalsIgnoreCase("prod") || ENV.equalsIgnoreCase("prod-dr")){\r
+ IMAGE_NAME=DOCKER_REGISTRY + "/" + NAMESPACE + ".prod" + "/" + ARTIFACT_ID + ":" + VERSION\r
+\r
+ }\r
+\r
+ if( ENV.equalsIgnoreCase("st") ){\r
+ IMAGE_NAME=DOCKER_REGISTRY + "/" + NAMESPACE + ".st" + "/" + ARTIFACT_ID + ":" + VERSION\r
+\r
+ }\r
+ \r
+ echo "Artifact: " + IMAGE_NAME\r
+\r
+ withEnv(["PATH=${env.PATH}:${env.WORKSPACE}/linux-amd64", "HELM_HOME=${env.WORKSPACE}"]) {\r
+\r
+ echo "PATH=${env.PATH}"\r
+ echo "HELM_HOME=${env.HELM_HOME}"\r
+\r
+ if (PHASES.contains("BUILD")){\r
+\r
+ stage 'Publish Artifact'\r
+\r
+ withCredentials([usernamePassword(credentialsId: MECHID, usernameVariable: 'USERNAME', passwordVariable: 'PASSWORD')]) {\r
+\r
+ echo "Artifact: " + IMAGE_NAME\r
+\r
+ sh """\r
+ docker login $DOCKER_REGISTRY --username $USERNAME --password $PASSWORD\r
+ docker build -t $IMAGE_NAME .\r
+ docker push $IMAGE_NAME\r
+ """\r
+ }\r
+\r
+ }\r
+\r
+ if (PHASES.contains("DEPLOY") || PHASES.contains("UNDEPLOY")) {\r
+\r
+ stage 'Init Helm'\r
+\r
+ //check if helm exists if not install\r
+ if(fileExists('linux-amd64/helm')){\r
+ sh """\r
+ echo "helm is already installed"\r
+ """\r
+ }\r
+ else{\r
+ //download helm\r
+ sh """\r
+ echo "installing helm"\r
+ wget https://storage.googleapis.com/kubernetes-helm/helm-v2.8.2-linux-amd64.tar.gz\r
+ tar -xf helm-v2.8.2-linux-amd64.tar.gz\r
+ rm helm-v2.8.2-linux-amd64.tar.gz\r
+ """\r
+ }\r
+\r
+ withCredentials([file(credentialsId: KUBE_CONFIG, variable: 'KUBECONFIG')]) {\r
+\r
+ dir('helm'){\r
+ //check if charts are valid, and then perform dry run, if successful then upgrade/install charts\r
+\r
+ if (PHASES.contains("UNDEPLOY") ) {\r
+ stage 'Undeploy'\r
+\r
+ sh """\r
+ helm delete --tiller-namespace=$TILLER_NAMESPACE --purge $ARTIFACT_ID\r
+ """\r
+ }\r
+\r
+ //NOTE Double quotes are used below to access groovy variables like artifact_id and tiller_namespace\r
+ if (PHASES.contains("DEPLOY") ){\r
+ stage 'Deploy'\r
+ withCredentials([\r
+ usernamePassword(credentialsId: MECHID, usernameVariable: 'USERNAME', passwordVariable: 'PASSWORD'),\r
+ usernamePassword(credentialsId: OTF_MONGO_DB, usernameVariable: 'USERNAME_MONGO', passwordVariable: 'PASSWORD_MONGO')\r
+ ]) {\r
+\r
+ sh """\r
+ echo "Validate Yaml"\r
+ helm lint $ARTIFACT_ID\r
+\r
+ echo "View Helm Templates"\r
+ helm template $ARTIFACT_ID \\r
+ --set appName=$ARTIFACT_ID \\r
+ --set version=$VERSION \\r
+ --set image=$IMAGE_NAME \\r
+ --set namespace=$TILLER_NAMESPACE \\r
+ --set env=$ENV \\r
+ --set mongo.username=$USERNAME_MONGO \\r
+ --set mongo.password=$PASSWORD_MONGO\r
+\r
+ echo "Perform Dry Run Of Install"\r
+ helm upgrade --tiller-namespace=$TILLER_NAMESPACE --install --dry-run $ARTIFACT_ID $ARTIFACT_ID \\r
+ --set appName=$ARTIFACT_ID \\r
+ --set version=$VERSION \\r
+ --set image=$IMAGE_NAME \\r
+ --set namespace=$TILLER_NAMESPACE \\r
+ --set env=$ENV \\r
+ --set mongo.username=$USERNAME_MONGO \\r
+ --set mongo.password=$PASSWORD_MONGO\r
+\r
+\r
+ echo "Helm Install/Upgrade"\r
+ helm upgrade --tiller-namespace=$TILLER_NAMESPACE --install $ARTIFACT_ID $ARTIFACT_ID \\r
+ --set appName=$ARTIFACT_ID \\r
+ --set version=$VERSION \\r
+ --set image=$IMAGE_NAME \\r
+ --set namespace=$TILLER_NAMESPACE \\r
+ --set env=$ENV \\r
+ --set mongo.username=$USERNAME_MONGO \\r
+ --set mongo.password=$PASSWORD_MONGO\r
+\r
+ """\r
+ }\r
+ }\r
+\r
+ }\r
+ }\r
+ }\r
+\r
+ }\r
+}\r
--- /dev/null
+Unless otherwise specified, all software contained herein is licensed\r
+under the Apache License, Version 2.0 (the "Software License");\r
+you may not use this software except in compliance with the Software\r
+License. You may obtain a copy of the Software License at\r
+\r
+http://www.apache.org/licenses/LICENSE-2.0\r
+\r
+Unless required by applicable law or agreed to in writing, software\r
+distributed under the Software License is distributed on an "AS IS" BASIS,\r
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
+See the Software License for the specific language governing permissions\r
+and limitations under the Software License.\r
+\r
+\r
+\r
+Unless otherwise specified, all documentation contained herein is licensed\r
+under the Creative Commons License, Attribution 4.0 Intl. (the\r
+"Documentation License"); you may not use this documentation except in\r
+compliance with the Documentation License. You may obtain a copy of the\r
+Documentation License at\r
+\r
+https://creativecommons.org/licenses/by/4.0/\r
+\r
+Unless required by applicable law or agreed to in writing, documentation\r
+distributed under the Documentation License is distributed on an "AS IS"\r
+BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\r
+implied. See the Documentation License for the specific language governing\r
+permissions and limitations under the Documentation License.
\ No newline at end of file
--- /dev/null
+""" Copyright (c) 2019 AT&T Intellectual Property. #\r
+# #\r
+# Licensed under the Apache License, Version 2.0 (the "License"); #\r
+# you may not use this file except in compliance with the License. #\r
+# You may obtain a copy of the License at #\r
+# #\r
+# http://www.apache.org/licenses/LICENSE-2.0 #\r
+# #\r
+# Unless required by applicable law or agreed to in writing, software #\r
+# distributed under the License is distributed on an "AS IS" BASIS, #\r
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #\r
+# See the License for the specific language governing permissions and #\r
+# limitations under the License. #\r
+#############################################################################"""\r
+\r
+\r
+from flask import Flask\r
+from app.routes import *\r
+from app import database\r
+\r
+\r
+def create_app():\r
+ # create Flask application\r
+ app = Flask(__name__)\r
+\r
+ # apply configuration\r
+ app.config.from_object(os.environ['APP_SETTINGS'])\r
+ app.config['g_database'] = None\r
+ app.config['g_base_folder'] = os.path.join(os.getcwd(), 'files')\r
+ app.config['g_data_folder'] = os.path.join(app.config['g_base_folder'], 'data')\r
+ app.config['g_working_folder'] = os.path.join(app.config['g_base_folder'], 'results')\r
+\r
+ # register all routes on the APPLICATION_ROOT\r
+ app.register_blueprint(routes, url_prefix=app.config['APPLICATION_ROOT'])\r
+\r
+ return app\r
--- /dev/null
+""" Copyright (c) 2019 AT&T Intellectual Property. #\r
+# #\r
+# Licensed under the Apache License, Version 2.0 (the "License"); #\r
+# you may not use this file except in compliance with the License. #\r
+# You may obtain a copy of the License at #\r
+# #\r
+# http://www.apache.org/licenses/LICENSE-2.0 #\r
+# #\r
+# Unless required by applicable law or agreed to in writing, software #\r
+# distributed under the License is distributed on an "AS IS" BASIS, #\r
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #\r
+# See the License for the specific language governing permissions and #\r
+# limitations under the License. #\r
+#############################################################################"""\r
+\r
+\r
+import os\r
+\r
+basedir = os.path.abspath(os.path.dirname(__file__))\r
+\r
+\r
+class Configuration(object):\r
+ DEBUG = False\r
+ TESTING = False\r
+ CSRF_ENABLED = False\r
+ PREFERRED_URL_SCHEME = 'https'\r
+ APPLICATION_ROOT = '/otf/vth/robot'\r
+\r
+\r
+class ProductionConfiguration(Configuration):\r
+ DEBUG = False\r
+\r
+\r
+class DevelopmentConfiguration(Configuration):\r
+ DEVELOPMENT = True\r
+ DEBUG = True\r
+\r
+\r
+class TestingConfiguration(Configuration):\r
+ DEVELOPMENT = True\r
+ DEBUG = True\r
--- /dev/null
+""" Copyright (c) 2019 AT&T Intellectual Property. #\r
+# #\r
+# Licensed under the Apache License, Version 2.0 (the "License"); #\r
+# you may not use this file except in compliance with the License. #\r
+# You may obtain a copy of the License at #\r
+# #\r
+# http://www.apache.org/licenses/LICENSE-2.0 #\r
+# #\r
+# Unless required by applicable law or agreed to in writing, software #\r
+# distributed under the License is distributed on an "AS IS" BASIS, #\r
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #\r
+# See the License for the specific language governing permissions and #\r
+# limitations under the License. #\r
+#############################################################################"""\r
+\r
+\r
+import os\r
+from urllib import quote_plus\r
+\r
+from pymongo import MongoClient\r
+from pymongo.errors import ConnectionFailure\r
+\r
+\r
+class DatabaseConfiguration:\r
+ def __init__(self):\r
+ # read environment variables containing information for the MongoDB replica set.\r
+ MONGO_HOST = os.environ['OTF_MONGO_HOSTS']\r
+ MONGO_USERNAME = os.environ['OTF_MONGO_USERNAME']\r
+ MONGO_PASSWORD = os.environ['OTF_MONGO_PASSWORD']\r
+ MONGO_REPLICA_SET = os.environ['OTF_MONGO_REPLICASET']\r
+ MONGO_DATABASE = os.environ['OTF_MONGO_DATABASE']\r
+\r
+ # form the connection string for connection to a MongoDB replica set.\r
+ uri = "mongodb://%s:%s@%s?replicaSet=%s" % (\r
+ quote_plus(MONGO_USERNAME),\r
+ quote_plus(MONGO_PASSWORD),\r
+ MONGO_HOST + MONGO_DATABASE,\r
+ MONGO_REPLICA_SET\r
+ )\r
+\r
+ client = MongoClient(uri)\r
+\r
+ try:\r
+ # The ismaster command is cheap and does not require auth.\r
+ client.admin.command('ismaster')\r
+ print("Established connection to MongoDB.")\r
+ self.database = client[MONGO_DATABASE]\r
+ except ConnectionFailure:\r
+ print("Failed to initialize connection to MongoDB.")\r
+\r
+ def set_database(self, database):\r
+ self.database = database\r
+\r
+ def get_database(self):\r
+ return self.database\r
--- /dev/null
+""" Copyright (c) 2019 AT&T Intellectual Property. #\r
+# #\r
+# Licensed under the Apache License, Version 2.0 (the "License"); #\r
+# you may not use this file except in compliance with the License. #\r
+# You may obtain a copy of the License at #\r
+# #\r
+# http://www.apache.org/licenses/LICENSE-2.0 #\r
+# #\r
+# Unless required by applicable law or agreed to in writing, software #\r
+# distributed under the License is distributed on an "AS IS" BASIS, #\r
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #\r
+# See the License for the specific language governing permissions and #\r
+# limitations under the License. #\r
+#############################################################################"""\r
+\r
+\r
+from flask import Blueprint\r
+\r
+routes = Blueprint('routes', __name__)\r
+\r
+from .health import *\r
+from .robot_processor import *\r
--- /dev/null
+""" Copyright (c) 2019 AT&T Intellectual Property. #\r
+# #\r
+# Licensed under the Apache License, Version 2.0 (the "License"); #\r
+# you may not use this file except in compliance with the License. #\r
+# You may obtain a copy of the License at #\r
+# #\r
+# http://www.apache.org/licenses/LICENSE-2.0 #\r
+# #\r
+# Unless required by applicable law or agreed to in writing, software #\r
+# distributed under the License is distributed on an "AS IS" BASIS, #\r
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #\r
+# See the License for the specific language governing permissions and #\r
+# limitations under the License. #\r
+#############################################################################"""\r
+\r
+\r
+from flask import jsonify\r
+from . import routes\r
+\r
+\r
+@routes.route("v1/health", methods=['GET'])\r
+def health():\r
+ return jsonify({'status': 200, 'message': 'UP'})
\ No newline at end of file
--- /dev/null
+""" Copyright (c) 2019 AT&T Intellectual Property. #\r
+# #\r
+# Licensed under the Apache License, Version 2.0 (the "License"); #\r
+# you may not use this file except in compliance with the License. #\r
+# You may obtain a copy of the License at #\r
+# #\r
+# http://www.apache.org/licenses/LICENSE-2.0 #\r
+# #\r
+# Unless required by applicable law or agreed to in writing, software #\r
+# distributed under the License is distributed on an "AS IS" BASIS, #\r
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #\r
+# See the License for the specific language governing permissions and #\r
+# limitations under the License. #\r
+#############################################################################"""\r
+\r
+\r
+import shutil\r
+import uuid\r
+import zipfile\r
+\r
+import gridfs\r
+from bson import ObjectId\r
+from flask import request, make_response, jsonify, current_app\r
+from gridfs import NoFile\r
+from robot import run\r
+\r
+from . import routes\r
+from .. import database\r
+from ..utils import *\r
+\r
+db_instance = database.DatabaseConfiguration()\r
+\r
+\r
+def verify_directories():\r
+ # retrieve the app object to retrieve the config we created\r
+ app = current_app._get_current_object()\r
+\r
+ if not os.path.isdir(app.config['g_base_folder']):\r
+ os.makedirs(app.config['g_base_folder'])\r
+ if not os.path.isdir(app.config['g_data_folder']):\r
+ os.makedirs(app.config['g_data_folder'])\r
+ if not os.path.isdir(app.config['g_working_folder']):\r
+ os.makedirs(app.config['g_working_folder'])\r
+\r
+\r
+@routes.route("/v1", methods=['POST'])\r
+def robot():\r
+ app = current_app._get_current_object()\r
+\r
+ response_data = {\r
+ "vthResponse": {\r
+ "testDurationMS": "",\r
+ "dateTimeUTC": str(datetime.datetime.now()),\r
+ "abstractMessage": "",\r
+ "resultData": {}\r
+ }\r
+ }\r
+\r
+ start_time = unix_time_millis(datetime.datetime.now())\r
+\r
+ try:\r
+ if not request.is_json:\r
+ raise ValueError('Invalid JSON object.')\r
+\r
+ # get json data from the request\r
+ request_data = request.get_json()\r
+\r
+ # get values for expected keys\r
+ vth_input = try_get_json_value('vthInput', request_data)\r
+ test_data = try_get_json_value('testData', vth_input)\r
+ robot_file_id = try_get_json_value('robotFileId', test_data)\r
+\r
+ # set up a GridFS to access the database\r
+ db = db_instance.get_database()\r
+ fs = gridfs.GridFS(db)\r
+\r
+ # try to find a file using the supplied robot_file_id\r
+ compressed_file = fs.get(ObjectId(robot_file_id))\r
+\r
+ # create the directories used during robot processing if they don't exist\r
+ verify_directories()\r
+\r
+ # generate a folder named by a uuid to organize data for each request\r
+ random_uuid = uuid.uuid4().get_hex()\r
+ data_dir = os.path.join(app.config['g_data_folder'], random_uuid)\r
+ os.mkdir(data_dir)\r
+\r
+ #\r
+ with open(os.path.join(data_dir, compressed_file.name), 'wb') as f:\r
+ f.write(compressed_file.read().__str__())\r
+ f.close()\r
+\r
+ with zipfile.ZipFile(os.path.join(data_dir, compressed_file.name)) as zip_ref:\r
+ # Create a temporary folder for storing extracted test file(s)\r
+ test_dir = os.path.join(app.config['g_working_folder'], random_uuid)\r
+ os.mkdir(test_dir)\r
+\r
+ # Create a separate folder for the output files, so they can be compressed and sent back to the TCU\r
+ test_output_dir = os.path.join(test_dir, 'output')\r
+ os.mkdir(test_output_dir)\r
+\r
+ # Extract the robot tests into the temporary directory\r
+ zip_ref.extractall(test_dir)\r
+\r
+ # Run the robot tests with the outputdir pointed to the temporary directory\r
+ return_code = run(os.path.join(test_dir), outputdir=os.path.join(test_dir, 'output'))\r
+\r
+ # this path is hardcoded so the entire system path isn't included in the zip\r
+ path = './files/results/{uuid}/output'.format(uuid=random_uuid)\r
+ zip_file = zipfile.ZipFile(path + '.zip', 'w', zipfile.ZIP_DEFLATED, allowZip64=True)\r
+ zip_dir(path, zip_file)\r
+ zip_file.close()\r
+\r
+ # save the results to the database\r
+ zf = open(path + '.zip', 'rb')\r
+ result_id = fs.put(zf, filename='output.zip', contentType='application/zip')\r
+ zf.close()\r
+\r
+ response_data['vthResponse']['resultData']['robotStatusCode'] = return_code\r
+ response_data['vthResponse']['resultData']['robotResultFileId'] = str(result_id)\r
+ response_data['vthResponse']['abstractMessage'] = resolve_robot_status_code(return_code)\r
+\r
+\r
+\r
+ # delete data from the local disk\r
+ shutil.rmtree(path.replace('/output', ''))\r
+ shutil.rmtree(data_dir)\r
+\r
+ # record the end time of the test\r
+ end_time = unix_time_millis(datetime.datetime.now())\r
+\r
+ # Calculate the total duration of the test\r
+ total_time = end_time - start_time\r
+\r
+ # Set the test duration in the result\r
+ response_data['vthResponse']['testDurationMS'] = total_time\r
+\r
+ return jsonify(response_data)\r
+ except NoFile as e:\r
+ # this exception can only occur if robot_file_id is set to something, so don't worry about reference precedence.\r
+ end_time = unix_time_millis(datetime.datetime.now())\r
+ total_time = end_time - start_time\r
+\r
+ response_data['vthResponse']['testDurationMS'] = ''\r
+ response_data['vthResponse']['abstractMessage'] = \\r
+ 'An exception occurred after running for {totalTime} milliseconds. ' \\r
+ 'A file with _id {id} was not found in the collection.'.format(id=robot_file_id, totalTime=total_time)\r
+\r
+ response = make_response(json.dumps(response_data))\r
+ return response\r
+\r
+ except Exception as e:\r
+ app.logger.error(e)\r
+ end_time = unix_time_millis(datetime.datetime.now())\r
+ total_time = end_time - start_time\r
+\r
+ response_data['vthResponse']['testDurationMS'] = ''\r
+ response_data['vthResponse']['abstractMessage'] = \\r
+ 'An exception occurred after running for {totalTime} milliseconds. ' \\r
+ 'Exception: {exception}.'.format(exception=str(e), totalTime=total_time)\r
+\r
+ response = make_response(json.dumps(response_data))\r
+\r
+ return response\r
--- /dev/null
+""" Copyright (c) 2019 AT&T Intellectual Property. #\r
+# #\r
+# Licensed under the Apache License, Version 2.0 (the "License"); #\r
+# you may not use this file except in compliance with the License. #\r
+# You may obtain a copy of the License at #\r
+# #\r
+# http://www.apache.org/licenses/LICENSE-2.0 #\r
+# #\r
+# Unless required by applicable law or agreed to in writing, software #\r
+# distributed under the License is distributed on an "AS IS" BASIS, #\r
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #\r
+# See the License for the specific language governing permissions and #\r
+# limitations under the License. #\r
+#############################################################################"""\r
+\r
+\r
+import os\r
+import datetime\r
+import json\r
+\r
+\r
+def unix_time_millis(dt):\r
+ epoch = datetime.datetime.utcfromtimestamp(0)\r
+ return (dt - epoch).total_seconds() * 1000.0\r
+\r
+\r
+def zip_dir(path, zip_handle):\r
+ for root, dirs, files in os.walk(path):\r
+ for f in files:\r
+ zip_handle.write(os.path.join(root, f))\r
+\r
+\r
+def try_get_json_value(key, data):\r
+ if key not in data:\r
+ raise KeyError('The key {key} is not in {data}.'\r
+ .format(key=key, data=json.dumps(data)))\r
+\r
+ return data[key]\r
+\r
+\r
+def resolve_robot_status_code(code):\r
+ resolved_message = 'Invalid robot status code.'\r
+\r
+ if code == 0:\r
+ resolved_message = 'All critical tests passed.'\r
+ elif 0 <= code <= 249:\r
+ resolved_message = '{numTestsFailed} test(s) failed.'.format(numTestsFailed=code)\r
+ elif code == 250:\r
+ resolved_message = '250 or more critical failures.'\r
+ elif code == 251:\r
+ resolved_message = 'Help or version information printed.'\r
+ elif code == 252:\r
+ resolved_message = 'Invalid test data or command line options.'\r
+ elif code == 253:\r
+ resolved_message = 'Test execution stopped by user.'\r
+ elif code == 255:\r
+ resolved_message = 'Unexpected internal error.'\r
+\r
+ return resolved_message\r
+\r
+\r
+\r
--- /dev/null
+# Patterns to ignore when building packages.\r
+# This supports shell glob matching, relative path matching, and\r
+# negation (prefixed with !). Only one pattern per line.\r
+.DS_Store\r
+# Common VCS dirs\r
+.git/\r
+.gitignore\r
+.bzr/\r
+.bzrignore\r
+.hg/\r
+.hgignore\r
+.svn/\r
+# Common backup files\r
+*.swp\r
+*.bak\r
+*.tmp\r
+*~\r
+# Various IDEs\r
+.project\r
+.idea/\r
+*.tmproj\r
--- /dev/null
+apiVersion: v1\r
+appVersion: "1.0"\r
+description: A Helm chart for the Robot Virtual Test Head\r
+name: otf-robot-test-head\r
+version: 0.0.5-SNAPSHOT\r
--- /dev/null
+apiVersion: extensions/v1beta1\r
+kind: Deployment\r
+metadata:\r
+ name: {{ .Values.appName}}\r
+ namespace: {{.Values.namespace}}\r
+ labels:\r
+ app: {{ .Values.appName}}\r
+ version: {{.Values.version}}\r
+spec:\r
+ revisionHistoryLimit: 1\r
+ minReadySeconds: 10\r
+ strategy:\r
+ # indicate which strategy we want for rolling update\r
+ type: RollingUpdate\r
+ rollingUpdate:\r
+ maxSurge: 0\r
+ maxUnavailable: 1\r
+ replicas: {{ .Values.replicas}}\r
+ selector:\r
+ matchLabels:\r
+ app: {{ .Values.appName}}\r
+ version: {{.Values.version}}\r
+ template:\r
+ metadata:\r
+ labels:\r
+ app: {{ .Values.appName}}\r
+ version: {{.Values.version}}\r
+ spec:\r
+ serviceAccount: default\r
+ volumes:\r
+ - name: {{ .Values.appName}}-cert-volume\r
+ secret:\r
+ secretName: {{.Values.sharedCert}}\r
+ optional: true\r
+ items:\r
+ - key: PEM_CERT\r
+ path: otf.pem\r
+ - key: PEM_KEY\r
+ path: privateKey.pem\r
+ {{ if or (eq .Values.env "st") (eq .Values.env "prod-dr")}}\r
+ {{else}}\r
+ - name: logging-pvc\r
+ persistentVolumeClaim:\r
+ {{if eq .Values.env "prod"}}\r
+ claimName: {{ .Values.pvc.prod | quote }}\r
+ {{ else }}\r
+ claimName: {{ .Values.pvc.dev | quote }}\r
+ {{ end }}\r
+ {{end}}\r
+ containers:\r
+ - name: {{ .Values.appName}}\r
+ image: {{ .Values.image}}\r
+ imagePullPolicy: Always\r
+ ports:\r
+ - name: http\r
+ containerPort: 5000\r
+ nodePort: {{.Values.nodePort}}\r
+ protocol: TCP\r
+ env:\r
+ - name: NAMESPACE\r
+ value: {{.Values.namespace}}\r
+ - name: APP_NAME\r
+ value: {{ .Values.appName}}\r
+ - name: APP_VERSION\r
+ value: {{.Values.version}}\r
+ - name: OTF_MONGO_HOSTS\r
+ {{ if or (eq .Values.env "prod") (eq .Values.env "prod-dr")}}\r
+ value: {{ .Values.mongo.prod.OTF_MONGO_HOSTS | quote}}\r
+ {{ else if eq .Values.env "st" }}\r
+ value: {{ .Values.mongo.st.OTF_MONGO_HOSTS | quote}}\r
+ {{ else }}\r
+ value: {{ .Values.mongo.dev.OTF_MONGO_HOSTS | quote}}\r
+ {{ end }}\r
+ - name: OTF_MONGO_DATABASE\r
+ {{ if or (eq .Values.env "prod") (eq .Values.env "prod-dr")}}\r
+ value: {{ .Values.mongo.prod.OTF_MONGO_DBOTF | quote }}\r
+ {{ else if eq .Values.env "st" }}\r
+ value: {{ .Values.mongo.st.OTF_MONGO_DBOTF | quote }}\r
+ {{ else }}\r
+ value: {{ .Values.mongo.dev.OTF_MONGO_DBOTF | quote }}\r
+ {{ end }}\r
+ - name: OTF_MONGO_REPLICASET\r
+ {{ if or (eq .Values.env "prod") (eq .Values.env "prod-dr")}}\r
+ value: {{ .Values.mongo.prod.OTF_MONGO_REPLICASET | quote }}\r
+ {{ else if eq .Values.env "st" }}\r
+ value: {{ .Values.mongo.st.OTF_MONGO_REPLICASET | quote }} \r
+ {{ else }}\r
+ value: {{ .Values.mongo.dev.OTF_MONGO_REPLICASET | quote }}\r
+ {{ end }}\r
+ - name: OTF_MONGO_USERNAME\r
+ valueFrom:\r
+ secretKeyRef:\r
+ name: {{ .Values.appName}}\r
+ key: mongo_username\r
+ optional: true\r
+ - name: OTF_MONGO_PASSWORD\r
+ valueFrom:\r
+ secretKeyRef:\r
+ name: {{ .Values.appName}}\r
+ key: mongo_password\r
+ optional: true\r
+ - name: APP_SETTINGS\r
+ value: {{ .Values.robot.APP_SETTINGS | quote }} \r
+ {{ if eq .Values.env "st"}}\r
+ resources:\r
+ limits: \r
+ memory: "2Gi"\r
+ cpu: "1"\r
+ requests:\r
+ memory: "1Gi"\r
+ cpu: "500m"\r
+ {{else}} \r
+ resources:\r
+ limits:\r
+ memory: "4Gi"\r
+ cpu: "2"\r
+ requests:\r
+ memory: "1Gi"\r
+ cpu: "1"\r
+ {{end}} \r
+ volumeMounts:\r
+ - name: {{.Values.appName}}-cert-volume\r
+ mountPath: /opt/cert\r
+ {{ if or (eq .Values.env "st") (eq .Values.env "prod-dr")}}\r
+ {{else}}\r
+ - name: logging-pvc\r
+ mountPath: "/otf/logs"\r
+ {{end}} \r
+ livenessProbe:\r
+ httpGet:\r
+ path: {{.Values.health}}\r
+ port: http\r
+ scheme: HTTPS\r
+ httpHeaders:\r
+ - name: X-Custom-Header\r
+ value: Alive\r
+ initialDelaySeconds: 30\r
+ timeoutSeconds: 30\r
+ periodSeconds: 30\r
+ readinessProbe:\r
+ httpGet:\r
+ path: {{.Values.health}}\r
+ port: http\r
+ scheme: HTTPS\r
+ httpHeaders:\r
+ - name: X-Custom-Header\r
+ value: Ready\r
+ initialDelaySeconds: 30\r
+ timeoutSeconds: 30\r
+ periodSeconds: 30\r
+ restartPolicy: Always\r
--- /dev/null
+apiVersion: v1\r
+kind: Secret\r
+metadata:\r
+ name: {{ .Values.appName}}\r
+type: Opaque\r
+data:\r
+ mongo_username: {{ .Values.mongo.username | b64enc}}\r
+ mongo_password: {{ .Values.mongo.password | b64enc}}\r
--- /dev/null
+apiVersion: v1\r
+kind: Service\r
+metadata:\r
+ name: {{ .Values.appName }}\r
+ namespace: {{ .Values.namespace}}\r
+ labels:\r
+ app: {{ .Values.appName }}\r
+ version: {{ .Values.version}}\r
+spec:\r
+ type: NodePort\r
+ ports:\r
+ - name: http\r
+ port: 5000\r
+ protocol: TCP\r
+ nodePort: {{ .Values.nodePort}}\r
+ selector:\r
+ app: {{ .Values.appName }}\r
+ version: {{ .Values.version}}\r
--- /dev/null
+appName: otf-robot-test-head\r
+env: dev\r
+version: 0.0.1-SNAPSHOT\r
+image: otf-robot-test-head:0.0.5-SNAPSHOT\r
+namespace: org-oran-otf\r
+nodePort: 32224\r
+replicas: 1\r
+health : /otf/vth/robot/v1/health\r
+sharedCert: otf-cert-secret-builder\r
+robot:\r
+ APP_SETTINGS: "app.configuration.DevelopmentConfiguration"\r
+pvc:\r
+ dev: org-oran-otf-dev-logs-pv\r
+ prod: org-oran-otf-prod-logs-pv\r
+mongo:\r
+ prod:\r
+ OTF_MONGO_HOSTS: "localhost:18720,localhost:18720,localhost:18720/"\r
+ OTF_MONGO_DBOTF: "otf"\r
+ OTF_MONGO_REPLICASET: "otf-rs-prod2"\r
+ dev:\r
+ OTF_MONGO_HOSTS: "localhost:27017,localhost:27017,localhost:27017/"\r
+ OTF_MONGO_DBOTF: "otf"\r
+ OTF_MONGO_REPLICASET: "mongoOTF"\r
+ st:\r
+ OTF_MONGO_HOSTS: "localhost:27017,localhost:27017,localhost:27017/"\r
+ OTF_MONGO_DBOTF: "otf_st"\r
+ OTF_MONGO_REPLICASET: "mongoOTF"\r
+\r
+ username: ""\r
+ password: ""\r
+sharedSecret: otf-aaf-credential-generator\r
--- /dev/null
+flask\r
+FLASK\r
+pymongo\r
+robotframework\r
+robotframework-selenium2library\r
+selenium
\ No newline at end of file
--- /dev/null
+from app import create_app\r
+\r
+if __name__ == '__main__':\r
+ app = create_app()\r
+\r
+ # Set SSL context with the certificate chain and the private RSA key.\r
+ context = ('opt/cert/otf.pem', 'opt/cert/privateKey.pem')\r
+ app.run(\r
+ debug=app.config['DEBUG'],\r
+ host='0.0.0.0', port=5000,\r
+ use_reloader=True,\r
+ ssl_context=context)\r
+ # Run without ssl\r
+ # app.run(debug=app.config['DEBUG'], host='0.0.0.0', use_reloader=True, port=5000)
\ No newline at end of file
--- /dev/null
+otf.pem\r
+privateKey.pem\r
+*.log\r
+*.pem\r
+/otf\r
--- /dev/null
+FROM python:2.7\r
+\r
+ARG HTTP_PROXY="localhost:8080"\r
+ARG HTTPS_PROXY="localhost:8080"\r
+ARG http_proxy="localhost:8080"\r
+ARG https_proxy="localhost:8080"\r
+\r
+RUN python --version\r
+\r
+ADD pip-requirements.txt pip-requirements.txt\r
+ADD ssh_test_head.py ssh_test_head.py\r
+\r
+RUN mkdir -p /otf/logs\r
+\r
+RUN python -m pip install -r pip-requirements.txt\r
+\r
+ENTRYPOINT ["python", "ssh_test_head.py"]\r
--- /dev/null
+#!/usr/bin/env groovy\r
+\r
+\r
+properties([[$class: 'ParametersDefinitionProperty', parameterDefinitions: [\r
+ [$class: 'hudson.model.StringParameterDefinition', name: 'PHASE', defaultValue: "BUILD"],\r
+ [$class: 'hudson.model.StringParameterDefinition', name: 'ENV', defaultValue: "dev"],\r
+ [$class: 'hudson.model.StringParameterDefinition', name: 'MECHID', defaultValue: "id_otf_dev"],\r
+ [$class: 'hudson.model.StringParameterDefinition', name: 'KUBE_CONFIG', defaultValue: "kubeConfig-dev"],\r
+ [$class: 'hudson.model.StringParameterDefinition', name: 'TILLER_NAMESPACE', defaultValue: "org-oran-otf"]\r
+]]])\r
+\r
+\r
+echo "Build branch: ${env.BRANCH_NAME}"\r
+\r
+node("docker"){\r
+ stage 'Checkout'\r
+ checkout scm\r
+ PHASES=PHASE.tokenize( '_' );\r
+ echo "PHASES : " + PHASES\r
+\r
+\r
+ ARTIFACT_ID="otf-ssh-test-head";\r
+ VERSION="Blitzcrank.1.1";\r
+ NAMESPACE="org-oran-otf"\r
+ DOCKER_REGISTRY="registry.hub.docker.io"\r
+\r
+ if( ENV.equalsIgnoreCase("dev") ){\r
+ IMAGE_NAME=DOCKER_REGISTRY + "/" + NAMESPACE + "/" + ARTIFACT_ID + ":" + VERSION\r
+\r
+ }\r
+ if( ENV.equalsIgnoreCase("prod") || ENV.equalsIgnoreCase("prod-dr")){\r
+ IMAGE_NAME=DOCKER_REGISTRY + "/" + NAMESPACE + ".prod" + "/" + ARTIFACT_ID + ":" + VERSION\r
+\r
+ }\r
+\r
+ if( ENV.equalsIgnoreCase("st") ){\r
+ IMAGE_NAME=DOCKER_REGISTRY + "/" + NAMESPACE + ".st" + "/" + ARTIFACT_ID + ":" + VERSION\r
+\r
+ }\r
+\r
+ echo "Artifact: " + IMAGE_NAME\r
+\r
+ withEnv(["PATH=${env.PATH}:${env.WORKSPACE}/linux-amd64", "HELM_HOME=${env.WORKSPACE}"]) {\r
+\r
+ echo "PATH=${env.PATH}"\r
+ echo "HELM_HOME=${env.HELM_HOME}"\r
+\r
+ if (PHASES.contains("BUILD")){\r
+\r
+ stage 'Publish Artifact'\r
+\r
+ withCredentials([usernamePassword(credentialsId: MECHID, usernameVariable: 'USERNAME', passwordVariable: 'PASSWORD')]) {\r
+\r
+ echo "Artifact: " + IMAGE_NAME\r
+\r
+ sh """\r
+ docker login $DOCKER_REGISTRY --username $USERNAME --password $PASSWORD\r
+ docker build -t $IMAGE_NAME .\r
+ docker push $IMAGE_NAME\r
+ """\r
+ }\r
+\r
+ }\r
+\r
+ if (PHASES.contains("DEPLOY") || PHASES.contains("UNDEPLOY")) {\r
+\r
+ stage 'Init Helm'\r
+\r
+ //check if helm exists if not install\r
+ if(fileExists('linux-amd64/helm')){\r
+ sh """\r
+ echo "helm is already installed"\r
+ """\r
+ }\r
+ else{\r
+ //download helm\r
+ sh """\r
+ echo "installing helm"\r
+ wget https://storage.googleapis.com/kubernetes-helm/helm-v2.8.2-linux-amd64.tar.gz\r
+ tar -xf helm-v2.8.2-linux-amd64.tar.gz\r
+ rm helm-v2.8.2-linux-amd64.tar.gz\r
+ """\r
+ }\r
+\r
+ withCredentials([file(credentialsId: KUBE_CONFIG, variable: 'KUBECONFIG')]) {\r
+\r
+ dir('helm'){\r
+ //check if charts are valid, and then perform dry run, if successful then upgrade/install charts\r
+\r
+ if (PHASES.contains("UNDEPLOY") ) {\r
+ stage 'Undeploy'\r
+\r
+ sh """\r
+ helm delete --tiller-namespace=$TILLER_NAMESPACE --purge $ARTIFACT_ID\r
+ """\r
+ }\r
+\r
+ //NOTE Double quotes are used below to access groovy variables like artifact_id and tiller_namespace\r
+ if (PHASES.contains("DEPLOY") ){\r
+ stage 'Deploy'\r
+ withCredentials([\r
+ usernamePassword(credentialsId: MECHID, usernameVariable: 'USERNAME', passwordVariable: 'PASSWORD'),\r
+ file(credentialsId: 'id_PRIVATE_KEY', variable: 'VAR_id_PRIVATE_KEY'),\r
+ string(credentialsId: 'id_otf_key_passphrase', variable: 'VAR_id_otf_key_passphrase')\r
+ ]) {\r
+\r
+ sh """\r
+ \r
+ cp $VAR_id_PRIVATE_KEY $ARTIFACT_ID\r
+ FILE_id_PRIVATE_KEY=`basename $VAR_id_PRIVATE_KEY`\r
+\r
+ echo "Validate Yaml"\r
+ helm lint $ARTIFACT_ID\r
+\r
+ echo "View Helm Templates"\r
+ helm template $ARTIFACT_ID --set appName=$ARTIFACT_ID \\r
+ --set appName=$ARTIFACT_ID \\r
+ --set version=$VERSION \\r
+ --set env=$ENV \\r
+ --set image=$IMAGE_NAME \\r
+ --set namespace=$TILLER_NAMESPACE \\r
+ --set Secret.id_private_key=\$FILE_id_PRIVATE_KEY \\r
+ --set Secret.id_private_key_passphrase=$VAR_id_otf_key_passphrase \\r
+\r
+\r
+\r
+ echo "Perform Dry Run Of Install"\r
+ helm upgrade --tiller-namespace=$TILLER_NAMESPACE --install --dry-run $ARTIFACT_ID $ARTIFACT_ID \\r
+ --set appName=$ARTIFACT_ID \\r
+ --set version=$VERSION \\r
+ --set env=$ENV \\r
+ --set image=$IMAGE_NAME \\r
+ --set namespace=$TILLER_NAMESPACE \\r
+ --set Secret.id_private_key=\$FILE_id_PRIVATE_KEY \\r
+ --set Secret.id_private_key_passphrase=$VAR_id_otf_key_passphrase \\r
+\r
+\r
+ echo "Helm Install/Upgrade"\r
+ helm upgrade --tiller-namespace=$TILLER_NAMESPACE --install $ARTIFACT_ID $ARTIFACT_ID \\r
+ --set appName=$ARTIFACT_ID \\r
+ --set version=$VERSION \\r
+ --set env=$ENV \\r
+ --set image=$IMAGE_NAME \\r
+ --set namespace=$TILLER_NAMESPACE \\r
+ --set Secret.id_private_key=\$FILE_id_PRIVATE_KEY \\r
+ --set Secret.id_private_key_passphrase=$VAR_id_otf_key_passphrase \\r
+\r
+ """\r
+ }\r
+ }\r
+\r
+ }\r
+ }\r
+ }\r
+\r
+ }\r
+}\r
--- /dev/null
+Unless otherwise specified, all software contained herein is licensed\r
+under the Apache License, Version 2.0 (the "Software License");\r
+you may not use this software except in compliance with the Software\r
+License. You may obtain a copy of the Software License at\r
+\r
+http://www.apache.org/licenses/LICENSE-2.0\r
+\r
+Unless required by applicable law or agreed to in writing, software\r
+distributed under the Software License is distributed on an "AS IS" BASIS,\r
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
+See the Software License for the specific language governing permissions\r
+and limitations under the Software License.\r
+\r
+\r
+\r
+Unless otherwise specified, all documentation contained herein is licensed\r
+under the Creative Commons License, Attribution 4.0 Intl. (the\r
+"Documentation License"); you may not use this documentation except in\r
+compliance with the Documentation License. You may obtain a copy of the\r
+Documentation License at\r
+\r
+https://creativecommons.org/licenses/by/4.0/\r
+\r
+Unless required by applicable law or agreed to in writing, documentation\r
+distributed under the Documentation License is distributed on an "AS IS"\r
+BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\r
+implied. See the Documentation License for the specific language governing\r
+permissions and limitations under the Documentation License.
\ No newline at end of file
--- /dev/null
+# Patterns to ignore when building packages.\r
+# This supports shell glob matching, relative path matching, and\r
+# negation (prefixed with !). Only one pattern per line.\r
+.DS_Store\r
+# Common VCS dirs\r
+.git/\r
+.gitignore\r
+.bzr/\r
+.bzrignore\r
+.hg/\r
+.hgignore\r
+.svn/\r
+# Common backup files\r
+*.swp\r
+*.bak\r
+*.tmp\r
+*~\r
+# Various IDEs\r
+.project\r
+.idea/\r
+*.tmproj\r
--- /dev/null
+apiVersion: v1\r
+appVersion: "1.0"\r
+description: A Helm chart for the SSH Virtual Test Head \r
+name: otf-ssh-test-head\r
+version: 0.0.1\r
--- /dev/null
+apiVersion: extensions/v1beta1\r
+kind: Deployment\r
+metadata:\r
+ name: {{ .Values.appName}}\r
+ namespace: {{.Values.namespace}}\r
+ labels:\r
+ app: {{ .Values.appName}}\r
+ version: {{.Values.version}}\r
+spec:\r
+ revisionHistoryLimit: 1\r
+ minReadySeconds: 10\r
+ strategy:\r
+ # indicate which strategy we want for rolling update\r
+ type: RollingUpdate\r
+ rollingUpdate:\r
+ maxSurge: 0\r
+ maxUnavailable: 1\r
+ replicas: {{ .Values.replicas}}\r
+ selector:\r
+ matchLabels:\r
+ app: {{ .Values.appName}}\r
+ version: {{.Values.version}}\r
+ template:\r
+ metadata:\r
+ labels:\r
+ app: {{ .Values.appName}}\r
+ version: {{.Values.version}}\r
+ spec:\r
+ serviceAccount: default\r
+ volumes:\r
+ - name: {{ .Values.appName}}-cert-volume\r
+ secret:\r
+ secretName: {{.Values.sharedCert}}\r
+ optional: true\r
+ items:\r
+ - key: PEM_CERT\r
+ path: otf.pem\r
+ - key: PEM_KEY\r
+ path: privateKey.pem\r
+ - name: {{ .Values.appName}}-ssh-volume\r
+ secret:\r
+ secretName: {{.Values.appName}}\r
+ optional: true\r
+ items:\r
+ - key: id_private_key\r
+ path: id_otf.key\r
+ {{ if or (eq .Values.env "st") (eq .Values.env "prod-dr")}}\r
+ {{else}}\r
+ - name: logging-pvc\r
+ persistentVolumeClaim:\r
+ {{if eq .Values.env "prod"}}\r
+ claimName: {{ .Values.pvc.prod | quote }}\r
+ {{ else }}\r
+ claimName: {{ .Values.pvc.dev | quote }}\r
+ {{ end }}\r
+ {{end}}\r
+ containers:\r
+ - name: {{ .Values.appName}}\r
+ image: {{ .Values.image}}\r
+ imagePullPolicy: Always\r
+ ports:\r
+ - name: http\r
+ containerPort: 5000\r
+ nodePort: {{.Values.nodePort}}\r
+ protocol: TCP\r
+ {{ if eq .Values.env "st"}}\r
+ resources:\r
+ limits: \r
+ memory: "2Gi"\r
+ cpu: "1"\r
+ requests:\r
+ memory: "1Gi"\r
+ cpu: "500m"\r
+ {{else}} \r
+ resources:\r
+ limits:\r
+ memory: "4Gi"\r
+ cpu: "2"\r
+ requests:\r
+ memory: "1Gi"\r
+ cpu: "1"\r
+ {{end}}\r
+ env:\r
+ - name: NAMESPACE\r
+ value: {{.Values.namespace}}\r
+ - name: APP_NAME\r
+ value: {{ .Values.appName}}\r
+ - name: APP_VERSION\r
+ value: {{.Values.version}}\r
+ - name: id_private_key_passphrase\r
+ valueFrom:\r
+ secretKeyRef:\r
+ name: {{ .Values.appName}}\r
+ key: id_private_key_passphrase\r
+ optional: true\r
+ volumeMounts:\r
+ - name: {{.Values.appName}}-cert-volume\r
+ mountPath: /opt/cert\r
+ - name: {{.Values.appName}}-ssh-volume\r
+ mountPath: /ssh \r
+ {{ if or (eq .Values.env "st") (eq .Values.env "prod-dr")}}\r
+ {{else}}\r
+ - name: logging-pvc\r
+ mountPath: "/otf/logs"\r
+ {{end}} \r
+ livenessProbe:\r
+ httpGet:\r
+ path: {{.Values.health}}\r
+ port: http\r
+ scheme: HTTPS\r
+ httpHeaders:\r
+ - name: X-Custom-Header\r
+ value: Alive\r
+ initialDelaySeconds: 30\r
+ timeoutSeconds: 30\r
+ periodSeconds: 30\r
+ readinessProbe:\r
+ httpGet:\r
+ path: {{.Values.health}}\r
+ port: http\r
+ scheme: HTTPS\r
+ httpHeaders:\r
+ - name: X-Custom-Header\r
+ value: Ready\r
+ initialDelaySeconds: 30\r
+ timeoutSeconds: 30\r
+ periodSeconds: 30\r
+ restartPolicy: Always\r
--- /dev/null
+apiVersion: v1\r
+kind: Secret\r
+metadata:\r
+ name: {{ .Values.appName}}\r
+type: Opaque\r
+data:\r
+ id_private_key: {{ .Files.Get .Values.Secret.id_private_key | b64enc }}\r
+ id_private_key_passphrase: {{ .Values.Secret.id_private_key_passphrase | b64enc }}
\ No newline at end of file
--- /dev/null
+apiVersion: v1\r
+kind: Service\r
+metadata:\r
+ name: {{ .Values.appName }}\r
+ namespace: {{ .Values.namespace}}\r
+ labels:\r
+ app: {{ .Values.appName }}\r
+ version: {{ .Values.version}}\r
+spec:\r
+ type: NodePort\r
+ ports:\r
+ - name: http\r
+ port: 5000\r
+ protocol: TCP\r
+ nodePort: {{ .Values.nodePort}}\r
+ selector:\r
+ app: {{ .Values.appName }}\r
+ version: {{ .Values.version}}\r
--- /dev/null
+appName: otf-ssh-test-head\r
+env: dev\r
+version: 0.0.1-SNAPSHOT\r
+image: otf-ssh-test-head:0.0.1-SNAPSHOT\r
+namespace: org-oran-otf\r
+nodePort: 32222\r
+replicas: 1\r
+health : /otf/vth/ssh/v1/health\r
+sharedCert: otf-cert-secret-builder\r
+pvc:\r
+ dev: org-oran-otf-dev-logs-pv\r
+ prod: org-oran-otf-prod-logs-pv\r
+\r
+Secret:\r
+ id_private_key: temp\r
+ id_private_key_passphrase: temp\r
+\r
--- /dev/null
+add certs here. requires two pem files (Cert and key)\r
--- /dev/null
+flask\r
+flask-cors\r
+pyping\r
+paramiko\r
+FLASK\r
+FLASK-CORS\r
--- /dev/null
+# Copyright (c) 2019 AT&T Intellectual Property. #\r
+# #\r
+# Licensed under the Apache License, Version 2.0 (the "License"); #\r
+# you may not use this file except in compliance with the License. #\r
+# You may obtain a copy of the License at #\r
+# #\r
+# http://www.apache.org/licenses/LICENSE-2.0 #\r
+# #\r
+# Unless required by applicable law or agreed to in writing, software #\r
+# distributed under the License is distributed on an "AS IS" BASIS, #\r
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #\r
+# See the License for the specific language governing permissions and #\r
+# limitations under the License. #\r
+################################################################################\r
+\r
+\r
+from flask import Flask, request, make_response, jsonify, g\r
+import json\r
+import paramiko\r
+import datetime\r
+import logging\r
+import os\r
+from logging import FileHandler\r
+\r
+# redirect http to https\r
+app = Flask(__name__)\r
+\r
+\r
+# Prevents print statement every time an endpoint is triggered.\r
+logging.getLogger("werkzeug").setLevel(logging.WARNING)\r
+\r
+\r
+def unix_time_millis(dt):\r
+ epoch = datetime.datetime.utcfromtimestamp(0)\r
+ return (dt - epoch).total_seconds() * 1000.0\r
+\r
+\r
+@app.route("/otf/vth/ssh/v1/health", methods = ['GET'])\r
+def getHealth():\r
+ return "UP"\r
+\r
+\r
+@app.route('/otf/vth/ssh/v1', methods = ['POST'])\r
+def remoteSSH():\r
+ responseData = {\r
+ "vthResponse": {\r
+ "testDurationMS": "",\r
+ "dateTimeUTC": "",\r
+ "abstractMessage": "",\r
+ "resultData": {}\r
+ }\r
+ }\r
+\r
+ responseData['vthResponse']['dateTimeUTC'] = str(datetime.datetime.now())\r
+ start_time = unix_time_millis(datetime.datetime.now())\r
+\r
+ try:\r
+ if not request.is_json:\r
+ raise ValueError('Request must be a valid JSON object.')\r
+\r
+ request_data = request.get_json()\r
+\r
+ if 'vthInput' in request_data:\r
+ vth_input = request_data['vthInput']\r
+ expected_keys = ['vthName', 'testConfig', 'testData']\r
+ received_keys = vth_input.keys();\r
+ test_data = ""\r
+ test_config = ""\r
+\r
+ if sorted(expected_keys) == sorted(received_keys):\r
+ test_data = vth_input['testData']\r
+\r
+ if 'command' not in test_data:\r
+ raise ValueError('Must supply value testData.command')\r
+\r
+ else:\r
+ raise ValueError('Missing one or more expected keys: {expectedKeys}.'.format(expectedKeys=expected_keys))\r
+\r
+ test_config = vth_input['testConfig']\r
+\r
+ if 'jumpServer' not in test_config:\r
+ raise KeyError('Cannot use jump server when jumpServer key is missing.')\r
+\r
+ jump_server = test_config['jumpServer']\r
+\r
+ if 'host' not in test_config['jumpServer']:\r
+ raise KeyError('Missing host value in jumpServer.')\r
+\r
+ host = test_config['jumpServer']['host']\r
+\r
+ if 'credentials' not in jump_server:\r
+ raise KeyError('Missing credentials in jumpServer.')\r
+\r
+ credentials = jump_server['credentials']\r
+\r
+ if 'username' not in credentials:\r
+ raise KeyError('Missing username in credentials.')\r
+\r
+ username = credentials['username']\r
+\r
+ if 'password' not in credentials:\r
+ raise KeyError('Missing password in credentials.')\r
+\r
+ password = credentials['password']\r
+\r
+ ssh = paramiko.SSHClient()\r
+ ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())\r
+\r
+ if 'usePrivateKey' in test_config:\r
+ if test_config['usePrivateKey']:\r
+ key_passphrase = os.environ.get('id_otf_key_passphrase')\r
+ app.logger.info(key_passphrase)\r
+ ssh.connect(host, username=username, passphrase='passphrase', key_filename='./ssh/id_otf.key')\r
+ with open('./ssh/id_otf.key', 'r') as myfile:\r
+ data = myfile.read().replace('\n', '')\r
+\r
+ app.logger.info(data)\r
+ else:\r
+ ssh.connect(host, username=username, password=password)\r
+ command = test_data['command']\r
+ ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command(command)\r
+\r
+ responseData['vthResponse']['resultData']['output'] = str(ssh_stdout.read()).replace('"', '\\"').replace('\n', '\\n')\r
+ responseData['vthResponse']['resultData']['error'] = str(ssh_stderr.read()).replace('"', '\\"').replace('\n', '\\n')\r
+\r
+ else:\r
+ raise KeyError('Missing vthInput parameter(s)')\r
+\r
+ # record the end time of the test\r
+ endTime = unix_time_millis(datetime.datetime.now())\r
+\r
+ # Calculate the total duration of the test\r
+ totalTime = endTime - start_time\r
+\r
+ # Set the test duration in the result\r
+ responseData['vthResponse']['testDurationMS'] = totalTime\r
+\r
+ responseData['vthResponse']['abstractMessage'] = 'done'\r
+\r
+ app.logger.info(str(responseData))\r
+\r
+ return jsonify(responseData)\r
+ except Exception as e:\r
+ app.logger.info(e)\r
+ responseData['vthResponse']['abstractMessage'] = str(e)\r
+ resp = make_response(json.dumps(responseData))\r
+ endTime = unix_time_millis(datetime.datetime.now())\r
+\r
+ totalTime = endTime - start_time\r
+ return resp\r
+\r
+\r
+if __name__ == '__main__':\r
+ logHandler = FileHandler('otf/logs/sshVTH.log', mode='a')\r
+ # logHandler = FileHandler('sshVTH.log', mode='a')\r
+ logHandler.setLevel(logging.INFO)\r
+ app.logger.setLevel(logging.INFO)\r
+ app.logger.addHandler(logHandler)\r
+ context = ('opt/cert/otf.pem', 'opt/cert/privateKey.pem')\r
+ app.run(debug = False, host = '0.0.0.0', port = 5000, ssl_context = context)\r
+ # app.run(debug = False, host = '0.0.0.0', port=5000)\r