},
{
"cell_type": "code",
- "execution_count": 11,
+ "execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"@component(base_image=BASE_IMAGE,packages_to_install=['requests'])\n",
- "def train_export_model(trainingjobName: str, epochs: str, version: str):\n",
+ "def train_export_model(featurepath: str, epochs: str, modelname: str, modelversion:str, artifactversion:str):\n",
" \n",
+ " import re\n",
" import tensorflow as tf\n",
" from numpy import array\n",
" from tensorflow.keras.models import Sequential\n",
" from tensorflow.keras.layers import LSTM\n",
" import numpy as np\n",
" import requests\n",
+ " import zipfile\n",
" print(\"numpy version\")\n",
" print(np.__version__)\n",
" import pandas as pd\n",
" import os\n",
- " import zipfile\n",
" from featurestoresdk.feature_store_sdk import FeatureStoreSdk\n",
" from modelmetricsdk.model_metrics_sdk import ModelMetricsSdk\n",
" \n",
" fs_sdk = FeatureStoreSdk()\n",
" mm_sdk = ModelMetricsSdk()\n",
- " print(\"job name is: \", trainingjobName)\n",
- " features = fs_sdk.get_features(trainingjobName, ['pdcpBytesDl','pdcpBytesUl'])\n",
+ " print(\"featurepath is: \", featurepath)\n",
+ " features = fs_sdk.get_features(featurepath, ['pdcpBytesDl','pdcpBytesUl'])\n",
" print(\"Dataframe:\")\n",
" print(features)\n",
"\n",
" y = y.reshape((y.shape[0], y.shape[2]))\n",
" print(X.shape)\n",
" print(y.shape)\n",
- " \n",
+ " \n",
" print(\"Loading the saved model\")\n",
" print(os.listdir(os.getcwd()))\n",
+ " \n",
+ " pattern = r'(.*?)_(\\d+)$'\n",
+ "\n",
+ " # Search for the pattern in the input string\n",
+ " match = re.search(pattern, featurepath)\n",
+ " trainingjob_id = None\n",
+ " if match:\n",
+ " trainingjob_id = int(match.group(2))\n",
+ " print(\"Training Job ID:\", trainingjob_id)\n",
+ " else:\n",
+ " print(\"Pattern not found\")\n",
"\n",
" # Download the model zip file\n",
- " model_url= f\"http://tm.traininghost:32002/model/{trainingjobName}/{version}/Model.zip\"\n",
+ " model_url= f\"http://tm.traininghost:32002/model/{trainingjob_id}/Model.zip\"\n",
" print(f\"Downloading model from :{model_url}\")\n",
" response = requests.get(model_url)\n",
"\n",
" data['metrics'] = []\n",
" data['metrics'].append({'Accuracy': str(np.mean(np.absolute(np.asarray(xx)-np.asarray(yy))<5))})\n",
" \n",
- " mm_sdk.upload_metrics(data, trainingjobName, version)\n",
- " mm_sdk.upload_model(\"./retrain\", trainingjobName, version)\n"
+ " mm_sdk.upload_metrics(data, modelname, modelversion,artifactversion)\n",
+ " mm_sdk.upload_model(\"./\", modelname, modelversion, artifactversion)\n"
]
},
{
"cell_type": "code",
- "execution_count": 12,
+ "execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"@dsl.pipeline(\n",
- " name=\"qoe Pipeline retrain\",\n",
- " description=\"qoe retrain\",\n",
+ " name=\"qoe Pipeline\",\n",
+ " description=\"qoe\",\n",
")\n",
"def super_model_pipeline( \n",
- " trainingjob_name: str, epochs: str, version: str):\n",
+ " featurepath: str, epochs: str, modelname: str, modelversion:str, artifactversion:str):\n",
" \n",
- " trainop=train_export_model(trainingjobName=trainingjob_name, epochs=epochs, version=version)\n",
+ " trainop=train_export_model(featurepath=featurepath, epochs=epochs, modelname=modelname, modelversion=modelversion, artifactversion=artifactversion)\n",
" trainop.set_caching_options(False)\n",
" kubernetes.set_image_pull_policy(trainop, \"IfNotPresent\")"
]
},
{
"cell_type": "code",
- "execution_count": 13,
+ "execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"pipeline_func = super_model_pipeline\n",
- "file_name = \"qoe_model_pipeline_retrain_2\"\n",
+ "file_name = \"qoe_model_pipeline_retrain\"\n",
"\n",
"kfp.compiler.Compiler().compile(pipeline_func, \n",
" '{}.yaml'.format(file_name))"
"outputs": [],
"source": [
"import requests\n",
- "pipeline_name=\"qoe_Pipeline_retrain_2\"\n",
+ "pipeline_name=\"qoe_Pipeline_retrain\"\n",
"pipeline_file = file_name+'.yaml'\n",
"requests.post(\"http://tm.traininghost:32002/pipelines/{}/upload\".format(pipeline_name), files={'file':open(pipeline_file,'rb')})"
]