},
{
"cell_type": "code",
- "execution_count": 3,
+ "execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"@component(base_image=BASE_IMAGE)\n",
- "def train_export_model(trainingjobName: str, epochs: str, version: str):\n",
+ "def train_export_model(featurepath: str, epochs: str, modelname: str, modelversion:str, artifactversion:str):\n",
" \n",
" import tensorflow as tf\n",
" from numpy import array\n",
" \n",
" fs_sdk = FeatureStoreSdk()\n",
" mm_sdk = ModelMetricsSdk()\n",
- " print(\"job name is: \", trainingjobName)\n",
- " features = fs_sdk.get_features(trainingjobName, ['pdcpBytesDl','pdcpBytesUl'])\n",
+ " print(\"featurepath is: \", featurepath)\n",
+ " features = fs_sdk.get_features(featurepath, ['pdcpBytesDl','pdcpBytesUl'])\n",
" print(\"Dataframe:\")\n",
" print(features)\n",
"\n",
" data['metrics'] = []\n",
" data['metrics'].append({'Accuracy': str(np.mean(np.absolute(np.asarray(xx)-np.asarray(yy))<5))})\n",
" \n",
- " mm_sdk.upload_metrics(data, trainingjobName, version)\n",
- " mm_sdk.upload_model(\"./\", trainingjobName, version)\n"
+ " artifactversion=\"1.0.0\"\n",
+ " \n",
+ " mm_sdk.upload_metrics(data, modelname, modelversion,artifactversion)\n",
+ " mm_sdk.upload_model(\"./\", modelname, modelversion, artifactversion)\n"
]
},
{
"cell_type": "code",
- "execution_count": 4,
+ "execution_count": null,
"metadata": {},
"outputs": [],
"source": [
" description=\"qoe\",\n",
")\n",
"def super_model_pipeline( \n",
- " trainingjob_name: str, epochs: str, version: str):\n",
+ " featurepath: str, epochs: str, modelname: str, modelversion:str, artifactversion:str):\n",
" \n",
- " trainop=train_export_model(trainingjobName=trainingjob_name, epochs=epochs, version=version)\n",
+ " trainop=train_export_model(featurepath=featurepath, epochs=epochs, modelname=modelname, modelversion=modelversion, artifactversion=artifactversion)\n",
" trainop.set_caching_options(False)\n",
" kubernetes.set_image_pull_policy(trainop, \"IfNotPresent\")"
]