From 0abae738fe22805dca48b759b4a60d9a1851a207 Mon Sep 17 00:00:00 2001 From: rajdeep11 Date: Wed, 6 Dec 2023 11:32:02 +0530 Subject: [PATCH] adding db changes Issue-Id: AIMLFW-65 Change-Id: I89c15517515b33f566072270012accbc2d388d94 Signed-off-by: rajdeep11 --- tests/test_common_db_fun.py | 15 +++++++++------ tox.ini | 1 + trainingmgr/db/common_db_fun.py | 14 +++++++------- trainingmgr/db/trainingmgr_ps_db.py | 3 +++ 4 files changed, 20 insertions(+), 13 deletions(-) diff --git a/tests/test_common_db_fun.py b/tests/test_common_db_fun.py index 6882d1e..8dfd91b 100644 --- a/tests/test_common_db_fun.py +++ b/tests/test_common_db_fun.py @@ -55,7 +55,10 @@ mimic_db = { "notification_url": "", "_measurement": "liveCell", "bucket": "UEdata", - "accuracy": 70 + "deletion_in_progress": False, + "is_mme": False, + "model_name": "", + "model_info": "" } mimic_fg_db={ @@ -422,20 +425,20 @@ class Test_Common_Db_Fun: def test_add_update_trainingjob(self): checker = Check() db_obj = db_helper([[None]], check_success_obj=checker) - add_update_trainingjob('Testing', 'qoe-pipeline', 'Default', '*', '{epoches : 1}', '', True, True, 1, 'InfluxSource', 'Tester',db_obj) + add_update_trainingjob('description','qoe-pipeline', 'Default','featuregroup', '{epoches : 1}', '', True, True, 1, 'InfluxSource', 'Tester',db_obj) assert checker.finished, 'add_update_trainingjob FAILED, When adding = True' def test_negative_add_update_trainingjob_2(self): checker = Check() db_obj = db_helper([[None]], check_success_obj=checker) - add_update_trainingjob('Testing', 'qoe-pipeline', 'Default', '*', '{epoches : 1}', '', True, False, 1, 'InfluxSource', 'Tester',db_obj) + add_update_trainingjob('description','qoe-pipeline', 'Default','featuregroup', '{epoches : 1}', '', True, True, 1, 'InfluxSource', 'Tester',db_obj) assert checker.finished, 'add_update_trainingjob FAILED, When adding = True' def test_negative_add_update_trainingjob_3(self): checker = Check() db_obj = db_helper([[None]], check_success_obj=checker) try: - add_update_trainingjob('Testing', 'qoe-pipeline', 'Default', '*', '{epoches : 1}', '', False, True, 1, 'InfluxSource', 'Tester',db_obj) + add_update_trainingjob('description','qoe-pipeline', 'Default','featuregroup', '{epoches : 1}', '', True, True, 1, 'InfluxSource', 'Tester',db_obj) assert checker.finished, 'add_update_trainingjob FAILED, When adding = True' assert False except Exception: @@ -445,7 +448,7 @@ class Test_Common_Db_Fun: checker = Check() db_obj = db_helper([[None]], check_success_obj=checker) try: - add_update_trainingjob('Testing', 'qoe-pipeline', 'Default', '*', '{epoches : 1}', '', False, False, 1, 'InfluxSource', 'Tester',db_obj) + add_update_trainingjob('description','qoe-pipeline', 'Default','featuregroup', '{epoches : 1}', '', True, True, 1, 'InfluxSource', 'Tester',db_obj) assert checker.finished, 'add_update_trainingjob FAILED, When adding = True' assert False except Exception: @@ -460,7 +463,7 @@ class Test_Common_Db_Fun: assert checker.finished, 'add_update_trainingjob FAILED, When adding = True' except Exception as err: fxn_name = "add_update_trainingjob" - assert str(err) == "add_update_trainingjob() takes from 12 to 15 positional arguments but 19 were given", 'Negative test {} FAILED when adding = True , Doesnt returned required error'.format(fxn_name) + assert str(err) == "add_update_trainingjob() takes from 12 to 18 positional arguments but 19 were given", 'Negative test {} FAILED when adding = True , Doesnt returned required error'.format(fxn_name) def test_get_all_jobs_latest_status_version(self): db_obj = db_helper([["usecase_name"]]) diff --git a/tox.ini b/tox.ini index c4c4da7..ce95619 100644 --- a/tox.ini +++ b/tox.ini @@ -41,6 +41,7 @@ deps= python-dotenv kubernetes pg8000 + Werkzeug==2.2.2 validators==0.20.0 setenv = cd = {toxinidir}/tests diff --git a/trainingmgr/db/common_db_fun.py b/trainingmgr/db/common_db_fun.py index 5945753..042b560 100644 --- a/trainingmgr/db/common_db_fun.py +++ b/trainingmgr/db/common_db_fun.py @@ -473,7 +473,7 @@ def update_model_download_url(trainingjob_name, version, url, ps_db_obj): def add_update_trainingjob(description, pipeline_name, experiment_name, feature_list, arguments, query_filter, adding, enable_versioning, pipeline_version, datalake_source, trainingjob_name, ps_db_obj, notification_url="", - _measurement="", bucket=""): + _measurement="", bucket="", is_mme="", model_name="", model_info=""): """ This function add the new row or update existing row with given information """ @@ -511,14 +511,14 @@ def add_update_trainingjob(description, pipeline_name, experiment_name, feature_ version = version + 1 cursor.execute('''INSERT INTO {} VALUES '''.format(tm_table_name) + \ '''(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,''' + \ - ''' %s,%s,%s,%s,%s,%s,%s)''', + ''' %s,%s,%s,%s,%s,%s,%s,%s,%s, %s)''', (trainingjob_name, description, feature_list, pipeline_name, experiment_name, arguments_string, query_filter, creation_time, run_id, json.dumps(steps_state), updation_time, version, enable_versioning, pipeline_version, datalake_source_string, model_url, notification_url, - _measurement, bucket, deletion_in_progress)) + _measurement, bucket, deletion_in_progress, is_mme, model_name, model_info)) else: cursor.execute('''update {} set description=%s, feature_list=%s, '''.format(tm_table_name) + \ '''pipeline_name=%s,experiment_name=%s,arguments=%s,''' + \ @@ -527,26 +527,26 @@ def add_update_trainingjob(description, pipeline_name, experiment_name, feature_ '''pipeline_version=%s,updation_time=%s,enable_versioning=%s,''' + \ '''datalake_source=%s,''' + \ '''model_url=%s, notification_url=%s, _measurement=%s, ''' + \ - '''bucket=%s, deletion_in_progress=%s where ''' + \ + '''bucket=%s, deletion_in_progress=%s, is_mme=%s, model_name=%s , model_info=%s where ''' + \ '''trainingjob_name=%s and version=%s''', (description, feature_list, pipeline_name, experiment_name, arguments_string, query_filter, creation_time, run_id, json.dumps(steps_state), pipeline_version, updation_time, enable_versioning, datalake_source_string, model_url, notification_url, - _measurement, bucket, deletion_in_progress, trainingjob_name, version)) + _measurement, bucket, deletion_in_progress, is_mme, model_name, model_info, trainingjob_name, version)) else: cursor.execute(''' INSERT INTO {} VALUES '''.format(tm_table_name) + \ '''(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,''' + \ - '''%s,%s,%s,%s,%s,%s,%s,%s)''', + '''%s,%s,%s,%s,%s,%s,%s,%s, %s, %s, %s)''', (trainingjob_name, description, feature_list, pipeline_name, experiment_name, arguments_string, query_filter, creation_time, run_id, json.dumps(steps_state), updation_time, version, enable_versioning, pipeline_version, datalake_source_string, model_url, notification_url, _measurement, bucket, - deletion_in_progress)) + deletion_in_progress, is_mme, model_name, model_info)) conn.commit() cursor.close() except Exception as err: diff --git a/trainingmgr/db/trainingmgr_ps_db.py b/trainingmgr/db/trainingmgr_ps_db.py index c45a979..a057c9d 100644 --- a/trainingmgr/db/trainingmgr_ps_db.py +++ b/trainingmgr/db/trainingmgr_ps_db.py @@ -100,6 +100,9 @@ class PSDB(): "_measurement varchar(100) NOT NULL," + \ "bucket varchar(50) NOT NULL," + \ "deletion_in_progress BOOLEAN NOT NULL," + \ + "is_mme BOOLEAN NOT NULL," + \ + "model_name varchar(128) NOT NULL," + \ + "model_info varchar(1000) NOT NULL," \ "PRIMARY KEY (trainingjob_name,version)" + \ ")") conn2.commit() -- 2.16.6