adding db changes 79/12179/2
authorrajdeep11 <rajdeep.sin@samsung.com>
Wed, 6 Dec 2023 06:02:02 +0000 (11:32 +0530)
committerrajdeep11 <rajdeep.sin@samsung.com>
Wed, 6 Dec 2023 06:16:45 +0000 (11:46 +0530)
Issue-Id: AIMLFW-65

Change-Id: I89c15517515b33f566072270012accbc2d388d94
Signed-off-by: rajdeep11 <rajdeep.sin@samsung.com>
tests/test_common_db_fun.py
tox.ini
trainingmgr/db/common_db_fun.py
trainingmgr/db/trainingmgr_ps_db.py

index 6882d1e..8dfd91b 100644 (file)
@@ -55,7 +55,10 @@ mimic_db = {
             "notification_url": "",
             "_measurement": "liveCell",
             "bucket": "UEdata",
-            "accuracy": 70
+            "deletion_in_progress": False,
+            "is_mme": False,
+            "model_name": "",
+            "model_info": ""
         }
 
 mimic_fg_db={
@@ -422,20 +425,20 @@ class Test_Common_Db_Fun:
     def test_add_update_trainingjob(self):
         checker = Check()
         db_obj = db_helper([[None]], check_success_obj=checker)
-        add_update_trainingjob('Testing', 'qoe-pipeline', 'Default', '*', '{epoches : 1}', '', True, True, 1, 'InfluxSource', 'Tester',db_obj)
+        add_update_trainingjob('description','qoe-pipeline', 'Default','featuregroup', '{epoches : 1}', '', True, True, 1, 'InfluxSource', 'Tester',db_obj)
         assert checker.finished, 'add_update_trainingjob FAILED, When adding = True'
     
     def test_negative_add_update_trainingjob_2(self):
         checker = Check()
         db_obj = db_helper([[None]], check_success_obj=checker)
-        add_update_trainingjob('Testing', 'qoe-pipeline', 'Default', '*', '{epoches : 1}', '', True, False, 1, 'InfluxSource', 'Tester',db_obj)
+        add_update_trainingjob('description','qoe-pipeline', 'Default','featuregroup', '{epoches : 1}', '', True, True, 1, 'InfluxSource', 'Tester',db_obj)
         assert checker.finished, 'add_update_trainingjob FAILED, When adding = True'
     
     def test_negative_add_update_trainingjob_3(self):
         checker = Check()
         db_obj = db_helper([[None]], check_success_obj=checker)
         try:
-            add_update_trainingjob('Testing', 'qoe-pipeline', 'Default', '*', '{epoches : 1}', '', False, True, 1, 'InfluxSource', 'Tester',db_obj)
+            add_update_trainingjob('description','qoe-pipeline', 'Default','featuregroup', '{epoches : 1}', '', True, True, 1, 'InfluxSource', 'Tester',db_obj)
             assert checker.finished, 'add_update_trainingjob FAILED, When adding = True'
             assert False
         except Exception:
@@ -445,7 +448,7 @@ class Test_Common_Db_Fun:
         checker = Check()
         db_obj = db_helper([[None]], check_success_obj=checker)
         try:
-            add_update_trainingjob('Testing', 'qoe-pipeline', 'Default', '*', '{epoches : 1}', '', False, False, 1, 'InfluxSource', 'Tester',db_obj)
+            add_update_trainingjob('description','qoe-pipeline', 'Default','featuregroup', '{epoches : 1}', '', True, True, 1, 'InfluxSource', 'Tester',db_obj)
             assert checker.finished, 'add_update_trainingjob FAILED, When adding = True'
             assert False
         except Exception:
@@ -460,7 +463,7 @@ class Test_Common_Db_Fun:
             assert checker.finished, 'add_update_trainingjob FAILED, When adding = True'
         except Exception as err:
             fxn_name = "add_update_trainingjob"
-            assert str(err) == "add_update_trainingjob() takes from 12 to 15 positional arguments but 19 were given", 'Negative test {} FAILED when  adding = True , Doesnt returned required error'.format(fxn_name)
+            assert str(err) == "add_update_trainingjob() takes from 12 to 18 positional arguments but 19 were given", 'Negative test {} FAILED when  adding = True , Doesnt returned required error'.format(fxn_name)
     
     def test_get_all_jobs_latest_status_version(self):
         db_obj = db_helper([["usecase_name"]])
diff --git a/tox.ini b/tox.ini
index c4c4da7..ce95619 100644 (file)
--- a/tox.ini
+++ b/tox.ini
@@ -41,6 +41,7 @@ deps=
   python-dotenv
   kubernetes
   pg8000
+  Werkzeug==2.2.2
   validators==0.20.0
 
 setenv = cd  = {toxinidir}/tests
index 5945753..042b560 100644 (file)
@@ -473,7 +473,7 @@ def update_model_download_url(trainingjob_name, version, url, ps_db_obj):
 def add_update_trainingjob(description, pipeline_name, experiment_name, feature_list, arguments,
                           query_filter, adding, enable_versioning,
                           pipeline_version, datalake_source, trainingjob_name, ps_db_obj, notification_url="",
-                          _measurement="", bucket=""):
+                          _measurement="", bucket="", is_mme="", model_name="", model_info=""):
     """
     This function add the new row or update existing row with given information
     """
@@ -511,14 +511,14 @@ def add_update_trainingjob(description, pipeline_name, experiment_name, feature_
                 version = version + 1
                 cursor.execute('''INSERT INTO {} VALUES '''.format(tm_table_name) + \
                                '''(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,''' + \
-                               ''' %s,%s,%s,%s,%s,%s,%s)''',
+                               ''' %s,%s,%s,%s,%s,%s,%s,%s,%s, %s)''',
                                (trainingjob_name, description, feature_list, pipeline_name,
                                 experiment_name, arguments_string, query_filter,
                                 creation_time, run_id, json.dumps(steps_state),
                                 updation_time, version,
                                 enable_versioning, pipeline_version,
                                 datalake_source_string, model_url, notification_url,
-                                _measurement, bucket, deletion_in_progress))
+                                _measurement, bucket, deletion_in_progress, is_mme, model_name, model_info))
             else:
                 cursor.execute('''update {} set description=%s, feature_list=%s, '''.format(tm_table_name) + \
                                '''pipeline_name=%s,experiment_name=%s,arguments=%s,''' + \
@@ -527,26 +527,26 @@ def add_update_trainingjob(description, pipeline_name, experiment_name, feature_
                                '''pipeline_version=%s,updation_time=%s,enable_versioning=%s,''' + \
                                '''datalake_source=%s,''' + \
                                '''model_url=%s, notification_url=%s, _measurement=%s, ''' + \
-                               '''bucket=%s, deletion_in_progress=%s where ''' + \
+                               '''bucket=%s, deletion_in_progress=%s, is_mme=%s, model_name=%s , model_info=%s where ''' + \
                                '''trainingjob_name=%s and version=%s''',
                                (description, feature_list, pipeline_name, experiment_name,
                                 arguments_string, query_filter, creation_time, run_id,
                                 json.dumps(steps_state),
                                 pipeline_version, updation_time, enable_versioning,
                                 datalake_source_string, model_url, notification_url,
-                                _measurement, bucket, deletion_in_progress, trainingjob_name, version))
+                                _measurement, bucket, deletion_in_progress, is_mme, model_name, model_info, trainingjob_name, version))
 
         else:
             cursor.execute(''' INSERT INTO {} VALUES '''.format(tm_table_name) + \
                            '''(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,''' + \
-                           '''%s,%s,%s,%s,%s,%s,%s,%s)''',
+                           '''%s,%s,%s,%s,%s,%s,%s,%s, %s, %s, %s)''',
                            (trainingjob_name, description, feature_list, pipeline_name,
                             experiment_name, arguments_string, query_filter, creation_time,
                             run_id, json.dumps(steps_state),
                             updation_time, version, enable_versioning,
                             pipeline_version, datalake_source_string,
                             model_url, notification_url, _measurement, bucket,
-                            deletion_in_progress))
+                            deletion_in_progress, is_mme, model_name, model_info))
         conn.commit()
         cursor.close()
     except Exception as err:
index c45a979..a057c9d 100644 (file)
@@ -100,6 +100,9 @@ class PSDB():
                         "_measurement varchar(100) NOT NULL," + \
                         "bucket varchar(50) NOT NULL," + \
                         "deletion_in_progress BOOLEAN NOT NULL," + \
+                        "is_mme BOOLEAN NOT NULL," + \
+                        "model_name varchar(128) NOT NULL," + \
+                        "model_info varchar(1000) NOT NULL," \
                         "PRIMARY KEY (trainingjob_name,version)" + \
                         ")")
             conn2.commit()