changes for the edit trainingjob 34/13034/2
authorrajdeep11 <rajdeep.sin@samsung.com>
Wed, 26 Jun 2024 12:57:31 +0000 (18:27 +0530)
committerrajdeep11 <rajdeep.sin@samsung.com>
Wed, 26 Jun 2024 17:56:00 +0000 (23:26 +0530)
changes to fix the edit traininghob

Change-Id: I8e6d4acbe1ce40d8283af7054e6c9effa3b263a2
Signed-off-by: rajdeep11 <rajdeep.sin@samsung.com>
tests/test_tm_apis.py
trainingmgr/trainingmgr_main.py

index 87308a3..6e03f93 100644 (file)
@@ -485,12 +485,10 @@ class Test_training_main:
         assert response.data == expected_data
         assert response.status_code == status.HTTP_500_INTERNAL_SERVER_ERROR, "Return status code NOT equal" 
 
-    db_result = [('usecase1', 'uc1', '*', 'qoe Pipeline lat v2', 'Default', '{"arguments": {"epochs": "1", "trainingjob_name": "usecase1"}}',
-     '', datetime.datetime(2022, 10, 12, 10, 0, 59, 923588), '51948a12-aee9-42e5-93a0-b8f4a15bca33',
-      '{"DATA_EXTRACTION": "FINISHED", "DATA_EXTRACTION_AND_TRAINING": "FINISHED", "TRAINING": "FINISHED", "TRAINING_AND_TRAINED_MODEL": "FINISHED", "TRAINED_MODEL": "FAILED"}',
-       datetime.datetime(2022, 10, 12, 10, 2, 31, 888830), 1, False, '3', '{"datalake_source": {"InfluxSource": {}}}', 'No data available.', '', 'liveCell', 'UEData', False, False, "","")]
+    db_result = [('my_testing_new_7', 'testing', 'testing_influxdb', 'pipeline_kfp2.2.0_5', 'Default', '{"arguments": {"epochs": "1", "trainingjob_name": "my_testing_new_7"}}', '', datetime.datetime(2024, 6, 21, 8, 57, 48, 408725), '432516c9-29d2-4f90-9074-407fe8f77e4f', '{"DATA_EXTRACTION": "FINISHED", "DATA_EXTRACTION_AND_TRAINING": "FINISHED", "TRAINING": "FINISHED", "TRAINING_AND_TRAINED_MODEL": "FINISHED", "TRAINED_MODEL": "FINISHED"}', datetime.datetime(2024, 6, 21, 9, 1, 54, 388278), 1, False, 'pipeline_kfp2.2.0_5', '{"datalake_source": {"InfluxSource": {}}}', 'http://10.0.0.10:32002/model/my_testing_new_7/1/Model.zip', '', False, False, '', '')]
+
     
-    training_data = ('','','','','','','','','','','', '','')
+    training_data = ('','','','','','','','','',False,'')
     @patch('trainingmgr.trainingmgr_main.validate_trainingjob_name', return_value = True)
     @patch('trainingmgr.trainingmgr_main.get_trainingjob_info_by_name', return_value = db_result)
     @patch('trainingmgr.trainingmgr_main.check_trainingjob_data', return_value = training_data)
@@ -498,27 +496,25 @@ class Test_training_main:
     def test_trainingjob_operations_put(self,mock1,mock2,mock3,mock4):
         trainingmgr_main.LOGGER.debug("******* test_trainingjob_operations_put *******")
         trainingjob_req = {
-                    "trainingjob_name":"usecase1",
-                    "pipeline_name":"qoe Pipeline lat v2",
-                    "experiment_name":"Default",
-                    "featureGroup_name":"group",
-                    "query_filter":"",
-                    "arguments":{
-                        "epochs":"1",
-                        "trainingjob_name":"usecase1"
-                    },
-                    "enable_versioning":False,
-                    "description":"updated",
-                    "pipeline_version":"3",
-                    "datalake_source":"InfluxSource",
-                    "_measurement":"liveCell",
-                    "bucket":"UEData", 
-                    "is_mme": False,
-                    "model_name":""
-                    }
+                "trainingjob_name": "my_testing_new_7",
+                "is_mme": False,
+                "model_name": False,
+                "pipeline_name": "pipeline",
+                "experiment_name": "Default",
+                "featureGroup_name": "testing",
+                "query_filter": "",
+                "arguments": {
+                    "epochs": "1",
+                    "trainingjob_name": "my_testing"
+                },
+                "enable_versioning": False,
+                "description": "testing",
+                "pipeline_version": "pipeline",
+                "datalake_source": "InfluxSource"
+            }
             
         expected_data = 'Information updated in database'
-        response = self.client.put("/trainingjobs/{}".format("usecase1"),
+        response = self.client.put("/trainingjobs/{}".format("my_testing_new_7"),
                                     data=json.dumps(trainingjob_req),
                                     content_type="application/json")
         trainingmgr_main.LOGGER.debug(response.data)        
index 80b34e6..88f8d3a 100644 (file)
@@ -963,7 +963,7 @@ def trainingjob_operations(trainingjob_name):
                 results = None
                 results = get_trainingjob_info_by_name(trainingjob_name, PS_DB_OBJ)
                 if results:
-                    if results[0][19]:
+                    if results[0][17]:
                         raise TMException("Failed to process request for trainingjob(" + trainingjob_name + ") " + \
                                         " deletion in progress")
                     if (get_one_word_status(json.loads(results[0][9]))
@@ -972,16 +972,16 @@ def trainingjob_operations(trainingjob_name):
 
                     (featuregroup_name, description, pipeline_name, experiment_name,
                     arguments, query_filter, enable_versioning, pipeline_version,
-                    datalake_source, _measurement, bucket, is_mme, model_name) = check_trainingjob_data(trainingjob_name, json_data)
+                    datalake_source, is_mme, model_name)= check_trainingjob_data(trainingjob_name, json_data)
                 if is_mme:
                     featuregroup_name=results[0][2]
                     pipeline_name, pipeline_version=results[0][3], results[0][13]
                 # model name is not changing hence model info is unchanged.
-                model_info = results[0][22]
+                model_info = results[0][20]
                 add_update_trainingjob(description, pipeline_name, experiment_name, featuregroup_name,
-                                        arguments, query_filter, False, enable_versioning,
-                                        pipeline_version, datalake_source, trainingjob_name, PS_DB_OBJ, _measurement=_measurement,
-                                        bucket=bucket, is_mme=is_mme, model_name=model_name, model_info=model_info)
+                                    arguments, query_filter, False, enable_versioning,
+                                    pipeline_version, datalake_source, trainingjob_name, 
+                                    PS_DB_OBJ,is_mme=is_mme, model_name=model_name, model_info=model_info)
                 api_response = {"result": "Information updated in database."}
                 response_code = status.HTTP_200_OK
     except Exception as err: