changes in the feature group 42/11842/3
authorrajdeep11 <rajdeep.sin@samsung.com>
Tue, 3 Oct 2023 09:48:53 +0000 (15:18 +0530)
committerrajdeep11 <rajdeep.sin@samsung.com>
Wed, 4 Oct 2023 06:59:03 +0000 (12:29 +0530)
Issue-Id: AIMLFW-58

Change-Id: I9f7441b59622ecab7b5eb910d8b1d6c9bed92c58
Signed-off-by: rajdeep11 <rajdeep.sin@samsung.com>
tests/test_common_db_fun.py
tests/test_tm_apis.py
tests/test_trainingmgr_util.py
trainingmgr/common/trainingmgr_util.py
trainingmgr/db/common_db_fun.py
trainingmgr/trainingmgr_main.py

index 471d491..6882d1e 100644 (file)
@@ -518,14 +518,14 @@ class Test_Common_Db_Fun:
     def test_add_featuregroup(self):
         checker = Check()
         db_obj = db_helper_fg([[None]], check_success_obj=checker)
-        add_featuregroup('Testing', '', '', True, db_obj, '', '', '', '', '')
+        add_featuregroup('Testing', '', '', '','','','','', '',db_obj, '', '', '')
         assert checker.finished, 'add_featuregroup FAILED when dme true'      
 
     def test_negative_add_featuregroup(self):
         checker = Check()
         try:
             db_obj = db_helper_fg([[None]], raise_exception=True, check_success_obj=checker)
-            add_featuregroup('Testing', '', '', True, db_obj, '', '', '', '', '')
+            add_featuregroup('Testing', '', '', '','','','','', '',db_obj, '', '', '')
             assert False
         except Exception as err:
             fxn_name = "add_featuregroup"
index b7d565e..5292a39 100644 (file)
@@ -851,24 +851,23 @@ class Test_create_featuregroup:
         self.client = trainingmgr_main.APP.test_client(self)
         self.logger = trainingmgr_main.LOGGER
     
-    feature_group_data2=('testing_hash','pdcpBytesDl,pdcpBytesUl','InfluxSource',False,'','','','','','', '')
+    feature_group_data2=('testing_hash','pdcpBytesDl,pdcpBytesUl','InfluxSource',False,'','','','','','', '','')
     @patch('trainingmgr.trainingmgr_main.check_feature_group_data', return_value=feature_group_data2)
     @patch('trainingmgr.trainingmgr_main.get_feature_group_by_name_db', return_value=False)
     @patch('trainingmgr.trainingmgr_main.add_featuregroup')
     def test_create_featuregroup_1(self, mock1, mock2, mock3):
-        create_featuregroup_req={
-                            "featureGroupName": "testing_hash",
-                            "feature_list": "pdcpBytesDl,pdcpBytesUl",
-                            "datalake_source": "InfluxSource",
-                            "enable_Dme": False,
-                            "DmeHost": "",
-                            "DmePort": "",
-                            "bucket": "",
-                            "token": "",
-                            "source_name": "",
-                            "measured_obj_class":"",
-                            "dbOrg": ""
-                                }
+        create_featuregroup_req={"featureGroupName":"testing_hash",
+                                 "feature_list":"pdcpBytesDl,pdcpBytesUl",
+                                 "datalake_source":"InfluxSource",
+                                 "enable_Dme":False,
+                                 "Host":"",
+                                 "Port":"",
+                                 "dmePort":"",
+                                 "bucket":"",
+                                 "token":"",
+                                 "source_name":"",
+                                 "measured_obj_class":"",
+                                 "dbOrg":""}
         expected_response=b'{"result": "Feature Group Created"}'
         response=self.client.post("/featureGroup", data=json.dumps(create_featuregroup_req),
                                   content_type="application/json")
@@ -881,7 +880,7 @@ class Test_create_featuregroup:
     the_response1.headers={"content-type": "application/json"}
     the_response1._content = b''
     mocked_TRAININGMGR_CONFIG_OBJ=mock.Mock(name="TRAININGMGR_CONFIG_OBJ")
-    feature_group_data2=('testing_hash','pdcpBytesDl,pdcpBytesUl','InfluxSource',True,'127.0.0.1','31823','pm-bucket','','','','')
+    feature_group_data2=('testing_hash','pdcpBytesDl,pdcpBytesUl','InfluxSource',True,'127.0.0.1','31823','pm-bucket','','','','','')
     @patch('trainingmgr.trainingmgr_main.check_feature_group_data', return_value=feature_group_data2)
     @patch('trainingmgr.trainingmgr_main.get_feature_group_by_name_db', return_value=False)
     @patch('trainingmgr.trainingmgr_main.add_featuregroup')
@@ -894,9 +893,10 @@ class Test_create_featuregroup:
                             "feature_list": "pdcpBytesDl,pdcpBytesUl",
                             "datalake_source": "InfluxSource",
                             "enable_Dme": True,
-                            "DmeHost": "",
-                            "DmePort": "",
+                            "host": "",
+                            "port": "",
                             "bucket": "",
+                            "dmePort":"",
                             "token": "",
                             "source_name": "",
                             "measured_obj_class":"",
@@ -914,7 +914,7 @@ class Test_create_featuregroup:
     the_response2.headers={"content-type": "application/json"}
     the_response2._content = b''
     mocked_TRAININGMGR_CONFIG_OBJ=mock.Mock(name="TRAININGMGR_CONFIG_OBJ")
-    feature_group_data3=('testing_hash','pdcpBytesDl,pdcpBytesUl','InfluxSource',True,'127.0.0.1','31823','pm-bucket','','','','')
+    feature_group_data3=('testing_hash','pdcpBytesDl,pdcpBytesUl','InfluxSource',True,'127.0.0.1','31823','pm-bucket','','','','','')
     @patch('trainingmgr.trainingmgr_main.check_feature_group_data', return_value=feature_group_data3)
     @patch('trainingmgr.trainingmgr_main.get_feature_group_by_name_db', return_value=False)
     @patch('trainingmgr.trainingmgr_main.add_featuregroup')
@@ -927,9 +927,10 @@ class Test_create_featuregroup:
                             "feature_list": "pdcpBytesDl,pdcpBytesUl",
                             "datalake_source": "InfluxSource",
                             "enable_Dme": True,
-                            "DmeHost": "",
-                            "DmePort": "",
+                            "host": "",
+                            "port": "",
                             "bucket": "",
+                            "dmePort":"",
                             "token": "",
                             "source_name": "",
                             "measured_obj_class":"",
@@ -943,7 +944,7 @@ class Test_create_featuregroup:
         assert response.status_code ==status.HTTP_400_BAD_REQUEST, "Return status code not equal"
 
 
-    feature_group_data3=('testing_hash','pdcpBytesDl,pdcpBytesUl','InfluxSource',True,'127.0.0.1','31823','pm-bucket','','','','')
+    feature_group_data3=('testing_hash','pdcpBytesDl,pdcpBytesUl','InfluxSource',True,'127.0.0.1','31823','pm-bucket','','','','','')
     @patch('trainingmgr.trainingmgr_main.check_feature_group_data', return_value=feature_group_data3)
     @patch('trainingmgr.trainingmgr_main.get_feature_group_by_name_db', return_value=False)
     @patch('trainingmgr.trainingmgr_main.add_featuregroup',side_effect = Exception('Mocked error'))
@@ -954,9 +955,10 @@ class Test_create_featuregroup:
                             "feature_list": "pdcpBytesDl,pdcpBytesUl",
                             "datalake_source": "InfluxSource",
                             "enable_Dme": False,
-                            "DmeHost": "",
-                            "DmePort": "",
+                            "host": "",
+                            "port": "",
                             "bucket": "",
+                            "dmePort":"",
                             "token": "",
                             "source_name": "",
                             "measured_obj_class":"",
@@ -969,7 +971,7 @@ class Test_create_featuregroup:
         assert response.data==expected_response
         assert response.status_code ==status.HTTP_500_INTERNAL_SERVER_ERROR, "Return status code not equal"  
 
-    feature_group_data3=('testing_hash!@','pdcpBytesDl,pdcpBytesUl','InfluxSource',True,'127.0.0.1','31823','pm-bucket','','','','')
+    feature_group_data3=('testing_hash!@','pdcpBytesDl,pdcpBytesUl','InfluxSource',True,'127.0.0.1','31823','pm-bucket','','','','','')
     @patch('trainingmgr.trainingmgr_main.check_feature_group_data', return_value=feature_group_data3)
     @patch('trainingmgr.trainingmgr_main.get_feature_group_by_name_db', return_value=True)
     def test_neagtive_create_featuregroup_3(self, mock1, mock2):
@@ -978,9 +980,10 @@ class Test_create_featuregroup:
                             "feature_list": "pdcpBytesDl,pdcpBytesUl",
                             "datalake_source": "InfluxSource",
                             "enable_Dme": False,
-                            "DmeHost": "",
-                            "DmePort": "",
+                            "host": "",
+                            "port": "",
                             "bucket": "",
+                            "dmePort":"",
                             "token": "",
                             "source_name": "",
                             "measured_obj_class":"",
@@ -998,7 +1001,7 @@ class Test_get_feature_group:
         self.client = trainingmgr_main.APP.test_client(self)
         self.logger = trainingmgr_main.LOGGER
 
-    result=[('testing', '', 'InfluxSource', True, '21.0.0.21', '12345', '', '', '', '')]
+    result=[('testing', '', 'InfluxSource', '', '', '', '', '', True, '', '', '')]
     @patch('trainingmgr.trainingmgr_main.get_feature_groups_db', return_value=result)
     def test_get_feature_group(self,mock1):
         expected_data=b'{"featuregroups": [{"featuregroup_name": "testing", "features": "", "datalake": "InfluxSource", "dme": true}]}'
@@ -1018,10 +1021,10 @@ class Test_get_feature_group_by_name:
         self.client = trainingmgr_main.APP.test_client(self)
         self.logger = trainingmgr_main.LOGGER
 
-    result=[('testing', '', 'InfluxSource', True, '21.0.0.21', '12345', '', '', '', '','')]
+    result=[('testing', '', 'InfluxSource', '127.0.0.21', '8080', '', '', '', '', '', '', '')]
     @patch('trainingmgr.trainingmgr_main.get_feature_group_by_name_db', return_value=result)
     def test_get_feature_group_by_name(self, mock1):
-        expected_data=b'{"featuregroup": [{"featuregroup_name": "testing", "features": [""], "datalake": "InfluxSource", "dme": true, "dme_host": "21.0.0.21", "measured_obj_class": "12345", "dme_port": "", "bucket": "", "token": "", "source_name": "", "db_org": ""}]}'
+        expected_data=b'{"featuregroup": [{"featuregroup_name": "testing", "features": "", "datalake": "InfluxSource", "dme": "", "host": "127.0.0.21", "measured_obj_class": "", "port": "8080", "dme_port": "", "bucket": "", "token": "", "source_name": "", "db_org": ""}]}'
         fg_name='testing'
         response=self.client.get('/featureGroup/{}'.format(fg_name))
         assert response.status_code == 200 , "status code is not equal"
@@ -1061,7 +1064,7 @@ class Test_delete_list_of_feature_group:
     mocked_TRAININGMGR_CONFIG_OBJ.configure_mock(**attrs_TRAININGMGR_CONFIG_OBJ)
     resp=Response()
     resp.status_code=status.HTTP_204_NO_CONTENT
-    the_result=[('testing', '', 'InfluxSource', True, '21.0.0.21', '12345', '', '', '', '')]
+    the_result=[('testing_hash', '', 'InfluxSource', '127.0.0.21', '8080', '', '', '', False, '', '', '')]
     @patch('trainingmgr.trainingmgr_main.check_key_in_dictionary', return_value=True)
     @patch('trainingmgr.trainingmgr_main.get_feature_group_by_name_db', return_value=the_result)
     @patch('trainingmgr.trainingmgr_main.delete_feature_group_by_name')
@@ -1122,11 +1125,9 @@ class Test_delete_list_of_feature_group:
         assert response.data==expected_response
         assert response.status_code==200, "status code not equal"
 
-    the_result2=[('testing', '', 'InfluxSource', True, '21.0.0.21', '12345', '', '', '', '')]
-    resp2=Response()
-    resp2.status_code=status.HTTP_500_INTERNAL_SERVER_ERROR
+    the_result2=[('testing_hash', '', 'InfluxSource', '127.0.0.21', '8080', '', '', '', False, '', '', '')]
     @patch('trainingmgr.trainingmgr_main.get_feature_group_by_name_db', return_value=the_result2)
-    @patch('trainingmgr.trainingmgr_main.delete_feature_group_by_name', return_value=resp2)
+    @patch('trainingmgr.trainingmgr_main.delete_feature_group_by_name', side_effect=Exception("Mocked Error"))
     def test_negative_delete_list_of_feature_group_7(self, mock1, mock2):
         delete_req=delete_req={"featuregroups_list":[{"featureGroup_name":"testing_hash"}]}
         expected_response=b'{"success count": 0, "failure count": 1}'
index b4c519a..3c33253 100644 (file)
@@ -559,34 +559,35 @@ class Test_check_feature_group_data:
                             "feature_list": "",
                             "datalake_source": "",
                             "enable_Dme": False,
-                            "DmeHost": "",
-                            "DmePort": "",
+                            "Host": "",
+                            "Port": "",
                             "bucket": "",
+                            "dmePort":"",
                             "token": "",
                             "source_name": "",
-                            "dbOrg": "", 
-                            "measured_obj_class":""
-
+                            "measured_obj_class":"",
+                            "dbOrg": ""
                                 }
-        expected_data=("test", "", "",False,"","","","","","","")
+        expected_data=("test", "", "",False,"","","","","","","","")
         assert check_feature_group_data(json_data)==expected_data, "data not equal"
 
     @patch('trainingmgr.common.trainingmgr_util.check_key_in_dictionary',return_value=False)
     def test_negative_check_feature_group_data(self, mock1):
         json_data={
-                "featureGroupName": "test",
-                "feature_list": "",
-                "datalake_source": "",
-                "enable_Dme": False,
-                "DmeHost": "",
-                "DmePort": "",
-                "bucket": "",
-                "token": "",
-                "source_name": "",
-                "dbOrg": "",
-                "measured_obj_class":""
-                    }
-        expected_data=("test", "", "",False,"","","","","","","")
+                            "featureGroupName": "test",
+                            "feature_list": "",
+                            "datalake_source": "",
+                            "enable_Dme": False,
+                            "Host": "",
+                            "Port": "",
+                            "bucket": "",
+                            "dmePort":"",
+                            "token": "",
+                            "source_name": "",
+                            "measured_obj_class":"",
+                            "dbOrg": ""
+                                }
+        expected_data=("test", "", "",False,"","","","","","","","")
         try:
             assert check_feature_group_data(json_data)==expected_data, 'data not equal'
             assert False
index 37e6153..cdf62b7 100644 (file)
@@ -166,20 +166,20 @@ def check_feature_group_data(json_data):
     """
     try:
         if check_key_in_dictionary(["featureGroupName", "feature_list", \
-                                    "datalake_source", "enable_Dme", "DmeHost", 
-                                    "DmePort", "bucket", "token", "source_name", "measured_obj_class"], json_data):
+                                    "datalake_source", "enable_Dme", "Host", 
+                                    "Port", "dmePort","bucket", "token", "source_name", "measured_obj_class"], json_data):
             feature_group_name=json_data["featureGroupName"]
             features=json_data["feature_list"]
             datalake_source=json_data["datalake_source"]
             enable_dme=json_data["enable_Dme"]
-            dme_host=json_data["DmeHost"]
-            dme_port=json_data["DmePort"]
+            host=json_data["Host"]
+            port=json_data["Port"]
+            dme_port=json_data["dmePort"]
             bucket=json_data["bucket"]
             token=json_data["token"]
             source_name=json_data["source_name"]
             db_org=json_data["dbOrg"]
             measured_obj_class = json_data["measured_obj_class"]
-            
         else :
             raise TMException("check_featuregroup_data- supplied data doesn't have" + \
                                 " all the required fields ")
@@ -187,7 +187,7 @@ def check_feature_group_data(json_data):
     except Exception as err:
         raise APIException(status.HTTP_400_BAD_REQUEST, str(err)) from None
     
-    return (feature_group_name, features, datalake_source, enable_dme, dme_host, dme_port, bucket, token, source_name,db_org, measured_obj_class)
+    return (feature_group_name, features, datalake_source, enable_dme, host, port,dme_port, bucket, token, source_name,db_org, measured_obj_class)
 
 def get_one_key(dictionary):
     '''
index 9248158..5945753 100644 (file)
@@ -558,7 +558,7 @@ def add_update_trainingjob(description, pipeline_name, experiment_name, feature_
         if conn is not None:
             conn.close()
 
-def add_featuregroup(feature_group_name, feature_list, datalake_source, enable_dme, ps_db_obj, measured_obj_class="" , dmehost="", dmeport="", bucket="", token="", source_name="",db_org=""):
+def add_featuregroup(feature_group_name, feature_list, datalake_source , host, port, bucket, token, db_org, enable_dme, ps_db_obj, measured_obj_class="", dme_port="", source_name=""):
     """
     This function add the new row or update existing row with given information
     """
@@ -569,8 +569,8 @@ def add_featuregroup(feature_group_name, feature_list, datalake_source, enable_d
     
     try:
         cursor.execute(''' INSERT INTO {} VALUES '''.format(fg_table_name) +
-                       '''(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s, %s)''',
-                       (feature_group_name, feature_list, datalake_source, enable_dme, dmehost, measured_obj_class, dmeport, bucket, token, source_name, db_org))
+                       '''(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s, %s, %s)''',
+                       (feature_group_name, feature_list, datalake_source , host, port, bucket, token, db_org, enable_dme, measured_obj_class, dme_port, source_name))
         conn.commit()
         cursor.close()
     except Exception as err:
index 6abc067..fa6b451 100644 (file)
@@ -1320,10 +1320,9 @@ def create_feature_group():
 
     try:
         json_data=request.json
-        (feature_group_name, features, datalake_source, enable_dme, dme_host, dme_port,bucket, token, source_name,db_org, measured_obj_class)=check_feature_group_data(json_data)
+        (feature_group_name, features, datalake_source, enable_dme, host, port,dme_port,bucket, token, source_name,db_org, measured_obj_class)=check_feature_group_data(json_data)
         # check the data conformance
         LOGGER.debug("the db info is : ", get_feature_group_by_name_db(PS_DB_OBJ, feature_group_name))
-
         if (not check_trainingjob_name_or_featuregroup_name(feature_group_name) or
             len(feature_group_name) < 3 or len(feature_group_name) > 63 or
             get_feature_group_by_name_db(PS_DB_OBJ, feature_group_name)):
@@ -1332,9 +1331,11 @@ def create_feature_group():
         else:
             # the features are stored in string format in the db, and has to be passed as list of feature to the dme. Hence the conversion.
             features_list = features.split(",")
-            add_featuregroup(feature_group_name, features, datalake_source, enable_dme, PS_DB_OBJ,measured_obj_class,dme_host, dme_port, bucket, token, source_name,db_org )
+            add_featuregroup(feature_group_name, features, datalake_source , host, port, bucket, token, db_org, enable_dme, PS_DB_OBJ, measured_obj_class, dme_port, source_name)
+            api_response={"result": "Feature Group Created"}
+            response_code =status.HTTP_200_OK
             if enable_dme == True :
-                response= create_dme_filtered_data_job(TRAININGMGR_CONFIG_OBJ, source_name, features_list, feature_group_name, dme_host, dme_port, measured_obj_class)
+                response= create_dme_filtered_data_job(TRAININGMGR_CONFIG_OBJ, source_name, features_list, feature_group_name, host, dme_port, measured_obj_class)
                 if response.status_code != 201:
                     api_response={"Exception": "Cannot create dme job"}
                     delete_feature_group_by_name(PS_DB_OBJ, feature_group_name)
@@ -1390,7 +1391,7 @@ def get_feature_group():
                 "featuregroup_name": res[0],
                 "features": res[1],
                 "datalake": res[2],
-                "dme": res[3]                
+                "dme": res[8]                
                 }
             feature_groups.append(dict_data)
         api_response={"featuregroups":feature_groups}
@@ -1453,19 +1454,19 @@ def get_feature_group_by_name(featuregroup_name):
         feature_group=[]
         if result:
             for res in result:
-                features=res[1].split(",")
                 dict_data={
                     "featuregroup_name": res[0],
-                    "features": features,
+                    "features": res[1],
                     "datalake": res[2],
-                    "dme": res[3],
-                    "dme_host": res[4],
-                    "measured_obj_class":res[5],
-                    "dme_port": res[6],
-                    "bucket":res[7],
-                    "token":res[8],
-                    "source_name":res[9],
-                    "db_org":res[10]
+                    "dme": res[8],
+                    "host": res[3],
+                    "measured_obj_class":res[9],
+                    "port": res[4],
+                    "dme_port":res[10],
+                    "bucket":res[5],
+                    "token":res[6],
+                    "source_name":res[11],
+                    "db_org":res[7]
                 }
                 feature_group.append(dict_data)
             api_response={"featuregroup":feature_group}
@@ -1539,12 +1540,12 @@ def delete_list_of_feature_group():
             continue
 
         if results:
-            dme=results[0][3]
+            dme=results[0][8]
             try:
                 delete_feature_group_by_name(PS_DB_OBJ, featuregroup_name)
                 if dme :
-                    dme_host=results[0][4]
-                    dme_port=results[0][6]
+                    dme_host=results[0][3]
+                    dme_port=results[0][10]
                     resp=delete_dme_filtered_data_job(TRAININGMGR_CONFIG_OBJ, featuregroup_name, dme_host, dme_port)
                     if(resp.status_code !=status.HTTP_204_NO_CONTENT):
                         not_possible_to_delete.append(my_dict)