Feature group changes 49/12549/3
authorrajdeep11 <rajdeep.sin@samsung.com>
Tue, 20 Feb 2024 09:20:25 +0000 (14:50 +0530)
committerrajdeep11 <rajdeep.sin@samsung.com>
Wed, 6 Mar 2024 10:34:34 +0000 (16:04 +0530)
Changes done to reflect Feature group.

Issue-id: AIMLFW-76
Change-Id: Idcdba5ad7a2f3b1c09b20dc18f1d9570e2d294db
Signed-off-by: rajdeep11 <rajdeep.sin@samsung.com>
tests/test_common_db_fun.py
tests/test_tm_apis.py
tests/test_trainingmgr_util.py
trainingmgr/common/trainingmgr_util.py
trainingmgr/db/common_db_fun.py
trainingmgr/db/trainingmgr_ps_db.py
trainingmgr/trainingmgr_main.py

index 8dfd91b..832db79 100644 (file)
@@ -521,14 +521,14 @@ class Test_Common_Db_Fun:
     def test_add_featuregroup(self):
         checker = Check()
         db_obj = db_helper_fg([[None]], check_success_obj=checker)
-        add_featuregroup('Testing', '', '', '','','','','', '',db_obj, '', '', '')
+        add_featuregroup('Testing', '', '', '','','','','', '','',db_obj, '', '', '')
         assert checker.finished, 'add_featuregroup FAILED when dme true'      
 
     def test_negative_add_featuregroup(self):
         checker = Check()
         try:
             db_obj = db_helper_fg([[None]], raise_exception=True, check_success_obj=checker)
-            add_featuregroup('Testing', '', '', '','','','','', '',db_obj, '', '', '')
+            add_featuregroup('Testing', '', '', '','','','','', '','',db_obj,'', '', '')
             assert False
         except Exception as err:
             fxn_name = "add_featuregroup"
index 339285c..816014e 100644 (file)
@@ -999,7 +999,7 @@ class Test_create_featuregroup:
         self.client = trainingmgr_main.APP.test_client(self)
         self.logger = trainingmgr_main.LOGGER
     
-    feature_group_data2=('testing_hash','pdcpBytesDl,pdcpBytesUl','InfluxSource',False,'','','','','','', '','')
+    feature_group_data2=('testing_hash','pdcpBytesDl,pdcpBytesUl','InfluxSource',False,'','','','','','', '','', '')
     @patch('trainingmgr.trainingmgr_main.check_feature_group_data', return_value=feature_group_data2)
     @patch('trainingmgr.trainingmgr_main.get_feature_group_by_name_db', return_value=False)
     @patch('trainingmgr.trainingmgr_main.add_featuregroup')
@@ -1012,6 +1012,7 @@ class Test_create_featuregroup:
                                  "Port":"",
                                  "dmePort":"",
                                  "bucket":"",
+                                 "_measurement":"",
                                  "token":"",
                                  "source_name":"",
                                  "measured_obj_class":"",
@@ -1028,7 +1029,7 @@ class Test_create_featuregroup:
     the_response1.headers={"content-type": "application/json"}
     the_response1._content = b''
     mocked_TRAININGMGR_CONFIG_OBJ=mock.Mock(name="TRAININGMGR_CONFIG_OBJ")
-    feature_group_data2=('testing_hash','pdcpBytesDl,pdcpBytesUl','InfluxSource',True,'127.0.0.1','31823','pm-bucket','','','','','')
+    feature_group_data2=('testing_hash','pdcpBytesDl,pdcpBytesUl','InfluxSource',True,'127.0.0.1','31823','pm-bucket','','','','','','')
     @patch('trainingmgr.trainingmgr_main.check_feature_group_data', return_value=feature_group_data2)
     @patch('trainingmgr.trainingmgr_main.get_feature_group_by_name_db', return_value=False)
     @patch('trainingmgr.trainingmgr_main.add_featuregroup')
@@ -1044,6 +1045,7 @@ class Test_create_featuregroup:
                             "host": "",
                             "port": "",
                             "bucket": "",
+                            "_measurement":"",
                             "dmePort":"",
                             "token": "",
                             "source_name": "",
@@ -1062,7 +1064,7 @@ class Test_create_featuregroup:
     the_response2.headers={"content-type": "application/json"}
     the_response2._content = b''
     mocked_TRAININGMGR_CONFIG_OBJ=mock.Mock(name="TRAININGMGR_CONFIG_OBJ")
-    feature_group_data3=('testing_hash','pdcpBytesDl,pdcpBytesUl','InfluxSource',True,'127.0.0.1','31823','pm-bucket','','','','','')
+    feature_group_data3=('testing_hash','pdcpBytesDl,pdcpBytesUl','InfluxSource',True,'127.0.0.1','31823','pm-bucket','','','','','','')
     @patch('trainingmgr.trainingmgr_main.check_feature_group_data', return_value=feature_group_data3)
     @patch('trainingmgr.trainingmgr_main.get_feature_group_by_name_db', return_value=False)
     @patch('trainingmgr.trainingmgr_main.add_featuregroup')
@@ -1078,6 +1080,7 @@ class Test_create_featuregroup:
                             "host": "",
                             "port": "",
                             "bucket": "",
+                            "_measurement":"",
                             "dmePort":"",
                             "token": "",
                             "source_name": "",
@@ -1092,7 +1095,7 @@ class Test_create_featuregroup:
         assert response.status_code ==status.HTTP_400_BAD_REQUEST, "Return status code not equal"
 
 
-    feature_group_data3=('testing_hash','pdcpBytesDl,pdcpBytesUl','InfluxSource',True,'127.0.0.1','31823','pm-bucket','','','','','')
+    feature_group_data3=('testing_hash','pdcpBytesDl,pdcpBytesUl','InfluxSource',True,'127.0.0.1','31823','pm-bucket','','','','','','')
     @patch('trainingmgr.trainingmgr_main.check_feature_group_data', return_value=feature_group_data3)
     @patch('trainingmgr.trainingmgr_main.get_feature_group_by_name_db', return_value=False)
     @patch('trainingmgr.trainingmgr_main.add_featuregroup',side_effect = Exception('Mocked error'))
@@ -1106,6 +1109,7 @@ class Test_create_featuregroup:
                             "host": "",
                             "port": "",
                             "bucket": "",
+                            "_measurement":"",
                             "dmePort":"",
                             "token": "",
                             "source_name": "",
@@ -1119,7 +1123,7 @@ class Test_create_featuregroup:
         assert response.data==expected_response
         assert response.status_code ==status.HTTP_500_INTERNAL_SERVER_ERROR, "Return status code not equal"  
 
-    feature_group_data3=('testing_hash!@','pdcpBytesDl,pdcpBytesUl','InfluxSource',True,'127.0.0.1','31823','pm-bucket','','','','','')
+    feature_group_data3=('testing_hash!@','pdcpBytesDl,pdcpBytesUl','InfluxSource',True,'127.0.0.1','31823','pm-bucket','','','','','','')
     @patch('trainingmgr.trainingmgr_main.check_feature_group_data', return_value=feature_group_data3)
     @patch('trainingmgr.trainingmgr_main.get_feature_group_by_name_db', return_value=True)
     def test_neagtive_create_featuregroup_3(self, mock1, mock2):
@@ -1132,6 +1136,7 @@ class Test_create_featuregroup:
                             "port": "",
                             "bucket": "",
                             "dmePort":"",
+                            "_measurement":"",
                             "token": "",
                             "source_name": "",
                             "measured_obj_class":"",
@@ -1183,7 +1188,6 @@ class Test_get_feature_group_by_name:
         expected_data=b'{"Exception": "Failed to fetch feature group info from db"}'
         fg_name='testing'
         response=self.client.get('/featureGroup/{}'.format(fg_name))
-        print(response.data)
         assert response.status_code == 404 , "status code is not equal"
         assert response.data == expected_data
     
@@ -1230,7 +1234,6 @@ class Test_delete_list_of_feature_group:
         delete_req=delete_req={"featuregroups_list":[{"featureGroup_name":"testing_hash"}]}
         expected_response=b'{"Exception": "Wrong Request syntax"}'
         response=self.client.delete('/featureGroup', data=json.dumps(delete_req), content_type="application/json")
-        print("response data", response.data)
         assert response.data==expected_response
         assert response.status_code==400, "status code not equal"
     
@@ -1317,7 +1320,6 @@ class Test_delete_list_of_trainingjob_version:
         delete_req={"list":[{"trainingjob_name":"testing_dme_02","version":1}]}
         expected_response=b'{"Exception": "Wrong Request syntax"}'
         response=self.client.delete('/trainingjobs', data=json.dumps(delete_req), content_type="application/json")
-        print("response data", response.data)
         assert response.data==expected_response
         assert response.status_code==400, "status code not equal"
 
@@ -1327,7 +1329,6 @@ class Test_delete_list_of_trainingjob_version:
         delete_req={"list":[{"trainingjob_name":"testing_dme_02","version":1}]}
         expected_response=b'{"Exception": "not given as list"}'
         response=self.client.delete('/trainingjobs', data=json.dumps(delete_req), content_type="application/json")
-        print("response data", response.data)
         assert response.data==expected_response
         assert response.status_code==400, "status code not equal"
     
index e655c24..6be7639 100644 (file)
@@ -563,12 +563,13 @@ class Test_check_feature_group_data:
                             "Port": "",
                             "bucket": "",
                             "dmePort":"",
+                            '_measurement':"",
                             "token": "",
                             "source_name": "",
                             "measured_obj_class":"",
                             "dbOrg": ""
                                 }
-        expected_data=("test", "", "",False,"","","","","","","","")
+        expected_data=("test", "", "",False,"","","","","","","","","")
         assert check_feature_group_data(json_data)==expected_data, "data not equal"
 
     @patch('trainingmgr.common.trainingmgr_util.check_key_in_dictionary',return_value=False)
@@ -581,13 +582,14 @@ class Test_check_feature_group_data:
                             "Host": "",
                             "Port": "",
                             "bucket": "",
+                            '_measurement':"",
                             "dmePort":"",
                             "token": "",
                             "source_name": "",
                             "measured_obj_class":"",
                             "dbOrg": ""
                                 }
-        expected_data=("test", "", "",False,"","","","","","","","")
+        expected_data=("test", "", "",False,"","","","","","","","","")
         try:
             assert check_feature_group_data(json_data)==expected_data, 'data not equal'
             assert False
index e3e7054..0b2a664 100644 (file)
@@ -169,11 +169,12 @@ def check_feature_group_data(json_data):
     try:
         if check_key_in_dictionary(["featureGroupName", "feature_list", \
                                     "datalake_source", "enable_Dme", "Host", 
-                                    "Port", "dmePort","bucket", "token", "source_name", "measured_obj_class"], json_data):
+                                    "Port", "dmePort","bucket", "token", "source_name", "measured_obj_class", "_measurement"], json_data):
             feature_group_name=json_data["featureGroupName"]
             features=json_data["feature_list"]
             datalake_source=json_data["datalake_source"]
             enable_dme=json_data["enable_Dme"]
+            measurement = json_data["_measurement"]
             host=json_data["Host"]
             port=json_data["Port"]
             dme_port=json_data["dmePort"]
@@ -189,7 +190,7 @@ def check_feature_group_data(json_data):
     except Exception as err:
         raise APIException(status.HTTP_400_BAD_REQUEST, str(err)) from None
     
-    return (feature_group_name, features, datalake_source, enable_dme, host, port,dme_port, bucket, token, source_name,db_org, measured_obj_class)
+    return (feature_group_name, features, datalake_source, enable_dme, host, port,dme_port, bucket, token, source_name,db_org, measured_obj_class, measurement)
 
 def get_one_key(dictionary):
     '''
index 042b560..ae069b4 100644 (file)
@@ -558,7 +558,7 @@ def add_update_trainingjob(description, pipeline_name, experiment_name, feature_
         if conn is not None:
             conn.close()
 
-def add_featuregroup(feature_group_name, feature_list, datalake_source , host, port, bucket, token, db_org, enable_dme, ps_db_obj, measured_obj_class="", dme_port="", source_name=""):
+def add_featuregroup(feature_group_name, feature_list, datalake_source , host, port, bucket, token, db_org,_measurement, enable_dme, ps_db_obj, measured_obj_class="", dme_port="", source_name=""):
     """
     This function add the new row or update existing row with given information
     """
@@ -569,8 +569,8 @@ def add_featuregroup(feature_group_name, feature_list, datalake_source , host, p
     
     try:
         cursor.execute(''' INSERT INTO {} VALUES '''.format(fg_table_name) +
-                       '''(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s, %s, %s)''',
-                       (feature_group_name, feature_list, datalake_source , host, port, bucket, token, db_org, enable_dme, measured_obj_class, dme_port, source_name))
+                       '''(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s, %s, %s,%s)''',
+                       (feature_group_name, feature_list, datalake_source , host, port, bucket, token, db_org,_measurement,enable_dme, measured_obj_class, dme_port, source_name))
         conn.commit()
         cursor.close()
     except Exception as err:
index a057c9d..ab7676a 100644 (file)
@@ -137,6 +137,7 @@ class PSDB():
                         "bucket varchar(128) NOT NULL," + \
                         "token varchar(2000) NOT NULL," + \
                         "db_org varchar(128) NOT NULL," + \
+                        "_measurement varchar(100) NOT NULL," + \
                         "enable_dme BOOLEAN NOT NULL," + \
                         "measured_obj_class varchar(128) NOT NULL," + \
                         "dme_port varchar(128) NOT NULL," + \
index 2668c3a..5ee1375 100644 (file)
@@ -1352,8 +1352,10 @@ def create_feature_group():
                     db port
                 token: str
                     token for the bucket
-                db org:
+                db org: str
                     db org name
+                measurement: str
+                    measurement of the influxdb
                 enable_Dme: boolean
                     whether to enable dme
                 source_name: str
@@ -1388,7 +1390,7 @@ def create_feature_group():
 
     try:
         json_data=request.json
-        (feature_group_name, features, datalake_source, enable_dme, host, port,dme_port,bucket, token, source_name,db_org, measured_obj_class)=check_feature_group_data(json_data)
+        (feature_group_name, features, datalake_source, enable_dme, host, port,dme_port,bucket, token, source_name,db_org, measured_obj_class, measurement)=check_feature_group_data(json_data)
         # check the data conformance
         LOGGER.debug("the db info is : ", get_feature_group_by_name_db(PS_DB_OBJ, feature_group_name))
         if (not check_trainingjob_name_or_featuregroup_name(feature_group_name) or
@@ -1399,7 +1401,7 @@ def create_feature_group():
         else:
             # the features are stored in string format in the db, and has to be passed as list of feature to the dme. Hence the conversion.
             features_list = features.split(",")
-            add_featuregroup(feature_group_name, features, datalake_source , host, port, bucket, token, db_org, enable_dme, PS_DB_OBJ, measured_obj_class, dme_port, source_name)
+            add_featuregroup(feature_group_name, features, datalake_source , host, port, bucket, token, db_org, measurement, enable_dme, PS_DB_OBJ, measured_obj_class, dme_port, source_name)
             api_response={"result": "Feature Group Created"}
             response_code =status.HTTP_200_OK
             if enable_dme == True :