From: rajdeep11 Date: Thu, 11 May 2023 09:16:29 +0000 (+0530) Subject: Unit test for create_feature_group X-Git-Tag: 1.1.0~13 X-Git-Url: https://gerrit.o-ran-sc.org/r/gitweb?a=commitdiff_plain;h=571d9a1c5fd65de8c2d5272ccc137f30f1a0d5dd;p=aiml-fw%2Fawmf%2Ftm.git Unit test for create_feature_group Issue-Id: AIMLFW-45 Change-Id: I2abd10f791218cda2adaff7526ed5a623e71a992 Signed-off-by: rajdeep11 --- diff --git a/tests/test_common_db_fun.py b/tests/test_common_db_fun.py index eae43c1..d5740cd 100644 --- a/tests/test_common_db_fun.py +++ b/tests/test_common_db_fun.py @@ -28,7 +28,8 @@ from trainingmgr.db.common_db_fun import get_data_extraction_in_progress_trainin get_trainingjob_info_by_name, get_latest_version_trainingjob_name, \ get_all_versions_info_by_name, get_all_distinct_trainingjobs, \ get_all_version_num_by_trainingjob_name, update_model_download_url, \ - add_update_trainingjob, get_all_jobs_latest_status_version, get_info_of_latest_version + add_update_trainingjob, get_all_jobs_latest_status_version, get_info_of_latest_version, \ + add_featuregroup mimic_db = { "usecase_name": "Tester", @@ -57,6 +58,19 @@ mimic_db = { "accuracy": 70 } +mimic_fg_db={ + "featureGroupName": "testing_hash", + "feature_list": "", + "datalake_source": "InfluxSource", + "enable_Dme": True, + "DmeHost": "", + "DmePort": "", + "bucket": "pm-bucket", + "token": "", + "source_name": "qoedataset0905202305", + "dbOrg": "est" +} + class db_helper: '''Mimics as a Db''' def __init__(self, req_cols, raise_exception = False, check_success_obj = None): @@ -101,6 +115,50 @@ class db_helper: def commit(self): pass +class db_helper_fg: + '''Mimics as a Db''' + def __init__(self, req_cols, raise_exception = False, check_success_obj = None): + self.cols = req_cols + self.raise_exception = raise_exception + self.check_success_obj = check_success_obj + self.counter = 0 + + def get_new_conn(self): + return db_helper_fg(self.cols, self.raise_exception, self.check_success_obj) + + def cursor(self): + return db_helper_fg(self.cols, self.raise_exception, self.check_success_obj) + + def execute(self, query, values = None): + if self.raise_exception: + raise Exception("DB Error") + + def fetchall(self): + out = [] + if(len(self.cols) > 0): + if(self.cols[self.counter][0] == "*"): + for (col, value) in mimic_fg_db.items(): + out.append(value) + elif(self.cols[self.counter][0] == None): + self.counter += 1 + return None + else: + for col in self.cols[self.counter]: + out.append(mimic_fg_db[col]) + self.counter += 1 + return [out] + + def close(self): + ''' For checking success in fxn not returning anything, If you call close, then It means query as exceuted as expected ''' + if self.check_success_obj: + self.check_success_obj.setwin() + + def rollback(self): + pass + + def commit(self): + pass + class Check: def __init__(self): self.finished = False @@ -455,4 +513,20 @@ class Test_Common_Db_Fun: except Exception as err: fxn_name = "get_info_by_version" assert str(err) == "DB Error", 'Negative test {} FAILED, Doesnt returned required error'.format(fxn_name) - assert checker.finished, 'Cursor Not Closed Properly for fxn {} | Negative Test'.format(fxn_name) \ No newline at end of file + assert checker.finished, 'Cursor Not Closed Properly for fxn {} | Negative Test'.format(fxn_name) + + def test_add_featuregroup(self): + checker = Check() + db_obj = db_helper_fg([[None]], check_success_obj=checker) + add_featuregroup('Testing', '', '', True, db_obj, '', '', '', '', '') + assert checker.finished, 'add_featuregroup FAILED when dme true' + + def test_negative_add_featuregroup(self): + checker = Check() + try: + db_obj = db_helper_fg([[None]], raise_exception=True, check_success_obj=checker) + add_featuregroup('Testing', '', '', True, db_obj, '', '', '', '', '') + assert False + except Exception as err: + fxn_name = "add_featuregroup" + assert str(err)=="Failed to execute query in {}DB Error".format(fxn_name) \ No newline at end of file diff --git a/tests/test_tm_apis.py b/tests/test_tm_apis.py index aeca8bc..802cc8f 100644 --- a/tests/test_tm_apis.py +++ b/tests/test_tm_apis.py @@ -788,3 +788,119 @@ class Test_retraining: data=json.loads(response.data) assert response.status_code == status.HTTP_200_OK, "Return status code NOT equal" assert data["failure count"]==1, "Return failure count NOT equal" + + +class Test_create_featuregroup: + def setup_method(self): + self.client = trainingmgr_main.APP.test_client(self) + self.logger = trainingmgr_main.LOGGER + + feature_group_data2=('testing_hash','pdcpBytesDl,pdcpBytesUl','InfluxSource',False,'','','','','','') + @patch('trainingmgr.trainingmgr_main.check_featureGroup_data', return_value=feature_group_data2) + @patch('trainingmgr.trainingmgr_main.add_featuregroup') + def test_create_featuregroup_1(self, mock1, mock2): + create_featuregroup_req={ + "featureGroupName": "testing_hash", + "feature_list": "pdcpBytesDl,pdcpBytesUl", + "datalake_source": "InfluxSource", + "enable_Dme": False, + "DmeHost": "", + "DmePort": "", + "bucket": "", + "token": "", + "source_name": "", + "dbOrg": "" + } + expected_response=b'{"result": "Feature Group Created"}' + response=self.client.post("/featureGroup", data=json.dumps(create_featuregroup_req), + content_type="application/json") + trainingmgr_main.LOGGER.debug(response.data) + assert response.data==expected_response + assert response.status_code ==status.HTTP_200_OK, "Return status code not equal" + + the_response1 = Response() + the_response1.status_code = status.HTTP_201_CREATED + the_response1.headers={"content-type": "application/json"} + the_response1._content = b'' + mocked_TRAININGMGR_CONFIG_OBJ=mock.Mock(name="TRAININGMGR_CONFIG_OBJ") + feature_group_data2=('testing_hash','pdcpBytesDl,pdcpBytesUl','InfluxSource',True,'127.0.0.1','31823','pm-bucket','','','') + @patch('trainingmgr.trainingmgr_main.check_featureGroup_data', return_value=feature_group_data2) + @patch('trainingmgr.trainingmgr_main.add_featuregroup') + @patch('trainingmgr.trainingmgr_main.create_dme_filtered_data_job', return_value=the_response1) + @patch('trainingmgr.trainingmgr_main.TRAININGMGR_CONFIG_OBJ', return_value = mocked_TRAININGMGR_CONFIG_OBJ) + @patch('trainingmgr.trainingmgr_main.delete_feature_group_by_name') + def test_create_featuregroup_2(self, mock1, mock2, mock3,mock4,mock5): + create_featuregroup_req={ + "featureGroupName": "testing_hash", + "feature_list": "pdcpBytesDl,pdcpBytesUl", + "datalake_source": "InfluxSource", + "enable_Dme": True, + "DmeHost": "", + "DmePort": "", + "bucket": "", + "token": "", + "source_name": "", + "dbOrg": "" + } + expected_response=b'{"result": "Feature Group Created"}' + response=self.client.post("/featureGroup", data=json.dumps(create_featuregroup_req), + content_type="application/json") + trainingmgr_main.LOGGER.debug(response.data) + assert response.data==expected_response + assert response.status_code ==status.HTTP_200_OK, "Return status code not equal" + + the_response2= Response() + the_response2.status_code = status.HTTP_500_INTERNAL_SERVER_ERROR + the_response2.headers={"content-type": "application/json"} + the_response2._content = b'' + mocked_TRAININGMGR_CONFIG_OBJ=mock.Mock(name="TRAININGMGR_CONFIG_OBJ") + feature_group_data3=('testing_hash','pdcpBytesDl,pdcpBytesUl','InfluxSource',True,'127.0.0.1','31823','pm-bucket','','','') + @patch('trainingmgr.trainingmgr_main.check_featureGroup_data', return_value=feature_group_data3) + @patch('trainingmgr.trainingmgr_main.add_featuregroup') + @patch('trainingmgr.trainingmgr_main.create_dme_filtered_data_job', return_value=the_response2) + @patch('trainingmgr.trainingmgr_main.TRAININGMGR_CONFIG_OBJ', return_value = mocked_TRAININGMGR_CONFIG_OBJ) + @patch('trainingmgr.trainingmgr_main.delete_feature_group_by_name') + def test_negative_create_featuregroup_1(self, mock1, mock2, mock3,mock4,mock5): + create_featuregroup_req={ + "featureGroupName": "testing_hash", + "feature_list": "pdcpBytesDl,pdcpBytesUl", + "datalake_source": "InfluxSource", + "enable_Dme": True, + "DmeHost": "", + "DmePort": "", + "bucket": "", + "token": "", + "source_name": "", + "dbOrg": "" + } + expected_response=b'{"Exception": "Cannot create dme job"}' + response=self.client.post("/featureGroup", data=json.dumps(create_featuregroup_req), + content_type="application/json") + trainingmgr_main.LOGGER.debug(response.data) + assert response.data==expected_response + assert response.status_code ==status.HTTP_400_BAD_REQUEST, "Return status code not equal" + + + feature_group_data3=('testing_hash','pdcpBytesDl,pdcpBytesUl','InfluxSource',True,'127.0.0.1','31823','pm-bucket','','','') + @patch('trainingmgr.trainingmgr_main.check_featureGroup_data', return_value=feature_group_data3) + @patch('trainingmgr.trainingmgr_main.add_featuregroup',side_effect = Exception('Mocked error')) + @patch('trainingmgr.trainingmgr_main.delete_feature_group_by_name') + def test_neagtive_create_featuregroup_2(self, mock1, mock2, mock3): + create_featuregroup_req={ + "featureGroupName": "testing_hash", + "feature_list": "pdcpBytesDl,pdcpBytesUl", + "datalake_source": "InfluxSource", + "enable_Dme": False, + "DmeHost": "", + "DmePort": "", + "bucket": "", + "token": "", + "source_name": "", + "dbOrg": "" + } + expected_response=b'{"Exception": "Failed to create the feature Group "}' + response=self.client.post("/featureGroup", data=json.dumps(create_featuregroup_req), + content_type="application/json") + trainingmgr_main.LOGGER.debug(response.data) + assert response.data==expected_response + assert response.status_code ==status.HTTP_500_INTERNAL_SERVER_ERROR, "Return status code not equal" diff --git a/tests/test_trainingmgr_operations.py b/tests/test_trainingmgr_operations.py index 54eb4fe..9bc1ab9 100644 --- a/tests/test_trainingmgr_operations.py +++ b/tests/test_trainingmgr_operations.py @@ -85,3 +85,42 @@ class Test_upload_pipeline: content_type="application/json") assert response.status_code == status.HTTP_500_INTERNAL_SERVER_ERROR assert expected_data in response.json.keys() + +class Test_create_dme_filtered_data_job: + + the_response=Response() + the_response.status_code=status.HTTP_201_CREATED + @patch('trainingmgr.common.trainingmgr_operations.requests.put', return_value=the_response) + def test_create_dme_filtered_data_job(self, mock1): + mocked_TRAININGMGR_CONFIG_OBJ=mock.Mock(name="TRAININGMGR_CONFIG_OBJ") + attrs_TRAININGMGR_CONFIG_OBJ = {'kf_adapter_ip.return_value': '123', 'kf_adapter_port.return_value' : '100'} + mocked_TRAININGMGR_CONFIG_OBJ.configure_mock(**attrs_TRAININGMGR_CONFIG_OBJ) + source_name="" + db_org="" + bucket_name="" + token="" + features=[] + feature_group_name="test" + host="10.0.0.50" + port="31840" + response=trainingmgr_operations.create_dme_filtered_data_job(mocked_TRAININGMGR_CONFIG_OBJ, source_name, db_org, bucket_name, token, features, feature_group_name, host, port) + assert response.status_code==201, "create_dme_filtered_data_job failed" + + @patch('trainingmgr.common.trainingmgr_operations.create_url_host_port', side_effect=Exception("Mocked Error")) + def test_negative_create_dme_filtered_data_job(self, mock1): + mocked_TRAININGMGR_CONFIG_OBJ=mock.Mock(name="TRAININGMGR_CONFIG_OBJ") + attrs_TRAININGMGR_CONFIG_OBJ = {'kf_adapter_ip.return_value': '123', 'kf_adapter_port.return_value' : '100'} + mocked_TRAININGMGR_CONFIG_OBJ.configure_mock(**attrs_TRAININGMGR_CONFIG_OBJ) + source_name="" + db_org="" + bucket_name="" + token="" + features=[] + feature_group_name="test" + host="10.0.0.50" + port="31840" + try: + response=trainingmgr_operations.create_dme_filtered_data_job(mocked_TRAININGMGR_CONFIG_OBJ, source_name, db_org, bucket_name, token, features, feature_group_name, host, port) + assert False + except Exception: + assert True diff --git a/tests/test_trainingmgr_util.py b/tests/test_trainingmgr_util.py index 2700466..e869d3c 100644 --- a/tests/test_trainingmgr_util.py +++ b/tests/test_trainingmgr_util.py @@ -37,7 +37,7 @@ from trainingmgr.common.tmgr_logger import TMLogger from trainingmgr.common.trainingmgr_config import TrainingMgrConfig from trainingmgr.common.trainingmgr_util import response_for_training, check_key_in_dictionary,check_trainingjob_data, \ get_one_key, get_metrics, handle_async_feature_engineering_status_exception_case, get_one_word_status, check_trainingjob_data, \ - validate_trainingjob_name, get_all_pipeline_names_svc + validate_trainingjob_name, get_all_pipeline_names_svc, check_featureGroup_data from requests.models import Response from trainingmgr import trainingmgr_main from trainingmgr.common.tmgr_logger import TMLogger @@ -661,4 +661,43 @@ class Test_get_all_pipeline_names_svc: @patch('trainingmgr.trainingmgr_main.requests.get', return_value = the_response) def test_get_all_pipeline_names(self,mock1, mock2): expected_data=['qoe_Pipeline'] - assert get_all_pipeline_names_svc(self.mocked_TRAININGMGR_CONFIG_OBJ) ==expected_data, "Not equal" \ No newline at end of file + assert get_all_pipeline_names_svc(self.mocked_TRAININGMGR_CONFIG_OBJ) ==expected_data, "Not equal" + +class Test_check_featureGroup_data: + @patch('trainingmgr.common.trainingmgr_util.check_key_in_dictionary',return_value=True) + def test_check_featureGroup_data(self, mock1): + json_data={ + "featureGroupName": "test", + "feature_list": "", + "datalake_source": "", + "enable_Dme": False, + "DmeHost": "", + "DmePort": "", + "bucket": "", + "token": "", + "source_name": "", + "dbOrg": "" + } + expected_data=("test", "", "",False,"","","","","","") + assert check_featureGroup_data(json_data)==expected_data, "data not equal" + + @patch('trainingmgr.common.trainingmgr_util.check_key_in_dictionary',return_value=False) + def test_negative_featureGroup_data(self, mock1): + json_data={ + "featureGroupName": "test", + "feature_list": "", + "datalake_source": "", + "enable_Dme": False, + "DmeHost": "", + "DmePort": "", + "bucket": "", + "token": "", + "source_name": "", + "dbOrg": "" + } + expected_data=("test", "", "",False,"","","","","","") + try: + assert check_featureGroup_data(json_data)==expected_data, 'data not equal' + assert False + except: + assert True \ No newline at end of file