get_trainingjob_info_by_name, get_latest_version_trainingjob_name, \
get_all_versions_info_by_name, get_all_distinct_trainingjobs, \
get_all_version_num_by_trainingjob_name, update_model_download_url, \
- add_update_trainingjob, get_all_jobs_latest_status_version, get_info_of_latest_version
+ add_update_trainingjob, get_all_jobs_latest_status_version, get_info_of_latest_version, \
+ add_featuregroup
mimic_db = {
"usecase_name": "Tester",
"accuracy": 70
}
+mimic_fg_db={
+ "featureGroupName": "testing_hash",
+ "feature_list": "",
+ "datalake_source": "InfluxSource",
+ "enable_Dme": True,
+ "DmeHost": "",
+ "DmePort": "",
+ "bucket": "pm-bucket",
+ "token": "",
+ "source_name": "qoedataset0905202305",
+ "dbOrg": "est"
+}
+
class db_helper:
'''Mimics as a Db'''
def __init__(self, req_cols, raise_exception = False, check_success_obj = None):
def commit(self):
pass
+class db_helper_fg:
+ '''Mimics as a Db'''
+ def __init__(self, req_cols, raise_exception = False, check_success_obj = None):
+ self.cols = req_cols
+ self.raise_exception = raise_exception
+ self.check_success_obj = check_success_obj
+ self.counter = 0
+
+ def get_new_conn(self):
+ return db_helper_fg(self.cols, self.raise_exception, self.check_success_obj)
+
+ def cursor(self):
+ return db_helper_fg(self.cols, self.raise_exception, self.check_success_obj)
+
+ def execute(self, query, values = None):
+ if self.raise_exception:
+ raise Exception("DB Error")
+
+ def fetchall(self):
+ out = []
+ if(len(self.cols) > 0):
+ if(self.cols[self.counter][0] == "*"):
+ for (col, value) in mimic_fg_db.items():
+ out.append(value)
+ elif(self.cols[self.counter][0] == None):
+ self.counter += 1
+ return None
+ else:
+ for col in self.cols[self.counter]:
+ out.append(mimic_fg_db[col])
+ self.counter += 1
+ return [out]
+
+ def close(self):
+ ''' For checking success in fxn not returning anything, If you call close, then It means query as exceuted as expected '''
+ if self.check_success_obj:
+ self.check_success_obj.setwin()
+
+ def rollback(self):
+ pass
+
+ def commit(self):
+ pass
+
class Check:
def __init__(self):
self.finished = False
except Exception as err:
fxn_name = "get_info_by_version"
assert str(err) == "DB Error", 'Negative test {} FAILED, Doesnt returned required error'.format(fxn_name)
- assert checker.finished, 'Cursor Not Closed Properly for fxn {} | Negative Test'.format(fxn_name)
\ No newline at end of file
+ assert checker.finished, 'Cursor Not Closed Properly for fxn {} | Negative Test'.format(fxn_name)
+
+ def test_add_featuregroup(self):
+ checker = Check()
+ db_obj = db_helper_fg([[None]], check_success_obj=checker)
+ add_featuregroup('Testing', '', '', True, db_obj, '', '', '', '', '')
+ assert checker.finished, 'add_featuregroup FAILED when dme true'
+
+ def test_negative_add_featuregroup(self):
+ checker = Check()
+ try:
+ db_obj = db_helper_fg([[None]], raise_exception=True, check_success_obj=checker)
+ add_featuregroup('Testing', '', '', True, db_obj, '', '', '', '', '')
+ assert False
+ except Exception as err:
+ fxn_name = "add_featuregroup"
+ assert str(err)=="Failed to execute query in {}DB Error".format(fxn_name)
\ No newline at end of file
data=json.loads(response.data)
assert response.status_code == status.HTTP_200_OK, "Return status code NOT equal"
assert data["failure count"]==1, "Return failure count NOT equal"
+
+
+class Test_create_featuregroup:
+ def setup_method(self):
+ self.client = trainingmgr_main.APP.test_client(self)
+ self.logger = trainingmgr_main.LOGGER
+
+ feature_group_data2=('testing_hash','pdcpBytesDl,pdcpBytesUl','InfluxSource',False,'','','','','','')
+ @patch('trainingmgr.trainingmgr_main.check_featureGroup_data', return_value=feature_group_data2)
+ @patch('trainingmgr.trainingmgr_main.add_featuregroup')
+ def test_create_featuregroup_1(self, mock1, mock2):
+ create_featuregroup_req={
+ "featureGroupName": "testing_hash",
+ "feature_list": "pdcpBytesDl,pdcpBytesUl",
+ "datalake_source": "InfluxSource",
+ "enable_Dme": False,
+ "DmeHost": "",
+ "DmePort": "",
+ "bucket": "",
+ "token": "",
+ "source_name": "",
+ "dbOrg": ""
+ }
+ expected_response=b'{"result": "Feature Group Created"}'
+ response=self.client.post("/featureGroup", data=json.dumps(create_featuregroup_req),
+ content_type="application/json")
+ trainingmgr_main.LOGGER.debug(response.data)
+ assert response.data==expected_response
+ assert response.status_code ==status.HTTP_200_OK, "Return status code not equal"
+
+ the_response1 = Response()
+ the_response1.status_code = status.HTTP_201_CREATED
+ the_response1.headers={"content-type": "application/json"}
+ the_response1._content = b''
+ mocked_TRAININGMGR_CONFIG_OBJ=mock.Mock(name="TRAININGMGR_CONFIG_OBJ")
+ feature_group_data2=('testing_hash','pdcpBytesDl,pdcpBytesUl','InfluxSource',True,'127.0.0.1','31823','pm-bucket','','','')
+ @patch('trainingmgr.trainingmgr_main.check_featureGroup_data', return_value=feature_group_data2)
+ @patch('trainingmgr.trainingmgr_main.add_featuregroup')
+ @patch('trainingmgr.trainingmgr_main.create_dme_filtered_data_job', return_value=the_response1)
+ @patch('trainingmgr.trainingmgr_main.TRAININGMGR_CONFIG_OBJ', return_value = mocked_TRAININGMGR_CONFIG_OBJ)
+ @patch('trainingmgr.trainingmgr_main.delete_feature_group_by_name')
+ def test_create_featuregroup_2(self, mock1, mock2, mock3,mock4,mock5):
+ create_featuregroup_req={
+ "featureGroupName": "testing_hash",
+ "feature_list": "pdcpBytesDl,pdcpBytesUl",
+ "datalake_source": "InfluxSource",
+ "enable_Dme": True,
+ "DmeHost": "",
+ "DmePort": "",
+ "bucket": "",
+ "token": "",
+ "source_name": "",
+ "dbOrg": ""
+ }
+ expected_response=b'{"result": "Feature Group Created"}'
+ response=self.client.post("/featureGroup", data=json.dumps(create_featuregroup_req),
+ content_type="application/json")
+ trainingmgr_main.LOGGER.debug(response.data)
+ assert response.data==expected_response
+ assert response.status_code ==status.HTTP_200_OK, "Return status code not equal"
+
+ the_response2= Response()
+ the_response2.status_code = status.HTTP_500_INTERNAL_SERVER_ERROR
+ the_response2.headers={"content-type": "application/json"}
+ the_response2._content = b''
+ mocked_TRAININGMGR_CONFIG_OBJ=mock.Mock(name="TRAININGMGR_CONFIG_OBJ")
+ feature_group_data3=('testing_hash','pdcpBytesDl,pdcpBytesUl','InfluxSource',True,'127.0.0.1','31823','pm-bucket','','','')
+ @patch('trainingmgr.trainingmgr_main.check_featureGroup_data', return_value=feature_group_data3)
+ @patch('trainingmgr.trainingmgr_main.add_featuregroup')
+ @patch('trainingmgr.trainingmgr_main.create_dme_filtered_data_job', return_value=the_response2)
+ @patch('trainingmgr.trainingmgr_main.TRAININGMGR_CONFIG_OBJ', return_value = mocked_TRAININGMGR_CONFIG_OBJ)
+ @patch('trainingmgr.trainingmgr_main.delete_feature_group_by_name')
+ def test_negative_create_featuregroup_1(self, mock1, mock2, mock3,mock4,mock5):
+ create_featuregroup_req={
+ "featureGroupName": "testing_hash",
+ "feature_list": "pdcpBytesDl,pdcpBytesUl",
+ "datalake_source": "InfluxSource",
+ "enable_Dme": True,
+ "DmeHost": "",
+ "DmePort": "",
+ "bucket": "",
+ "token": "",
+ "source_name": "",
+ "dbOrg": ""
+ }
+ expected_response=b'{"Exception": "Cannot create dme job"}'
+ response=self.client.post("/featureGroup", data=json.dumps(create_featuregroup_req),
+ content_type="application/json")
+ trainingmgr_main.LOGGER.debug(response.data)
+ assert response.data==expected_response
+ assert response.status_code ==status.HTTP_400_BAD_REQUEST, "Return status code not equal"
+
+
+ feature_group_data3=('testing_hash','pdcpBytesDl,pdcpBytesUl','InfluxSource',True,'127.0.0.1','31823','pm-bucket','','','')
+ @patch('trainingmgr.trainingmgr_main.check_featureGroup_data', return_value=feature_group_data3)
+ @patch('trainingmgr.trainingmgr_main.add_featuregroup',side_effect = Exception('Mocked error'))
+ @patch('trainingmgr.trainingmgr_main.delete_feature_group_by_name')
+ def test_neagtive_create_featuregroup_2(self, mock1, mock2, mock3):
+ create_featuregroup_req={
+ "featureGroupName": "testing_hash",
+ "feature_list": "pdcpBytesDl,pdcpBytesUl",
+ "datalake_source": "InfluxSource",
+ "enable_Dme": False,
+ "DmeHost": "",
+ "DmePort": "",
+ "bucket": "",
+ "token": "",
+ "source_name": "",
+ "dbOrg": ""
+ }
+ expected_response=b'{"Exception": "Failed to create the feature Group "}'
+ response=self.client.post("/featureGroup", data=json.dumps(create_featuregroup_req),
+ content_type="application/json")
+ trainingmgr_main.LOGGER.debug(response.data)
+ assert response.data==expected_response
+ assert response.status_code ==status.HTTP_500_INTERNAL_SERVER_ERROR, "Return status code not equal"
content_type="application/json")
assert response.status_code == status.HTTP_500_INTERNAL_SERVER_ERROR
assert expected_data in response.json.keys()
+
+class Test_create_dme_filtered_data_job:
+
+ the_response=Response()
+ the_response.status_code=status.HTTP_201_CREATED
+ @patch('trainingmgr.common.trainingmgr_operations.requests.put', return_value=the_response)
+ def test_create_dme_filtered_data_job(self, mock1):
+ mocked_TRAININGMGR_CONFIG_OBJ=mock.Mock(name="TRAININGMGR_CONFIG_OBJ")
+ attrs_TRAININGMGR_CONFIG_OBJ = {'kf_adapter_ip.return_value': '123', 'kf_adapter_port.return_value' : '100'}
+ mocked_TRAININGMGR_CONFIG_OBJ.configure_mock(**attrs_TRAININGMGR_CONFIG_OBJ)
+ source_name=""
+ db_org=""
+ bucket_name=""
+ token=""
+ features=[]
+ feature_group_name="test"
+ host="10.0.0.50"
+ port="31840"
+ response=trainingmgr_operations.create_dme_filtered_data_job(mocked_TRAININGMGR_CONFIG_OBJ, source_name, db_org, bucket_name, token, features, feature_group_name, host, port)
+ assert response.status_code==201, "create_dme_filtered_data_job failed"
+
+ @patch('trainingmgr.common.trainingmgr_operations.create_url_host_port', side_effect=Exception("Mocked Error"))
+ def test_negative_create_dme_filtered_data_job(self, mock1):
+ mocked_TRAININGMGR_CONFIG_OBJ=mock.Mock(name="TRAININGMGR_CONFIG_OBJ")
+ attrs_TRAININGMGR_CONFIG_OBJ = {'kf_adapter_ip.return_value': '123', 'kf_adapter_port.return_value' : '100'}
+ mocked_TRAININGMGR_CONFIG_OBJ.configure_mock(**attrs_TRAININGMGR_CONFIG_OBJ)
+ source_name=""
+ db_org=""
+ bucket_name=""
+ token=""
+ features=[]
+ feature_group_name="test"
+ host="10.0.0.50"
+ port="31840"
+ try:
+ response=trainingmgr_operations.create_dme_filtered_data_job(mocked_TRAININGMGR_CONFIG_OBJ, source_name, db_org, bucket_name, token, features, feature_group_name, host, port)
+ assert False
+ except Exception:
+ assert True
from trainingmgr.common.trainingmgr_config import TrainingMgrConfig
from trainingmgr.common.trainingmgr_util import response_for_training, check_key_in_dictionary,check_trainingjob_data, \
get_one_key, get_metrics, handle_async_feature_engineering_status_exception_case, get_one_word_status, check_trainingjob_data, \
- validate_trainingjob_name, get_all_pipeline_names_svc
+ validate_trainingjob_name, get_all_pipeline_names_svc, check_featureGroup_data
from requests.models import Response
from trainingmgr import trainingmgr_main
from trainingmgr.common.tmgr_logger import TMLogger
@patch('trainingmgr.trainingmgr_main.requests.get', return_value = the_response)
def test_get_all_pipeline_names(self,mock1, mock2):
expected_data=['qoe_Pipeline']
- assert get_all_pipeline_names_svc(self.mocked_TRAININGMGR_CONFIG_OBJ) ==expected_data, "Not equal"
\ No newline at end of file
+ assert get_all_pipeline_names_svc(self.mocked_TRAININGMGR_CONFIG_OBJ) ==expected_data, "Not equal"
+
+class Test_check_featureGroup_data:
+ @patch('trainingmgr.common.trainingmgr_util.check_key_in_dictionary',return_value=True)
+ def test_check_featureGroup_data(self, mock1):
+ json_data={
+ "featureGroupName": "test",
+ "feature_list": "",
+ "datalake_source": "",
+ "enable_Dme": False,
+ "DmeHost": "",
+ "DmePort": "",
+ "bucket": "",
+ "token": "",
+ "source_name": "",
+ "dbOrg": ""
+ }
+ expected_data=("test", "", "",False,"","","","","","")
+ assert check_featureGroup_data(json_data)==expected_data, "data not equal"
+
+ @patch('trainingmgr.common.trainingmgr_util.check_key_in_dictionary',return_value=False)
+ def test_negative_featureGroup_data(self, mock1):
+ json_data={
+ "featureGroupName": "test",
+ "feature_list": "",
+ "datalake_source": "",
+ "enable_Dme": False,
+ "DmeHost": "",
+ "DmePort": "",
+ "bucket": "",
+ "token": "",
+ "source_name": "",
+ "dbOrg": ""
+ }
+ expected_data=("test", "", "",False,"","","","","","")
+ try:
+ assert check_featureGroup_data(json_data)==expected_data, 'data not equal'
+ assert False
+ except:
+ assert True
\ No newline at end of file