self.client = trainingmgr_main.APP.test_client(self)
self.logger = trainingmgr_main.LOGGER
- feature_group_data2=('testing_hash','pdcpBytesDl,pdcpBytesUl','InfluxSource',False,'','','','','','', '')
+ feature_group_data2=('testing_hash','pdcpBytesDl,pdcpBytesUl','InfluxSource',False,'','','','','','', '','')
@patch('trainingmgr.trainingmgr_main.check_feature_group_data', return_value=feature_group_data2)
@patch('trainingmgr.trainingmgr_main.get_feature_group_by_name_db', return_value=False)
@patch('trainingmgr.trainingmgr_main.add_featuregroup')
def test_create_featuregroup_1(self, mock1, mock2, mock3):
- create_featuregroup_req={
- "featureGroupName": "testing_hash",
- "feature_list": "pdcpBytesDl,pdcpBytesUl",
- "datalake_source": "InfluxSource",
- "enable_Dme": False,
- "DmeHost": "",
- "DmePort": "",
- "bucket": "",
- "token": "",
- "source_name": "",
- "measured_obj_class":"",
- "dbOrg": ""
- }
+ create_featuregroup_req={"featureGroupName":"testing_hash",
+ "feature_list":"pdcpBytesDl,pdcpBytesUl",
+ "datalake_source":"InfluxSource",
+ "enable_Dme":False,
+ "Host":"",
+ "Port":"",
+ "dmePort":"",
+ "bucket":"",
+ "token":"",
+ "source_name":"",
+ "measured_obj_class":"",
+ "dbOrg":""}
expected_response=b'{"result": "Feature Group Created"}'
response=self.client.post("/featureGroup", data=json.dumps(create_featuregroup_req),
content_type="application/json")
the_response1.headers={"content-type": "application/json"}
the_response1._content = b''
mocked_TRAININGMGR_CONFIG_OBJ=mock.Mock(name="TRAININGMGR_CONFIG_OBJ")
- feature_group_data2=('testing_hash','pdcpBytesDl,pdcpBytesUl','InfluxSource',True,'127.0.0.1','31823','pm-bucket','','','','')
+ feature_group_data2=('testing_hash','pdcpBytesDl,pdcpBytesUl','InfluxSource',True,'127.0.0.1','31823','pm-bucket','','','','','')
@patch('trainingmgr.trainingmgr_main.check_feature_group_data', return_value=feature_group_data2)
@patch('trainingmgr.trainingmgr_main.get_feature_group_by_name_db', return_value=False)
@patch('trainingmgr.trainingmgr_main.add_featuregroup')
"feature_list": "pdcpBytesDl,pdcpBytesUl",
"datalake_source": "InfluxSource",
"enable_Dme": True,
- "DmeHost": "",
- "DmePort": "",
+ "host": "",
+ "port": "",
"bucket": "",
+ "dmePort":"",
"token": "",
"source_name": "",
"measured_obj_class":"",
the_response2.headers={"content-type": "application/json"}
the_response2._content = b''
mocked_TRAININGMGR_CONFIG_OBJ=mock.Mock(name="TRAININGMGR_CONFIG_OBJ")
- feature_group_data3=('testing_hash','pdcpBytesDl,pdcpBytesUl','InfluxSource',True,'127.0.0.1','31823','pm-bucket','','','','')
+ feature_group_data3=('testing_hash','pdcpBytesDl,pdcpBytesUl','InfluxSource',True,'127.0.0.1','31823','pm-bucket','','','','','')
@patch('trainingmgr.trainingmgr_main.check_feature_group_data', return_value=feature_group_data3)
@patch('trainingmgr.trainingmgr_main.get_feature_group_by_name_db', return_value=False)
@patch('trainingmgr.trainingmgr_main.add_featuregroup')
"feature_list": "pdcpBytesDl,pdcpBytesUl",
"datalake_source": "InfluxSource",
"enable_Dme": True,
- "DmeHost": "",
- "DmePort": "",
+ "host": "",
+ "port": "",
"bucket": "",
+ "dmePort":"",
"token": "",
"source_name": "",
"measured_obj_class":"",
assert response.status_code ==status.HTTP_400_BAD_REQUEST, "Return status code not equal"
- feature_group_data3=('testing_hash','pdcpBytesDl,pdcpBytesUl','InfluxSource',True,'127.0.0.1','31823','pm-bucket','','','','')
+ feature_group_data3=('testing_hash','pdcpBytesDl,pdcpBytesUl','InfluxSource',True,'127.0.0.1','31823','pm-bucket','','','','','')
@patch('trainingmgr.trainingmgr_main.check_feature_group_data', return_value=feature_group_data3)
@patch('trainingmgr.trainingmgr_main.get_feature_group_by_name_db', return_value=False)
@patch('trainingmgr.trainingmgr_main.add_featuregroup',side_effect = Exception('Mocked error'))
"feature_list": "pdcpBytesDl,pdcpBytesUl",
"datalake_source": "InfluxSource",
"enable_Dme": False,
- "DmeHost": "",
- "DmePort": "",
+ "host": "",
+ "port": "",
"bucket": "",
+ "dmePort":"",
"token": "",
"source_name": "",
"measured_obj_class":"",
assert response.data==expected_response
assert response.status_code ==status.HTTP_500_INTERNAL_SERVER_ERROR, "Return status code not equal"
- feature_group_data3=('testing_hash!@','pdcpBytesDl,pdcpBytesUl','InfluxSource',True,'127.0.0.1','31823','pm-bucket','','','','')
+ feature_group_data3=('testing_hash!@','pdcpBytesDl,pdcpBytesUl','InfluxSource',True,'127.0.0.1','31823','pm-bucket','','','','','')
@patch('trainingmgr.trainingmgr_main.check_feature_group_data', return_value=feature_group_data3)
@patch('trainingmgr.trainingmgr_main.get_feature_group_by_name_db', return_value=True)
def test_neagtive_create_featuregroup_3(self, mock1, mock2):
"feature_list": "pdcpBytesDl,pdcpBytesUl",
"datalake_source": "InfluxSource",
"enable_Dme": False,
- "DmeHost": "",
- "DmePort": "",
+ "host": "",
+ "port": "",
"bucket": "",
+ "dmePort":"",
"token": "",
"source_name": "",
"measured_obj_class":"",
self.client = trainingmgr_main.APP.test_client(self)
self.logger = trainingmgr_main.LOGGER
- result=[('testing', '', 'InfluxSource', True, '21.0.0.21', '12345', '', '', '', '')]
+ result=[('testing', '', 'InfluxSource', '', '', '', '', '', True, '', '', '')]
@patch('trainingmgr.trainingmgr_main.get_feature_groups_db', return_value=result)
def test_get_feature_group(self,mock1):
expected_data=b'{"featuregroups": [{"featuregroup_name": "testing", "features": "", "datalake": "InfluxSource", "dme": true}]}'
self.client = trainingmgr_main.APP.test_client(self)
self.logger = trainingmgr_main.LOGGER
- result=[('testing', '', 'InfluxSource', True, '21.0.0.21', '12345', '', '', '', '','')]
+ result=[('testing', '', 'InfluxSource', '127.0.0.21', '8080', '', '', '', '', '', '', '')]
@patch('trainingmgr.trainingmgr_main.get_feature_group_by_name_db', return_value=result)
def test_get_feature_group_by_name(self, mock1):
- expected_data=b'{"featuregroup": [{"featuregroup_name": "testing", "features": [""], "datalake": "InfluxSource", "dme": true, "dme_host": "21.0.0.21", "measured_obj_class": "12345", "dme_port": "", "bucket": "", "token": "", "source_name": "", "db_org": ""}]}'
+ expected_data=b'{"featuregroup": [{"featuregroup_name": "testing", "features": "", "datalake": "InfluxSource", "dme": "", "host": "127.0.0.21", "measured_obj_class": "", "port": "8080", "dme_port": "", "bucket": "", "token": "", "source_name": "", "db_org": ""}]}'
fg_name='testing'
response=self.client.get('/featureGroup/{}'.format(fg_name))
assert response.status_code == 200 , "status code is not equal"
mocked_TRAININGMGR_CONFIG_OBJ.configure_mock(**attrs_TRAININGMGR_CONFIG_OBJ)
resp=Response()
resp.status_code=status.HTTP_204_NO_CONTENT
- the_result=[('testing', '', 'InfluxSource', True, '21.0.0.21', '12345', '', '', '', '')]
+ the_result=[('testing_hash', '', 'InfluxSource', '127.0.0.21', '8080', '', '', '', False, '', '', '')]
@patch('trainingmgr.trainingmgr_main.check_key_in_dictionary', return_value=True)
@patch('trainingmgr.trainingmgr_main.get_feature_group_by_name_db', return_value=the_result)
@patch('trainingmgr.trainingmgr_main.delete_feature_group_by_name')
assert response.data==expected_response
assert response.status_code==200, "status code not equal"
- the_result2=[('testing', '', 'InfluxSource', True, '21.0.0.21', '12345', '', '', '', '')]
- resp2=Response()
- resp2.status_code=status.HTTP_500_INTERNAL_SERVER_ERROR
+ the_result2=[('testing_hash', '', 'InfluxSource', '127.0.0.21', '8080', '', '', '', False, '', '', '')]
@patch('trainingmgr.trainingmgr_main.get_feature_group_by_name_db', return_value=the_result2)
- @patch('trainingmgr.trainingmgr_main.delete_feature_group_by_name', return_value=resp2)
+ @patch('trainingmgr.trainingmgr_main.delete_feature_group_by_name', side_effect=Exception("Mocked Error"))
def test_negative_delete_list_of_feature_group_7(self, mock1, mock2):
delete_req=delete_req={"featuregroups_list":[{"featureGroup_name":"testing_hash"}]}
expected_response=b'{"success count": 0, "failure count": 1}'
try:
json_data=request.json
- (feature_group_name, features, datalake_source, enable_dme, dme_host, dme_port,bucket, token, source_name,db_org, measured_obj_class)=check_feature_group_data(json_data)
+ (feature_group_name, features, datalake_source, enable_dme, host, port,dme_port,bucket, token, source_name,db_org, measured_obj_class)=check_feature_group_data(json_data)
# check the data conformance
LOGGER.debug("the db info is : ", get_feature_group_by_name_db(PS_DB_OBJ, feature_group_name))
-
if (not check_trainingjob_name_or_featuregroup_name(feature_group_name) or
len(feature_group_name) < 3 or len(feature_group_name) > 63 or
get_feature_group_by_name_db(PS_DB_OBJ, feature_group_name)):
else:
# the features are stored in string format in the db, and has to be passed as list of feature to the dme. Hence the conversion.
features_list = features.split(",")
- add_featuregroup(feature_group_name, features, datalake_source, enable_dme, PS_DB_OBJ,measured_obj_class,dme_host, dme_port, bucket, token, source_name,db_org )
+ add_featuregroup(feature_group_name, features, datalake_source , host, port, bucket, token, db_org, enable_dme, PS_DB_OBJ, measured_obj_class, dme_port, source_name)
+ api_response={"result": "Feature Group Created"}
+ response_code =status.HTTP_200_OK
if enable_dme == True :
- response= create_dme_filtered_data_job(TRAININGMGR_CONFIG_OBJ, source_name, features_list, feature_group_name, dme_host, dme_port, measured_obj_class)
+ response= create_dme_filtered_data_job(TRAININGMGR_CONFIG_OBJ, source_name, features_list, feature_group_name, host, dme_port, measured_obj_class)
if response.status_code != 201:
api_response={"Exception": "Cannot create dme job"}
delete_feature_group_by_name(PS_DB_OBJ, feature_group_name)
"featuregroup_name": res[0],
"features": res[1],
"datalake": res[2],
- "dme": res[3]
+ "dme": res[8]
}
feature_groups.append(dict_data)
api_response={"featuregroups":feature_groups}
feature_group=[]
if result:
for res in result:
- features=res[1].split(",")
dict_data={
"featuregroup_name": res[0],
- "features": features,
+ "features": res[1],
"datalake": res[2],
- "dme": res[3],
- "dme_host": res[4],
- "measured_obj_class":res[5],
- "dme_port": res[6],
- "bucket":res[7],
- "token":res[8],
- "source_name":res[9],
- "db_org":res[10]
+ "dme": res[8],
+ "host": res[3],
+ "measured_obj_class":res[9],
+ "port": res[4],
+ "dme_port":res[10],
+ "bucket":res[5],
+ "token":res[6],
+ "source_name":res[11],
+ "db_org":res[7]
}
feature_group.append(dict_data)
api_response={"featuregroup":feature_group}
continue
if results:
- dme=results[0][3]
+ dme=results[0][8]
try:
delete_feature_group_by_name(PS_DB_OBJ, featuregroup_name)
if dme :
- dme_host=results[0][4]
- dme_port=results[0][6]
+ dme_host=results[0][3]
+ dme_port=results[0][10]
resp=delete_dme_filtered_data_job(TRAININGMGR_CONFIG_OBJ, featuregroup_name, dme_host, dme_port)
if(resp.status_code !=status.HTTP_204_NO_CONTENT):
not_possible_to_delete.append(my_dict)