Changes done to reflect Feature group.
Issue-id: AIMLFW-76
Change-Id: Idcdba5ad7a2f3b1c09b20dc18f1d9570e2d294db
Signed-off-by: rajdeep11 <rajdeep.sin@samsung.com>
def test_add_featuregroup(self):
checker = Check()
db_obj = db_helper_fg([[None]], check_success_obj=checker)
- add_featuregroup('Testing', '', '', '','','','','', '',db_obj, '', '', '')
+ add_featuregroup('Testing', '', '', '','','','','', '','',db_obj, '', '', '')
assert checker.finished, 'add_featuregroup FAILED when dme true'
def test_negative_add_featuregroup(self):
checker = Check()
try:
db_obj = db_helper_fg([[None]], raise_exception=True, check_success_obj=checker)
- add_featuregroup('Testing', '', '', '','','','','', '',db_obj, '', '', '')
+ add_featuregroup('Testing', '', '', '','','','','', '','',db_obj,'', '', '')
assert False
except Exception as err:
fxn_name = "add_featuregroup"
self.client = trainingmgr_main.APP.test_client(self)
self.logger = trainingmgr_main.LOGGER
- feature_group_data2=('testing_hash','pdcpBytesDl,pdcpBytesUl','InfluxSource',False,'','','','','','', '','')
+ feature_group_data2=('testing_hash','pdcpBytesDl,pdcpBytesUl','InfluxSource',False,'','','','','','', '','', '')
@patch('trainingmgr.trainingmgr_main.check_feature_group_data', return_value=feature_group_data2)
@patch('trainingmgr.trainingmgr_main.get_feature_group_by_name_db', return_value=False)
@patch('trainingmgr.trainingmgr_main.add_featuregroup')
"Port":"",
"dmePort":"",
"bucket":"",
+ "_measurement":"",
"token":"",
"source_name":"",
"measured_obj_class":"",
the_response1.headers={"content-type": "application/json"}
the_response1._content = b''
mocked_TRAININGMGR_CONFIG_OBJ=mock.Mock(name="TRAININGMGR_CONFIG_OBJ")
- feature_group_data2=('testing_hash','pdcpBytesDl,pdcpBytesUl','InfluxSource',True,'127.0.0.1','31823','pm-bucket','','','','','')
+ feature_group_data2=('testing_hash','pdcpBytesDl,pdcpBytesUl','InfluxSource',True,'127.0.0.1','31823','pm-bucket','','','','','','')
@patch('trainingmgr.trainingmgr_main.check_feature_group_data', return_value=feature_group_data2)
@patch('trainingmgr.trainingmgr_main.get_feature_group_by_name_db', return_value=False)
@patch('trainingmgr.trainingmgr_main.add_featuregroup')
"host": "",
"port": "",
"bucket": "",
+ "_measurement":"",
"dmePort":"",
"token": "",
"source_name": "",
the_response2.headers={"content-type": "application/json"}
the_response2._content = b''
mocked_TRAININGMGR_CONFIG_OBJ=mock.Mock(name="TRAININGMGR_CONFIG_OBJ")
- feature_group_data3=('testing_hash','pdcpBytesDl,pdcpBytesUl','InfluxSource',True,'127.0.0.1','31823','pm-bucket','','','','','')
+ feature_group_data3=('testing_hash','pdcpBytesDl,pdcpBytesUl','InfluxSource',True,'127.0.0.1','31823','pm-bucket','','','','','','')
@patch('trainingmgr.trainingmgr_main.check_feature_group_data', return_value=feature_group_data3)
@patch('trainingmgr.trainingmgr_main.get_feature_group_by_name_db', return_value=False)
@patch('trainingmgr.trainingmgr_main.add_featuregroup')
"host": "",
"port": "",
"bucket": "",
+ "_measurement":"",
"dmePort":"",
"token": "",
"source_name": "",
assert response.status_code ==status.HTTP_400_BAD_REQUEST, "Return status code not equal"
- feature_group_data3=('testing_hash','pdcpBytesDl,pdcpBytesUl','InfluxSource',True,'127.0.0.1','31823','pm-bucket','','','','','')
+ feature_group_data3=('testing_hash','pdcpBytesDl,pdcpBytesUl','InfluxSource',True,'127.0.0.1','31823','pm-bucket','','','','','','')
@patch('trainingmgr.trainingmgr_main.check_feature_group_data', return_value=feature_group_data3)
@patch('trainingmgr.trainingmgr_main.get_feature_group_by_name_db', return_value=False)
@patch('trainingmgr.trainingmgr_main.add_featuregroup',side_effect = Exception('Mocked error'))
"host": "",
"port": "",
"bucket": "",
+ "_measurement":"",
"dmePort":"",
"token": "",
"source_name": "",
assert response.data==expected_response
assert response.status_code ==status.HTTP_500_INTERNAL_SERVER_ERROR, "Return status code not equal"
- feature_group_data3=('testing_hash!@','pdcpBytesDl,pdcpBytesUl','InfluxSource',True,'127.0.0.1','31823','pm-bucket','','','','','')
+ feature_group_data3=('testing_hash!@','pdcpBytesDl,pdcpBytesUl','InfluxSource',True,'127.0.0.1','31823','pm-bucket','','','','','','')
@patch('trainingmgr.trainingmgr_main.check_feature_group_data', return_value=feature_group_data3)
@patch('trainingmgr.trainingmgr_main.get_feature_group_by_name_db', return_value=True)
def test_neagtive_create_featuregroup_3(self, mock1, mock2):
"port": "",
"bucket": "",
"dmePort":"",
+ "_measurement":"",
"token": "",
"source_name": "",
"measured_obj_class":"",
expected_data=b'{"Exception": "Failed to fetch feature group info from db"}'
fg_name='testing'
response=self.client.get('/featureGroup/{}'.format(fg_name))
- print(response.data)
assert response.status_code == 404 , "status code is not equal"
assert response.data == expected_data
delete_req=delete_req={"featuregroups_list":[{"featureGroup_name":"testing_hash"}]}
expected_response=b'{"Exception": "Wrong Request syntax"}'
response=self.client.delete('/featureGroup', data=json.dumps(delete_req), content_type="application/json")
- print("response data", response.data)
assert response.data==expected_response
assert response.status_code==400, "status code not equal"
delete_req={"list":[{"trainingjob_name":"testing_dme_02","version":1}]}
expected_response=b'{"Exception": "Wrong Request syntax"}'
response=self.client.delete('/trainingjobs', data=json.dumps(delete_req), content_type="application/json")
- print("response data", response.data)
assert response.data==expected_response
assert response.status_code==400, "status code not equal"
delete_req={"list":[{"trainingjob_name":"testing_dme_02","version":1}]}
expected_response=b'{"Exception": "not given as list"}'
response=self.client.delete('/trainingjobs', data=json.dumps(delete_req), content_type="application/json")
- print("response data", response.data)
assert response.data==expected_response
assert response.status_code==400, "status code not equal"
"Port": "",
"bucket": "",
"dmePort":"",
+ '_measurement':"",
"token": "",
"source_name": "",
"measured_obj_class":"",
"dbOrg": ""
}
- expected_data=("test", "", "",False,"","","","","","","","")
+ expected_data=("test", "", "",False,"","","","","","","","","")
assert check_feature_group_data(json_data)==expected_data, "data not equal"
@patch('trainingmgr.common.trainingmgr_util.check_key_in_dictionary',return_value=False)
"Host": "",
"Port": "",
"bucket": "",
+ '_measurement':"",
"dmePort":"",
"token": "",
"source_name": "",
"measured_obj_class":"",
"dbOrg": ""
}
- expected_data=("test", "", "",False,"","","","","","","","")
+ expected_data=("test", "", "",False,"","","","","","","","","")
try:
assert check_feature_group_data(json_data)==expected_data, 'data not equal'
assert False
try:
if check_key_in_dictionary(["featureGroupName", "feature_list", \
"datalake_source", "enable_Dme", "Host",
- "Port", "dmePort","bucket", "token", "source_name", "measured_obj_class"], json_data):
+ "Port", "dmePort","bucket", "token", "source_name", "measured_obj_class", "_measurement"], json_data):
feature_group_name=json_data["featureGroupName"]
features=json_data["feature_list"]
datalake_source=json_data["datalake_source"]
enable_dme=json_data["enable_Dme"]
+ measurement = json_data["_measurement"]
host=json_data["Host"]
port=json_data["Port"]
dme_port=json_data["dmePort"]
except Exception as err:
raise APIException(status.HTTP_400_BAD_REQUEST, str(err)) from None
- return (feature_group_name, features, datalake_source, enable_dme, host, port,dme_port, bucket, token, source_name,db_org, measured_obj_class)
+ return (feature_group_name, features, datalake_source, enable_dme, host, port,dme_port, bucket, token, source_name,db_org, measured_obj_class, measurement)
def get_one_key(dictionary):
'''
if conn is not None:
conn.close()
-def add_featuregroup(feature_group_name, feature_list, datalake_source , host, port, bucket, token, db_org, enable_dme, ps_db_obj, measured_obj_class="", dme_port="", source_name=""):
+def add_featuregroup(feature_group_name, feature_list, datalake_source , host, port, bucket, token, db_org,_measurement, enable_dme, ps_db_obj, measured_obj_class="", dme_port="", source_name=""):
"""
This function add the new row or update existing row with given information
"""
try:
cursor.execute(''' INSERT INTO {} VALUES '''.format(fg_table_name) +
- '''(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s, %s, %s)''',
- (feature_group_name, feature_list, datalake_source , host, port, bucket, token, db_org, enable_dme, measured_obj_class, dme_port, source_name))
+ '''(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s, %s, %s,%s)''',
+ (feature_group_name, feature_list, datalake_source , host, port, bucket, token, db_org,_measurement,enable_dme, measured_obj_class, dme_port, source_name))
conn.commit()
cursor.close()
except Exception as err:
"bucket varchar(128) NOT NULL," + \
"token varchar(2000) NOT NULL," + \
"db_org varchar(128) NOT NULL," + \
+ "_measurement varchar(100) NOT NULL," + \
"enable_dme BOOLEAN NOT NULL," + \
"measured_obj_class varchar(128) NOT NULL," + \
"dme_port varchar(128) NOT NULL," + \
db port
token: str
token for the bucket
- db org:
+ db org: str
db org name
+ measurement: str
+ measurement of the influxdb
enable_Dme: boolean
whether to enable dme
source_name: str
try:
json_data=request.json
- (feature_group_name, features, datalake_source, enable_dme, host, port,dme_port,bucket, token, source_name,db_org, measured_obj_class)=check_feature_group_data(json_data)
+ (feature_group_name, features, datalake_source, enable_dme, host, port,dme_port,bucket, token, source_name,db_org, measured_obj_class, measurement)=check_feature_group_data(json_data)
# check the data conformance
LOGGER.debug("the db info is : ", get_feature_group_by_name_db(PS_DB_OBJ, feature_group_name))
if (not check_trainingjob_name_or_featuregroup_name(feature_group_name) or
else:
# the features are stored in string format in the db, and has to be passed as list of feature to the dme. Hence the conversion.
features_list = features.split(",")
- add_featuregroup(feature_group_name, features, datalake_source , host, port, bucket, token, db_org, enable_dme, PS_DB_OBJ, measured_obj_class, dme_port, source_name)
+ add_featuregroup(feature_group_name, features, datalake_source , host, port, bucket, token, db_org, measurement, enable_dme, PS_DB_OBJ, measured_obj_class, dme_port, source_name)
api_response={"result": "Feature Group Created"}
response_code =status.HTTP_200_OK
if enable_dme == True :