self.logger = trainingmgr_main.LOGGER
feature_group_data2=('testing_hash','pdcpBytesDl,pdcpBytesUl','InfluxSource',False,'','','','','','')
- @patch('trainingmgr.trainingmgr_main.check_featureGroup_data', return_value=feature_group_data2)
+ @patch('trainingmgr.trainingmgr_main.check_feature_group_data', return_value=feature_group_data2)
+ @patch('trainingmgr.trainingmgr_main.get_feature_group_by_name_db', return_value=False)
@patch('trainingmgr.trainingmgr_main.add_featuregroup')
- def test_create_featuregroup_1(self, mock1, mock2):
+ def test_create_featuregroup_1(self, mock1, mock2, mock3):
create_featuregroup_req={
"featureGroupName": "testing_hash",
"feature_list": "pdcpBytesDl,pdcpBytesUl",
the_response1._content = b''
mocked_TRAININGMGR_CONFIG_OBJ=mock.Mock(name="TRAININGMGR_CONFIG_OBJ")
feature_group_data2=('testing_hash','pdcpBytesDl,pdcpBytesUl','InfluxSource',True,'127.0.0.1','31823','pm-bucket','','','')
- @patch('trainingmgr.trainingmgr_main.check_featureGroup_data', return_value=feature_group_data2)
+ @patch('trainingmgr.trainingmgr_main.check_feature_group_data', return_value=feature_group_data2)
+ @patch('trainingmgr.trainingmgr_main.get_feature_group_by_name_db', return_value=False)
@patch('trainingmgr.trainingmgr_main.add_featuregroup')
@patch('trainingmgr.trainingmgr_main.create_dme_filtered_data_job', return_value=the_response1)
@patch('trainingmgr.trainingmgr_main.TRAININGMGR_CONFIG_OBJ', return_value = mocked_TRAININGMGR_CONFIG_OBJ)
@patch('trainingmgr.trainingmgr_main.delete_feature_group_by_name')
- def test_create_featuregroup_2(self, mock1, mock2, mock3,mock4,mock5):
+ def test_create_featuregroup_2(self, mock1, mock2, mock3,mock4,mock5, mock6):
create_featuregroup_req={
"featureGroupName": "testing_hash",
"feature_list": "pdcpBytesDl,pdcpBytesUl",
the_response2._content = b''
mocked_TRAININGMGR_CONFIG_OBJ=mock.Mock(name="TRAININGMGR_CONFIG_OBJ")
feature_group_data3=('testing_hash','pdcpBytesDl,pdcpBytesUl','InfluxSource',True,'127.0.0.1','31823','pm-bucket','','','')
- @patch('trainingmgr.trainingmgr_main.check_featureGroup_data', return_value=feature_group_data3)
+ @patch('trainingmgr.trainingmgr_main.check_feature_group_data', return_value=feature_group_data3)
+ @patch('trainingmgr.trainingmgr_main.get_feature_group_by_name_db', return_value=False)
@patch('trainingmgr.trainingmgr_main.add_featuregroup')
@patch('trainingmgr.trainingmgr_main.create_dme_filtered_data_job', return_value=the_response2)
@patch('trainingmgr.trainingmgr_main.TRAININGMGR_CONFIG_OBJ', return_value = mocked_TRAININGMGR_CONFIG_OBJ)
@patch('trainingmgr.trainingmgr_main.delete_feature_group_by_name')
- def test_negative_create_featuregroup_1(self, mock1, mock2, mock3,mock4,mock5):
+ def test_negative_create_featuregroup_1(self, mock1, mock2, mock3,mock4,mock5, mock6):
create_featuregroup_req={
"featureGroupName": "testing_hash",
"feature_list": "pdcpBytesDl,pdcpBytesUl",
feature_group_data3=('testing_hash','pdcpBytesDl,pdcpBytesUl','InfluxSource',True,'127.0.0.1','31823','pm-bucket','','','')
- @patch('trainingmgr.trainingmgr_main.check_featureGroup_data', return_value=feature_group_data3)
+ @patch('trainingmgr.trainingmgr_main.check_feature_group_data', return_value=feature_group_data3)
@patch('trainingmgr.trainingmgr_main.add_featuregroup',side_effect = Exception('Mocked error'))
@patch('trainingmgr.trainingmgr_main.delete_feature_group_by_name')
def test_neagtive_create_featuregroup_2(self, mock1, mock2, mock3):
from trainingmgr.common.trainingmgr_config import TrainingMgrConfig
from trainingmgr.common.trainingmgr_util import response_for_training, check_key_in_dictionary,check_trainingjob_data, \
get_one_key, get_metrics, handle_async_feature_engineering_status_exception_case, get_one_word_status, check_trainingjob_data, \
- validate_trainingjob_name, get_all_pipeline_names_svc, check_featureGroup_data
+ validate_trainingjob_name, get_all_pipeline_names_svc, check_feature_group_data
from requests.models import Response
from trainingmgr import trainingmgr_main
from trainingmgr.common.tmgr_logger import TMLogger
expected_data=['qoe_Pipeline']
assert get_all_pipeline_names_svc(self.mocked_TRAININGMGR_CONFIG_OBJ) ==expected_data, "Not equal"
-class Test_check_featureGroup_data:
+class Test_check_feature_group_data:
@patch('trainingmgr.common.trainingmgr_util.check_key_in_dictionary',return_value=True)
- def test_check_featureGroup_data(self, mock1):
+ def test_check_feature_group_data(self, mock1):
json_data={
"featureGroupName": "test",
"feature_list": "",
"dbOrg": ""
}
expected_data=("test", "", "",False,"","","","","","")
- assert check_featureGroup_data(json_data)==expected_data, "data not equal"
+ assert check_feature_group_data(json_data)==expected_data, "data not equal"
@patch('trainingmgr.common.trainingmgr_util.check_key_in_dictionary',return_value=False)
- def test_negative_featureGroup_data(self, mock1):
+ def test_negative_check_feature_group_data(self, mock1):
json_data={
"featureGroupName": "test",
"feature_list": "",
}
expected_data=("test", "", "",False,"","","","","","")
try:
- assert check_featureGroup_data(json_data)==expected_data, 'data not equal'
+ assert check_feature_group_data(json_data)==expected_data, 'data not equal'
assert False
except:
assert True
\ No newline at end of file
import validators
from trainingmgr.common.exceptions_utls import TMException
+MIMETYPE_JSON = "application/json"
+
def create_url_host_port(protocol, host, port, path=''):
"""
This function creates and validates URL based on the inputs.
response = requests.post(url,
data=json.dumps(dictionary),
- headers={'content-type': 'application/json',
+ headers={'content-type': MIMETYPE_JSON,
'Accept-Charset': 'UTF-8'})
return response
logger.debug(url)
response = requests.post(url,
data=json.dumps(dict_data),
- headers={'content-type': 'application/json',
+ headers={'content-type': MIMETYPE_JSON,
'Accept-Charset': 'UTF-8'})
return response
{"sourceNames":[source_name],
"measTypes":features}}}
- headers = {'Content-type': 'application/json'}
+ headers = {'Content-type': MIMETYPE_JSON}
url = create_url_host_port('http', host, port, 'data-consumer/v1/info-jobs/{}'.format(feature_group_name))
logger.debug(url)
arguments, query_filter, enable_versioning, pipeline_version,
datalake_source, _measurement, bucket)
-def check_featureGroup_data(json_data):
+def check_feature_group_data(json_data):
"""
This function checks validation for json_data dictionary and return tuple which conatins
values of different keys in jsn_data.
if check_key_in_dictionary(["featureGroupName", "feature_list", \
"datalake_source", "enable_Dme", "DmeHost",
"DmePort", "bucket", "token", "source_name"], json_data):
- featureGroup_name=json_data["featureGroupName"]
+ feature_group_name=json_data["featureGroupName"]
features=json_data["feature_list"]
datalake_source=json_data["datalake_source"]
- enable_Dme=json_data["enable_Dme"]
+ enable_dme=json_data["enable_Dme"]
dme_host=json_data["DmeHost"]
dme_port=json_data["DmePort"]
bucket=json_data["bucket"]
except Exception as err:
raise APIException(status.HTTP_400_BAD_REQUEST, str(err)) from None
- return (featureGroup_name, features, datalake_source, enable_Dme, dme_host, dme_port, bucket, token, source_name,db_org)
+ return (feature_group_name, features, datalake_source, enable_dme, dme_host, dme_port, bucket, token, source_name,db_org)
def get_one_key(dictionary):
'''
if conn is not None:
conn.close()
-def add_featuregroup(featureGroup_name, feature_list, datalake_source, enable_Dme, ps_db_obj , DmeHost="", DmePort="", bucket="", token="", source_name="",db_org=""):
+def add_featuregroup(feature_group_name, feature_list, datalake_source, enable_dme, ps_db_obj , dmehost="", dmeport="", bucket="", token="", source_name="",db_org=""):
"""
This function add the new row or update existing row with given information
"""
try:
cursor.execute(''' INSERT INTO {} VALUES '''.format(fg_table_name) +
'''(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)''',
- (featureGroup_name, feature_list, datalake_source, enable_Dme, DmeHost, DmePort, bucket, token, source_name, db_org))
+ (feature_group_name, feature_list, datalake_source, enable_dme, dmehost, dmeport, bucket, token, source_name, db_org))
conn.commit()
cursor.close()
except Exception as err:
check_key_in_dictionary, get_one_key, \
response_for_training, get_metrics, \
handle_async_feature_engineering_status_exception_case, \
- validate_trainingjob_name, get_all_pipeline_names_svc, check_featureGroup_data
+ validate_trainingjob_name, get_all_pipeline_names_svc, check_feature_group_data
from trainingmgr.common.exceptions_utls import APIException,TMException
from trainingmgr.constants.steps import Steps
from trainingmgr.constants.states import States
ERROR_TYPE_KF_ADAPTER_JSON = "Kf adapter doesn't sends json type response"
ERROR_TYPE_DB_STATUS = "Couldn't update the status as failed in db access"
MIMETYPE_JSON = "application/json"
+NOT_LIST="not given as list"
@APP.errorhandler(APIException)
def error(err):
trainingjobs_list = request.json['trainingjobs_list']
if not isinstance(trainingjobs_list, list):
- raise APIException(status.HTTP_400_BAD_REQUEST, "not given as list")
+ raise APIException(status.HTTP_400_BAD_REQUEST, NOT_LIST)
for obj in trainingjobs_list:
try:
list_of_trainingjob_version = request.json['list']
if not isinstance(list_of_trainingjob_version, list):
- raise APIException(status.HTTP_400_BAD_REQUEST, "not given as list")
+ raise APIException(status.HTTP_400_BAD_REQUEST, NOT_LIST)
not_possible_to_delete = []
possible_to_delete = []
try:
json_data=request.json
- (featureGroup_name, features, datalake_source, enable_Dme, dme_host, dme_port, bucket, token, source_name,db_org)=check_featureGroup_data(json_data)
+ (feature_group_name, features, datalake_source, enable_dme, dme_host, dme_port, bucket, token, source_name,db_org)=check_feature_group_data(json_data)
# check the data conformance
- if len(featureGroup_name) < 3 or len(featureGroup_name) > 63:
- api_response = {"Exception": "Failed to create the feature group since feature group name must be between 3 and 63 characters long."}
+ LOGGER.debug("the db info is : ", get_feature_group_by_name_db(PS_DB_OBJ, feature_group_name))
+ if len(feature_group_name) < 3 or len(feature_group_name) > 63 or get_feature_group_by_name_db(PS_DB_OBJ, feature_group_name):
+ api_response = {"Exception": "Failed to create the feature group since feature group not valid or already present"}
response_code = status.HTTP_400_BAD_REQUEST
else:
# the features are stored in string format in the db, and has to be passed as list of feature to the dme. Hence the conversion.
features_list = features.split(",")
- add_featuregroup(featureGroup_name, features, datalake_source, enable_Dme, PS_DB_OBJ,dme_host, dme_port, bucket, token, source_name,db_org )
- if enable_Dme == True :
- response= create_dme_filtered_data_job(TRAININGMGR_CONFIG_OBJ, source_name, db_org, bucket, token, features_list, featureGroup_name, dme_host, dme_port)
+ add_featuregroup(feature_group_name, features, datalake_source, enable_dme, PS_DB_OBJ,dme_host, dme_port, bucket, token, source_name,db_org )
+ if enable_dme == True :
+ response= create_dme_filtered_data_job(TRAININGMGR_CONFIG_OBJ, source_name, db_org, bucket, token, features_list, feature_group_name, dme_host, dme_port)
if response.status_code != 201:
api_response={"Exception": "Cannot create dme job"}
- delete_feature_group_by_name(PS_DB_OBJ, featureGroup_name)
+ delete_feature_group_by_name(PS_DB_OBJ, feature_group_name)
response_code=status.HTTP_400_BAD_REQUEST
else:
api_response={"result": "Feature Group Created"}
api_response={"result": "Feature Group Created"}
response_code =status.HTTP_200_OK
except Exception as err:
- delete_feature_group_by_name(PS_DB_OBJ, featureGroup_name)
+ delete_feature_group_by_name(PS_DB_OBJ, feature_group_name)
err_msg = "Failed to create the feature Group "
api_response = {"Exception":err_msg}
LOGGER.error(str(err))
all exception are provided with exception message and HTTP status code.
"""
- LOGGER.debug("Request for getting a feature group with name = "+ featuregroup_name)
api_response={}
response_code=status.HTTP_500_INTERNAL_SERVER_ERROR
- try:
- result= get_feature_group_by_name_db(PS_DB_OBJ, featuregroup_name)
- feature_group=[]
- if result:
- for res in result:
- features=res[1].split(",")
- dict_data={
- "featuregroup_name": res[0],
- "features": features,
- "datalake": res[2],
- "dme": res[3],
- "dme_host": res[4],
- "dme_port": res[5],
- "bucket":res[6],
- "token":res[7],
- "source_name":res[8],
- "db_org":res[9]
- }
- feature_group.append(dict_data)
- api_response={"featuregroup":feature_group}
- response_code=status.HTTP_200_OK
- else:
- response_code=status.HTTP_404_NOT_FOUND
- raise TMException("Failed to fetch feature group info from db")
- except Exception as err:
- api_response = {"Exception": str(err)}
- LOGGER.error(str(err))
+ pattern = re.compile(r"[a-zA-Z0-9_]+")
+ if not re.fullmatch(pattern, featuregroup_name):
+ api_response={"Exception": "Invalid featuregroup_name"}
+ response_code=status.HTTP_400_BAD_REQUEST
+ else:
+ LOGGER.debug("Request for getting a feature group with name = "+ featuregroup_name)
+ try:
+ result= get_feature_group_by_name_db(PS_DB_OBJ, featuregroup_name)
+ feature_group=[]
+ if result:
+ for res in result:
+ features=res[1].split(",")
+ dict_data={
+ "featuregroup_name": res[0],
+ "features": features,
+ "datalake": res[2],
+ "dme": res[3],
+ "dme_host": res[4],
+ "dme_port": res[5],
+ "bucket":res[6],
+ "token":res[7],
+ "source_name":res[8],
+ "db_org":res[9]
+ }
+ feature_group.append(dict_data)
+ api_response={"featuregroup":feature_group}
+ response_code=status.HTTP_200_OK
+ else:
+ response_code=status.HTTP_404_NOT_FOUND
+ raise TMException("Failed to fetch feature group info from db")
+ except Exception as err:
+ api_response = {"Exception": str(err)}
+ LOGGER.error(str(err))
return APP.response_class(response=json.dumps(api_response),
status=response_code,
mimetype=MIMETYPE_JSON)
list_of_feature_groups = request.json['featuregroups_list']
if not isinstance(list_of_feature_groups, list):
LOGGER.debug("exception in not instance")
- raise APIException(status.HTTP_400_BAD_REQUEST, "not given as list")
+ raise APIException(status.HTTP_400_BAD_REQUEST, NOT_LIST)
not_possible_to_delete = []
possible_to_delete = []