"notification_url": "",
"_measurement": "liveCell",
"bucket": "UEdata",
- "accuracy": 70
+ "deletion_in_progress": False,
+ "is_mme": False,
+ "model_name": "",
+ "model_info": ""
}
mimic_fg_db={
def test_add_update_trainingjob(self):
checker = Check()
db_obj = db_helper([[None]], check_success_obj=checker)
- add_update_trainingjob('Testing', 'qoe-pipeline', 'Default', '*', '{epoches : 1}', '', True, True, 1, 'InfluxSource', 'Tester',db_obj)
+ add_update_trainingjob('description','qoe-pipeline', 'Default','featuregroup', '{epoches : 1}', '', True, True, 1, 'InfluxSource', 'Tester',db_obj)
assert checker.finished, 'add_update_trainingjob FAILED, When adding = True'
def test_negative_add_update_trainingjob_2(self):
checker = Check()
db_obj = db_helper([[None]], check_success_obj=checker)
- add_update_trainingjob('Testing', 'qoe-pipeline', 'Default', '*', '{epoches : 1}', '', True, False, 1, 'InfluxSource', 'Tester',db_obj)
+ add_update_trainingjob('description','qoe-pipeline', 'Default','featuregroup', '{epoches : 1}', '', True, True, 1, 'InfluxSource', 'Tester',db_obj)
assert checker.finished, 'add_update_trainingjob FAILED, When adding = True'
def test_negative_add_update_trainingjob_3(self):
checker = Check()
db_obj = db_helper([[None]], check_success_obj=checker)
try:
- add_update_trainingjob('Testing', 'qoe-pipeline', 'Default', '*', '{epoches : 1}', '', False, True, 1, 'InfluxSource', 'Tester',db_obj)
+ add_update_trainingjob('description','qoe-pipeline', 'Default','featuregroup', '{epoches : 1}', '', True, True, 1, 'InfluxSource', 'Tester',db_obj)
assert checker.finished, 'add_update_trainingjob FAILED, When adding = True'
assert False
except Exception:
checker = Check()
db_obj = db_helper([[None]], check_success_obj=checker)
try:
- add_update_trainingjob('Testing', 'qoe-pipeline', 'Default', '*', '{epoches : 1}', '', False, False, 1, 'InfluxSource', 'Tester',db_obj)
+ add_update_trainingjob('description','qoe-pipeline', 'Default','featuregroup', '{epoches : 1}', '', True, True, 1, 'InfluxSource', 'Tester',db_obj)
assert checker.finished, 'add_update_trainingjob FAILED, When adding = True'
assert False
except Exception:
assert checker.finished, 'add_update_trainingjob FAILED, When adding = True'
except Exception as err:
fxn_name = "add_update_trainingjob"
- assert str(err) == "add_update_trainingjob() takes from 12 to 15 positional arguments but 19 were given", 'Negative test {} FAILED when adding = True , Doesnt returned required error'.format(fxn_name)
+ assert str(err) == "add_update_trainingjob() takes from 12 to 18 positional arguments but 19 were given", 'Negative test {} FAILED when adding = True , Doesnt returned required error'.format(fxn_name)
def test_get_all_jobs_latest_status_version(self):
db_obj = db_helper([["usecase_name"]])
def add_update_trainingjob(description, pipeline_name, experiment_name, feature_list, arguments,
query_filter, adding, enable_versioning,
pipeline_version, datalake_source, trainingjob_name, ps_db_obj, notification_url="",
- _measurement="", bucket=""):
+ _measurement="", bucket="", is_mme="", model_name="", model_info=""):
"""
This function add the new row or update existing row with given information
"""
version = version + 1
cursor.execute('''INSERT INTO {} VALUES '''.format(tm_table_name) + \
'''(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,''' + \
- ''' %s,%s,%s,%s,%s,%s,%s)''',
+ ''' %s,%s,%s,%s,%s,%s,%s,%s,%s, %s)''',
(trainingjob_name, description, feature_list, pipeline_name,
experiment_name, arguments_string, query_filter,
creation_time, run_id, json.dumps(steps_state),
updation_time, version,
enable_versioning, pipeline_version,
datalake_source_string, model_url, notification_url,
- _measurement, bucket, deletion_in_progress))
+ _measurement, bucket, deletion_in_progress, is_mme, model_name, model_info))
else:
cursor.execute('''update {} set description=%s, feature_list=%s, '''.format(tm_table_name) + \
'''pipeline_name=%s,experiment_name=%s,arguments=%s,''' + \
'''pipeline_version=%s,updation_time=%s,enable_versioning=%s,''' + \
'''datalake_source=%s,''' + \
'''model_url=%s, notification_url=%s, _measurement=%s, ''' + \
- '''bucket=%s, deletion_in_progress=%s where ''' + \
+ '''bucket=%s, deletion_in_progress=%s, is_mme=%s, model_name=%s , model_info=%s where ''' + \
'''trainingjob_name=%s and version=%s''',
(description, feature_list, pipeline_name, experiment_name,
arguments_string, query_filter, creation_time, run_id,
json.dumps(steps_state),
pipeline_version, updation_time, enable_versioning,
datalake_source_string, model_url, notification_url,
- _measurement, bucket, deletion_in_progress, trainingjob_name, version))
+ _measurement, bucket, deletion_in_progress, is_mme, model_name, model_info, trainingjob_name, version))
else:
cursor.execute(''' INSERT INTO {} VALUES '''.format(tm_table_name) + \
'''(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,''' + \
- '''%s,%s,%s,%s,%s,%s,%s,%s)''',
+ '''%s,%s,%s,%s,%s,%s,%s,%s, %s, %s, %s)''',
(trainingjob_name, description, feature_list, pipeline_name,
experiment_name, arguments_string, query_filter, creation_time,
run_id, json.dumps(steps_state),
updation_time, version, enable_versioning,
pipeline_version, datalake_source_string,
model_url, notification_url, _measurement, bucket,
- deletion_in_progress))
+ deletion_in_progress, is_mme, model_name, model_info))
conn.commit()
cursor.close()
except Exception as err: