# location in the container
ENV TA_DIR /home/app/
WORKDIR ${TA_DIR}
+
# Install dependencies
-RUN apt-get update && apt-get install -y \
- python3-pip
-RUN apt-get install -y apt-utils
+RUN apt-get update && \
+ apt-get install -y python3-pip apt-utils
# Copy sources into the container
COPY . .
-RUN git clone "https://gerrit.o-ran-sc.org/r/aiml-fw/athp/sdk/feature-store"
-RUN git clone "https://gerrit.o-ran-sc.org/r/aiml-fw/athp/sdk/model-storage"
-RUN mkdir -p /SDK/featurestoresdk_main/
-RUN mkdir -p /SDK/modelmetricssdk_main/
-RUN cp -R feature-store/. /SDK/featurestoresdk_main/.
-RUN cp -R model-storage/. /SDK/modelmetricssdk_main/.
+RUN git clone "https://gerrit.o-ran-sc.org/r/aiml-fw/athp/sdk/feature-store" /SDK/featurestoresdk_main
+RUN git clone "https://gerrit.o-ran-sc.org/r/aiml-fw/athp/sdk/model-storage" /SDK/modelmetricssdk_main
+
RUN pip3 install /SDK/featurestoresdk_main/.
RUN pip3 install /SDK/modelmetricssdk_main/.
RUN pip3 install .
response = requests.post(notification_url,
data=json.dumps(req_json),
headers={
- 'content-type': 'application/json',
+ 'content-type': MIMETYPE_JSON,
'Accept-Charset': 'UTF-8'
})
- if ( response.headers['content-type'] != "application/json"
+ if ( response.headers['content-type'] != MIMETYPE_JSON
or response.status_code != status.HTTP_200_OK ):
err_msg = "Failed to notify the subscribed url " + trainingjob_name
raise TMException(err_msg)
def get_one_key(dictionary):
'''
- this function finds any one key from dictionary and retuen it.
+ this function finds any one key from dictionary and return it.
'''
only_key = None
for key in dictionary:
from trainingmgr.common.exceptions_utls import DBException
tm_table_name = "trainingjob_info" # Table used by 'Training Manager' for training jobs
-fg_table_name="featuregroup_info" # Table used by 'Training Manager' for Feature Groups
+fg_table_name = "featuregroup_info" # Table used by 'Training Manager' for Feature Groups
DB_QUERY_EXEC_ERROR = "Failed to execute query in "
def get_data_extraction_in_progress_trainingjobs(ps_db_obj):
raise DBException("Failed to execute query in change_field_value_by_version," + str(err))
finally:
if conn is not None:
- conn.close()
+ conn.close()
def get_field_by_latest_version(trainingjob_name, ps_db_obj, field):
raise DBException("Failed to execute query in get_field_by_latest_version," + str(err))
finally:
if conn is not None:
- conn.close()
+ conn.close()
return results
raise DBException("Failed to execute query in get_field_of_given_version" + str(err))
finally:
if conn is not None:
- conn.close()
+ conn.close()
return results
"change_in_progress_to_failed_by_latest_version" + str(err))
finally:
if conn is not None:
- conn.close()
+ conn.close()
return status_changed
"change_steps_state_of_latest_version" + str(err))
finally:
if conn is not None:
- conn.close()
+ conn.close()
def change_steps_state_by_version(trainingjob_name, version, ps_db_obj, key, value):
"change_steps_state_by_version" + str(err))
finally:
if conn is not None:
- conn.close()
+ conn.close()
def delete_trainingjob_version(trainingjob_name, version, ps_db_obj):
"delete_trainingjob_version" + str(err))
finally:
if conn is not None:
- conn.close()
+ conn.close()
def get_info_by_version(trainingjob_name, version, ps_db_obj):
"get_info_by_version" + str(err))
finally:
if conn is not None:
- conn.close()
+ conn.close()
return results
"get_trainingjob_info_by_name" + str(err))
finally:
if conn is not None:
- conn.close()
+ conn.close()
return results
def get_latest_version_trainingjob_name(trainingjob_name, ps_db_obj):
"get_latest_version_trainingjob_name" + str(err))
finally:
if conn is not None:
- conn.close()
+ conn.close()
return version
"get_all_versions_info_by_name" + str(err))
finally:
if conn is not None:
- conn.close()
+ conn.close()
return results
"get_all_distinct_trainingjobs" + str(err))
finally:
if conn is not None:
- conn.close()
+ conn.close()
return trainingjobs
"get_all_version_num_by_trainingjob_name" + str(err))
finally:
if conn is not None:
- conn.close()
+ conn.close()
return versions
"update_model_download_url" + str(err))
finally:
if conn is not None:
- conn.close()
+ conn.close()
def add_update_trainingjob(description, pipeline_name, experiment_name, feature_list, arguments,
query_filter, adding, enable_versioning,
pipeline_version, datalake_source, trainingjob_name, ps_db_obj, notification_url="",
_measurement="", bucket=""):
- """
- This function add the new row or update existing row with given information
- """
+ """
+ This function add the new row or update existing row with given information
+ """
- conn = None
- try:
+ conn = None
+ try:
arguments_string = json.dumps({"arguments": arguments})
datalake_source_dic = {}
datalake_source_dic[datalake_source] = {}
deletion_in_progress))
conn.commit()
cursor.close()
- except Exception as err:
- if conn is not None:
- conn.rollback()
- raise DBException(DB_QUERY_EXEC_ERROR + \
- "add_update_trainingjob" + str(err))
- finally:
- if conn is not None:
- conn.close()
+ except Exception as err:
+ if conn is not None:
+ conn.rollback()
+ raise DBException(DB_QUERY_EXEC_ERROR + \
+ "add_update_trainingjob" + str(err))
+ finally:
+ if conn is not None:
+ conn.close()
def add_featuregroup(feature_group_name, feature_list, datalake_source, enable_dme, ps_db_obj , dmehost="", dmeport="", bucket="", token="", source_name="",db_org=""):
"""
- This function add the new row or update existing row with given information
+ This function add the new row or update existing row with given information
"""
conn = None
"delete_feature_group" + str(err))
finally:
if conn is not None:
- conn.close()
+ conn.close()
def get_all_jobs_latest_status_version(ps_db_obj):
"""
"get_all_jobs_latest_status_version" + str(err))
finally:
if conn is not None:
- conn.close()
+ conn.close()
return results
def get_info_of_latest_version(trainingjob_name, ps_db_obj):