import re
import json
from trainingmgr.common.exceptions_utls import DBException
-from trainingmgr.models import db, TrainingJob, FeatureGroup
+from trainingmgr.models import db, TrainingJob, TrainingJobStatus
from trainingmgr.constants.steps import Steps
from trainingmgr.constants.states import States
from sqlalchemy.sql import func
# trainingjob.datalake_source = json.dumps({"datalake_source": datalake_source_dic})
trainingjob.creation_time = datetime.datetime.utcnow()
trainingjob.updation_time = trainingjob.creation_time
- run_id = "No data available"
- trainingjob.run_id = run_id
steps_state = {
Steps.DATA_EXTRACTION.name: States.NOT_STARTED.name,
Steps.DATA_EXTRACTION_AND_TRAINING.name: States.NOT_STARTED.name,
Steps.TRAINING_AND_TRAINED_MODEL.name: States.NOT_STARTED.name,
Steps.TRAINED_MODEL.name: States.NOT_STARTED.name
}
- trainingjob.steps_state=json.dumps(steps_state)
- trainingjob.model_url = "No data available."
- trainingjob.notification_url = "No data available."
+ training_job_status = TrainingJobStatus(states= json.dumps(steps_state))
+ db.session.add(training_job_status)
+ db.session.commit() #to get the steps_state id
+
trainingjob.deletion_in_progress = False
trainingjob.version = 1
setattr(trainingjob_max_version, attr, getattr(trainingjob, attr))
else:
+ trainingjob.steps_state_id = training_job_status.id
db.session.add(trainingjob)
db.session.commit()
States.FINISHED.name)
notification_rapp(trainingjob_info, TRAININGMGR_CONFIG_OBJ)
# upload to the mme
- is_mme= trainingjob_info.is_mme
+ is_mme = getField(trainingjob_info.training_config, "is_mme")
if is_mme:
model_name=trainingjob_info.model_name #model_name
file=MM_SDK.get_model_zip(trainingjob_name, str(version))