#
# ==================================================================================
from flask import Blueprint, request, jsonify
-from trainingmgr.service.agent_service import AgentClient
+from trainingmgr.service.agent_service import AgentClient, get_agent_model
+from trainingmgr.common.exceptions_utls import TMException
+from http import HTTPStatus
agent_controller = Blueprint("agent_controller", __name__)
@agent_controller.route("/modelInfo", methods=["GET"])
def model_info():
- return jsonify({
- "llm": {
- "model": "",
- }
- }), 200
+ try:
+ model = get_agent_model()
+ return jsonify({
+ "llm": {
+ "model": model,
+ }
+ }), HTTPStatus.OK
+ except TMException as e:
+ return jsonify({
+ "title": "Service Unavailable",
+ "status": HTTPStatus.SERVICE_UNAVAILABLE,
+ "detail": str(e)
+ }), HTTPStatus.SERVICE_UNAVAILABLE
+ except Exception as e:
+ return jsonify({
+ "title": "Internal Server Error",
+ "status": HTTPStatus.INTERNAL_SERVER_ERROR,
+ "detail": str(e)
+ }), HTTPStatus.INTERNAL_SERVER_ERROR
@agent_controller.route("/generate-content", methods=["POST"])
def generate_content():
return True
try:
- agent_model = os.getenv("LLM_AGENT_MODEL_FOR_TM")
- agent_token = os.getenv("LLM_AGENT_MODEL_TOKEN_FOR_TM")
+ agent_model = CONFIG.llm_agent_model_for_tm
+ agent_token = CONFIG.llm_agent_model_token_for_tm
if not agent_model:
LOGGER.error("LLM_AGENT_MODEL_FOR_TM not specified")
return {
'success': False,
'error': str(err),
- }
\ No newline at end of file
+ }
+
+def get_agent_model() -> str:
+ """
+ Return the configured LLM agent model name for TM via TrainingMgrConfig.
+ """
+ model = CONFIG.llm_agent_model_for_tm
+ if not model:
+ raise TMException("LLM agent model not configured")
+ return model
\ No newline at end of file