From bb6abdbb1d082ae9ed9b86838b49fa381ef08fda Mon Sep 17 00:00:00 2001 From: kkangmin Date: Fri, 10 Oct 2025 14:06:01 +0000 Subject: [PATCH] Add model info return logic for agent_controller.py > Implement model retrieval in agent_service: > - Added get_agent_model() function using TrainingMgrConfig.llm_agent_model_for_tm > - Modified agent_controller.py to return model info from service > - Improved endpoint consistency and removed redundant env reads > > Issue-Id: AIMLFW-296 Change-Id: I2b49877cd77677e30e91a7bd32f51fbfba923258 Signed-off-by: kkangmin --- trainingmgr/controller/agent_controller.py | 28 ++++++++++++++++++++++------ trainingmgr/service/agent_service.py | 15 ++++++++++++--- 2 files changed, 34 insertions(+), 9 deletions(-) diff --git a/trainingmgr/controller/agent_controller.py b/trainingmgr/controller/agent_controller.py index e7e69c3..866fc73 100644 --- a/trainingmgr/controller/agent_controller.py +++ b/trainingmgr/controller/agent_controller.py @@ -16,7 +16,9 @@ # # ================================================================================== from flask import Blueprint, request, jsonify -from trainingmgr.service.agent_service import AgentClient +from trainingmgr.service.agent_service import AgentClient, get_agent_model +from trainingmgr.common.exceptions_utls import TMException +from http import HTTPStatus agent_controller = Blueprint("agent_controller", __name__) @@ -28,11 +30,25 @@ _agent_client.initialize_agent() @agent_controller.route("/modelInfo", methods=["GET"]) def model_info(): - return jsonify({ - "llm": { - "model": "", - } - }), 200 + try: + model = get_agent_model() + return jsonify({ + "llm": { + "model": model, + } + }), HTTPStatus.OK + except TMException as e: + return jsonify({ + "title": "Service Unavailable", + "status": HTTPStatus.SERVICE_UNAVAILABLE, + "detail": str(e) + }), HTTPStatus.SERVICE_UNAVAILABLE + except Exception as e: + return jsonify({ + "title": "Internal Server Error", + "status": HTTPStatus.INTERNAL_SERVER_ERROR, + "detail": str(e) + }), HTTPStatus.INTERNAL_SERVER_ERROR @agent_controller.route("/generate-content", methods=["POST"]) def generate_content(): diff --git a/trainingmgr/service/agent_service.py b/trainingmgr/service/agent_service.py index c1568f6..5c94be7 100644 --- a/trainingmgr/service/agent_service.py +++ b/trainingmgr/service/agent_service.py @@ -105,8 +105,8 @@ class AgentClient: return True try: - agent_model = os.getenv("LLM_AGENT_MODEL_FOR_TM") - agent_token = os.getenv("LLM_AGENT_MODEL_TOKEN_FOR_TM") + agent_model = CONFIG.llm_agent_model_for_tm + agent_token = CONFIG.llm_agent_model_token_for_tm if not agent_model: LOGGER.error("LLM_AGENT_MODEL_FOR_TM not specified") @@ -150,4 +150,13 @@ class AgentClient: return { 'success': False, 'error': str(err), - } \ No newline at end of file + } + +def get_agent_model() -> str: + """ + Return the configured LLM agent model name for TM via TrainingMgrConfig. + """ + model = CONFIG.llm_agent_model_for_tm + if not model: + raise TMException("LLM agent model not configured") + return model \ No newline at end of file -- 2.16.6