From: subhash kumar singh Date: Fri, 15 Nov 2024 12:55:13 +0000 (+0000) Subject: Create TrainingConfig Singleton class X-Git-Tag: 3.0.0~42 X-Git-Url: https://gerrit.o-ran-sc.org/r/gitweb?a=commitdiff_plain;h=1b48bf3a1ac8cb925fb902f5d13d4213efc0b18a;p=aiml-fw%2Fawmf%2Ftm.git Create TrainingConfig Singleton class Created TrainingConfig as singletone class to share cofiguraiton and logger e.g. def __init__(): self.logger = TrainingConfig().__logger then use the `self.logger` for logging. Change-Id: I4eb5f38ffc288825fe6c9ac38312c99238150abe Signed-off-by: subhash kumar singh --- diff --git a/tests/test_trainingmgr_config.py b/tests/test_trainingmgr_config.py index a699588..9460a88 100644 --- a/tests/test_trainingmgr_config.py +++ b/tests/test_trainingmgr_config.py @@ -111,8 +111,8 @@ class Test_trainingmgr_config: @patch('trainingmgr.common.trainingmgr_config.TMLogger', return_value = TMLogger("tests/common/conf_log.yaml")) def test_is_config_loaded_properly_return_false(self,mock1): - os.environ.pop("KF_ADAPTER_IP") - self.TRAININGMGR_CONFIG_OBJ = TrainingMgrConfig() + self.TRAININGMGR_CONFIG_OBJ._TrainingMgrConfig__kf_adapter_ip = None expected_data = False - result = TrainingMgrConfig.is_config_loaded_properly(self.TRAININGMGR_CONFIG_OBJ) + + result = self.TRAININGMGR_CONFIG_OBJ.is_config_loaded_properly() assert result == expected_data \ No newline at end of file diff --git a/trainingmgr/common/trainingmgr_config.py b/trainingmgr/common/trainingmgr_config.py index dadbba2..c11ab10 100644 --- a/trainingmgr/common/trainingmgr_config.py +++ b/trainingmgr/common/trainingmgr_config.py @@ -29,10 +29,20 @@ class TrainingMgrConfig: This class conatains method for getting configuration varibles. """ + __instance = None + + def __new__(cls): + if cls.__instance is None: + cls.__instance = super(TrainingMgrConfig, cls).__new__(cls) + cls.__instance.__initialized = False + return cls.__instance + def __init__(self): """ This constructor filling configuration varibles. """ + if self.__initialized: + return self.__kf_adapter_port = getenv('KF_ADAPTER_PORT').rstrip() if getenv('KF_ADAPTER_PORT') is not None else None self.__kf_adapter_ip = getenv('KF_ADAPTER_IP').rstrip() if getenv('KF_ADAPTER_IP') is not None else None @@ -55,6 +65,7 @@ class TrainingMgrConfig: self.tmgr_logger = TMLogger("common/conf_log.yaml") self.__logger = self.tmgr_logger.logger + self.__initialized = True @property def kf_adapter_port(self): @@ -243,13 +254,19 @@ class TrainingMgrConfig: if all environment variables got value then function returns True otherwise it return False. """ - all_present = True + return all([val is not None for val in [self.__kf_adapter_ip, + self.__kf_adapter_port, + self.__data_extraction_ip, + self.__data_extraction_port, + self.__my_port, + self.__ps_ip, + self.__ps_port, + self.__ps_user, + self.__ps_password, + self.__my_ip, + self.__model_management_service_ip, + self.__model_management_service_port, + self.__allow_control_access_origin, + self.__pipeline, + self.__logger]]) - for var in [self.__kf_adapter_ip, self.__kf_adapter_port, - self.__data_extraction_ip, self.__data_extraction_port, - self.__my_port, self.__ps_ip, self.__ps_port, self.__ps_user, - self.__ps_password, self.__my_ip,self.__model_management_service_ip, self.__model_management_service_port, - self.__allow_control_access_origin,self.__pipeline, self.__logger]: - if var is None: - all_present = False - return all_present