Functions added to create and delete DME filtered data jobs 37/10937/6
authorjosephthaliath <jo.thaliath@samsung.com>
Mon, 17 Apr 2023 09:53:11 +0000 (15:23 +0530)
committerjosephthaliath <jo.thaliath@samsung.com>
Mon, 17 Apr 2023 15:29:10 +0000 (20:59 +0530)
Issue-Id: AIMLFW-40

Change-Id: I49fb7bded843cbd00599364c157972709ca42cdf
Signed-off-by: josephthaliath <jo.thaliath@samsung.com>
requirements.txt
tox.ini
trainingmgr/common/trainingmgr_operations.py

index 0fb2e6b..63c209a 100644 (file)
@@ -25,3 +25,4 @@ requests
 pandas
 PyYAML
 kubernetes
+validators==0.20.0
diff --git a/tox.ini b/tox.ini
index cf847e0..10eb822 100644 (file)
--- a/tox.ini
+++ b/tox.ini
@@ -22,6 +22,7 @@ skipsdist = true
 
 # basic test and coverage job
 [testenv:code]
+allowlist_externals = git
 basepython = python3.8
 deps=
   pytest
@@ -40,6 +41,7 @@ deps=
   python-dotenv
   kubernetes
   pg8000
+  validators==0.20.0
 
 setenv = cd  = {toxinidir}/tests
 commands =
index cf0e096..cece95c 100644 (file)
@@ -23,6 +23,17 @@ Training manager main operations
 
 import json
 import requests
+import validators
+
+def create_url_host_port(protocol, host, port, path=''):
+    """
+    This function creates and validates URL based on the inputs.
+    """
+    url = protocol + '://' + host + ':' + port + '/' + path
+    if not validators.url(url):
+        logger.debug('URL validation error: ',url)
+        return None
+    return url
 
 def data_extraction_start(training_config_obj, trainingjob_name, feature_list, query_filter,
                           datalake_source, _measurement, bucket):
@@ -107,3 +118,43 @@ def training_start(training_config_obj, dict_data, trainingjob_name):
                                       'Accept-Charset': 'UTF-8'})
 
     return response
+
+def create_dme_filtered_data_job(training_config_obj, source_name, db_org, bucket_name,
+                                 token, features, feature_group_name, host, port):
+    """
+    This function calls Non-RT RIC DME APIs for creating filter PM data jobs.
+    """
+    logger = training_config_obj.logger
+
+    job_json =  {
+        "info_type_id": "json-file-data-from-filestore-to-influx",
+        "job_owner": "console",
+        "status_notification_uri": "http://callback.nonrtric:80/post",
+        "job_definition": { "db-url":"http://influxdb.onap:8086",
+        "db-org":db_org,
+        "db-bucket":bucket_name,
+        "db-token": token,
+        "filterType":"pmdata",
+        "filter":
+            {"sourceNames":[source_name],
+            "measTypes":features}}}
+
+    headers = {'Content-type': 'application/json'}
+
+    url = create_url_host_port('http', host, port, 'data-consumer/v1/info-jobs/{}'.format(feature_group_name))
+    logger.debug(url)
+    logger.debug(json.dumps(job_json))
+    response = requests.put(url, data=json.dumps(job_json), headers=headers)
+
+    return response
+
+def delete_dme_filtered_data_job(training_config_obj, feature_group_name, host, port):
+    """
+    This function calls Non-RT RIC DME APIs for deleting filter PM data jobs.
+    """
+    logger = training_config_obj.logger
+
+    url = create_url_host_port('http', host, port, 'data-consumer/v1/info-jobs/{}'.format(feature_group_name))
+    logger.debug(url)
+    response = requests.delete(url)
+    return response