From c1dbd842142685b1594f95660c94f06592f42610 Mon Sep 17 00:00:00 2001
From: lb584 <lb584@cam.ac.uk>
Date: Wed, 12 Apr 2023 11:28:15 +0100
Subject: [PATCH] removing redundant function calls from EnvSuitPipeline and
 ews_met_data_extraction some additional comments and code-tidying

---
 coordinator/EnvSuitPipeline.py                | 58 +++++++++----------
 .../integration/full/full_test_deposition.py  |  3 +-
 2 files changed, 31 insertions(+), 30 deletions(-)

diff --git a/coordinator/EnvSuitPipeline.py b/coordinator/EnvSuitPipeline.py
index 4e915bb..636e08e 100644
--- a/coordinator/EnvSuitPipeline.py
+++ b/coordinator/EnvSuitPipeline.py
@@ -76,25 +76,25 @@ def clean(workPath): # Clean temporary files and folders from the working direct
     return
 
 
-def generate_all(sys_config, run_config):
-    # Write run_config.json
-    workPath = getParameter(run_config,'OUTPUT_DIR')
-    run_configName = 'run_config.json'
-    run_configFile = workPath + run_configName
-
-    with open(run_configFile, 'w') as run_configJson:
-        json.dump(run_config, run_configJson, indent=4)
-
-    run_configJson.close()
-
-    # Run all generate
-    try:
-        job_runner.generate_all_jobs(run_config, sys_config)
-    except Exception:
-        logger.exception(f"Some failure when running one of the generate job", exc_info=True)
-        raise
-
-    return
+# def generate_all(sys_config, run_config):
+#     # Write run_config.json
+#     workPath = getParameter(run_config,'OUTPUT_DIR')
+#     run_configName = 'run_config.json'
+#     run_configFile = workPath + run_configName
+#
+#     with open(run_configFile, 'w') as run_configJson:
+#         json.dump(run_config, run_configJson, indent=4)
+#
+#     run_configJson.close()
+#
+#     # Run all generate
+#     try:
+#         job_runner.generate_all_jobs(run_config, sys_config)
+#     except Exception:
+#         logger.exception(f"Some failure when running one of the generate job", exc_info=True)
+#         raise
+#
+#     return
 
 
 def run_extraction(run_params: dict, sys_params: dict):
@@ -109,12 +109,12 @@ def run_post_processing(run_params: dict, sys_params: dict, processor_name: str)
     logger.info('Data extracted and chunked')
 
 
-def run_merger(run_params: dict, sys_params: dict, processor_name: str):
-    try:
-        job_runner.run_merge_post_processing(run_params, sys_params, processor_name)
-    except Exception:
-        logger.exception(f"Some failure when running merge RIE", exc_info=True)
-        raise
+# def run_merger(run_params: dict, sys_params: dict, processor_name: str):
+#     try:
+#         job_runner.run_merge_post_processing(run_params, sys_params, processor_name)
+#     except Exception:
+#         logger.exception(f"Some failure when running merge RIE", exc_info=True)
+#         raise
 
 
 #######################################
@@ -208,7 +208,7 @@ def run_pipeline(pipeline_config, region, dateString, extracted = False, prevent
             if (extracted == False):
                 clean(workPath)
 
-            generate_all(sys_config, config)
+            # generate_all(sys_config, config)
 
             # Extract
             if (extracted == False):
@@ -218,7 +218,7 @@ def run_pipeline(pipeline_config, region, dateString, extracted = False, prevent
             logger.info(f"Starting {processor_name} post processor ---------------------------------")
             run_post_processing(config, sys_config, processor_name)
 
-            run_merger(config, sys_config, processor_name)
+            # run_merger(config, sys_config, processor_name)
         else:
             strains = getParameter(pipeline_config, 'STRAINS')
 
@@ -240,7 +240,7 @@ def run_pipeline(pipeline_config, region, dateString, extracted = False, prevent
                 if (extracted == False):
                     clean(workPath)
 
-                generate_all(sys_config, config)
+                # generate_all(sys_config, config)
 
                 # Extract
                 if (extracted == False):
@@ -251,7 +251,7 @@ def run_pipeline(pipeline_config, region, dateString, extracted = False, prevent
                 envSuitPath = workPath + 'post_processing/RIE/'
                 run_post_processing(config, sys_config, processor_name)
 
-                run_merger(config, sys_config, processor_name)
+                # run_merger(config, sys_config, processor_name)
 
                 resultFile = envSuitPath + 'RIE.csv'
                 strain_outPath = os.path.join(region_outPath,strain)
diff --git a/tests/integration/full/full_test_deposition.py b/tests/integration/full/full_test_deposition.py
index 01f3837..b3092a9 100644
--- a/tests/integration/full/full_test_deposition.py
+++ b/tests/integration/full/full_test_deposition.py
@@ -75,7 +75,8 @@ if __name__ == '__main__':
 
     BaseDepoTestSuite.DepoTestSuite.TEST_START_DATE = IntegrationTestUtils.generate_run_date(_run_date_type, _custom_run_date)
 
-    BaseDepoTestSuite.DepoTestSuite.TEST_JOB_DIR = "DEPOSITION_" + BaseDepoTestSuite.DepoTestSuite.TEST_START_DATE
+    BaseDepoTestSuite.DepoTestSuite.TEST_JOB_DIR = os.path.join(BaseDepoTestSuite.DepoTestSuite.TEST_OUT_PATH,
+                                                                "DEPOSITION_" + BaseDepoTestSuite.DepoTestSuite.TEST_START_DATE)
 
     #  Now set the sys.argv to the unittest_args (leaving sys.argv[0] alone)
     sys.argv[1:] = _args.unittest_args
-- 
GitLab