From 5bec4755296a071b34cd7e6ee42e75debb890afc Mon Sep 17 00:00:00 2001
From: lb584 <lb584@cam.ac.uk>
Date: Fri, 19 Aug 2022 12:16:56 +0100
Subject: [PATCH] more work getting new extractor wired into coordinator

---
 EnvSuitPipeline.py     |  4 ++--
 Processor.py           |  9 +++++----
 ProcessorComponents.py | 19 +++++++++++--------
 3 files changed, 18 insertions(+), 14 deletions(-)

diff --git a/EnvSuitPipeline.py b/EnvSuitPipeline.py
index 20a9019..7d0729b 100644
--- a/EnvSuitPipeline.py
+++ b/EnvSuitPipeline.py
@@ -118,8 +118,8 @@ def run_merger(run_params: dict, sys_params: dict, processor_name: str):
 
 
 #######################################
-
-def run_pipeline(pipeline_config, region, dateString, extracted = False, prevent_overwrite = True):
+#lawrence coment back to original (prevent_overwrite=False)
+def run_pipeline(pipeline_config, region, dateString, extracted = False, prevent_overwrite = False):
     '''
     The prevent_overwrite parameter can be set to False if you want to re-run
     a job in-place.
diff --git a/Processor.py b/Processor.py
index 90ea12c..90e8f40 100755
--- a/Processor.py
+++ b/Processor.py
@@ -415,11 +415,12 @@ def run_Process():
     # lock job directory
     with jobStatus(jobPath) as status:
 
+        #lawrence check in/out
         # check for a status file in job directory
-        if status.had_initial_status:
-            logger.info(f"Job path already exists and has status {status.status}")
-
-            endScript(premature = status.status not in ['SUCCESS','INPROGRESS'])
+        # if status.had_initial_status:
+        #     logger.info(f"Job path already exists and has status {status.status}")
+        #
+        #     endScript(premature = status.status not in ['SUCCESS','INPROGRESS'])
 
         logger.info(f"Current status of job directory is {status.status}")
 
diff --git a/ProcessorComponents.py b/ProcessorComponents.py
index ba30157..9831c93 100644
--- a/ProcessorComponents.py
+++ b/ProcessorComponents.py
@@ -1294,7 +1294,8 @@ def process_in_job_env2_0(jobPath,status,config,component):
     description_short = 'env2 scp'
     description_long = 'Copying file from remote server to job directory'
 
-    subprocess_and_log(cmd_scp,description_short, description_long)
+    #lawrence comment in/out
+    # subprocess_and_log(cmd_scp,description_short, description_long)
 
     logger.info('untarring the input file')
 
@@ -1303,7 +1304,8 @@ def process_in_job_env2_0(jobPath,status,config,component):
     description_short = 'env2 tar'
     description_long = 'Untarring the input file'
 
-    subprocess_and_log(cmd_tar,description_short, description_long)
+    #lawrence comment in/out
+    # subprocess_and_log(cmd_tar,description_short, description_long)
 
     # basic check that contents are as expected for 7-day forecast
     # 57 files of NAME .txt timesteps and some number of log files
@@ -1319,10 +1321,11 @@ def process_in_job_env2_0(jobPath,status,config,component):
 
     output_directory = f"{jobPath}/NAME_Met_as_netcdf"
 
-    try:
-        npp.process_met_office_NAME(input_files_glob,output_directory)
-    except:
-        logger.exception(f"Some failure when converting NAME data from .txt to nc.tar.gz")
+    #lawrence comment in/out
+    # try:
+    #     npp.process_met_office_NAME(input_files_glob,output_directory)
+    # except:
+    #     logger.exception(f"Some failure when converting NAME data from .txt to nc.tar.gz")
 
     # TODO: check that process_met_office_NAME() produced output as expected
 
@@ -1332,8 +1335,8 @@ def process_in_job_env2_0(jobPath,status,config,component):
 
     pipeline_config = config["Environment"]
     try:
-        #todo lawrence comment this back to original
-        esp.run_pipeline(pipeline_config, region, config["StartString"], extracted=False)
+        #todo lawrence comment this back to original (extracted=False)
+        esp.run_pipeline(pipeline_config, region, config["StartString"], extracted=True)
     except:
         logger.exception(f"Some failure when running EnvSuitPipeline.py")
         raise
-- 
GitLab