From 7540daa86e80e57eb9cc30be353a53db7a348942 Mon Sep 17 00:00:00 2001
From: lb584 <lb584@cam.ac.uk>
Date: Thu, 18 Aug 2022 10:39:19 +0100
Subject: [PATCH] reverting code to run in the standard way (running with
 overwrite as false, etc). Removing redundant code from EnvSuitPipeline.py

---
 EnvSuitPipeline.py     | 27 ++-------------------------
 ProcessorComponents.py |  3 ---
 2 files changed, 2 insertions(+), 28 deletions(-)

diff --git a/EnvSuitPipeline.py b/EnvSuitPipeline.py
index 8499667..6981a4b 100644
--- a/EnvSuitPipeline.py
+++ b/EnvSuitPipeline.py
@@ -3,15 +3,13 @@ import json
 import logging
 import os
 import shutil
-import subprocess
-from typing import List
 
 import pandas as pd
 
-from met_processing.common import processor_pool
 from met_processing.runner.common import job_runner
 from met_processing.runner.common.generate_all_jobs import generate_all_jobs
 
+
 MAX_WORKERS: int = 3
 
 logging.basicConfig(level=logging.DEBUG)
@@ -65,27 +63,6 @@ def generate_temporal_points(file, datestr, timeresolution, nDaysForecast):
     outfile.close()
     return outfile
 
-### Met extractor ############
-
-# def pipeline_subprocess(workPath, command, multi=True):
-#     logger.info(f"Change work directory to {workPath}")
-#     os.chdir(workPath)
-#
-#     try:
-#         if (multi == True):
-#             commands: List[str] = command.split(",")
-#
-#             logger.info(f"Run {command} in multi process mode.")
-#             processor_pool.main(workPath, commands, max_workers=10, chunk_size=1)
-#         else:
-#             logger.info(f"Run {command} in single process mode.")
-#             pass # TODO add not multi proc mode
-#     except:
-#         logger.exception(f"Some failure when running {command}", exc_info=True)
-#         raise
-#
-#     return
-
 
 def clean(workPath): # Clean temporary files and folders from the working directory
     try:
@@ -146,7 +123,7 @@ def run_merger(work_path):
 
 #######################################
 
-def run_pipeline(pipeline_config, region, dateString, extracted = False, prevent_overwrite = False):
+def run_pipeline(pipeline_config, region, dateString, extracted = False, prevent_overwrite = True):
     '''
     The prevent_overwrite parameter can be set to False if you want to re-run
     a job in-place.
diff --git a/ProcessorComponents.py b/ProcessorComponents.py
index c3440a6..4e010f4 100644
--- a/ProcessorComponents.py
+++ b/ProcessorComponents.py
@@ -1285,7 +1285,6 @@ def process_in_job_env2_0(jobPath,status,config,component):
     description_short = 'env2 scp'
     description_long = 'Copying file from remote server to job directory'
 
-    #todo lawrence comment this back in
     subprocess_and_log(cmd_scp,description_short, description_long)
 
     logger.info('untarring the input file')
@@ -1295,7 +1294,6 @@ def process_in_job_env2_0(jobPath,status,config,component):
     description_short = 'env2 tar'
     description_long = 'Untarring the input file'
 
-    #todo lawrence comment this back in
     subprocess_and_log(cmd_tar,description_short, description_long)
 
     # basic check that contents are as expected for 7-day forecast
@@ -1312,7 +1310,6 @@ def process_in_job_env2_0(jobPath,status,config,component):
 
     output_directory = f"{jobPath}/NAME_Met_as_netcdf"
 
-    #todo lawrence comment this back in
     try:
         npp.process_met_office_NAME(input_files_glob,output_directory)
     except:
-- 
GitLab