diff --git a/EnvSuitPipeline.py b/EnvSuitPipeline.py
index c71786824421546ad582ccfbd1473c38c987fc12..fcc6ce5de70dfc1715cebe488848aedce46038d0 100644
--- a/EnvSuitPipeline.py
+++ b/EnvSuitPipeline.py
@@ -119,7 +119,7 @@ def run_merger(run_params: dict, sys_params: dict, processor_name: str):
 
 #######################################
 #lawrence coment back to original (prevent_overwrite=True)
-def run_pipeline(pipeline_config, region, dateString, extracted = False, prevent_overwrite = False):
+def run_pipeline(pipeline_config, region, dateString, extracted = False, prevent_overwrite = True):
     '''
     The prevent_overwrite parameter can be set to False if you want to re-run
     a job in-place.
diff --git a/Processor.py b/Processor.py
index 17b72ef5c1094a972cdd69b9c91e27eccaf67cd2..99268c1823b04dd3a4c6c5c72cd9daaa2fa748f0 100755
--- a/Processor.py
+++ b/Processor.py
@@ -420,12 +420,12 @@ def run_Process():
 
         #lawrence comment in/out
         # check for a status file in job directory
-        # if status.had_initial_status:
-        #     logger.info(f"Job path already exists and has status {status.status}")
-        #
-        #     endScript(premature = status.status not in ['SUCCESS','INPROGRESS'])
-        #
-        # logger.info(f"Current status of job directory is {status.status}")
+        if status.had_initial_status:
+            logger.info(f"Job path already exists and has status {status.status}")
+
+            endScript(premature = status.status not in ['SUCCESS','INPROGRESS'])
+
+        logger.info(f"Current status of job directory is {status.status}")
 
         # now that we have a useable job directory, move the log file there
         logPathJob = f"{jobPath}/log.txt"
diff --git a/ProcessorEnvironment.py b/ProcessorEnvironment.py
index be0b5f001459f01c12e7ac9da4b8e1712ec0afa4..e57c40a1fcc4761eaf0b6def7adf39a5fc32a576 100644
--- a/ProcessorEnvironment.py
+++ b/ProcessorEnvironment.py
@@ -44,7 +44,7 @@ def process_in_job_env2_0(jobPath,status,config,component):
     description_long = 'Copying file from remote server to job directory'
 
     # lawrence comment in/out
-    # subprocess_and_log(cmd_scp,description_short, description_long)
+    subprocess_and_log(cmd_scp,description_short, description_long)
 
     logger.info('untarring the input file')
 
@@ -72,12 +72,12 @@ def process_in_job_env2_0(jobPath,status,config,component):
     logger.info(f"Calling environmental suitability 2.0 for {region} so wait for output to appear")
 
     pipeline_config = config["Environment"]
-    # try:
+    try:
         #todo lawrence comment this back to original (extracted=False)
-        # esp.run_pipeline(pipeline_config, region, config["StartString"], extracted=False)
-    # except:
-    #     logger.exception(f"Some failure when running EnvSuitPipeline.py")
-    #     raise
+        esp.run_pipeline(pipeline_config, region, config["StartString"], extracted=False)
+    except:
+        logger.exception(f"Some failure when running EnvSuitPipeline.py")
+        raise
 
     logger.info('Finished running environmental suitability 2.0')