From d74904ddc5285b84ab7c4a29b7bbaad088371522 Mon Sep 17 00:00:00 2001 From: Tamas Mona <tm689@cam.ac.uk> Date: Wed, 27 Jul 2022 16:50:22 +0100 Subject: [PATCH] Updated paths --- EnvSuitPipeline.py | 6 +++--- ProcessorComponents.py | 2 +- run_Processor.sh | 14 +++++++------- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/EnvSuitPipeline.py b/EnvSuitPipeline.py index 666b95e..21a30cc 100644 --- a/EnvSuitPipeline.py +++ b/EnvSuitPipeline.py @@ -163,7 +163,7 @@ def run_merger(workPath): def run_pipeline(pipeline_config, region, dateString, extracted = False, prevent_overwrite = True): ''' - The prevent_overwrite parameter can be set to False if you want to re-run + The prevent_overwrite parameter can be set to False if you want to re-run a job in-place. ''' # Get parameters from the config @@ -183,7 +183,7 @@ def run_pipeline(pipeline_config, region, dateString, extracted = False, prevent sys_config = loadConfig(sys_config_file) templateName = 'template_' + runType + '_config.json' - template_configFile = resourcesPath + templateName + template_configFile = resourcesPath + 'configs/' + templateName config = loadConfig(template_configFile) # Before writing any files, check the output path doesn't exist already @@ -192,7 +192,7 @@ def run_pipeline(pipeline_config, region, dateString, extracted = False, prevent if prevent_overwrite: assert not os.path.exists(region_outPath) # Get spatial points file for the region - region_spatial_points_file = resourcesPath + 'input_spatial_points_' + region + '.csv' + region_spatial_points_file = resourcesPath + 'assets/' + 'input_spatial_points_' + region + '.csv' input_spatial_points_file = workPath + 'input_spatial_points.csv' if prevent_overwrite: assert not os.path.exists(input_spatial_points_file) shutil.copy(region_spatial_points_file,input_spatial_points_file) diff --git a/ProcessorComponents.py b/ProcessorComponents.py index 0b975d1..75f7b81 100644 --- a/ProcessorComponents.py +++ b/ProcessorComponents.py @@ -1171,7 +1171,7 @@ def process_in_job_survey(jobPath,status,config,component): date = datetime.datetime.now() - cluster_calc_path = "/storage/app/EWS/General/wheat-source-generation/" + cluster_calc_path = "/storage/app/EWS_prod/code/wheat_source_generation/" # clear old output old_clustering_output_glob = f"{cluster_calc_path}/output/sources_*" diff --git a/run_Processor.sh b/run_Processor.sh index ef384aa..5f173fe 100755 --- a/run_Processor.sh +++ b/run_Processor.sh @@ -1,17 +1,17 @@ #!/bin/bash # directory containing all environment -envs=/storage/app/EWS/envs/ +envs=/storage/app/EWS_prod/envs/ # directory containing all custom python packages -bin=/storage/app/EWS/General +bin=/storage/app/EWS_prod/code/ # provide custom python packages so they can be imported flagdir=${bin}/flagdir/ -epimodel=${bin}/EWS-EpiModel/ -advisory=${bin}/EWS-advisory-builder/ -met_extractor=${bin}/EWS-met_extractor/era5_met_data_extraction/python/ -plotting=${bin}/EWS-Plotting/ +epimodel=${bin}/epimodel/ +advisory=${bin}/advisory_builder/ +met_extractor=${bin}/met_extractor/python/ +plotting=${bin}/plotting/ export PYTHONPATH=$PYTHONPATH:$flagdir:$epimodel:$advisory:$met_extractor:$plotting @@ -21,7 +21,7 @@ export EMAIL_CRED=${envs}/Cred_gmail.json # activate conda environment of python modules so they can be imported #TODO: Move conda_env from bin to envs -conda_env=${bin}/EWS-python/py3EWS +conda_env=${envs}/conda/py3EWS source /storage/app/miniconda3/bin/activate ${conda_env} # get path of this script (to point to files within the same git repo) -- GitLab