From 5dcd70ad2a94d525e89079873beb9685777a141e Mon Sep 17 00:00:00 2001 From: lb584 <lb584@cam.ac.uk> Date: Fri, 21 Jul 2023 15:47:26 +0100 Subject: [PATCH] changes to allow the new spatial points coming from the MetOffice to work. It is possible to specify which spatial points file to use in the main config json. The default file will be used if not specified. This will allow us to perfrom historic runs with the old grid and specify manually. The country-level points filtering used by the postprocessing steps (beofre plotting) now round down the acuracy when filtering points. This allows the country filtering to work with both the new and the old grid, as the differece is lost in the rounding --- coordinator/EnvSuitPipeline.py | 8 ++++++-- coordinator/ProcessorEnvironment.py | 7 ++++++- 2 files changed, 12 insertions(+), 3 deletions(-) diff --git a/coordinator/EnvSuitPipeline.py b/coordinator/EnvSuitPipeline.py index cbf4291..03d22ed 100644 --- a/coordinator/EnvSuitPipeline.py +++ b/coordinator/EnvSuitPipeline.py @@ -164,8 +164,12 @@ def run_pipeline(pipeline_config, region, dateString, extracted = False, prevent region_outPath = os.path.join(outPath,'ENVIRONMENT_2.0_'+dateString,'processed',region) if prevent_overwrite: assert not os.path.exists(region_outPath) - # Get spatial points file for the region - region_spatial_points_file = resourcesPath + 'assets/' + 'input_spatial_points_' + region + '.csv' + # Get spatial points file for the region. The file can be calculated from the region name or specified in the config. + if "SPATIAL_POINTS_FILE" in pipeline_config: + region_spatial_points_file = pipeline_config["SPATIAL_POINTS_FILE"] + else: + region_spatial_points_file = resourcesPath + 'assets/' + 'input_spatial_points_' + region + '.csv' + input_spatial_points_file = workPath + 'input_spatial_points.csv' if prevent_overwrite: assert not os.path.exists(input_spatial_points_file) shutil.copy(region_spatial_points_file,input_spatial_points_file) diff --git a/coordinator/ProcessorEnvironment.py b/coordinator/ProcessorEnvironment.py index cccd43f..933189a 100644 --- a/coordinator/ProcessorEnvironment.py +++ b/coordinator/ProcessorEnvironment.py @@ -66,10 +66,15 @@ def process_in_job_env2_0(jobPath,status,config,component): # basic check that contents are as expected for 7-day forecast (57 timepoints in all files) cube_wildcard = f"{output_directory}/*.nc" cubes: CubeList = iris.load(cube_wildcard) + + #TODO: find out from Will whether these problem cubes with unexpected time counts will be in the production output + ignore_list = ["LAND_FRACTION", "TOPOGRAPHY"] + for cube in cubes: + var_name = cube.name() coord = cube.coord("time") timepoint_count = coord.shape[0] - if timepoint_count != 57: + if timepoint_count != 57 and var_name not in ignore_list: msg = f"Unexpected number of timepoints ({timepoint_count}) in cube {cube.name()}" logger.error(msg) raise RuntimeError(msg) -- GitLab