diff --git a/coordinator/extra/ProcessorMetResample.py b/coordinator/extra/ProcessorMetResample.py index f303914155eaff08bebfa36541592eb6cf3424eb..15d679c3b67731368a550405095a3b5a97d218d9 100644 --- a/coordinator/extra/ProcessorMetResample.py +++ b/coordinator/extra/ProcessorMetResample.py @@ -36,7 +36,7 @@ class ProcessorMetResample(Processor): def process_pre_job(self, args) -> bool: self.logger.debug('Performing process_pre_job()') - + # If it will work on a single forecast, get a dedicated download #return process_pre_job_server_download(args) # return query_past_successes(args, 'Environment') @@ -51,9 +51,12 @@ class ProcessorMetResample(Processor): def __init__(self) -> None: super().__init__() + + def set_component_logger(self): logger = logging.getLogger('Processor.Extra.MetResample') add_filters_to_sublogger(logger) - + + def gather_data( self, config, @@ -97,11 +100,11 @@ class ProcessorMetResample(Processor): #loader_kwargs['VariableNameAlternative']= config_for_lister['Deposition'].get('VariableNameAlternative') file_of_origins = prep.prep_input(config_for_lister,start_date,end_date, - component=component, - file_lister=file_lister, - file_loader=file_loader, - lister_kwargs=lister_kwargs, - **loader_kwargs) + component=component, + file_lister=file_lister, + file_loader=file_loader, + lister_kwargs=lister_kwargs, + **loader_kwargs) assert os.path.isfile(config[component]['FileNamePrepared']) @@ -116,7 +119,7 @@ class ProcessorMetResample(Processor): fn, header=[0,1], index_col=0) - + df.index = to_datetime(df.index,format='%Y%m%d%H%M') return df @@ -132,7 +135,7 @@ class ProcessorMetResample(Processor): /home/jws52/projects/SouthAsia/blast-fernandes-adaptation/code/prep_met.py """ - + # load dataframe from config file_name_prepared = config[component]['FileNamePrepared'] @@ -157,7 +160,7 @@ class ProcessorMetResample(Processor): #df_original = concat([dfm1,df_original]) resampler = df_original.resample(resample_scale) - + resample_method = config[component].get('resampling','backfill') if resample_method == 'backfill': print('Upsampling by backfilling') @@ -193,7 +196,7 @@ class ProcessorMetResample(Processor): paths = [] origins = [] - + for component in components: self.logger.info(f"Working on {component}") @@ -216,8 +219,8 @@ class ProcessorMetResample(Processor): start_time,#datetime.datetime(2023,6,7,3) end_time,#datetime.datetime(2023,6,25,0), component=component, - ) - + ) + origins += [file_of_origins] self.logger.debug('Performing resampling') @@ -262,7 +265,7 @@ class ProcessorMetResample(Processor): config_i[k]=v # Get run config - + # path to some analysis met reruns provided by Will. These mirror the usual weekly EWS analysis jobs, # but on the extended EastAfrica grid that includes Zambia. ANJOBDIR2 = '${WorkspacePath2}/WR_EnvSuit_Met_Ethiopia_${DateString}/' @@ -284,12 +287,12 @@ class ProcessorMetResample(Processor): config_met[k]=v self.logger.info('Calling gather_and_resample()') - + origins, paths_out = self.gather_and_resample( config_met, reference_date_str = config['StartString'], calculation_span_days = config[component]['CalculationSpanDays'] - ) + ) # zip the files that will be provided to collaborators files_to_zip = origins + paths_out @@ -301,7 +304,7 @@ class ProcessorMetResample(Processor): for file_to_zip in files_to_zip: filename_in_archive = os.path.basename(file_to_zip) - + zipf.write( file_to_zip, arcname=filename_in_archive,