diff --git a/run_configs/gcom/grab_gcom.py b/run_configs/gcom/grab_gcom.py index 0b53b9d3..730b5cca 100755 --- a/run_configs/gcom/grab_gcom.py +++ b/run_configs/gcom/grab_gcom.py @@ -20,7 +20,9 @@ if __name__ == '__main__': # note: we can add arguments to grab_config.arg_parser here - # todo: do a real example of this in one of the configs, or at least in the docs. + # todo: do a real example of this in one of the configs, or at least in + # the docs. with grab_config: - fcm_export(grab_config, src='fcm:gcom.xm_tr/build', revision=revision, dst_label="gcom") + fcm_export(grab_config, src='fcm:gcom.xm_tr/build', + revision=revision, dst_label="gcom") diff --git a/run_configs/jules/build_jules.py b/run_configs/jules/build_jules.py index f3fc983c..9cc43ddf 100755 --- a/run_configs/jules/build_jules.py +++ b/run_configs/jules/build_jules.py @@ -43,9 +43,14 @@ def __init__(self): with BuildConfig(project_label=f'jules {revision} $compiler', tool_box=tool_box) as state: - # grab the source. todo: use some checkouts instead of exports in these configs. - fcm_export(state, src='fcm:jules.xm_tr/src', revision=revision, dst_label='src') - fcm_export(state, src='fcm:jules.xm_tr/utils', revision=revision, dst_label='utils') + # grab the source. todo: use some checkouts instead of exports in + # these configs. + fcm_export(state, src='fcm:jules.xm_tr/src', + revision=revision, + dst_label='src') + fcm_export(state, src='fcm:jules.xm_tr/utils', + revision=revision, + dst_label='utils') grab_pre_build(state, path='/not/a/real/folder', allow_fail=True), @@ -61,9 +66,18 @@ def __init__(self): # move inc files to the root for easy tool use root_inc_files(state) - preprocess_fortran(state, common_flags=['-P', '-DMPI_DUMMY', '-DNCDF_DUMMY', '-I$output']) - - analyse(state, root_symbol='jules', unreferenced_deps=['imogen_update_carb']) + preprocess_fortran(state, + common_flags=[ + '-P', + '-DMPI_DUMMY', + '-DNCDF_DUMMY', + '-I$output' + ] + ) + + analyse(state, + root_symbol='jules', + unreferenced_deps=['imogen_update_carb']) compile_fortran(state) diff --git a/run_configs/lfric/atm.py b/run_configs/lfric/atm.py index c297499c..29806fbc 100755 --- a/run_configs/lfric/atm.py +++ b/run_configs/lfric/atm.py @@ -25,127 +25,164 @@ def file_filtering(config): """Based on lfric_atm/fcm-make/extract.cfg""" science_root = config.source_root / 'science' + um_root = science_root / 'um' + um_control = um_root / 'control' + + jules_root = science_root / 'jules' return [ Exclude('unit-test', '/test/'), - Exclude(science_root / 'um'), - Include(science_root / 'um/atmosphere/AC_assimilation/iau_mod.F90'), - Include(science_root / 'um/atmosphere/aerosols'), - Include(science_root / 'um/atmosphere/atmosphere_service'), - Include(science_root / 'um/atmosphere/boundary_layer'), - Include(science_root / 'um/atmosphere/carbon/carbon_options_mod.F90'), - Include(science_root / 'um/atmosphere/convection'), - Include(science_root / 'um/atmosphere/convection/comorph/control/comorph_constants_mod.F90'), - Include(science_root / 'um/atmosphere/diffusion_and_filtering/leonard_incs_mod.F90'), - Include(science_root / 'um/atmosphere/diffusion_and_filtering/turb_diff_ctl_mod.F90'), - Include(science_root / 'um/atmosphere/diffusion_and_filtering/turb_diff_mod.F90'), - Include(science_root / 'um/atmosphere/dynamics'), - Include(science_root / 'um/atmosphere/dynamics_advection'), - Include(science_root / 'um/atmosphere/electric'), - Include(science_root / 'um/atmosphere/energy_correction/eng_corr_inputs_mod.F90'), - Include(science_root / 'um/atmosphere/energy_correction/flux_diag-fldiag1a.F90'), - Include(science_root / 'um/atmosphere/free_tracers/free_tracers_inputs_mod.F90'), - Include(science_root / 'um/atmosphere/free_tracers/water_tracers_mod.F90'), - Include(science_root / 'um/atmosphere/free_tracers/wtrac_all_phase_chg.F90'), - Include(science_root / 'um/atmosphere/free_tracers/wtrac_calc_ratio.F90'), - Include(science_root / 'um/atmosphere/free_tracers/wtrac_move_phase.F90'), - Include(science_root / 'um/atmosphere/idealised'), - Include(science_root / 'um/atmosphere/large_scale_cloud'), - Include(science_root / 'um/atmosphere/large_scale_precipitation'), - Include(science_root / 'um/atmosphere/PWS_diagnostics/pws_diags_mod.F90'), - Include(science_root / 'um/atmosphere/radiation_control/def_easyaerosol.F90'), - Include(science_root / 'um/atmosphere/radiation_control/easyaerosol_mod.F90'), - Include(science_root / 'um/atmosphere/radiation_control/easyaerosol_option_mod.F90'), - Include(science_root / 'um/atmosphere/radiation_control/easyaerosol_read_input_mod.F90'), - Include(science_root / 'um/atmosphere/radiation_control/fsd_parameters_mod.F90'), - Include(science_root / 'um/atmosphere/radiation_control/max_calls.F90'), - Include(science_root / 'um/atmosphere/radiation_control/r2_calc_total_cloud_cover.F90'), - Include(science_root / 'um/atmosphere/radiation_control/rad_input_mod.F90'), - Include(science_root / 'um/atmosphere/radiation_control/solinc_data.F90'), - Include(science_root / 'um/atmosphere/radiation_control/spec_sw_lw.F90'), - Include(science_root / 'um/atmosphere/stochastic_physics/stochastic_physics_run_mod.F90'), - Include(science_root / 'um/atmosphere/tracer_advection/trsrce-trsrce2a.F90'), - Include(science_root / 'um/control/dummy_libs/drhook/parkind1.F90'), - Include(science_root / 'um/control/dummy_libs/drhook/yomhook.F90'), - Include(science_root / 'um/control/glomap_clim_interface/glomap_clim_option_mod.F90'), - Include(science_root / 'um/control/grids'), - Include(science_root / 'um/control/misc'), - Include(science_root / 'um/control/mpp/decomp_params.F90'), - Include(science_root / 'um/control/mpp/um_parcore.F90'), - Include(science_root / 'um/control/mpp/um_parparams.F90'), - Include(science_root / 'um/control/mpp/um_parvars.F90'), - Include(science_root / 'um/control/stash/copydiag_3d_mod.F90'), - Include(science_root / 'um/control/stash/copydiag_mod.F90'), - Include(science_root / 'um/control/stash/cstash_mod.F90'), - Include(science_root / 'um/control/stash/profilename_length_mod.F90'), - Include(science_root / 'um/control/stash/set_levels_list.F90'), - Include(science_root / 'um/control/stash/set_pseudo_list.F90'), - Include(science_root / 'um/control/stash/stash_array_mod.F90'), - Include(science_root / 'um/control/stash/stparam_mod.F90'), - Include(science_root / 'um/control/stash/um_stashcode_mod.F90'), - Include(science_root / 'um/control/top_level'), - Include(science_root / 'um/control/ukca_interface/atmos_ukca_callback_mod.F90'), - Include(science_root / 'um/control/ukca_interface/atmos_ukca_humidity_mod.F90'), - Include(science_root / 'um/control/ukca_interface/get_emdiag_stash_mod.F90'), - Include(science_root / 'um/control/ukca_interface/ukca_d1_defs.F90'), - Include(science_root / 'um/control/ukca_interface/ukca_dissoc.F90'), - Include(science_root / 'um/control/ukca_interface/ukca_eg_tracers_total_mass_mod.F90'), - Include(science_root / 'um/control/ukca_interface/ukca_nmspec_mod.F90'), - Include(science_root / 'um/control/ukca_interface/ukca_option_mod.F90'), - Include(science_root / 'um/control/ukca_interface/ukca_photo_scheme_mod.F90'), - Include(science_root / 'um/control/ukca_interface/ukca_radaer_lut_in.F90'), - Include(science_root / 'um/control/ukca_interface/ukca_radaer_read_precalc.F90'), - Include(science_root / 'um/control/ukca_interface/ukca_radaer_read_presc_mod.F90'), - Include(science_root / 'um/control/ukca_interface/ukca_radaer_struct_mod.F90'), - Include(science_root / 'um/control/ukca_interface/ukca_scavenging_diags_mod.F90'), - Include(science_root / 'um/control/ukca_interface/ukca_scavenging_mod.F90'), - Include(science_root / 'um/control/ukca_interface/ukca_tracer_stash.F90'), - Include(science_root / 'um/control/ukca_interface/ukca_um_legacy_mod.F90'), - Include(science_root / 'um/control/ukca_interface/ukca_volcanic_so2.F90'), + Exclude(um_root), + Include(um_root / 'atmosphere' / 'AC_assimilation/iau_mod.F90'), + Include(um_root / 'atmosphere' / 'aerosols'), + Include(um_root / 'atmosphere' / 'atmosphere_service'), + Include(um_root / 'atmosphere' / 'boundary_layer'), + Include(um_root / 'atmosphere' / 'carbon/carbon_options_mod.F90'), + Include(um_root / 'atmosphere' / 'convection'), + Include(um_root / 'atmosphere' / 'convection' / 'comorph' / 'control' + / 'comorph_constants_mod.F90'), + Include(um_root / 'atmosphere' / 'diffusion_and_filtering' + / 'leonard_incs_mod.F90'), + Include(um_root / 'atmosphere' / 'diffusion_and_filtering' + / 'turb_diff_ctl_mod.F90'), + Include(um_root / 'atmosphere' / 'diffusion_and_filtering' + / 'turb_diff_mod.F90'), + Include(um_root / 'atmosphere' / 'dynamics'), + Include(um_root / 'atmosphere' / 'dynamics_advection'), + Include(um_root / 'atmosphere' / 'electric'), + Include(um_root / 'atmosphere' / 'energy_correction' + / 'eng_corr_inputs_mod.F90'), + Include(um_root / 'atmosphere' / 'energy_correction' + / 'flux_diag-fldiag1a.F90'), + Include(um_root / 'atmosphere' / 'free_tracers' + / 'free_tracers_inputs_mod.F90'), + Include(um_root / 'atmosphere' / 'free_tracers' + / 'water_tracers_mod.F90'), + Include(um_root / 'atmosphere' / 'free_tracers' + / 'wtrac_all_phase_chg.F90'), + Include(um_root / 'atmosphere' / 'free_tracers' + / 'wtrac_calc_ratio.F90'), + Include(um_root / 'atmosphere' / 'free_tracers' + / 'wtrac_move_phase.F90'), + Include(um_root / 'atmosphere' / 'idealised'), + Include(um_root / 'atmosphere' / 'large_scale_cloud'), + Include(um_root / 'atmosphere' / 'large_scale_precipitation'), + Include(um_root / 'atmosphere' / 'PWS_diagnostics/pws_diags_mod.F90'), + Include(um_root / 'atmosphere' / 'radiation_control' + / 'def_easyaerosol.F90'), + Include(um_root / 'atmosphere' / 'radiation_control' + / 'easyaerosol_mod.F90'), + Include(um_root / 'atmosphere' / 'radiation_control' + / 'easyaerosol_option_mod.F90'), + Include(um_root / 'atmosphere' / 'radiation_control' + / 'easyaerosol_read_input_mod.F90'), + Include(um_root / 'atmosphere' / 'radiation_control' + / 'fsd_parameters_mod.F90'), + Include(um_root / 'atmosphere' / 'radiation_control' + / 'max_calls.F90'), + Include(um_root / 'atmosphere' / 'radiation_control' + / 'r2_calc_total_cloud_cover.F90'), + Include(um_root / 'atmosphere' / 'radiation_control' + / 'rad_input_mod.F90'), + Include(um_root / 'atmosphere' / 'radiation_control' + / 'solinc_data.F90'), + Include(um_root / 'atmosphere' / 'radiation_control' + / 'spec_sw_lw.F90'), + Include(um_root / 'atmosphere' / 'stochastic_physics' + / 'stochastic_physics_run_mod.F90'), + Include(um_root / 'atmosphere' / 'tracer_advection' + / 'trsrce-trsrce2a.F90'), + Include(um_control / 'dummy_libs/drhook/parkind1.F90'), + Include(um_control / 'dummy_libs/drhook/yomhook.F90'), + Include(um_control / 'glomap_clim_interface' + / 'glomap_clim_option_mod.F90'), + Include(um_control / 'grids'), + Include(um_control / 'misc'), + Include(um_control / 'mpp/decomp_params.F90'), + Include(um_control / 'mpp/um_parcore.F90'), + Include(um_control / 'mpp/um_parparams.F90'), + Include(um_control / 'mpp/um_parvars.F90'), + Include(um_control / 'stash/copydiag_3d_mod.F90'), + Include(um_control / 'stash/copydiag_mod.F90'), + Include(um_control / 'stash/cstash_mod.F90'), + Include(um_control / 'stash/profilename_length_mod.F90'), + Include(um_control / 'stash/set_levels_list.F90'), + Include(um_control / 'stash/set_pseudo_list.F90'), + Include(um_control / 'stash/stash_array_mod.F90'), + Include(um_control / 'stash/stparam_mod.F90'), + Include(um_control / 'stash/um_stashcode_mod.F90'), + Include(um_control / 'top_level'), + Include(um_control / 'ukca_interface/atmos_ukca_callback_mod.F90'), + Include(um_control / 'ukca_interface/atmos_ukca_humidity_mod.F90'), + Include(um_control / 'ukca_interface/get_emdiag_stash_mod.F90'), + Include(um_control / 'ukca_interface/ukca_d1_defs.F90'), + Include(um_control / 'ukca_interface/ukca_dissoc.F90'), + Include(um_control / 'ukca_interface' + / 'ukca_eg_tracers_total_mass_mod.F90'), + Include(um_control / 'ukca_interface/ukca_nmspec_mod.F90'), + Include(um_control / 'ukca_interface/ukca_option_mod.F90'), + Include(um_control / 'ukca_interface/ukca_photo_scheme_mod.F90'), + Include(um_control / 'ukca_interface/ukca_radaer_lut_in.F90'), + Include(um_control / 'ukca_interface/ukca_radaer_read_precalc.F90'), + Include(um_control / 'ukca_interface/ukca_radaer_read_presc_mod.F90'), + Include(um_control / 'ukca_interface/ukca_radaer_struct_mod.F90'), + Include(um_control / 'ukca_interface/ukca_scavenging_diags_mod.F90'), + Include(um_control / 'ukca_interface/ukca_scavenging_mod.F90'), + Include(um_control / 'ukca_interface/ukca_tracer_stash.F90'), + Include(um_control / 'ukca_interface/ukca_um_legacy_mod.F90'), + Include(um_control / 'ukca_interface/ukca_volcanic_so2.F90'), Include(science_root / 'um/scm/modules/scmoptype_defn.F90'), Include(science_root / 'um/scm/modules/s_scmop_mod.F90'), Include(science_root / 'um/scm/modules/scm_convss_dg_mod.F90'), Include(science_root / 'um/scm/stub/dgnstcs_glue_conv.F90'), Include(science_root / 'um/scm/stub/scmoutput_stub.F90'), - Include(science_root / 'um/atmosphere/COSP/cosp_input_mod.F90'), - Include(science_root / 'um/control/coupling'), - Include(science_root / 'um/atmosphere/gravity_wave_drag/g_wave_input_mod.F90'), - Include(science_root / 'um/atmosphere/gravity_wave_drag/gw_ussp_prec_mod.F90'), - Include(science_root / 'um/atmosphere/gravity_wave_drag/gw_ussp_params_mod.F90'), - Include(science_root / 'um/atmosphere/gravity_wave_drag/gw_ussp_core_mod.F90'), - Include(science_root / 'um/atmosphere/gravity_wave_drag/gw_ussp_mod.F90'), - Include(science_root / 'um/atmosphere/gravity_wave_drag/gw_block.F90'), - Include(science_root / 'um/atmosphere/gravity_wave_drag/gw_wave.F90'), - Include(science_root / 'um/atmosphere/gravity_wave_drag/gw_setup.F90'), - Include(science_root / 'um/atmosphere/gravity_wave_drag/c_gwave_mod.F90'), + Include(um_root / 'atmosphere' / 'COSP/cosp_input_mod.F90'), + Include(um_control / 'coupling'), + Include(um_root / 'atmosphere' / 'gravity_wave_drag' + / 'g_wave_input_mod.F90'), + Include(um_root / 'atmosphere' / 'gravity_wave_drag' + / 'gw_ussp_prec_mod.F90'), + Include(um_root / 'atmosphere' / 'gravity_wave_drag' + / 'gw_ussp_params_mod.F90'), + Include(um_root / 'atmosphere' / 'gravity_wave_drag' + / 'gw_ussp_core_mod.F90'), + Include(um_root / 'atmosphere' / 'gravity_wave_drag' + / 'gw_ussp_mod.F90'), + Include(um_root / 'atmosphere' / 'gravity_wave_drag/gw_block.F90'), + Include(um_root / 'atmosphere' / 'gravity_wave_drag/gw_wave.F90'), + Include(um_root / 'atmosphere' / 'gravity_wave_drag/gw_setup.F90'), + Include(um_root / 'atmosphere' / 'gravity_wave_drag/c_gwave_mod.F90'), Include(science_root / 'um/utility/qxreconf/calc_fit_fsat.F'), - Exclude(science_root / 'jules'), - Include(science_root / 'jules/control/shared'), - Include(science_root / 'jules/control/lfric'), - Include(science_root / 'jules/control/cable/shared'), - Include(science_root / 'jules/control/cable/cable_land'), - Include(science_root / 'jules/control/cable/interface'), - Include(science_root / 'jules/control/cable/util'), - Include(science_root / 'jules/params/cable'), - Include(science_root / 'jules/science_cable'), - Include(science_root / 'jules/util/cable'), - Include(science_root / 'jules/initialisation/cable'), - Include(science_root / 'jules/control/standalone/jules_fields_mod.F90'), - Include(science_root / 'jules/util/shared/gridbox_mean_mod.F90'), - Include(science_root / 'jules/util/shared/metstats/metstats_mod.F90'), - Include(science_root / 'jules/initialisation/shared/allocate_jules_arrays.F90'), - Include(science_root / 'jules/initialisation/shared/freeze_soil.F90'), - Include(science_root / 'jules/initialisation/shared/calc_urban_aero_fields_mod.F90'), - Include(science_root / 'jules/initialisation/shared/check_compatible_options_mod.F90'), - Include(science_root / 'jules/science/deposition'), - Include(science_root / 'jules/science/params'), - Include(science_root / 'jules/science/radiation'), - Include(science_root / 'jules/science/snow'), - Include(science_root / 'jules/science/soil'), - Include(science_root / 'jules/science/surface'), - Include(science_root / 'jules/science/vegetation'), + Exclude(jules_root), + Include(jules_root / 'control/shared'), + Include(jules_root / 'control/lfric'), + Include(jules_root / 'control/cable/shared'), + Include(jules_root / 'control/cable/cable_land'), + Include(jules_root / 'control/cable/interface'), + Include(jules_root / 'control/cable/util'), + Include(jules_root / 'params/cable'), + Include(jules_root / 'science_cable'), + Include(jules_root / 'util/cable'), + Include(jules_root / 'initialisation/cable'), + Include(jules_root / 'control/standalone/jules_fields_mod.F90'), + Include(jules_root / 'util/shared/gridbox_mean_mod.F90'), + Include(jules_root / 'util/shared/metstats/metstats_mod.F90'), + Include(jules_root / 'initialisation' / 'shared' + / 'allocate_jules_arrays.F90'), + Include(jules_root / 'initialisation/shared/freeze_soil.F90'), + Include(jules_root / 'initialisation' / 'shared' + / 'calc_urban_aero_fields_mod.F90'), + Include(jules_root / 'initialisation' / 'shared' + / 'check_compatible_options_mod.F90'), + Include(jules_root / 'science/deposition'), + Include(jules_root / 'science/params'), + Include(jules_root / 'science/radiation'), + Include(jules_root / 'science/snow'), + Include(jules_root / 'science/soil'), + Include(jules_root / 'science/surface'), + Include(jules_root / 'science/vegetation'), Exclude(science_root / 'socrates'), Include(science_root / 'socrates/radiance_core'), @@ -173,7 +210,8 @@ def get_transformation_script(fpath, config): relative_path = fpath.relative_to(base_path) except ValueError: pass - local_transformation_script = optimisation_path / (relative_path.with_suffix('.py')) + local_transformation_script = (optimisation_path + / relative_path.with_suffix('.py')) if local_transformation_script.exists(): return local_transformation_script global_transformation_script = optimisation_path / 'global.py' @@ -189,44 +227,88 @@ def get_transformation_script(fpath, config): with BuildConfig(project_label='atm $compiler $two_stage', tool_box=ToolBox()) as state: - # todo: use different dst_labels because they all go into the same folder, - # making it hard to see what came from where? + # todo: use different dst_labels because they all go into the same + # folder, making it hard to see what came from where? # internal dependencies - grab_folder(state, src=lfric_source / 'infrastructure/source/', dst_label='lfric') - grab_folder(state, src=lfric_source / 'components/driver/source/', dst_label='lfric') - grab_folder(state, src=lfric_source / 'components' / 'inventory' / 'source', dst_label='') - grab_folder(state, src=lfric_source / 'components/science/source/', dst_label='lfric') - grab_folder(state, src=lfric_source / 'components/lfric-xios/source/', dst_label='lfric', ) + grab_folder(state, + src=lfric_source / 'infrastructure/source/', + dst_label='lfric') + grab_folder(state, + src=lfric_source / 'components/driver/source/', + dst_label='lfric') + grab_folder(state, + src=lfric_source / 'components' / 'inventory' / 'source', + dst_label='') + grab_folder(state, + src=lfric_source / 'components/science/source/', + dst_label='lfric') + grab_folder(state, + src=lfric_source / 'components/lfric-xios/source/', + dst_label='lfric', ) # coupler - oasis component - grab_folder(state, src=lfric_source / 'components/coupler-oasis/source/', dst_label='lfric') + grab_folder(state, + src=lfric_source / 'components/coupler-oasis/source/', + dst_label='lfric') # gungho dynamical core - grab_folder(state, src=lfric_source / 'gungho/source/', dst_label='lfric') - - grab_folder(state, src=lfric_source / 'um_physics/source/', dst_label='lfric') - grab_folder(state, src=lfric_source / 'socrates/source/', dst_label='lfric') - grab_folder(state, src=lfric_source / 'jules/source/', dst_label='lfric') + grab_folder(state, + src=lfric_source / 'gungho/source/', + dst_label='lfric') + + grab_folder(state, + src=lfric_source / 'um_physics/source/', + dst_label='lfric') + grab_folder(state, + src=lfric_source / 'socrates/source/', + dst_label='lfric') + grab_folder(state, + src=lfric_source / 'jules/source/', + dst_label='lfric') # UM physics - versions as required by the LFRIC_REVISION in grab_lfric.py - fcm_export(state, src='fcm:um.xm_tr/src', dst_label='science/um', revision=116568) - fcm_export(state, src='fcm:jules.xm_tr/src', dst_label='science/jules', revision=25146) - fcm_export(state, src='fcm:socrates.xm_tr/src', dst_label='science/socrates', revision='1331') - fcm_export(state, src='fcm:shumlib.xm_tr/', dst_label='science/shumlib', revision='um13.1') - fcm_export(state, src='fcm:casim.xm_tr/src', dst_label='science/casim', revision='10024') - fcm_export(state, src='fcm:ukca.xm_tr/src', dst_label='science/ukca', revision='1179') + fcm_export(state, + src='fcm:um.xm_tr/src', + dst_label='science/um', + revision=116568) + fcm_export(state, + src='fcm:jules.xm_tr/src', + dst_label='science/jules', + revision=25146) + fcm_export(state, + src='fcm:socrates.xm_tr/src', + dst_label='science/socrates', + revision='1331') + fcm_export(state, + src='fcm:shumlib.xm_tr/', + dst_label='science/shumlib', + revision='um13.1') + fcm_export(state, + src='fcm:casim.xm_tr/src', + dst_label='science/casim', + revision='10024') + fcm_export(state, + src='fcm:ukca.xm_tr/src', + dst_label='science/ukca', + revision='1179') # lfric_atm - grab_folder(state, src=lfric_source / 'lfric_atm/source/', dst_label='lfric') - grab_folder(state, src=lfric_source / 'lfric_atm' / 'optimisation', + grab_folder(state, + src=lfric_source / 'lfric_atm/source/', + dst_label='lfric') + grab_folder(state, + src=lfric_source / 'lfric_atm' / 'optimisation', dst_label='optimisation') # generate more source files in source and source/configuration - configurator(state, - lfric_source=lfric_source, - gpl_utils_source=gpl_utils_source, - rose_meta_conf=lfric_source / 'lfric_atm/rose-meta/lfric-lfric_atm/HEAD/rose-meta.conf', - config_dir=state.source_root / 'lfric/configuration') + configurator( + state, + lfric_source=lfric_source, + gpl_utils_source=gpl_utils_source, + rose_meta_conf=lfric_source / 'lfric_atm' / 'rose-meta' + / 'lfric-lfric_atm' / 'HEAD' / 'rose-meta.conf', + config_dir=state.source_root / 'lfric/configuration' + ) find_source_files(state, path_filters=file_filtering(state)) @@ -236,26 +318,41 @@ def get_transformation_script(fpath, config): preprocess_c( state, path_flags=[ - AddFlags(match="$source/science/um/*", flags=['-I$relative/include']), - AddFlags(match="$source/science/shumlib/*", flags=['-I$source/science/shumlib/common/src']), - AddFlags(match='$source/science/um/controls/c_code/*', flags=[ - '-I$source/science/um/include/other', - '-I$source/science/shumlib/shum_thread_utils/src']), + AddFlags(match="$source/science/um/*", + flags=['-I$relative/include']), + AddFlags(match="$source/science/shumlib/*", + flags=['-I$source/science/shumlib/common/src']), + AddFlags(match='$source/science/um/controls/c_code/*', + flags=[ + '-I$source/science/um/include/other', + '-I$source/science/shumlib/shum_thread_utils/src' + ]), ], ) preprocess_fortran( state, - common_flags=['-DRDEF_PRECISION=64', '-DUSE_XIOS', '-DUM_PHYSICS', '-DCOUPLED', '-DUSE_MPI=YES'], + common_flags=['-DRDEF_PRECISION=64', + '-DUSE_XIOS', + '-DUM_PHYSICS', + '-DCOUPLED', + '-DUSE_MPI=YES'], path_flags=[ - AddFlags(match="$source/science/um/*", flags=['-I$relative/include']), - AddFlags(match="$source/science/jules/*", flags=['-DUM_JULES', '-I$output']), - AddFlags(match="$source/science/*", flags=['-DLFRIC']), + AddFlags(match="$source/science/um/*", + flags=['-I$relative/include']), + AddFlags(match="$source/science/jules/*", + flags=['-DUM_JULES', '-I$output']), + AddFlags(match="$source/science/*", + flags=['-DLFRIC']), ], ) - # todo: put this inside the psyclone step, no need for it to be separate, there's nothing required between them - preprocess_x90(state, common_flags=['-DUM_PHYSICS', '-DRDEF_PRECISION=64', '-DUSE_XIOS', '-DCOUPLED']) + # todo: put this inside the psyclone step, no need for it to be + # separate, there's nothing required between them + preprocess_x90(state, common_flags=['-DUM_PHYSICS', + '-DRDEF_PRECISION=64', + '-DUSE_XIOS', + '-DCOUPLED']) psyclone( state, @@ -270,7 +367,12 @@ def get_transformation_script(fpath, config): analyse( state, root_symbol='lfric_atm', - ignore_mod_deps=['netcdf', 'MPI', 'yaxt', 'pfunit_mod', 'xios', 'mod_wait'], + ignore_mod_deps=['netcdf', + 'MPI', + 'yaxt', + 'pfunit_mod', + 'xios', + 'mod_wait'], ) compile_c(state, common_flags=['-c', '-std=c99']) @@ -279,16 +381,26 @@ def get_transformation_script(fpath, config): state, common_flags=[ '-c', - '-ffree-line-length-none', '-fopenmp', + '-ffree-line-length-none', + '-fopenmp', '-g', - '-finit-integer=31173', '-finit-real=snan', '-finit-logical=true', '-finit-character=85', - '-fcheck=all', '-ffpe-trap=invalid,zero,overflow', - '-Wall', '-Werror=character-truncation', '-Werror=unused-value', '-Werror=tabs', + '-finit-integer=31173', + '-finit-real=snan', + '-finit-logical=true', + '-finit-character=85', + '-fcheck=all', + '-ffpe-trap=invalid,zero,overflow', + + '-Wall', + '-Werror=character-truncation', + '-Werror=unused-value', + '-Werror=tabs', ], path_flags=[ - AddFlags('$output/science/*', ['-fdefault-real-8', '-fdefault-double-8']), + AddFlags('$output/science/*', + ['-fdefault-real-8', '-fdefault-double-8']), ] ) @@ -297,10 +409,12 @@ def get_transformation_script(fpath, config): link_exe( state, flags=[ - '-lyaxt', '-lyaxt_c', '-lnetcdff', '-lnetcdf', '-lhdf5', # EXTERNAL_DYNAMIC_LIBRARIES - '-lxios', # EXTERNAL_STATIC_LIBRARIES - '-lstdc++', + # EXTERNAL_DYNAMIC_LIBRARIES + '-lyaxt', '-lyaxt_c', '-lnetcdff', '-lnetcdf', '-lhdf5', + # EXTERNAL_STATIC_LIBRARIES + '-lxios', + '-lstdc++', '-fopenmp', ], ) diff --git a/run_configs/lfric/grab_lfric.py b/run_configs/lfric/grab_lfric.py index c649ada2..49b9d2c4 100755 --- a/run_configs/lfric/grab_lfric.py +++ b/run_configs/lfric/grab_lfric.py @@ -16,18 +16,30 @@ # these configs are interrogated by the build scripts # todo: doesn't need two separate configs, they use the same project workspace tool_box = ToolBox() -lfric_source_config = BuildConfig(project_label=f'lfric source {LFRIC_REVISION}', - tool_box=tool_box) -gpl_utils_source_config = BuildConfig(project_label=f'lfric source {LFRIC_REVISION}', - tool_box=tool_box) +lfric_source_config = BuildConfig( + project_label=f'lfric source {LFRIC_REVISION}', + tool_box=tool_box +) +gpl_utils_source_config = BuildConfig( + project_label=f'lfric source {LFRIC_REVISION}', + tool_box=tool_box +) if __name__ == '__main__': with lfric_source_config: fcm_export( - lfric_source_config, src='fcm:lfric.xm_tr', revision=LFRIC_REVISION, dst_label='lfric') + lfric_source_config, + src='fcm:lfric.xm_tr', + revision=LFRIC_REVISION, + dst_label='lfric' + ) with gpl_utils_source_config: fcm_export( - gpl_utils_source_config, src='fcm:lfric_gpl_utils.xm-tr', revision=LFRIC_REVISION, dst_label='gpl_utils') + gpl_utils_source_config, + src='fcm:lfric_gpl_utils.xm-tr', + revision=LFRIC_REVISION, + dst_label='gpl_utils' + ) diff --git a/run_configs/lfric/gungho.py b/run_configs/lfric/gungho.py index 5454d8ca..1799b899 100755 --- a/run_configs/lfric/gungho.py +++ b/run_configs/lfric/gungho.py @@ -1,9 +1,9 @@ #!/usr/bin/env python3 -# ############################################################################## +# ############################################################################ # (c) Crown copyright Met Office. All rights reserved. # For further details please refer to the file COPYRIGHT # which you should have received as part of this distribution -# ############################################################################## +# ############################################################################ import logging from fab.build_config import BuildConfig @@ -34,7 +34,8 @@ def get_transformation_script(fpath, config): relative_path = fpath.relative_to(base_path) except ValueError: pass - local_transformation_script = optimisation_path / (relative_path.with_suffix('.py')) + local_transformation_script = (optimisation_path + / (relative_path.with_suffix('.py'))) if local_transformation_script.exists(): return local_transformation_script global_transformation_script = optimisation_path / 'global.py' @@ -49,18 +50,40 @@ def get_transformation_script(fpath, config): with BuildConfig(project_label='gungho $compiler $two_stage', tool_box=ToolBox()) as state: - grab_folder(state, src=lfric_source / 'infrastructure/source/', dst_label='') - grab_folder(state, src=lfric_source / 'components/driver/source/', dst_label='') - grab_folder(state, src=lfric_source / 'components' / 'inventory' / 'source', dst_label='') - grab_folder(state, src=lfric_source / 'components/science/source/', dst_label='') - grab_folder(state, src=lfric_source / 'components/lfric-xios/source/', dst_label='') - grab_folder(state, src=lfric_source / 'gungho/source/', dst_label='') - grab_folder(state, src=lfric_source / 'um_physics/source/', dst_label='') - grab_folder(state, src=lfric_source / 'miniapps' / 'gungho_model' / 'source', dst_label='') - grab_folder(state, src=lfric_source / 'miniapps' / 'gungho_model' / 'optimisation', + grab_folder(state, + src=lfric_source / 'infrastructure/source/', + dst_label='') + grab_folder(state, + src=lfric_source / 'components/driver/source/', + dst_label='') + grab_folder(state, + src=lfric_source / 'components' / 'inventory' / 'source', + dst_label='') + grab_folder(state, + src=lfric_source / 'components/science/source/', + dst_label='') + grab_folder(state, + src=lfric_source / 'components/lfric-xios/source/', + dst_label='') + grab_folder(state, + src=lfric_source / 'gungho/source/', + dst_label='') + grab_folder(state, + src=lfric_source / 'um_physics/source/', + dst_label='') + grab_folder(state, + src=lfric_source / 'miniapps' / 'gungho_model' / 'source', + dst_label='') + grab_folder(state, + src=lfric_source / 'miniapps' / 'gungho_model' + / 'optimisation', dst_label='optimisation') - grab_folder(state, src=lfric_source / 'jules/source/', dst_label='') - grab_folder(state, src=lfric_source / 'socrates/source/', dst_label='') + grab_folder(state, + src=lfric_source / 'jules/source/', + dst_label='') + grab_folder(state, + src=lfric_source / 'socrates/source/', + dst_label='') # generate more source files in source and source/configuration configurator( @@ -72,15 +95,24 @@ def get_transformation_script(fpath, config): / 'HEAD' / 'rose-meta.conf', ) - find_source_files(state, path_filters=[Exclude('unit-test', '/test/')]) + find_source_files(state, + path_filters=[ + Exclude('unit-test', '/test/') + ]) preprocess_fortran( state, common_flags=[ - '-DRDEF_PRECISION=64', '-DR_SOLVER_PRECISION=64', '-DR_TRAN_PRECISION=64', '-DUSE_XIOS', - ]) + '-DRDEF_PRECISION=64', + '-DR_SOLVER_PRECISION=64', + '-DR_TRAN_PRECISION=64', + '-DUSE_XIOS', + ] + ) - preprocess_x90(state, common_flags=['-DRDEF_PRECISION=64', '-DUSE_XIOS', '-DCOUPLED']) + preprocess_x90(state, common_flags=['-DRDEF_PRECISION=64', + '-DUSE_XIOS', + '-DCOUPLED']) psyclone( state, @@ -94,7 +126,12 @@ def get_transformation_script(fpath, config): analyse( state, root_symbol='gungho_model', - ignore_mod_deps=['netcdf', 'MPI', 'yaxt', 'pfunit_mod', 'xios', 'mod_wait'], + ignore_mod_deps=['netcdf', + 'MPI', + 'yaxt', + 'pfunit_mod', + 'xios', + 'mod_wait'], ) compile_fortran( @@ -105,11 +142,18 @@ def get_transformation_script(fpath, config): '-g', '-std=f2008', - '-Wall', '-Werror=conversion', '-Werror=unused-variable', '-Werror=character-truncation', - '-Werror=unused-value', '-Werror=tabs', - - '-DRDEF_PRECISION=64', '-DR_SOLVER_PRECISION=64', '-DR_TRAN_PRECISION=64', - '-DUSE_XIOS', '-DUSE_MPI=YES', + '-Wall', + '-Werror=conversion', + '-Werror=unused-variable', + '-Werror=character-truncation', + '-Werror=unused-value', + '-Werror=tabs', + + '-DRDEF_PRECISION=64', + '-DR_SOLVER_PRECISION=64', + '-DR_TRAN_PRECISION=64', + '-DUSE_XIOS', + '-DUSE_MPI=YES', ], ) @@ -119,9 +163,10 @@ def get_transformation_script(fpath, config): state, flags=[ '-fopenmp', - - '-lyaxt', '-lyaxt_c', '-lnetcdff', '-lnetcdf', '-lhdf5', # EXTERNAL_DYNAMIC_LIBRARIES - '-lxios', # EXTERNAL_STATIC_LIBRARIES + # EXTERNAL_DYNAMIC_LIBRARIES + '-lyaxt', '-lyaxt_c', '-lnetcdff', '-lnetcdf', '-lhdf5', + # EXTERNAL_STATIC_LIBRARIES + '-lxios', '-lstdc++', ], ) diff --git a/run_configs/lfric/lfric_common.py b/run_configs/lfric/lfric_common.py index fd4488c6..e0a55c73 100644 --- a/run_configs/lfric/lfric_common.py +++ b/run_configs/lfric/lfric_common.py @@ -3,6 +3,7 @@ import shutil from pathlib import Path +from fab.build_config import BuildConfig from fab.steps import step from fab.tools import Category, Tool @@ -23,12 +24,17 @@ def check_available(self): # todo: is this part of psyclone? if so, put it in the psyclone step module? @step -def configurator(config, lfric_source: Path, gpl_utils_source: Path, rose_meta_conf: Path, config_dir=None): +def configurator(config : BuIldConfig, + lfric_source: Path, + gpl_utils_source: Path, + rose_meta_conf: Path, + config_dir=None): rose_picker_tool = gpl_utils_source / 'rose_picker/rose_picker' - gen_namelist_tool = lfric_source / 'infrastructure/build/tools/GenerateNamelist' - gen_loader_tool = lfric_source / 'infrastructure/build/tools/GenerateLoader' - gen_feigns_tool = lfric_source / 'infrastructure/build/tools/GenerateFeigns' + build_tools = lfric_source / 'infrastructure/build/tools' + gen_namelist_tool = build_tools / 'GenerateNamelist' + gen_loader_tool = build_tools / 'GenerateLoader' + gen_feigns_tool = build_tools / 'GenerateFeigns' config_dir = config_dir or config.source_root / 'configuration' @@ -37,7 +43,8 @@ def configurator(config, lfric_source: Path, gpl_utils_source: Path, rose_meta_c env['PYTHONPATH'] += f':{rose_lfric_path}' # "rose picker" - # creates rose-meta.json and config_namelists.txt in gungho/source/configuration + # creates rose-meta.json and config_namelists.txt in + # gungho/source/configuration logger.info('rose_picker') rose_picker = Script(rose_picker_tool) rose_picker.run(additional_parameters=[str(rose_meta_conf), @@ -56,7 +63,10 @@ def configurator(config, lfric_source: Path, gpl_utils_source: Path, rose_meta_c # create configuration_mod.f90 in source root logger.info('GenerateLoader') gen_loader = Script(gen_loader_tool) - names = [name.strip() for name in open(config_dir / 'config_namelists.txt').readlines()] + names = [ + name.strip() + for name in open(config_dir / 'config_namelists.txt').readlines() + ] configuration_mod_fpath = config.source_root / 'configuration_mod.f90' gen_loader.run(additional_parameters=[configuration_mod_fpath, *names]) @@ -70,7 +80,8 @@ def configurator(config, lfric_source: Path, gpl_utils_source: Path, rose_meta_c # put the generated source into an artefact # todo: we shouldn't need to do this, should we? - # it's just going to be found in the source folder with everything else. + # it's just going to be found in the source folder with everything + # else. config._artefact_store['configurator_output'] = [ configuration_mod_fpath, feign_config_mod_fpath @@ -80,7 +91,8 @@ def configurator(config, lfric_source: Path, gpl_utils_source: Path, rose_meta_c @step def fparser_workaround_stop_concatenation(config): """ - fparser can't handle string concat in a stop statement. This step is a workaround. + fparser can't handle string concat in a stop statement. This step is a + workaround. https://github.com/stfc/fparser/issues/330 diff --git a/run_configs/lfric/mesh_tools.py b/run_configs/lfric/mesh_tools.py index 634b7834..1698fd25 100755 --- a/run_configs/lfric/mesh_tools.py +++ b/run_configs/lfric/mesh_tools.py @@ -21,24 +21,30 @@ lfric_source = lfric_source_config.source_root / 'lfric' gpl_utils_source = gpl_utils_source_config.source_root / 'gpl_utils' - # this folder just contains previous output, for testing the overrides mechanism. + # this folder just contains previous output, for testing the overrides + # mechanism. psyclone_overrides = Path(__file__).parent / 'mesh_tools_overrides' with BuildConfig(project_label='mesh tools $compiler $two_stage', tool_box=ToolBox()) as state: - grab_folder(state, src=lfric_source / 'infrastructure/source/', dst_label='') - grab_folder(state, src=lfric_source / 'mesh_tools/source/', dst_label='') - grab_folder(state, src=lfric_source / 'components/science/source/', dst_label='') + grab_folder(state, src=lfric_source / 'infrastructure/source/', + dst_label='') + grab_folder(state, src=lfric_source / 'mesh_tools/source/', + dst_label='') + grab_folder(state, src=lfric_source / 'components/science/source/', + dst_label='') # grab the psyclone overrides folder into the source folder - grab_folder(state, src=psyclone_overrides, dst_label='mesh_tools_overrides') + grab_folder(state, src=psyclone_overrides, + dst_label='mesh_tools_overrides') # generate more source files in source and source/configuration configurator( state, lfric_source=lfric_source, gpl_utils_source=gpl_utils_source, - rose_meta_conf=lfric_source / 'mesh_tools/rose-meta/lfric-mesh_tools/HEAD/rose-meta.conf', + rose_meta_conf=lfric_source / 'mesh_tools' / 'rose-meta' + / 'lfric-mesh_tools' / 'HEAD' / 'rose-meta.conf', ) find_source_files( @@ -50,7 +56,9 @@ preprocess_fortran(state) - preprocess_x90(state, common_flags=['-DRDEF_PRECISION=64', '-DUSE_XIOS', '-DCOUPLED']) + preprocess_x90(state, common_flags=['-DRDEF_PRECISION=64', + '-DUSE_XIOS', + '-DCOUPLED']) psyclone( state, @@ -63,8 +71,15 @@ analyse( state, - root_symbol=['cubedsphere_mesh_generator', 'planar_mesh_generator', 'summarise_ugrid'], - # ignore_mod_deps=['netcdf', 'MPI', 'yaxt', 'pfunit_mod', 'xios', 'mod_wait'], + root_symbol=['cubedsphere_mesh_generator', + 'planar_mesh_generator', + 'summarise_ugrid'], + # ignore_mod_deps=['netcdf', + # 'MPI', + # 'yaxt', + # 'pfunit_mod', + # 'xios', + # 'mod_wait'], ) compile_fortran(state, common_flags=['-c']) @@ -75,8 +90,10 @@ link_exe( state, flags=[ - '-lyaxt', '-lyaxt_c', '-lnetcdff', '-lnetcdf', '-lhdf5', # EXTERNAL_DYNAMIC_LIBRARIES - '-lxios', # EXTERNAL_STATIC_LIBRARIES + # EXTERNAL_DYNAMIC_LIBRARIES + '-lyaxt', '-lyaxt_c', '-lnetcdff', '-lnetcdf', '-lhdf5', + # EXTERNAL_STATIC_LIBRARIES + '-lxios', '-lstdc++', ], ) diff --git a/run_configs/um/build_um.py b/run_configs/um/build_um.py index ce769865..5e59338a 100755 --- a/run_configs/um/build_um.py +++ b/run_configs/um/build_um.py @@ -52,18 +52,27 @@ def replace_in_file(inpath, outpath, find, replace): open(os.path.expanduser(outpath), "wt").write( case_insensitive_replace(in_str=orig, find=find, replace_with=replace)) - warnings.warn("SPECIAL MEASURE for io_configuration_mod.F90: fparser2 misunderstands 'NameListFile'") - replace_in_file( - config.project_workspace / 'source/um/io_services/common/io_configuration_mod.F90', - config.project_workspace / 'source/um/io_services/common/io_configuration_mod.F90', - r'(\W)NameListFile', r'\g<1>FabNameListFile') + um_source = config.project_workspace / 'source' / 'um' - warnings.warn("SPECIAL MEASURE for um_config.F90: fparser2 misunderstands 'NameListFile'") + warnings.warn( + "SPECIAL MEASURE for io_configuration_mod.F90: " + "fparser2 misunderstands 'NameListFile'" + ) replace_in_file( - config.project_workspace / 'source/um/control/top_level/um_config.F90', - config.project_workspace / 'source/um/control/top_level/um_config.F90', - r'(\W)NameListFile', r'\g<1>FabNameListFile') - + um_source / 'io_services/common/io_configuration_mod.F90', + um_source / 'io_services/common/io_configuration_mod.F90', + r'(\W)NameListFile', r'\g<1>FabNameListFile' + ) + + warnings.warn( + "SPECIAL MEASURE for um_config.F90: " + "fparser2 misunderstands 'NameListFile'" + ) + replace_in_file( + um_source / 'control/top_level/um_config.F90', + um_source / 'control/top_level/um_config.F90', + r'(\W)NameListFile', r'\g<1>FabNameListFile' + ) file_filtering = [ Exclude('unit-test', 'unit_test', '/test/'), @@ -125,13 +134,17 @@ def replace_in_file(inpath, outpath, find, replace): revision = 'vn12.1' um_revision = revision.replace('vn', 'um') - state = BuildConfig(project_label=f'um atmos safe {revision} $compiler $two_stage', - tool_box=ToolBox()) + state = BuildConfig( + project_label=f'um atmos safe {revision} $compiler $two_stage', + tool_box=ToolBox() + ) # compiler-specific flags compiler = state.tool_box[Category.FORTRAN_COMPILER] if compiler.name == 'gfortran': - compiler_specific_flags = ['-fdefault-integer-8', '-fdefault-real-8', '-fdefault-double-8'] + compiler_specific_flags = ['-fdefault-integer-8', + '-fdefault-real-8', + '-fdefault-double-8'] elif compiler.name == 'ifort': # compiler_specific_flags = ['-r8'] compiler_specific_flags = [ @@ -153,19 +166,34 @@ def replace_in_file(inpath, outpath, find, replace): # todo: these repo defs could make a good set of reusable variables # UM 12.1, 16th November 2021 - fcm_export(state, src='fcm:um.xm_tr/src', dst_label='um', revision=revision) + fcm_export(state, + src='fcm:um.xm_tr/src', + dst_label='um', + revision=revision) # JULES 6.2, for UM 12.1 - fcm_export(state, src='fcm:jules.xm_tr/src', dst_label='jules', revision=um_revision) + fcm_export(state, + src='fcm:jules.xm_tr/src', + dst_label='jules', + revision=um_revision) # SOCRATES 21.11, for UM 12.1 - fcm_export(state, src='fcm:socrates.xm_tr/src', dst_label='socrates', revision=um_revision) + fcm_export(state, + src='fcm:socrates.xm_tr/src', + dst_label='socrates', + revision=um_revision) # SHUMLIB, for UM 12.1 - fcm_export(state, src='fcm:shumlib.xm_tr/', dst_label='shumlib', revision=um_revision) + fcm_export(state, + src='fcm:shumlib.xm_tr/', + dst_label='shumlib', + revision=um_revision) # CASIM, for UM 12.1 - fcm_export(state, src='fcm:casim.xm_tr/src', dst_label='casim', revision=um_revision) + fcm_export(state, + src='fcm:casim.xm_tr/src', + dst_label='casim', + revision=um_revision) my_custom_code_fixes(state) @@ -179,7 +207,8 @@ def replace_in_file(inpath, outpath, find, replace): state, source=CollectionGetter(PRAGMAD_C), path_flags=[ - # todo: this is a bit "codey" - can we safely give longer strings and split later? + # todo: this is a bit "codey" - can we safely give longer + # strings and split later? AddFlags(match="$source/um/*", flags=[ '-I$source/um/include/other', '-I$source/shumlib/common/src', @@ -190,7 +219,8 @@ def replace_in_file(inpath, outpath, find, replace): '-I$source/shumlib/shum_thread_utils/src']), # todo: just 3 folders use this - AddFlags("$source/um/*", ['-DC95_2A', '-I$source/shumlib/shum_byteswap/src']), + AddFlags("$source/um/*", + ['-DC95_2A', '-I$source/shumlib/shum_byteswap/src']), ], ) @@ -204,7 +234,8 @@ def replace_in_file(inpath, outpath, find, replace): # coupling defines AddFlags("$source/um/control/timer/*", ['-DC97_3A']), - AddFlags("$source/um/io_services/client/stash/*", ['-DC96_1C']), + AddFlags("$source/um/io_services/client/stash/*", + ['-DC96_1C']), ], ) @@ -216,8 +247,10 @@ def replace_in_file(inpath, outpath, find, replace): # FortranParserWorkaround( # fpath=Path(state.build_output / "casim/lookup.f90"), # symbol_defs={'lookup'}, - # symbol_deps={'mphys_die', 'variable_precision', 'mphys_switches', 'mphys_parameters', 'special', - # 'passive_fields', 'casim_moments_mod', 'yomhook', 'parkind1'}, + # symbol_deps={'mphys_die', 'variable_precision', + # 'mphys_switches', 'mphys_parameters', + # 'special', 'passive_fields', + # 'casim_moments_mod', 'yomhook', 'parkind1'}, # ) # ] ) @@ -225,8 +258,13 @@ def replace_in_file(inpath, outpath, find, replace): compile_c(state, common_flags=['-c', '-std=c99']) # Locate the gcom library. UM 12.1 intended to be used with gcom 7.6 - gcom_build = os.getenv('GCOM_BUILD') or os.path.normpath(os.path.expanduser( - state.project_workspace / f"../gcom_object_archive_{compiler.name}/build_output")) + gcom_build = os.getenv('GCOM_BUILD') or os.path.normpath( + os.path.expanduser( + state.project_workspace.parent + / f"gcom_object_archive_{compiler.name}" + / "build_output" + ) + ) if not os.path.exists(gcom_build): raise RuntimeError(f'gcom not found at {gcom_build}') diff --git a/source/fab/steps/__init__.py b/source/fab/steps/__init__.py index f08671d0..24d7c52b 100644 --- a/source/fab/steps/__init__.py +++ b/source/fab/steps/__init__.py @@ -34,16 +34,20 @@ def run_mp(config, items, func, no_multiprocessing: bool = False): """ Called from Step.run() to process multiple items in parallel. - For example, a compile step would, in its run() method, find a list of source files in the artefact store. - It could then pass those paths to this method, along with a function to compile a *single* file. - The whole set of results are returned in a list-like, with undefined order. + For example, a compile step would, in its run() method, find a list of + source files in the artefact store. + It could then pass those paths to this method, along with a function to + compile a *single* file. + The whole set of results are returned in a list-like, with undefined + order. :param items: An iterable of items to process in parallel. :param func: A function to process a single item. Must accept a single argument. :param no_multiprocessing: - Overrides the config's multiprocessing flag, disabling multiprocessing for this call. + Overrides the config's multiprocessing flag, disabling multiprocessing + for this call. """ if config.multiprocessing and not no_multiprocessing: @@ -57,10 +61,12 @@ def run_mp(config, items, func, no_multiprocessing: bool = False): def run_mp_imap(config, items, func, result_handler): """ - Like run_mp, but uses imap instead of map so that we can process each result as it happens. + Like run_mp, but uses imap instead of map so that we can process each + result as it happens. - This is useful for a slow operation where we want to save our progress as we go - instead of waiting for everything to finish, allowing us to pick up where we left off if the program is halted. + This is useful for a slow operation where we want to save our progress as + we go instead of waiting for everything to finish, allowing us to pick up + where we left off if the program is halted. :param items: An iterable of items to process in parallel. @@ -84,7 +90,8 @@ def check_for_errors(results, caller_label=None): Check an iterable of results for any exceptions and handle them gracefully. This is a helper function for steps which use multiprocessing, - getting multiple results back from :meth:`~fab.steps.Step.run_mp` all in one go. + getting multiple results back from :meth:`~fab.steps.Step.run_mp` all in + one go. :param results: An iterable of results. diff --git a/source/fab/steps/analyse.py b/source/fab/steps/analyse.py index 153aaa00..35b9ba7b 100644 --- a/source/fab/steps/analyse.py +++ b/source/fab/steps/analyse.py @@ -4,34 +4,41 @@ # which you should have received as part of this distribution ############################################################################## """ -Fab parses each C and Fortran file into an :class:`~fab.steps.dep_tree.AnalysedDependent` object -which contains the symbol definitions and dependencies for that file. - -From this set of analysed files, Fab builds a symbol table mapping symbols to their containing files. - -Fab uses the symbol table to turn symbol dependencies into file dependencies (stored in the AnalysedDependent objects). -This gives us a file dependency tree for the entire project source. The data structure is simple, -just a dict of *: *, where the analysed files' dependencies are other dict keys. - -If we're building a library, that's the end of the analysis process as we'll compile the entire project source. -If we're building one or more executables, which happens when we use the `root_symbol` argument, -Fab will extract a subtree from the entire dependency tree for each root symbol we specify. - -Finally, the resulting artefact collection is a dict of these subtrees (*"build trees"*), -mapping *: *. -When building a library, there will be a single tree with a root symbol of `None`. - -Addendum: The language parsers Fab uses are unable to detect some kinds of dependency. -For example, fparser can't currently identify a call statement in a one-line if statement. -We can tell Fab that certain symbols *should have been included* in the build tree -using the `unreferenced_deps` argument. -For every symbol we provide, its source file *and dependencies* will be added to the build trees. - -Sometimes a language parser will crash while parsing a *valid* source file, even though the compiler -can compile the file perfectly well. In this case we can give Fab the analysis results it should have made -by passing FortranParserWorkaround objects into the `special_measure_analysis_results` argument. -You'll have to manually read the file to determine which symbol definitions and dependencies it contains. - +Fab parses each C and Fortran file into an +:class:`~fab.steps.dep_tree.AnalysedDependent` object which contains the +symbol definitions and dependencies for that file. + +From this set of analysed files, Fab builds a symbol table mapping symbols to +their containing files. + +Fab uses the symbol table to turn symbol dependencies into file dependencies +(stored in the AnalysedDependent objects). This gives us a file dependency +tree for the entire project source. The data structure is simple, just a dict +of *: *, where the analysed files' dependencies +are other dict keys. + +If we're building a library, that's the end of the analysis process as we'll +compile the entire project source. If we're building one or more executables, +which happens when we use the `root_symbol` argument, Fab will extract a +subtree from the entire dependency tree for each root symbol we specify. + +Finally, the resulting artefact collection is a dict of these subtrees +(*"build trees"*), mapping *: *. When building a +library, there will be a single tree with a root symbol of `None`. + +Addendum: The language parsers Fab uses are unable to detect some kinds of +dependency. For example, fparser can't currently identify a call statement in +a one-line if statement. We can tell Fab that certain symbols *should have +been included* in the build tree using the `unreferenced_deps` argument. For +every symbol we provide, its source file *and dependencies* will be added to +the build trees. + +Sometimes a language parser will crash while parsing a *valid* source file, +even though the compiler can compile the file perfectly well. In this case we +can give Fab the analysis results it should have made by passing +FortranParserWorkaround objects into the `special_measure_analysis_results` +argument. You'll have to manually read the file to determine which symbol +definitions and dependencies it contains. """ from itertools import chain import logging @@ -66,8 +73,9 @@ # todo: split out c and fortran? this class is still a bit big -# This has all been done as a single step, for now, because we don't have a simple mp pattern -# (i.e we don't have a list of artefacts and a function to feed them through). +# This has all been done as a single step, for now, because we don't have a +# simple mp pattern (i.e we don't have a list of artefacts and a function to +# feed them through). @step def analyse( config, @@ -82,39 +90,48 @@ def analyse( """ Produce one or more build trees by analysing source code dependencies. - The resulting artefact collection is a mapping from root symbol to build tree. - The name of this artefact collection is taken from :py:const:`fab.constants.BUILD_TREES`. + The resulting artefact collection is a mapping from root symbol to build + tree. The name of this artefact collection is taken from + :py:const:`fab.constants.BUILD_TREES`. - If no artefact getter is specified in *source*, a default is used which provides input files - from multiple artefact collections, including the default C and Fortran preprocessor outputs - and any source files with a 'little' *.f90* extension. + If no artefact getter is specified in *source*, a default is used which + provides input files from multiple artefact collections, including the + default C and Fortran preprocessor outputs and any source files with a + 'little' *.f90* extension. - A build tree is produced for every root symbol specified in *root_symbol*, which can be a string or list of. - This is how we create executable files. If no root symbol is specified, a single tree of the entire source - is produced (with a root symbol of `None`). This is how we create shared and static libraries. + A build tree is produced for every root symbol specified in *root_symbol*, + which can be a string or list of. This is how we create executable files. + If no root symbol is specified, a single tree of the entire source is + produced (with a root symbol of `None`). This is how we create shared and + static libraries. :param config: - The :class:`fab.build_config.BuildConfig` object where we can read settings - such as the project workspace folder or the multiprocessing flag. + The :class:`fab.build_config.BuildConfig` object where we can read + settings such as the project workspace folder or the multiprocessing + flag. :param source: An :class:`~fab.util.ArtefactsGetter` to get the source files. :param find_programs: - Instructs the analyser to automatically identify program definitions in the source. - Alternatively, the required programs can be specified with the root_symbol argument. + Instructs the analyser to automatically identify program definitions + in the source. Alternatively, the required programs can be specified + with the root_symbol argument. :param root_symbol: - When building an executable, provide the Fortran Program name(s), or 'main' for C. - If None, build tree extraction will not be performed and the entire source will be used - as the build tree - for building a shared or static library. + When building an executable, provide the Fortran Program name(s), or + 'main' for C. If None, build tree extraction will not be performed and + the entire source will be used as the build tree - for building a + shared or static library. :param std: The fortran standard, passed through to fparser2. Defaults to 'f2008'. :param special_measure_analysis_results: - When a language parser cannot parse a valid source file, we can manually provide the expected analysis - results with this argument. + When a language parser cannot parse a valid source file, we can + manually provide the expected analysis results with this argument. :param unreferenced_deps: - A list of symbols which are needed for the build, but which cannot be automatically determined by Fab. + A list of symbols which are needed for the build, but which cannot be + automatically determined by Fab. For example, functions that are called in a one-line if statement. Assuming the files containing these symbols are present and analysed, - those files and all their dependencies will be added to the build tree(s). + those files and all their dependencies will be added to the build + tree(s). :param ignore_mod_deps: Third party Fortran module names to be ignored. :param name: @@ -122,43 +139,54 @@ def analyse( """ - # Note: a code smell?: we insist on the manual analysis results, special_measure_analysis_results, - # arriving as a list not a set because we don't want to hash them yet, - # because the files they refer to probably don't exist yet, - # because we're just creating steps at this point, so there's been no grab... + # ToDo: a code smell?: we insist on the manual analysis results, + # special_measure_analysis_results, arriving as a list not a set + # because we don't want to hash them yet, because the files they + # refer to probably don't exist yet, because we're just creating + # steps at this point, so there's been no grab... if find_programs and root_symbol: raise ValueError("find_programs and root_symbol can't be used together") source_getter = source or DEFAULT_SOURCE_GETTER - root_symbols: Optional[List[str]] = [root_symbol] if isinstance(root_symbol, str) else root_symbol - special_measure_analysis_results = list(special_measure_analysis_results or []) + root_symbols: Optional[List[str]] = [root_symbol] \ + if isinstance(root_symbol, str) else root_symbol + special_measure_analysis_results = list(special_measure_analysis_results + or []) unreferenced_deps = list(unreferenced_deps or []) # todo: these seem more like functions - fortran_analyser = FortranAnalyser(std=std, ignore_mod_deps=ignore_mod_deps) + fortran_analyser = FortranAnalyser(std=std, + ignore_mod_deps=ignore_mod_deps) c_analyser = CAnalyser() """ Creates the *build_trees* artefact from the files in `self.source_getter`. Does the following, in order: - - Create a hash of every source file. Used to check if it's already been analysed. - - Parse the C and Fortran files to find external symbol definitions and dependencies in each file. - - Analysis results are stored in a csv as-we-go, so analysis can be resumed if interrupted. + - Create a hash of every source file. Used to check if it's already + been analysed. + - Parse the C and Fortran files to find external symbol definitions + and dependencies in each file. + - Analysis results are stored in a csv as-we-go, so analysis can + be resumed if interrupted. - Create a 'symbol table' recording which file each symbol is in. - Work out the file dependencies from the symbol dependencies. - At this point we have a source tree for the entire source. - - (Optionally) Extract a sub tree for every root symbol, if provided. For building executables. + - (Optionally) Extract a sub tree for every root symbol, if provided. + For building executables. - This step uses multiprocessing, unless disabled in the :class:`~fab.steps.Step` class. + This step uses multiprocessing, unless disabled in the + :class:`~fab.steps.Step` class. :param artefact_store: - Contains artefacts created by previous Steps, and where we add our new artefacts. - This is where the given :class:`~fab.artefacts.ArtefactsGetter` finds the artefacts to process. + Contains artefacts created by previous Steps, and where we add our new + artefacts. This is where the given + :class:`~fab.artefacts.ArtefactsGetter` finds the artefacts to process. :param config: - The :class:`fab.build_config.BuildConfig` object where we can read settings - such as the project workspace folder or the multiprocessing flag. + The :class:`fab.build_config.BuildConfig` object where we can read + settings such as the project workspace folder or the multiprocessing + flag. """ @@ -168,28 +196,38 @@ def analyse( # parse files: List[Path] = source_getter(config.artefact_store) - analysed_files = _parse_files(config, files=files, fortran_analyser=fortran_analyser, c_analyser=c_analyser) + analysed_files = _parse_files(config, files=files, + fortran_analyser=fortran_analyser, + c_analyser=c_analyser) _add_manual_results(special_measure_analysis_results, analysed_files) - # shall we search the results for fortran programs and a c function called main? + # shall we search the results for fortran programs and a c function + # called main? if find_programs: # find fortran programs - sets_of_programs = [af.program_defs for af in by_type(analysed_files, AnalysedFortran)] + sets_of_programs = [af.program_defs + for af in by_type(analysed_files, + AnalysedFortran)] root_symbols = list(chain(*sets_of_programs)) # find c main() - c_with_main = list(filter(lambda c: 'main' in c.symbol_defs, by_type(analysed_files, AnalysedC))) + c_with_main = list(filter(lambda c: 'main' in c.symbol_defs, + by_type(analysed_files, AnalysedC))) if c_with_main: root_symbols.append('main') if len(c_with_main) > 1: raise FabException("multiple c main() functions found") - logger.info(f'automatically found the following programs to build: {", ".join(root_symbols)}') + logger.info( + "automatically found the following programs to build: " + + ", ".join(root_symbols) + ) # analyse project_source_tree, symbol_table = _analyse_dependencies(analysed_files) - # add the file dependencies for MO FCM's "DEPENDS ON:" commented file deps (being removed soon) + # add the file dependencies for MO FCM's "DEPENDS ON:" commented file + # deps (being removed soon) with TimerLogger("adding MO FCM 'DEPENDS ON:' file dependency comments"): add_mo_commented_file_deps(project_source_tree) @@ -197,13 +235,18 @@ def analyse( # extract "build trees" for executables. if root_symbols: - build_trees = _extract_build_trees(root_symbols, project_source_tree, symbol_table) + build_trees = _extract_build_trees(root_symbols, + project_source_tree, + symbol_table) else: build_trees = {None: project_source_tree} # throw in any extra source we need, which Fab can't automatically detect for build_tree in build_trees.values(): - _add_unreferenced_deps(unreferenced_deps, symbol_table, project_source_tree, build_tree) + _add_unreferenced_deps(unreferenced_deps, + symbol_table, + project_source_tree, + build_tree) validate_dependencies(build_tree) config.artefact_store[BUILD_TREES] = build_trees @@ -222,8 +265,10 @@ def _analyse_dependencies(analysed_files: Iterable[AnalysedDependent]): _gen_file_deps(analysed_files, symbol_table) # build the tree - # the nodes refer to other nodes via the file dependencies we just made, which are keys into this dict - source_tree: Dict[Path, AnalysedDependent] = {a.fpath: a for a in analysed_files} + # the nodes refer to other nodes via the file dependencies we just made, + # which are keys into this dict + source_tree: Dict[Path, AnalysedDependent] = {a.fpath: a + for a in analysed_files} return source_tree, symbol_table @@ -232,7 +277,8 @@ def _extract_build_trees(root_symbols, project_source_tree, symbol_table): """ Find the subset of files needed to build each root symbol (executable). - Assumes we have been given a root symbol(s) or we wouldn't have been called. + Assumes we have been given a root symbol(s) or we wouldn't have been + called. Returns a build tree for every root symbol. """ @@ -240,27 +286,38 @@ def _extract_build_trees(root_symbols, project_source_tree, symbol_table): assert root_symbols is not None for root in root_symbols: with TimerLogger(f"extracting build tree for root '{root}'"): - build_tree = extract_sub_tree(project_source_tree, symbol_table[root], verbose=False) - - logger.info(f"target source tree size {len(build_tree)} (target '{symbol_table[root]}')") + build_tree = extract_sub_tree(project_source_tree, + symbol_table[root], + verbose=False) + + logger.info( + f"target source tree size {len(build_tree)} " + f"(target '{symbol_table[root]}')" + ) build_trees[root] = build_tree return build_trees -def _parse_files(config, files: List[Path], fortran_analyser, c_analyser) -> Set[AnalysedDependent]: +def _parse_files(config, + files: List[Path], + fortran_analyser, c_analyser) -> Set[AnalysedDependent]: """ Determine the symbols which are defined in, and used by, each file. - Returns the analysed_fortran and analysed_c as lists of :class:`~fab.dep_tree.AnalysedDependent` - with no file dependencies, to be filled in later. - + Returns the analysed_fortran and analysed_c as lists of + :class:`~fab.dep_tree.AnalysedDependent` with no file dependencies, to be + filled in later. """ # fortran fortran_files = set(filter(lambda f: f.suffix == '.f90', files)) - with TimerLogger(f"analysing {len(fortran_files)} preprocessed fortran files"): - fortran_results = run_mp(config, items=fortran_files, func=fortran_analyser.run) - fortran_analyses, fortran_artefacts = zip(*fortran_results) if fortran_results else (tuple(), tuple()) + with TimerLogger( + f"analysing {len(fortran_files)} preprocessed fortran files" + ): + fortran_results = run_mp(config, items=fortran_files, + func=fortran_analyser.run) + fortran_analyses, fortran_artefacts = zip(*fortran_results) \ + if fortran_results else (tuple(), tuple()) # warn about naughty fortran usage if fortran_analyser.depends_on_comment_found: @@ -273,17 +330,28 @@ def _parse_files(config, files: List[Path], fortran_analyser, c_analyser) -> Set # Override the multiprocessing flag. no_multiprocessing = False if sys.version.startswith('3.7'): - warnings.warn('Python 3.7 detected. Disabling multiprocessing for C analysis.') + warnings.warn( + "Python 3.7 detected. Disabling multiprocessing " + "for C analysis." + ) no_multiprocessing = True - c_results = run_mp(config, items=c_files, func=c_analyser.run, no_multiprocessing=no_multiprocessing) - c_analyses, c_artefacts = zip(*c_results) if c_results else (tuple(), tuple()) - - # Check for parse errors but don't fail. The failed files might not be required. + c_results = run_mp(config, items=c_files, + func=c_analyser.run, + no_multiprocessing=no_multiprocessing) + c_analyses, c_artefacts = zip(*c_results) \ + if c_results else (tuple(), tuple()) + + # Check for parse errors but don't fail. The failed files might not be + # required. analyses = fortran_analyses + c_analyses exceptions = list(by_type(analyses, Exception)) if exceptions: err_str = '\n\n'.join(map(str, exceptions)) - print(f"\nThere were {len(exceptions)} analysis errors:\n\n{err_str}\n\n", file=sys.stderr) + print( + f"\nThere were {len(exceptions)} analysis errors:" + f"\n\n{err_str}\n\n", + file=sys.stderr + ) # ToDo: What the hell is this? Why no log? # record the artefacts as being current artefacts = by_type(fortran_artefacts + c_artefacts, Path) @@ -291,28 +359,39 @@ def _parse_files(config, files: List[Path], fortran_analyser, c_analyser) -> Set # ignore empty files analysed_files = by_type(analyses, AnalysedFile) - non_empty = {af for af in analysed_files if not isinstance(af, EmptySourceFile)} + non_empty = {af for af in analysed_files + if not isinstance(af, EmptySourceFile)} return non_empty -def _add_manual_results(special_measure_analysis_results, analysed_files: Set[AnalysedDependent]): +def _add_manual_results(special_measure_analysis_results, + analysed_files: Set[AnalysedDependent]): # add manual analysis results for files which could not be parsed if special_measure_analysis_results: - warnings.warn("SPECIAL MEASURE: injecting user-defined analysis results") + warnings.warn( + "SPECIAL MEASURE: injecting user-defined analysis results" + ) already_present = {af.fpath for af in analysed_files} for r in special_measure_analysis_results: if r.fpath in already_present: - # Note: This exception stops the user from being able to override results for files - # which don't *crash* the parser. We don't have a use case to do this, but it's worth noting. - # If we want to allow this we can raise a warning instead of an exception. + # Note: This exception stops the user from being able to + # override results for files which don't *crash* the + # parser. We don't have a use case to do this, but it's + # worth noting. + # If we want to allow this we can raise a warning + # instead of an exception. raise ValueError(f'Unnecessary ParserWorkaround for {r.fpath}') analysed_files.add(r.as_analysed_fortran()) - logger.info(f'added {len(special_measure_analysis_results)} manual analysis results') + logger.info( + f'added {len(special_measure_analysis_results)} manual analysis results' + ) -def _gen_symbol_table(analysed_files: Iterable[AnalysedDependent]) -> Dict[str, Path]: +def _gen_symbol_table( + analysed_files: Iterable[AnalysedDependent] +) -> Dict[str, Path]: """ Create a dictionary mapping symbol names to the files in which they appear. @@ -323,22 +402,30 @@ def _gen_symbol_table(analysed_files: Iterable[AnalysedDependent]) -> Dict[str, for symbol_def in analysed_file.symbol_defs: # check for duplicates if symbol_def in symbols: - duplicates.append(ValueError( - f"duplicate symbol '{symbol_def}' defined in {analysed_file.fpath} " - f"already found in {symbols[symbol_def]}")) + duplicates.append( + ValueError( + f"duplicate symbol '{symbol_def}' defined " + f"in {analysed_file.fpath} already found " + f"in {symbols[symbol_def]}" + ) + ) continue symbols[symbol_def] = analysed_file.fpath if duplicates: - # we don't break the build because these symbols might not be required to build the exe + # we don't break the build because these symbols might not be required + # to build the exe. # todo: put a big warning at the end of the build? err_msg = "\n".join(map(str, duplicates)) - warnings.warn(f"Duplicates found while generating symbol table:\n{err_msg}") + warnings.warn( + f"Duplicates found while generating symbol table:\n{err_msg}" + ) return symbols -def _gen_file_deps(analysed_files: Iterable[AnalysedDependent], symbols: Dict[str, Path]): +def _gen_file_deps(analysed_files: Iterable[AnalysedDependent], + symbols: Dict[str, Path]): """ Use the symbol table to convert symbol dependencies into file dependencies. @@ -354,7 +441,9 @@ def _gen_file_deps(analysed_files: Iterable[AnalysedDependent], symbols: Dict[st # warn of missing file if not file_dep: deps_not_found.add(symbol_dep) - logger.debug(f"not found {symbol_dep} for {analysed_file.fpath}") + logger.debug( + f"not found {symbol_dep} for {analysed_file.fpath}" + ) continue analysed_file.file_deps.add(file_dep) if deps_not_found: @@ -367,19 +456,24 @@ def _add_unreferenced_deps(unreferenced_deps, symbol_table: Dict[str, Path], """ Add files to the build tree. - This is used for building Fortran code which Fab doesn't know is a dependency. + This is used for building Fortran code which Fab doesn't know is a + dependency. """ if not unreferenced_deps: return - logger.info(f"Adding {len(unreferenced_deps or [])} unreferenced dependencies") + logger.info( + f"Adding {len(unreferenced_deps or [])} unreferenced dependencies" + ) for symbol_dep in unreferenced_deps: # what file is the symbol in? analysed_fpath = symbol_table.get(symbol_dep) if not analysed_fpath: - warnings.warn(f"no file found for unreferenced dependency {symbol_dep}") + warnings.warn( + f"no file found for unreferenced dependency {symbol_dep}" + ) continue analysed_file = all_analysed_files[analysed_fpath] @@ -390,10 +484,13 @@ def _add_unreferenced_deps(unreferenced_deps, symbol_table: Dict[str, Path], # is it already in the build tree? if analysed_file.fpath in build_tree: - logger.info(f"file {analysed_file.fpath} for unreferenced dependency {symbol_dep} " - f"is already in the build tree") + logger.info( + f"file {analysed_file.fpath} for unreferenced " + f"dependency {symbol_dep} is already in the build tree" + ) continue # add the file and it's file deps - sub_tree = extract_sub_tree(source_tree=all_analysed_files, root=analysed_fpath) + sub_tree = extract_sub_tree(source_tree=all_analysed_files, + root=analysed_fpath) build_tree.update(sub_tree) diff --git a/source/fab/steps/archive_objects.py b/source/fab/steps/archive_objects.py index f4d5efcf..dc697689 100644 --- a/source/fab/steps/archive_objects.py +++ b/source/fab/steps/archive_objects.py @@ -24,10 +24,12 @@ DEFAULT_SOURCE_GETTER = CollectionGetter(OBJECT_FILES) -# todo: two diagrams showing the flow of artefacts in the exe and library use cases -# show how the library has a single build target with None as the name. +# todo: two diagrams showing the flow of artefacts in the exe and library use +# cases show how the library has a single build target with None as the +# name. -# todo: all this documentation for such a simple step - should we split it up somehow? +# todo: all this documentation for such a simple step - should we split it up +# somehow? @step def archive_objects(config: BuildConfig, @@ -37,58 +39,77 @@ def archive_objects(config: BuildConfig, """ Create an object archive for every build target, from their object files. - An object archive is a set of object (*.o*) files bundled into a single file, typically with a *.a* extension. + An object archive is a set of object (*.o*) files bundled into a single + file, typically with a *.a* extension. - Expects one or more build targets from its artefact getter, of the form Dict[name, object_files]. - By default, it finds the build targets and their object files in the artefact collection named by - :py:const:`fab.constants.COMPILED_FILES`. + Expects one or more build targets from its artefact getter, of the form + Dict[name, object_files]. + By default, it finds the build targets and their object files in the + artefact collection named by :py:const:`fab.constants.COMPILED_FILES`. This step has three use cases: * The **object archive** is the end goal of the build. - * The object archive is a convenience step before linking a **shared object**. - * One or more object archives as convenience steps before linking **executables**. - - The benefit of creating an object archive before linking is simply to reduce the size - of the linker command, which might otherwise include thousands of .o files, making any error output + * The object archive is a convenience step before linking a **shared + object**. + * One or more object archives as convenience steps before linking + **executables**. + + The benefit of creating an object archive before linking is simply to + reduce the size + of the linker command, which might otherwise include thousands of .o + files, making any error output difficult to read. You don't have to use this step before linking. - The linker step has a default artefact getter which will work with or without this preceding step. + The linker step has a default artefact getter which will work with or + without this preceding step. **Creating a Static or Shared Library:** - When building a library there is expected to be a single build target with a `None` name. - This typically happens when configuring the :class:`~fab.steps.analyser.Analyser` step *without* a root symbol. - We can assume the list of object files is the entire project source, compiled. + When building a library there is expected to be a single build target with + a `None` name. + This typically happens when configuring the + :class:`~fab.steps.analyser.Analyser` step *without* a root symbol. + We can assume the list of object files is the entire project source, + compiled. In this case you must specify an *output_fpath*. **Creating Executables:** - When creating executables, there is expected to be one or more build targets, each with a name. - This typically happens when configuring the :class:`~fab.steps.analyser.Analyser` step *with* a root symbol(s). - We can assume each list of object files is sufficient to build each *.exe*. + When creating executables, there is expected to be one or more build + targets, each with a name. + This typically happens when configuring the + :class:`~fab.steps.analyser.Analyser` step *with* a root symbol(s). + We can assume each list of object files is sufficient to build each + *.exe*. - In this case you cannot specify an *output_fpath* path because they are automatically created from the - target name. + In this case you cannot specify an *output_fpath* path because they are + automatically created from the target name. :param config: - The :class:`fab.build_config.BuildConfig` object where we can read settings + The :class:`fab.build_config.BuildConfig` object where we can read + settings such as the project workspace folder or the multiprocessing flag. :param source: - An :class:`~fab.artefacts.ArtefactsGetter` which give us our lists of objects to archive. - The artefacts are expected to be of the form `Dict[root_symbol_name, list_of_object_files]`. + An :class:`~fab.artefacts.ArtefactsGetter` which give us our lists of + objects to archive. + The artefacts are expected to be of the form `Dict[root_symbol_name, + list_of_object_files]`. :param output_fpath: The file path of the archive file to create. - This string can include templating, where "$output" is replaced with the output folder. + This string can include templating, where "$output" is replaced with + the output folder. * Must be specified when building a library file (no build target name). - * Must not be specified when building linker input (one or more build target names). + * Must not be specified when building linker input (one or more build + target names). :param output_collection: The name of the artefact collection to create. Defaults to the name in :const:`fab.constants.OBJECT_ARCHIVES`. """ - # todo: the output path should not be an abs fpath, it should be relative to the proj folder + # todo: the output path should not be an abs fpath, it should be relative + # to the proj folder source_getter = source or DEFAULT_SOURCE_GETTER ar = config.tool_box[Category.AR] @@ -100,9 +121,14 @@ def archive_objects(config: BuildConfig, target_objects = source_getter(config.artefact_store) assert target_objects.keys() if output_fpath and list(target_objects.keys()) != [None]: - raise ValueError("You must not specify an output path (library) when there are root symbols (exes)") + raise ValueError( + "You must not specify an output path (library) when there are " + "root symbols (exes)" + ) if not output_fpath and list(target_objects.keys()) == [None]: - raise ValueError("You must specify an output path when building a library.") + raise ValueError( + "You must specify an output path when building a library." + ) output_archives = config.artefact_store.setdefault(output_collection, {}) for root, objects in target_objects.items(): @@ -112,7 +138,8 @@ def archive_objects(config: BuildConfig, output_fpath = str(config.build_output / f'{root}.a') else: # we're building a single object archive with a given filename - assert len(target_objects) == 1, "unexpected root of None with multiple build targets" + assert len(target_objects) == 1, \ + "unexpected root of None with multiple build targets" output_fpath = Template(str(output_fpath)).substitute( output=config.build_output) @@ -121,6 +148,8 @@ def archive_objects(config: BuildConfig, try: ar.create(output_fpath, sorted(objects)) except RuntimeError as err: - raise RuntimeError(f"error creating object archive:\n{err}") from err + raise RuntimeError( + f"error creating object archive:\n{err}" + ) from err output_archives[root] = [output_fpath] diff --git a/source/fab/steps/c_pragma_injector.py b/source/fab/steps/c_pragma_injector.py index 623172a2..60e04458 100644 --- a/source/fab/steps/c_pragma_injector.py +++ b/source/fab/steps/c_pragma_injector.py @@ -21,23 +21,31 @@ # todo: test @step -def c_pragma_injector(config, source: Optional[ArtefactsGetter] = None, output_name=None): +def c_pragma_injector(config, + source: Optional[ArtefactsGetter] = None, + output_name=None): """ - A build step to inject custom pragmas to mark blocks of user and system include statements. + A build step to inject custom pragmas to mark blocks of user and system + include statements. - By default, reads .c files from the *all_source* artefact and creates the *pragmad_c* artefact. + By default, reads .c files from the *all_source* artefact and creates the + *pragmad_c* artefact. - This step does not write to the build output folder, it creates the pragmad c in the same folder as the c file. - This is because a subsequent preprocessing step needs to look in the source folder for header files, + This step does not write to the build output folder, it creates the + pragmad c in the same folder as the c file. This is because a subsequent + preprocessing step needs to look in the source folder for header files, including in paths relative to the c file. :param config: - The :class:`fab.build_config.BuildConfig` object where we can read settings + The :class:`fab.build_config.BuildConfig` object where we can read + settings such as the project workspace folder or the multiprocessing flag. :param source: - An :class:`~fab.artefacts.ArtefactsGetter` which give us our c files to process. + An :class:`~fab.artefacts.ArtefactsGetter` which give us our c files + to process. :param output_name: - The name of the artefact collection to create in the artefact store, with a sensible default + The name of the artefact collection to create in the artefact store, + with a sensible default """ source_getter = source or DEFAULT_SOURCE_GETTER diff --git a/source/fab/steps/cleanup_prebuilds.py b/source/fab/steps/cleanup_prebuilds.py index 8d1548b2..95e5dc32 100644 --- a/source/fab/steps/cleanup_prebuilds.py +++ b/source/fab/steps/cleanup_prebuilds.py @@ -25,19 +25,26 @@ @step def cleanup_prebuilds( - config, older_than: Optional[timedelta] = None, n_versions: int = 0, all_unused: Optional[bool] = None): + config, + older_than: Optional[timedelta] = None, + n_versions: int = 0, + all_unused: Optional[bool] = None +): """ A step to delete old files from the local incremental/prebuild folder. Assumes prebuild filenames follow the pattern: `..`. :param config: - The :class:`fab.build_config.BuildConfig` object where we can read settings - such as the project workspace folder or the multiprocessing flag. + The :class:`fab.build_config.BuildConfig` object where we can read + settings such as the project workspace folder or the multiprocessing + flag. :param older_than: - Delete prebuild artefacts which are *n seconds* older than the *last prebuild access time*. + Delete prebuild artefacts which are *n seconds* older than the *last + prebuild access time*. :param n_versions: - Only keep the most recent n versions of each artefact `.*.` + Only keep the most recent n versions of each artefact + `.*.` :param all_unused: Delete everything which was not part of the current build. @@ -47,12 +54,16 @@ def cleanup_prebuilds( # If the user has not specified any cleanup parameters, we default to a hard cleanup. if not n_versions and not older_than: if all_unused not in [None, True]: - raise ValueError(f"unexpected value for all_unused: '{all_unused}'") + raise ValueError( + f"unexpected value for all_unused: '{all_unused}'" + ) all_unused = True # if we're doing a hard cleanup, there's no point providing the softer options if all_unused and (n_versions or older_than): - raise ValueError("n_versions or older_than should not be specified with all_unused") + raise ValueError( + "n_versions or older_than should not be specified with all_unused" + ) num_removed = 0 @@ -63,16 +74,25 @@ def cleanup_prebuilds( elif all_unused: num_removed = remove_all_unused( - found_files=prebuild_files, current_files=config.artefact_store[CURRENT_PREBUILDS]) + found_files=prebuild_files, + current_files=config.artefact_store[CURRENT_PREBUILDS] + ) else: # get the file access time for every artefact - prebuilds_ts = \ - dict(zip(prebuild_files, run_mp(config, prebuild_files, get_access_time))) # type: ignore + prebuilds_ts = dict( + zip(prebuild_files, run_mp(config, prebuild_files, get_access_time)) + ) # type: ignore # work out what to delete - to_delete = by_age(older_than, prebuilds_ts, current_files=config.artefact_store[CURRENT_PREBUILDS]) - to_delete |= by_version_age(n_versions, prebuilds_ts, current_files=config.artefact_store[CURRENT_PREBUILDS]) + to_delete = by_age(older_than, + prebuilds_ts, + current_files=config.artefact_store[CURRENT_PREBUILDS]) + to_delete |= by_version_age( + n_versions, + prebuilds_ts, + current_files=config.artefact_store[CURRENT_PREBUILDS] + ) # delete them all run_mp(config, to_delete, os.remove) @@ -83,7 +103,8 @@ def cleanup_prebuilds( def by_age(older_than: Optional[timedelta], - prebuilds_ts: Dict[Path, datetime], current_files: Iterable[Path]) -> Set[Path]: + prebuilds_ts: Dict[Path, datetime], + current_files: Iterable[Path]) -> Set[Path]: to_delete = set() if older_than: @@ -102,7 +123,9 @@ def by_age(older_than: Optional[timedelta], return to_delete -def by_version_age(n_versions: int, prebuilds_ts: Dict[Path, datetime], current_files: Iterable[Path]) -> Set[Path]: +def by_version_age(n_versions: int, + prebuilds_ts: Dict[Path, datetime], + current_files: Iterable[Path]) -> Set[Path]: to_delete = set() if n_versions: diff --git a/source/fab/steps/compile_c.py b/source/fab/steps/compile_c.py index 81e9bef5..358fad76 100644 --- a/source/fab/steps/compile_c.py +++ b/source/fab/steps/compile_c.py @@ -40,7 +40,8 @@ class MpCommonArgs: def compile_c(config, common_flags: Optional[List[str]] = None, path_flags: Optional[List] = None, source: Optional[ArtefactsGetter] = None): """ - Compiles all C files in all build trees, creating or extending a set of compiled files for each target. + Compiles all C files in all build trees, creating or extending a set of compiled + files for each target. This step uses multiprocessing. All C files are compiled in a single pass. @@ -54,11 +55,11 @@ def compile_c(config, common_flags: Optional[List[str]] = None, :param common_flags: A list of strings to be included in the command line call, for all files. :param path_flags: - A list of :class:`~fab.build_config.AddFlags`, defining flags to be included in the command line call - for selected files. + A list of :class:`~fab.build_config.AddFlags`, defining flags to be included + in the command line call for selected files. :param source: - An :class:`~fab.artefacts.ArtefactsGetter` which give us our c files to process. - + An :class:`~fab.artefacts.ArtefactsGetter` which give us our c files to + process. """ # todo: tell the compiler (and other steps) which artefact name to create? @@ -85,7 +86,8 @@ def compile_c(config, common_flags: Optional[List[str]] = None, compiled_c = list(by_type(compilation_results, CompiledFile)) logger.info(f"compiled {len(compiled_c)} c files") - # record the prebuild files as being current, so the cleanup knows not to delete them + # record the prebuild files as being current, so the cleanup knows not to + # delete them prebuild_files = {r.output_fpath for r in compiled_c} config.add_current_prebuilds(prebuild_files) @@ -94,10 +96,12 @@ def compile_c(config, common_flags: Optional[List[str]] = None, # todo: very similar code in fortran compiler -def store_artefacts(compiled_files: List[CompiledFile], build_lists: Dict[str, List], artefact_store): +def store_artefacts(compiled_files: List[CompiledFile], + build_lists: Dict[str, List], + artefact_store): """ - Create our artefact collection; object files for each compiled file, per root symbol. - + Create our artefact collection; object files for each compiled file, per + root symbol. """ # add the new object files to the artefact store, by target lookup = {c.input_fpath: c for c in compiled_files} @@ -115,16 +119,18 @@ def _compile_file(arg: Tuple[AnalysedC, MpCommonArgs]): if not isinstance(compiler, CCompiler): raise RuntimeError(f"Unexpected tool '{compiler.name}' of type " f"'{type(compiler)}' instead of CCompiler") - with Timer() as timer: + with (Timer() as timer): flags = Flags(mp_payload.flags.flags_for_path(path=analysed_file.fpath, config=config)) obj_combo_hash = _get_obj_combo_hash(compiler, analysed_file, flags) - obj_file_prebuild = config.prebuild_folder / f'{analysed_file.fpath.stem}.{obj_combo_hash:x}.o' + obj_file_prebuild = config.prebuild_folder \ + / f'{analysed_file.fpath.stem}.{obj_combo_hash:x}.o' # prebuild available? if obj_file_prebuild.exists(): - log_or_dot(logger, f'CompileC using prebuild: {analysed_file.fpath}') + log_or_dot(logger, + f'CompileC using prebuild: {analysed_file.fpath}') else: obj_file_prebuild.parent.mkdir(parents=True, exist_ok=True) log_or_dot(logger, f'CompileC compiling {analysed_file.fpath}') @@ -132,13 +138,16 @@ def _compile_file(arg: Tuple[AnalysedC, MpCommonArgs]): compiler.compile_file(analysed_file.fpath, obj_file_prebuild, add_flags=flags) except Exception as err: - return FabException(f"error compiling {analysed_file.fpath}:\n{err}") + return FabException( + f"error compiling {analysed_file.fpath}:\n{err}" + ) send_metric( group="compile c", name=str(analysed_file.fpath), value={'time_taken': timer.taken, 'start': timer.start}) - return CompiledFile(input_fpath=analysed_file.fpath, output_fpath=obj_file_prebuild) + return CompiledFile(input_fpath=analysed_file.fpath, + output_fpath=obj_file_prebuild) def _get_obj_combo_hash(compiler, analysed_file, flags: Flags): diff --git a/source/fab/steps/compile_fortran.py b/source/fab/steps/compile_fortran.py index 8b3fa632..41725838 100644 --- a/source/fab/steps/compile_fortran.py +++ b/source/fab/steps/compile_fortran.py @@ -34,7 +34,10 @@ @dataclass class MpCommonArgs: - """Arguments to be passed into the multiprocessing function, alongside the filenames.""" + """ + Arguments to be passed into the multiprocessing function, alongside the + filenames. + """ config: BuildConfig flags: FlagsConfig mod_hashes: Dict[str, int] @@ -42,28 +45,33 @@ class MpCommonArgs: @step -def compile_fortran(config: BuildConfig, common_flags: Optional[List[str]] = None, - path_flags: Optional[List] = None, source: Optional[ArtefactsGetter] = None): +def compile_fortran(config: BuildConfig, + common_flags: Optional[List[str]] = None, + path_flags: Optional[List] = None, + source: Optional[ArtefactsGetter] = None): """ - Compiles all Fortran files in all build trees, creating/extending a set of compiled files for each build target. + Compiles all Fortran files in all build trees, creating/extending a set of + compiled files for each build target. - Files are compiled in multiple passes, with each pass enabling further files to be compiled in the next pass. + Files are compiled in multiple passes, with each pass enabling further + files to be compiled in the next pass. Uses multiprocessing, unless disabled in the config. :param config: - The :class:`fab.build_config.BuildConfig` object where we can read settings - such as the project workspace folder or the multiprocessing flag. + The :class:`fab.build_config.BuildConfig` object where we can read + settings such as the project workspace folder or the multiprocessing + flag. :param common_flags: - A list of strings to be included in the command line call, for all files. + A list of strings to be included in the command line call, for all + files. :param path_flags: - A list of :class:`~fab.build_config.AddFlags`, defining flags to be included in the command line call - for selected files. + A list of :class:`~fab.build_config.AddFlags`, defining flags to be + included in the command line call for selected files. :param source: - An :class:`~fab.artefacts.ArtefactsGetter` which gives us our Fortran files to process. - + An :class:`~fab.artefacts.ArtefactsGetter` which gives us our Fortran + files to process. """ - compiler, flags_config = handle_compiler_args(config, common_flags, path_flags) # Set module output folder: @@ -93,8 +101,11 @@ def compile_fortran(config: BuildConfig, common_flags: Optional[List[str]] = Non f"disabling two-stage compile.") while uncompiled: - uncompiled = compile_pass(config=config, compiled=compiled, uncompiled=uncompiled, - mp_common_args=mp_common_args, mod_hashes=mod_hashes) + uncompiled = compile_pass(config=config, + compiled=compiled, + uncompiled=uncompiled, + mp_common_args=mp_common_args, + mod_hashes=mod_hashes) log_or_dot_finish(logger) if syntax_only: @@ -102,7 +113,8 @@ def compile_fortran(config: BuildConfig, common_flags: Optional[List[str]] = Non mp_common_args.syntax_only = False # a single pass should now compile all the object files in one go - uncompiled = set(sum(build_lists.values(), [])) # todo: order by last compile duration + # todo: order by last compile duration + uncompiled = set(sum(build_lists.values(), [])) mp_args = [(fpath, mp_common_args) for fpath in uncompiled] results_this_pass = run_mp(config, items=mp_args, func=process_file) log_or_dot_finish(logger) @@ -127,29 +139,38 @@ def handle_compiler_args(config: BuildConfig, common_flags=None, # Collate the flags from 1) flags env and 2) parameters. env_flags = os.getenv('FFLAGS', '').split() common_flags = env_flags + (common_flags or []) - flags_config = FlagsConfig(common_flags=common_flags, path_flags=path_flags) + flags_config = FlagsConfig(common_flags=common_flags, + path_flags=path_flags) return compiler, flags_config -def compile_pass(config, compiled: Dict[Path, CompiledFile], uncompiled: Set[AnalysedFortran], - mp_common_args: MpCommonArgs, mod_hashes: Dict[str, int]): +def compile_pass(config, + compiled: Dict[Path, CompiledFile], + uncompiled: Set[AnalysedFortran], + mp_common_args: MpCommonArgs, + mod_hashes: Dict[str, int]): # what can we compile next? compile_next = get_compile_next(compiled, uncompiled) # compile - logger.info(f"\ncompiling {len(compile_next)} of {len(uncompiled)} remaining files") + logger.info( + f"\ncompiling {len(compile_next)} of {len(uncompiled)} remaining files" + ) mp_args = [(fpath, mp_common_args) for fpath in compile_next] results_this_pass = run_mp(config, items=mp_args, func=process_file) - # there's a compilation result and a list of prebuild files for each compiled file - compilation_results, prebuild_files = zip(*results_this_pass) if results_this_pass else (tuple(), tuple()) + # there's a compilation result and a list of prebuild files for each + # compiled file. + compilation_results, prebuild_files = zip(*results_this_pass)\ + if results_this_pass else (tuple(), tuple()) check_for_errors(compilation_results, caller_label="compile_pass") compiled_this_pass = list(by_type(compilation_results, CompiledFile)) logger.debug(f"compiled {len(compiled_this_pass)} files") - # record the prebuild files as being current, so the cleanup knows not to delete them + # record the prebuild files as being current, so the cleanup knows not to + # delete them. config.add_current_prebuilds(chain(*prebuild_files)) # hash the modules we just created @@ -164,7 +185,8 @@ def compile_pass(config, compiled: Dict[Path, CompiledFile], uncompiled: Set[Ana return uncompiled -def get_compile_next(compiled: Dict[Path, CompiledFile], uncompiled: Set[AnalysedFortran]) \ +def get_compile_next(compiled: Dict[Path, CompiledFile], + uncompiled: Set[AnalysedFortran]) \ -> Set[AnalysedFortran]: # find what to compile next @@ -172,7 +194,8 @@ def get_compile_next(compiled: Dict[Path, CompiledFile], uncompiled: Set[Analyse not_ready: Dict[Path, List[Path]] = {} for af in uncompiled: # all deps ready? - unfulfilled = [dep for dep in af.file_deps if dep not in compiled and dep.suffix == '.f90'] + unfulfilled = [dep for dep in af.file_deps + if dep not in compiled and dep.suffix == '.f90'] if unfulfilled: not_ready[af.fpath] = unfulfilled else: @@ -195,8 +218,8 @@ def store_artefacts(compiled_files: Dict[Path, CompiledFile], build_lists: Dict[str, List], artefact_store: ArtefactStore): """ - Create our artefact collection; object files for each compiled file, per root symbol. - + Create our artefact collection; object files for each compiled file, per + root symbol. """ # add the new object files to the artefact store, by target lookup = {c.input_fpath: c for c in compiled_files.values()} @@ -209,22 +232,27 @@ def store_artefacts(compiled_files: Dict[Path, CompiledFile], def process_file(arg: Tuple[AnalysedFortran, MpCommonArgs]) \ -> Union[Tuple[CompiledFile, List[Path]], Tuple[Exception, None]]: """ - Prepare to compile a fortran file, and compile it if anything has changed since it was last compiled. + Prepare to compile a fortran file, and compile it if anything has changed + since it was last compiled. Object files are created directly as artefacts in the prebuild folder. - Mod files are created in the module folder and copied as artefacts into the prebuild folder. - If nothing has changed, prebuilt mod files are copied *from* the prebuild folder into the module folder. + Mod files are created in the module folder and copied as artefacts into + the prebuild folder. If nothing has changed, prebuilt mod files are copied + *from* the prebuild folder into the module folder. .. note:: - Prebuild filenames include a "combo-hash" of everything that, if changed, must trigger a recompile. - For mod and object files, this includes a checksum of: *source code, compiler*. - For object files, this also includes a checksum of: *compiler flags, modules on which we depend*. - - Before compiling a file, we calculate the combo hashes and see if the output files already exists. + Prebuild filenames include a "combo-hash" of everything that, if + changed, must trigger a recompile. For mod and object files, this + includes a checksum of: *source code, compiler*. For object files, + this also includes a checksum of: *compiler flags, modules on which we + depend*. - Returns a compilation result, regardless of whether it was compiled or prebuilt. + Before compiling a file, we calculate the combo hashes and see if the + output files already exists. + Returns a compilation result, regardless of whether it was compiled or + prebuilt. """ with Timer() as timer: analysed_file, mp_common_args = arg @@ -234,7 +262,10 @@ def process_file(arg: Tuple[AnalysedFortran, MpCommonArgs]) \ raise RuntimeError(f"Unexpected tool '{compiler.name}' of type " f"'{type(compiler)}' instead of " f"FortranCompiler") - flags = Flags(mp_common_args.flags.flags_for_path(path=analysed_file.fpath, config=config)) + flags = Flags( + mp_common_args.flags.flags_for_path(path=analysed_file.fpath, + config=config) + ) mod_combo_hash = _get_mod_combo_hash(analysed_file, compiler=compiler) obj_combo_hash = _get_obj_combo_hash(analysed_file, @@ -242,7 +273,8 @@ def process_file(arg: Tuple[AnalysedFortran, MpCommonArgs]) \ compiler=compiler, flags=flags) # calculate the incremental/prebuild artefact filenames - obj_file_prebuild = mp_common_args.config.prebuild_folder / f'{analysed_file.fpath.stem}.{obj_combo_hash:x}.o' + object_name = f'{analysed_file.fpath.stem}.{obj_combo_hash:x}.o' + obj_file_prebuild = mp_common_args.config.prebuild_folder / object_name mod_file_prebuilds = [ mp_common_args.config.prebuild_folder / f'{mod_def}.{mod_combo_hash:x}.mod' for mod_def in analysed_file.module_defs @@ -260,26 +292,31 @@ def process_file(arg: Tuple[AnalysedFortran, MpCommonArgs]) \ except Exception as err: return Exception(f"Error compiling {analysed_file.fpath}:\n{err}"), None - # copy the mod files to the prebuild folder as artefacts for reuse - # note: perhaps we could sometimes avoid these copies because mods can change less frequently than obj + # copy the mod files to the prebuild folder as artefacts for reuse. + # note: perhaps we could sometimes avoid these copies because mods + # can change less frequently than object. for mod_def in analysed_file.module_defs: + hashed_module_name = f'{mod_def}.{mod_combo_hash:x}.mod' shutil.copy2( mp_common_args.config.build_output / f'{mod_def}.mod', - mp_common_args.config.prebuild_folder / f'{mod_def}.{mod_combo_hash:x}.mod', + mp_common_args.config.prebuild_folder / hashed_module_name, ) else: - log_or_dot(logger, f'CompileFortran using prebuild: {analysed_file.fpath}') + log_or_dot(logger, + f'CompileFortran using prebuild: {analysed_file.fpath}') # copy the prebuilt mod files from the prebuild folder for mod_def in analysed_file.module_defs: + hashed_module_name = f'{mod_def}.{mod_combo_hash:x}.mod' shutil.copy2( - mp_common_args.config.prebuild_folder / f'{mod_def}.{mod_combo_hash:x}.mod', + mp_common_args.config.prebuild_folder / hashed_module_name, mp_common_args.config.build_output / f'{mod_def}.mod', ) # return the results - compiled_file = CompiledFile(input_fpath=analysed_file.fpath, output_fpath=obj_file_prebuild) + compiled_file = CompiledFile(input_fpath=analysed_file.fpath, + output_fpath=obj_file_prebuild) artefacts = [obj_file_prebuild] + mod_file_prebuilds metric_name = "compile fortran" @@ -299,7 +336,9 @@ def _get_obj_combo_hash(analysed_file, mp_common_args: MpCommonArgs, # get a combo hash of things which matter to the object file we define # todo: don't just silently use 0 for a missing dep hash mod_deps_hashes = { - mod_dep: mp_common_args.mod_hashes.get(mod_dep, 0) for mod_dep in analysed_file.module_deps} + mod_dep: mp_common_args.mod_hashes.get(mod_dep, 0) + for mod_dep in analysed_file.module_deps + } try: obj_combo_hash = sum([ analysed_file.file_hash, @@ -345,7 +384,8 @@ def compile_file(analysed_file, flags, output_fpath, mp_common_args): syntax_only=mp_common_args.syntax_only) -def get_mod_hashes(analysed_files: Set[AnalysedFortran], config) -> Dict[str, int]: +def get_mod_hashes(analysed_files: Set[AnalysedFortran], + config) -> Dict[str, int]: """ Get the hash of every module file defined in the list of analysed files. diff --git a/source/fab/steps/find_source_files.py b/source/fab/steps/find_source_files.py index 25191d5f..ab8ea551 100644 --- a/source/fab/steps/find_source_files.py +++ b/source/fab/steps/find_source_files.py @@ -5,7 +5,6 @@ ############################################################################## """ Gather files from a source folder. - """ import logging from typing import Optional, Iterable @@ -39,8 +38,8 @@ def check(self, path): class Include(_PathFilter): """ - A path filter which includes matching paths, this convenience class improves config readability. - + A path filter which includes matching paths, this convenience class + improves config readability. """ def __init__(self, *filter_strings): """ @@ -56,10 +55,9 @@ def __str__(self): class Exclude(_PathFilter): """ - A path filter which excludes matching paths, this convenience class improves config readability. - + A path filter which excludes matching paths, this convenience class + improves config readability. """ - def __init__(self, *filter_strings): """ :param filter_strings: @@ -73,7 +71,9 @@ def __str__(self): @step -def find_source_files(config, source_root=None, output_collection="all_source", +def find_source_files(config, + source_root=None, + output_collection="all_source", path_filters: Optional[Iterable[_PathFilter]] = None): """ Find the files in the source folder, with filtering. @@ -81,9 +81,10 @@ def find_source_files(config, source_root=None, output_collection="all_source", Files can be included or excluded with simple pattern matching. Every file is included by default, unless the filters say otherwise. - Path filters are expected to be provided by the user in an *ordered* collection. - The two convenience subclasses, :class:`~fab.steps.walk_source.Include` and :class:`~fab.steps.walk_source.Exclude`, - improve readability. + Path filters are expected to be provided by the user in an *ordered* + collection. The two convenience subclasses, + :class:`~fab.steps.walk_source.Include` and + :class:`~fab.steps.walk_source.Exclude`, improve readability. Order matters. For example:: @@ -92,14 +93,17 @@ def find_source_files(config, source_root=None, output_collection="all_source", Include('my_folder/my_file.F90'), ] - In the above example, swapping the order would stop the file being included in the build. + In the above example, swapping the order would stop the file being + included in the build. - A path matches a filter string simply if it *contains* it, - so the path *my_folder/my_file.F90* would match filters "my_folder", "my_file" and "er/my". + A path matches a filter string simply if it *contains* it, so the path + *my_folder/my_file.F90* would match filters "my_folder", "my_file" and + "er/my". :param config: - The :class:`fab.build_config.BuildConfig` object where we can read settings - such as the project workspace folder or the multiprocessing flag. + The :class:`fab.build_config.BuildConfig` object where we can read + settings such as the project workspace folder or the multiprocessing + flag. :param source_root: Optional path to source folder, with a sensible default. :param output_collection: @@ -116,19 +120,23 @@ def find_source_files(config, source_root=None, output_collection="all_source", Recursively get all files in the given folder, with filtering. :param artefact_store: - Contains artefacts created by previous Steps, and where we add our new artefacts. - This is where the given :class:`~fab.artefacts.ArtefactsGetter` finds the artefacts to process. + Contains artefacts created by previous Steps, and where we add our new + artefacts. This is where the given + :class:`~fab.artefacts.ArtefactsGetter` finds the artefacts to process. :param config: - The :class:`fab.build_config.BuildConfig` object where we can read settings - such as the project workspace folder or the multiprocessing flag. + The :class:`fab.build_config.BuildConfig` object where we can read + settings such as the project workspace folder or the multiprocessing + flag. """ source_root = source_root or config.source_root # file filtering filtered_fpaths = [] - # todo: we shouldn't need to ignore the prebuild folder here, it's not underneath the source root. - for fpath in file_walk(source_root, ignore_folders=[config.prebuild_folder]): + # todo: we shouldn't need to ignore the prebuild folder here, it's not + # underneath the source root. + for fpath in file_walk(source_root, + ignore_folders=[config.prebuild_folder]): wanted = True for path_filter in path_filters: diff --git a/source/fab/steps/grab/archive.py b/source/fab/steps/grab/archive.py index 5609434e..aba16997 100644 --- a/source/fab/steps/grab/archive.py +++ b/source/fab/steps/grab/archive.py @@ -18,8 +18,10 @@ def grab_archive(config, src: Union[Path, str], dst_label: str = ''): :param src: The source archive to grab from. :param dst_label: - The name of a sub folder, in the project workspace, in which to put the source. - If not specified, the code is copied into the root of the source folder. + The name of a sub folder, in the project workspace, in which to put + the source. + If not specified, the code is copied into the root of the source + folder. :param name: Human friendly name for logger output, with sensible default. diff --git a/source/fab/steps/grab/folder.py b/source/fab/steps/grab/folder.py index d745a3c5..a7212b9f 100644 --- a/source/fab/steps/grab/folder.py +++ b/source/fab/steps/grab/folder.py @@ -16,13 +16,17 @@ def grab_folder(config, src: Union[Path, str], dst_label: str = ''): Copy a source folder to the project workspace. :param config: - The :class:`fab.build_config.BuildConfig` object where we can read settings + The :class:`fab.build_config.BuildConfig` object where we can read + settings such as the project workspace folder or the multiprocessing flag. :param src: - The source location to grab. The nature of this parameter is depends on the subclass. + The source location to grab. The nature of this parameter is depends + on the subclass. :param dst_label: - The name of a sub folder, in the project workspace, in which to put the source. - If not specified, the code is copied into the root of the source folder. + The name of a sub folder, in the project workspace, in which to put + the source. + If not specified, the code is copied into the root of the source + folder. """ _dst = config.source_root / dst_label diff --git a/source/fab/steps/grab/svn.py b/source/fab/steps/grab/svn.py index d6b3f45b..d443b688 100644 --- a/source/fab/steps/grab/svn.py +++ b/source/fab/steps/grab/svn.py @@ -18,8 +18,10 @@ from fab.tools import Category, Subversion, Tool, Versioning -def split_repo_url(url: str, - revision: Optional[str] = None) -> Tuple[str, Optional[str]]: +def split_repo_url( + url: str, + revision: Optional[str] = None +) -> Tuple[str, Optional[str]]: """ Pull out the revision if it's part of the url. diff --git a/source/fab/steps/preprocess.py b/source/fab/steps/preprocess.py index 11777e96..8d294cfd 100644 --- a/source/fab/steps/preprocess.py +++ b/source/fab/steps/preprocess.py @@ -46,8 +46,9 @@ def pre_processor(config: BuildConfig, preprocessor: Preprocessor, Uses multiprocessing, unless disabled in the config. :param config: - The :class:`fab.build_config.BuildConfig` object where we can read settings - such as the project workspace folder or the multiprocessing flag. + The :class:`fab.build_config.BuildConfig` object where we can read + settings such as the project workspace folder or the multiprocessing + flag. :param preprocessor: The preprocessor executable. :param files: @@ -110,36 +111,47 @@ def process_artefact(arg: Tuple[Path, MpCommonArgs]): else: output_fpath.parent.mkdir(parents=True, exist_ok=True) - params = args.flags.flags_for_path(path=input_fpath, config=args.config) + params = args.flags.flags_for_path(path=input_fpath, + config=args.config) log_or_dot(logger, f"PreProcessor running with parameters: " f"'{' '.join(params)}'.'") try: args.preprocessor.preprocess(input_fpath, output_fpath, params) except Exception as err: - raise Exception(f"error preprocessing {input_fpath}:\n{err}") from err + raise Exception( + f"error preprocessing {input_fpath}:\n{err}" + ) from err - send_metric(args.name, str(input_fpath), {'time_taken': timer.taken, 'start': timer.start}) + send_metric(args.name, + str(input_fpath), + {'time_taken': timer.taken, 'start': timer.start}) return output_fpath # todo: rename preprocess_fortran @step -def preprocess_fortran(config: BuildConfig, source: Optional[ArtefactsGetter] = None, **kwargs): +def preprocess_fortran(config: BuildConfig, + source: Optional[ArtefactsGetter] = None, + **kwargs): """ Wrapper to pre_processor for Fortran files. Ensures all preprocessed files are in the build output. - This means *copying* already preprocessed files from source to build output. + This means *copying* already preprocessed files from source to build + output. Params as per :func:`~fab.steps.preprocess._pre_processor`. - The preprocessor is taken from the `FPP` environment, or falls back to `fpp -P`. + The preprocessor is taken from the `FPP` environment, or falls back to + `fpp -P`. - If source is not provided, it defaults to `SuffixFilter('all_source', '.F90')`. + If source is not provided, it defaults to + `SuffixFilter('all_source', '.F90')`. """ - source_getter = source or SuffixFilter('all_source', ['.F90', '.f90']) + source_getter = source or SuffixFilter('all_source', + ['.F90', '.f90']) source_files = source_getter(config.artefact_store) F90s = suffix_filter(source_files, '.F90') f90s = suffix_filter(source_files, '.f90') @@ -149,7 +161,8 @@ def preprocess_fortran(config: BuildConfig, source: Optional[ArtefactsGetter] = raise RuntimeError(f"Unexpected tool '{fpp.name}' of type " f"'{type(fpp)}' instead of CppFortran") - # make sure any flags from FPP are included in any common flags specified by the config + # make sure any flags from FPP are included in any common flags specified + # by the config try: common_flags = kwargs.pop('common_flags') except KeyError: @@ -168,7 +181,9 @@ def preprocess_fortran(config: BuildConfig, source: Optional[ArtefactsGetter] = # todo: parallel copy? # copy little f90s from source to output folder - logger.info(f'Fortran preprocessor copying {len(f90s)} files to build_output') + logger.info( + f'Fortran preprocessor copying {len(f90s)} files to build_output' + ) for f90 in f90s: output_path = input_to_output_fpath(config, input_path=f90) if output_path != f90: @@ -181,8 +196,9 @@ def preprocess_fortran(config: BuildConfig, source: Optional[ArtefactsGetter] = class DefaultCPreprocessorSource(ArtefactsGetter): """ A source getter specifically for c preprocessing. - Looks for the default output from pragma injection, falls back to default source finder. - This allows the step to work with or without a preceding pragma step. + Looks for the default output from pragma injection, falls back to default + source finder. This allows the step to work with or without a preceding + pragma step. """ def __call__(self, artefact_store): @@ -198,9 +214,11 @@ def preprocess_c(config: BuildConfig, source=None, **kwargs): Params as per :func:`~fab.steps.preprocess._pre_processor`. - The preprocessor is taken from the `CPP` environment, or falls back to `cpp`. + The preprocessor is taken from the `CPP` environment, or falls back to + `cpp`. - If source is not provided, it defaults to :class:`~fab.steps.preprocess.DefaultCPreprocessorSource`. + If source is not provided, it defaults to + :class:`~fab.steps.preprocess.DefaultCPreprocessorSource`. """ source_getter = source or DefaultCPreprocessorSource() diff --git a/source/fab/steps/psyclone.py b/source/fab/steps/psyclone.py index 0db38b3d..e04869f9 100644 --- a/source/fab/steps/psyclone.py +++ b/source/fab/steps/psyclone.py @@ -25,8 +25,15 @@ from fab.steps import run_mp, check_for_errors, step from fab.steps.preprocess import pre_processor from fab.tools import Category, Psyclone -from fab.util import log_or_dot, input_to_output_fpath, file_checksum, file_walk, TimerLogger, \ - string_checksum, suffix_filter, by_type, log_or_dot_finish +from fab.util import (log_or_dot, + input_to_output_fpath, + file_checksum, + file_walk, + TimerLogger, + string_checksum, + suffix_filter, + by_type, + log_or_dot_finish) logger = logging.getLogger(__name__) @@ -37,7 +44,8 @@ def preprocess_x90(config, common_flags: Optional[List[str]] = None): # get the tool from FPP fpp = config.tool_box[Category.FORTRAN_PREPROCESSOR] - source_files = SuffixFilter('all_source', '.X90')(config.artefact_store) + source_files = SuffixFilter('all_source', + '.X90')(config.artefact_store) pre_processor( config, @@ -67,42 +75,54 @@ class MpCommonArgs: all_kernel_hashes: Dict[str, int] overrides_folder: Optional[Path] - override_files: List[str] # filenames (not paths) of hand crafted overrides + override_files: List[str] # filenames (not paths) of handcrafted overrides DEFAULT_SOURCE_GETTER = CollectionConcat([ - 'preprocessed_x90', # any X90 we've preprocessed this run - SuffixFilter('all_source', '.x90'), # any already preprocessed x90 we pulled in + # any X90 we've preprocessed this run + 'preprocessed_x90', + # any already preprocessed x90 we pulled in + SuffixFilter('all_source', '.x90'), ]) @step -def psyclone(config, kernel_roots: Optional[List[Path]] = None, - transformation_script: Optional[Callable[[Path, BuildConfig], Path]] = None, - cli_args: Optional[List[str]] = None, - source_getter: Optional[ArtefactsGetter] = None, - overrides_folder: Optional[Path] = None): +def psyclone( + config, + kernel_roots: Optional[List[Path]] = None, + transformation_script: Optional[Callable[[Path, BuildConfig], Path]] = None, + cli_args: Optional[List[str]] = None, + source_getter: Optional[ArtefactsGetter] = None, + overrides_folder: Optional[Path] = None +): """ Psyclone runner step. .. note:: - This step produces Fortran, so it must be run before the :class:`~fab.steps.analyse.Analyse` step. + This step produces Fortran, so it must be run before the + :class:`~fab.steps.analyse.Analyse` step. This step stores prebuilt results to speed up subsequent builds. - To generate the prebuild hashes, it analyses the X90 and kernel files, storing prebuilt results for these also. + To generate the prebuild hashes, it analyses the X90 and kernel files, + storing prebuilt results for these also. - Kernel files are just normal Fortran, and the standard Fortran analyser is used to analyse them + Kernel files are just normal Fortran, and the standard Fortran analyser is + used to analyse them :param config: - The :class:`fab.build_config.BuildConfig` object where we can read settings - such as the project workspace folder or the multiprocessing flag. + The :class:`fab.build_config.BuildConfig` object where we can read + settings such as the project workspace folder or the multiprocessing + flag. :param kernel_roots: - Folders containing kernel files. Must be part of the analysed source code. + Folders containing kernel files. Must be part of the analysed source + code. :param transformation_script: The function to get Python transformation script. - It takes in a file path and the config object, and returns the path of the transformation script or None. - If no function is given or the function returns None, no script will be applied and PSyclone still runs. + It takes in a file path and the config object, and returns the path of + the transformation script or None. + If no function is given or the function returns None, no script will + be applied and PSyclone still runs. :param cli_args: Passed through to the psyclone cli tool. :param source_getter: @@ -110,7 +130,8 @@ def psyclone(config, kernel_roots: Optional[List[Path]] = None, :param overrides_folder: Optional folder containing hand-crafted override files. Must be part of the subsequently analysed source code. - Any file produced by psyclone will be deleted if there is a corresponding file in this folder. + Any file produced by psyclone will be deleted if there is a + corresponding file in this folder. """ kernel_roots = kernel_roots or [] @@ -126,12 +147,19 @@ def psyclone(config, kernel_roots: Optional[List[Path]] = None, # analyse the kernel files, all_kernel_hashes = _analyse_kernels(config, kernel_roots) - # get the data in a payload object for child processes to calculate prebuild hashes - mp_payload = _generate_mp_payload( - config, analysed_x90, all_kernel_hashes, overrides_folder, kernel_roots, transformation_script, cli_args) + # get the data in a payload object for child processes to calculate + # prebuild hashes + mp_payload = _generate_mp_payload(config, + analysed_x90, + all_kernel_hashes, + overrides_folder, + kernel_roots, + transformation_script, + cli_args) # run psyclone. - # for every file, we get back a list of its output files plus a list of the prebuild copies. + # for every file, we get back a list of its output files plus a list of + # the prebuild copies. mp_arg = [(x90, mp_payload) for x90 in x90s] with TimerLogger(f"running psyclone on {len(x90s)} x90 files"): results = run_mp(config, mp_arg, do_one_file) @@ -148,18 +176,25 @@ def psyclone(config, kernel_roots: Optional[List[Path]] = None, outputs_str = "\n".join(map(str, output_files)) logger.debug(f'psyclone outputs:\n{outputs_str}\n') - # mark the prebuild files as being current so the cleanup step doesn't delete them + # mark the prebuild files as being current so the cleanup step doesn't + # delete them config.add_current_prebuilds(prebuild_files) prebuilds_str = "\n".join(map(str, prebuild_files)) logger.debug(f'psyclone prebuilds:\n{prebuilds_str}\n') - # todo: delete any psy layer files which have hand-written overrides, in a given overrides folder + # todo: delete any psy layer files which have hand-written overrides, in a + # given overrides folder. # is this called psykal? # assert False -def _generate_mp_payload(config, analysed_x90, all_kernel_hashes, overrides_folder, kernel_roots, - transformation_script, cli_args) -> MpCommonArgs: +def _generate_mp_payload(config, + analysed_x90, + all_kernel_hashes, + overrides_folder, + kernel_roots, + transformation_script, + cli_args) -> MpCommonArgs: override_files: List[str] = [] if overrides_folder: override_files = [f.name for f in file_walk(overrides_folder)] @@ -187,20 +222,27 @@ def _analyse_x90s(config, x90s: Set[Path]) -> Dict[Path, AnalysedX90]: x90_analyser = X90Analyser() x90_analyser._config = config with TimerLogger(f"analysing {len(parsable_x90s)} parsable x90 files"): - x90_results = run_mp(config, items=parsable_x90s, func=x90_analyser.run) + x90_results = run_mp(config, + items=parsable_x90s, + func=x90_analyser.run) log_or_dot_finish(logger) - x90_analyses, x90_artefacts = zip(*x90_results) if x90_results else ((), ()) + x90_analyses, x90_artefacts = zip(*x90_results)\ + if x90_results else ((), ()) check_for_errors(results=x90_analyses) - # mark the analysis results files (i.e. prebuilds) as being current, so the cleanup knows not to delete them + # mark the analysis results files (i.e. prebuilds) as being current, so + # the cleanup knows not to delete them prebuild_files = list(by_type(x90_artefacts, Path)) config.add_current_prebuilds(prebuild_files) - # record the analysis results against the original x90 filenames (not the parsable versions we analysed) + # record the analysis results against the original x90 filenames (not the + # parsable versions we analysed) analysed_x90 = by_type(x90_analyses, AnalysedX90) - analysed_x90 = {result.fpath.with_suffix('.x90'): result for result in analysed_x90} + analysed_x90 = {result.fpath.with_suffix('.x90'): result + for result in analysed_x90} - # make the hashes from the original x90s, not the parsable versions which have invoke names removed. + # make the hashes from the original x90s, not the parsable versions which + # have invoke names removed. for p, r in analysed_x90.items(): analysed_x90[p]._file_hash = file_checksum(p).file_hash @@ -212,58 +254,77 @@ def _analyse_kernels(config, kernel_roots) -> Dict[str, int]: We want to hash the kernel metadata (type defs). Kernel metadata are type definitions passed to invoke(). - For example, this x90 code depends on the kernel `compute_total_mass_kernel_type`. + For example, this x90 code depends on the kernel + `compute_total_mass_kernel_type`. + .. code-block:: fortran - call invoke( name = "compute_dry_mass", & - compute_total_mass_kernel_type(dry_mass, rho, chi, panel_id, qr), & + call invoke( name = "compute_dry_mass", & + compute_total_mass_kernel_type(dry_mass, rho, + chi, panel_id, qr), & sum_X(total_dry, dry_mass)) We can see this kernel in a use statement at the top of the x90. + .. code-block:: fortran - use compute_total_mass_kernel_mod, only: compute_total_mass_kernel_type + use compute_total_mass_kernel_mod, only: compute_total_mass_kernel_type - Some kernels, such as `setval_c`, are - `PSyclone built-ins `_. + Some kernels, such as `setval_c`, are `PSyclone built-ins`_. They will not appear in use statements and can be ignored. - The Psyclone and Analyse steps both use the generic Fortran analyser, which recognises Psyclone kernel metadata. - The Analysis step must come after this step because it needs to analyse the fortran we create. + .. _PSyclone build-ins: https://psyclone.readthedocs.io/en/stable/dynamo0p3.html#built-ins + The Psyclone and Analyse steps both use the generic Fortran analyser, + which recognises Psyclone kernel metadata. + The Analysis step must come after this step because it needs to analyse + the fortran we create. """ - # Ignore the prebuild folder. Todo: test the prebuild folder is ignored, in case someone breaks this. - file_lists = [list(file_walk(root, ignore_folders=[config.prebuild_folder])) for root in kernel_roots] + # Ignore the prebuild folder. + # Todo: test the prebuild folder is ignored, in case someone breaks this. + file_lists = [list(file_walk(root, ignore_folders=[config.prebuild_folder])) + for root in kernel_roots] all_kernel_files: Set[Path] = set(sum(file_lists, [])) kernel_files: List[Path] = suffix_filter(all_kernel_files, ['.f90']) - # We use the normal Fortran analyser, which records psyclone kernel metadata. - # todo: We'd like to separate that from the general fortran analyser at some point, to reduce coupling. - # The Analyse step also uses the same fortran analyser. It stores its results so they won't be analysed twice. + # We use the normal Fortran analyser, which records psyclone kernel + # metadata. + # todo: We'd like to separate that from the general fortran analyser at + # some point, to reduce coupling. + # The Analyse step also uses the same fortran analyser. It stores its + # results so they won't be analysed twice. fortran_analyser = FortranAnalyser() fortran_analyser._config = config - with TimerLogger(f"analysing {len(kernel_files)} potential psyclone kernel files"): - fortran_results = run_mp(config, items=kernel_files, func=fortran_analyser.run) + with TimerLogger( + f"analysing {len(kernel_files)} potential psyclone kernel files" + ): + fortran_results = run_mp(config, + items=kernel_files, + func=fortran_analyser.run) log_or_dot_finish(logger) - fortran_analyses, fortran_artefacts = zip(*fortran_results) if fortran_results else (tuple(), tuple()) + fortran_analyses, fortran_artefacts = zip(*fortran_results)\ + if fortran_results else (tuple(), tuple()) errors: List[Exception] = list(by_type(fortran_analyses, Exception)) if errors: errs_str = '\n\n'.join(map(str, errors)) - logger.error(f"There were {len(errors)} errors while parsing kernels:\n\n{errs_str}") + logger.error(f"There were {len(errors)} errors " + f"while parsing kernels:\n\n{errs_str}") - # mark the analysis results files (i.e. prebuilds) as being current, so the cleanup knows not to delete them + # mark the analysis results files (i.e. prebuilds) as being current, so + # the cleanup knows not to delete them. prebuild_files = list(by_type(fortran_artefacts, Path)) config.add_current_prebuilds(prebuild_files) - analysed_fortran: List[AnalysedFortran] = list(by_type(fortran_analyses, AnalysedFortran)) + analysed_fortran: List[AnalysedFortran] = list(by_type(fortran_analyses, + AnalysedFortran)) # gather all kernel hashes into one big lump all_kernel_hashes: Dict[str, int] = {} for af in analysed_fortran: assert set(af.psyclone_kernels).isdisjoint(all_kernel_hashes), \ - f"duplicate kernel name(s): {set(af.psyclone_kernels) & set(all_kernel_hashes)}" + ("duplicate kernel name(s): " + + set(af.psyclone_kernels) & set(all_kernel_hashes)) all_kernel_hashes.update(af.psyclone_kernels) return all_kernel_hashes @@ -276,15 +337,21 @@ def do_one_file(arg: Tuple[Path, MpCommonArgs]): # These are the filenames we expect to be output for this x90 input file. # There will always be one modified_alg, and 0-1 generated psy file. modified_alg: Path = x90_file.with_suffix('.f90') - modified_alg = input_to_output_fpath(config=mp_payload.config, input_path=modified_alg) + modified_alg = input_to_output_fpath(config=mp_payload.config, + input_path=modified_alg) psy_file: Path = x90_file.parent / (str(x90_file.stem) + '_psy.f90') - psy_file = input_to_output_fpath(config=mp_payload.config, input_path=psy_file) + psy_file = input_to_output_fpath(config=mp_payload.config, + input_path=psy_file) psy_file.parent.mkdir(parents=True, exist_ok=True) # do we already have prebuilt results for this x90 file? prebuilt_alg, prebuilt_gen = _get_prebuild_paths( - mp_payload.config.prebuild_folder, modified_alg, psy_file, prebuild_hash) + mp_payload.config.prebuild_folder, + modified_alg, + psy_file, + prebuild_hash + ) if prebuilt_alg.exists(): # todo: error handling in here msg = f'found prebuilds for {x90_file}:\n {prebuilt_alg}' @@ -333,7 +400,8 @@ def do_one_file(arg: Tuple[Path, MpCommonArgs]): result.append(psy_file) # we also want to return the prebuild artefact files we created, - # which are just copies, in the prebuild folder, with hashes in the filenames. + # which are just copies, in the prebuild folder, with hashes in the + # filenames. prebuild_result: List[Path] = [prebuilt_alg, prebuilt_gen] return result, prebuild_result @@ -341,7 +409,8 @@ def do_one_file(arg: Tuple[Path, MpCommonArgs]): def _gen_prebuild_hash(x90_file: Path, mp_payload: MpCommonArgs): """ - Calculate the prebuild hash for this x90 file, based on all the things which should trigger reprocessing. + Calculate the prebuild hash for this x90 file, based on all the things + which should trigger reprocessing. Changes which must trigger reprocessing of an x90 file: - x90 source: @@ -355,29 +424,36 @@ def _gen_prebuild_hash(x90_file: Path, mp_payload: MpCommonArgs): # include the hashes of kernels used by this x90 kernel_deps_hashes = { - mp_payload.all_kernel_hashes[kernel_name] for kernel_name in analysis_result.kernel_deps} # type: ignore + mp_payload.all_kernel_hashes[kernel_name] + for kernel_name in analysis_result.kernel_deps + } # type: ignore # calculate the transformation script hash for this file - transformation_script_hash = 0 + trans_script_hash = 0 if mp_payload.transformation_script: - transformation_script_return_path = mp_payload.transformation_script(x90_file, mp_payload.config) - if transformation_script_return_path: - transformation_script_hash = file_checksum(transformation_script_return_path).file_hash - if transformation_script_hash == 0: + trans_script_return_path = mp_payload.transformation_script( + x90_file, + mp_payload.config + ) + if trans_script_return_path: + trans_script_hash = file_checksum(trans_script_return_path) \ + .file_hash + if trans_script_hash == 0: warnings.warn('no transformation script specified') # hash everything which should trigger re-processing # todo: hash the psyclone version in case the built-in kernels change? prebuild_hash = sum([ - # the hash of the x90 (not of the parsable version, so includes invoke names) + # the hash of the x90 (not of the parsable version, so includes invoke + # names) analysis_result.file_hash, # the hashes of the kernels used by this x90 sum(kernel_deps_hashes), # the hash of the transformation script for this x90 - transformation_script_hash, + trans_script_hash, # command-line arguments string_checksum(str(mp_payload.cli_args)), @@ -386,9 +462,14 @@ def _gen_prebuild_hash(x90_file: Path, mp_payload: MpCommonArgs): return prebuild_hash -def _get_prebuild_paths(prebuild_folder, modified_alg, psy_file, prebuild_hash): - prebuilt_alg = Path(prebuild_folder / f'{modified_alg.stem}.{prebuild_hash}{modified_alg.suffix}') - prebuilt_gen = Path(prebuild_folder / f'{psy_file.stem}.{prebuild_hash}{psy_file.suffix}') +def _get_prebuild_paths(prebuild_folder: Path, + modified_alg : Path, + psy_file: Path, + prebuild_hash: str) -> Tuple[Path, Path]: + new_suffixes = '.' + prebuild_hash + modified_alg.suffix + prebuilt_alg = prebuild_folder / modified_alg.with_suffix(new_suffixes) + hashed_psy_name = f'{psy_file.stem}.{prebuild_hash}{psy_file.suffix}' + prebuilt_gen = prebuild_folder / hashed_psy_name return prebuilt_alg, prebuilt_gen @@ -413,7 +494,8 @@ def _check_override(check_path: Path, mp_payload: MpCommonArgs): return check_path -# regex to convert an x90 into parsable fortran, so it can be analysed using a third party tool +# regex to convert an x90 into parsable fortran, so it can be analysed using a +# third party tool. WHITE = r'[\s&]+' OPT_WHITE = r'[\s&]*' @@ -422,13 +504,16 @@ def _check_override(check_path: Path, mp_payload: MpCommonArgs): DQ_STRING = '"[^"]*"' STRING = f'({SQ_STRING}|{DQ_STRING})' -NAME_KEYWORD = 'name' + OPT_WHITE + '=' + OPT_WHITE + STRING + OPT_WHITE + ',' + OPT_WHITE -NAMED_INVOKE = 'call' + WHITE + 'invoke' + OPT_WHITE + r'\(' + OPT_WHITE + NAME_KEYWORD +NAME_KEYWORD = ('name' + OPT_WHITE + + '=' + OPT_WHITE + STRING + OPT_WHITE + ',' + OPT_WHITE) +NAMED_INVOKE = ('call' + WHITE + 'invoke' + OPT_WHITE + + r'\(' + OPT_WHITE + NAME_KEYWORD) _x90_compliance_pattern = None -# todo: In the future, we'd like to extend fparser to handle the leading invoke keywords. (Lots of effort.) +# todo: In the future, we'd like to extend fparser to handle the leading +# invoke keywords. (Lots of effort.) def make_parsable_x90(x90_path: Path) -> Path: """ Take out the leading name keyword in calls to invoke(), making temporary, parsable fortran from x90s. @@ -448,18 +533,24 @@ def make_parsable_x90(x90_path: Path) -> Path: # src = open(x90_path, 'rt').read() - # Before we remove the name keywords to invoke, we must remove any comment lines. - # This is the simplest way to avoid producing bad fortran when the name keyword is followed by a comment line. - # I.e. The comment line doesn't have an "&", so we get "call invoke(!" with no "&", which is a syntax error. + # Before we remove the name keywords to invoke, we must remove any comment + # lines. + # This is the simplest way to avoid producing bad fortran when the name + # keyword is followed by a comment line. + # I.e. The comment line doesn't have an "&", so we get + # "call invoke(!" with no "&", which is a syntax error. src_lines = open(x90_path, 'rt').readlines() - no_comment_lines = [line for line in src_lines if not line.lstrip().startswith('!')] + no_comment_lines = [line for line in src_lines + if not line.lstrip().startswith('!')] src = ''.join(no_comment_lines) replaced = [] def repl(matchobj): - # matchobj[0] contains the entire matching string, from "call" to the "," after the name keyword. - # matchobj[1] contains the single group in the search pattern, which is defined in STRING. + # matchobj[0] contains the entire matching string, from "call" to the + # "," after the name keyword. + # matchobj[1] contains the single group in the search pattern, which is + # defined in STRING. name = matchobj[1].replace('"', '').replace("'", "") replaced.append(name) return 'call invoke(' diff --git a/source/fab/steps/root_inc_files.py b/source/fab/steps/root_inc_files.py index 9ed53df4..eee28fba 100644 --- a/source/fab/steps/root_inc_files.py +++ b/source/fab/steps/root_inc_files.py @@ -4,11 +4,11 @@ # which you should have received as part of this distribution ############################################################################## """ -A helper step to copy .inc files to the root of the build source folder, for easy include by the preprocessor. - -Currently only used for building JULES, .inc files are due to be removed from dev practices, -at which point this step should be deprecated. +A helper step to copy .inc files to the root of the build source folder, for +easy include by the preprocessor. +Currently only used for building JULES, .inc files are due to be removed from +dev practices, at which point this step should be deprecated. """ import logging import shutil @@ -24,7 +24,6 @@ @step def root_inc_files(config: BuildConfig): - """ Copy inc files into the workspace output root. @@ -35,23 +34,27 @@ def root_inc_files(config: BuildConfig): Artefacts created by previous Steps. This is where we find the artefacts to process. :param config: - The :class:`fab.build_config.BuildConfig` object where we can read settings - such as the project workspace folder or the multiprocessing flag. - + The :class:`fab.build_config.BuildConfig` object where we can read + settings such as the project workspace folder or the multiprocessing + flag. """ - # todo: make the build output path a getter calculated in the config? build_output: Path = config.build_output build_output.mkdir(parents=True, exist_ok=True) - warnings.warn("RootIncFiles is deprecated as .inc files are due to be removed.", DeprecationWarning) + warnings.warn( + "RootIncFiles is deprecated as .inc files are due to be removed.", + DeprecationWarning + ) - # inc files all go in the root - they're going to be removed altogether, soon + # inc files all go in the root - they're going to be removed altogether, + # soon. inc_copied = set() for fpath in suffix_filter(config.artefact_store["all_source"], [".inc"]): # don't copy from the output root to the output root! - # this is currently unlikely to happen but did in the past, and caused problems. + # this is currently unlikely to happen but did in the past, and caused + # problems. if fpath.parent == build_output: continue