From 0f1554b95505e9f5595fdf33ffe1e5f6b59b87ff Mon Sep 17 00:00:00 2001 From: "Matthew.Deshotel" Date: Fri, 13 Feb 2026 17:05:42 -0600 Subject: [PATCH 01/34] update imports --- .../NextGen_Forcings_Engine/bmi_model.py | 35 ++++++------------- .../core/forcingInputMod.py | 6 ++-- .../NextGen_Forcings_Engine/core/regrid.py | 10 +++--- .../NextGen_Forcings_Engine/model.py | 12 +++---- .../NextGen_Forcings_Engine/retry_utils.py | 4 +-- 5 files changed, 27 insertions(+), 40 deletions(-) diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/bmi_model.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/bmi_model.py index 6858056a..71e23349 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/bmi_model.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/bmi_model.py @@ -24,15 +24,14 @@ from mpi4py import MPI from NextGen_Forcings_Engine_BMI import esmf_creation, forcing_extraction - +from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.consts import GEOGRID +from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.parallel import MpiConfig +from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.config import ConfigOptions from .bmi_grid import Grid, GridType from .core import ( - config, err_handler, forcingInputMod, - geoMod, ioMod, - parallel, suppPrecipMod, ) from .model import NWMv3ForcingEngineModel @@ -207,7 +206,7 @@ def initialize(self, config_file: str, output_path: str | None = None) -> None: # If _job_meta was not set by initialize_with_params(), create a default one if self._job_meta is None: - self._job_meta = config.ConfigOptions(self.cfg_bmi) + self._job_meta = ConfigOptions(self.cfg_bmi) # Parse the configuration options try: @@ -231,7 +230,7 @@ def initialize(self, config_file: str, output_path: str | None = None) -> None: self._job_meta.nwmConfig = self.cfg_bmi["NWM_CONFIG"] # Initialize MPI communication - self._mpi_meta = parallel.MpiConfig() + self._mpi_meta = MpiConfig() try: comm = MPI.Comm.f2py(self._comm) if self._comm is not None else None self._mpi_meta.initialize_comm(self._job_meta, comm=comm) @@ -252,21 +251,11 @@ def initialize(self, config_file: str, output_path: str | None = None) -> None: # information about the modeling domain, local processor # grid boundaries, and ESMF grid objects/fields to be used # in regridding. - self._wrf_hydro_geo_meta = geoMod.GeoMetaWrfHydro() + self._wrf_hydro_geo_meta = GEOGRID.get(self._job_meta.grid_type)( + self._job_meta, self._mpi_meta + ) - if self._job_meta.grid_type == "gridded": - self._wrf_hydro_geo_meta.initialize_destination_geo_gridded( - self._job_meta, self._mpi_meta - ) - elif self._job_meta.grid_type == "unstructured": - self._wrf_hydro_geo_meta.initialize_destination_geo_unstructured( - self._job_meta, self._mpi_meta - ) - elif self._job_meta.grid_type == "hydrofabric": - self._wrf_hydro_geo_meta.initialize_destination_geo_hydrofabric( - self._job_meta, self._mpi_meta - ) - else: + if self._wrf_hydro_geo_meta is None: self._job_meta.errMsg = "You must specify a proper grid_type (gridded, unstructured, hydrofabric) in the config." err_handler.err_out_screen_para(self._job_meta.errMsg, self._mpi_meta) @@ -761,9 +750,7 @@ def initialize(self, config_file: str, output_path: str | None = None) -> None: if self._job_meta.spatial_meta is not None: try: - self._wrf_hydro_geo_meta.initialize_geospatial_metadata( - self._job_meta, self._mpi_meta - ) + self._wrf_hydro_geo_meta.initialize_geospatial_metadata() except Exception as e: err_handler.err_out_screen_para(self._job_meta.errMsg, self._mpi_meta) err_handler.check_program_status(self._job_meta, self._mpi_meta) @@ -897,7 +884,7 @@ def initialize_with_params( :raises ValueError: If an invalid grid type is specified, an exception is raised. """ # Set the job metadata parameters (b_date, geogrid) using config_options - self._job_meta = config.ConfigOptions( + self._job_meta = ConfigOptions( self.cfg_bmi, b_date=b_date, geogrid_arg=geogrid ) diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py index 9931100b..5e94f65d 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py @@ -12,7 +12,7 @@ ConfigOptions, ) from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.geoMod import ( - GeoMetaWrfHydro, + GeoMeta, ) from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.parallel import MpiConfig from nextgen_forcings_ewts import MODULE_NAME @@ -963,7 +963,7 @@ def regrid_map(self): def regrid_inputs( self, config_options: ConfigOptions, - wrf_hyro_geo_meta: GeoMetaWrfHydro, + wrf_hyro_geo_meta: GeoMeta, mpi_config: MpiConfig, ): """Regrid input forcings to the final output grids for this timestep. @@ -1012,7 +1012,7 @@ def temporal_interpolate_inputs( def init_dict( config_options: ConfigOptions, - geo_meta_wrf_hydro: GeoMetaWrfHydro, + geo_meta_wrf_hydro: GeoMeta, mpi_config: MpiConfig, ) -> dict: """Initialize the input forcing dictionary. diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/regrid.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/regrid.py index 99126c62..df0b9e89 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/regrid.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/regrid.py @@ -43,7 +43,7 @@ ConfigOptions, ) from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.geoMod import ( - GeoMetaWrfHydro, + GeoMeta, ) from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.parallel import MpiConfig from nextgen_forcings_ewts import MODULE_NAME @@ -11979,7 +11979,7 @@ def check_supp_pcp_regrid_status( def get_weight_file_names( mpi_config: MpiConfig, config_options: ConfigOptions, - input_forcings: GeoMetaWrfHydro, + input_forcings: GeoMeta, ) -> tuple[str | None, str | None]: """Get weight file names for regridding.""" if not config_options.weightsDir: @@ -12005,7 +12005,7 @@ def get_weight_file_names( def load_weight_file( mpi_config: MpiConfig, config_options: ConfigOptions, - input_forcings: GeoMetaWrfHydro, + input_forcings: GeoMeta, weight_file: str, element_mode: bool, ) -> None: @@ -12051,7 +12051,7 @@ def load_weight_file( def make_regrid( mpi_config: MpiConfig, config_options: ConfigOptions, - input_forcings: GeoMetaWrfHydro, + input_forcings: GeoMeta, weight_file: str | None, fill: bool, element_mode: bool, @@ -12120,7 +12120,7 @@ def make_regrid( def execute_regrid( mpi_config: MpiConfig, config_options: ConfigOptions, - input_forcings: GeoMetaWrfHydro, + input_forcings: GeoMeta, weight_file: str, element_mode: bool, ) -> None: diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/model.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/model.py index a71a2f8c..d6bd80dc 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/model.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/model.py @@ -18,7 +18,7 @@ ConfigOptions, ) from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.geoMod import ( - GeoMetaWrfHydro, + GeoMeta, ) from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.ioMod import OutputObj from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.parallel import MpiConfig @@ -80,7 +80,7 @@ def run( model: dict, future_time: float, config_options: ConfigOptions, - wrf_hydro_geo_meta: GeoMetaWrfHydro, + wrf_hydro_geo_meta: GeoMeta, input_forcing_mod: dict, supp_pcp_mod: dict, mpi_config: MpiConfig, @@ -347,7 +347,7 @@ def loop_through_forcing_products( self, future_time: float, config_options: ConfigOptions, - wrf_hydro_geo_meta: GeoMetaWrfHydro, + wrf_hydro_geo_meta: GeoMeta, input_forcing_mod: dict, supp_pcp_mod: dict, mpi_config: MpiConfig, @@ -664,7 +664,7 @@ def loop_through_forcing_products( def process_suplemental_precip( self, config_options: ConfigOptions, - wrf_hydro_geo_meta: GeoMetaWrfHydro, + wrf_hydro_geo_meta: GeoMeta, supp_pcp_mod: dict, mpi_config: MpiConfig, output_obj: OutputObj, @@ -736,7 +736,7 @@ def process_suplemental_precip( def write_output( self, config_options: ConfigOptions, - wrf_hydro_geo_meta: GeoMetaWrfHydro, + wrf_hydro_geo_meta: GeoMeta, mpi_config: MpiConfig, output_obj: OutputObj, ): @@ -764,7 +764,7 @@ def update_dict( self, model: dict, config_options: ConfigOptions, - wrf_hydro_geo_meta: GeoMetaWrfHydro, + wrf_hydro_geo_meta: GeoMeta, output_obj: OutputObj, ): """Flatten the Forcings Engine output object and update the BMI dictionary.""" diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/retry_utils.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/retry_utils.py index 5b2598a2..dc14dfb5 100644 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/retry_utils.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/retry_utils.py @@ -3,8 +3,8 @@ import traceback import types -from .core.config import ConfigOptions -from .core.parallel import MpiConfig +from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.parallel import MpiConfig +from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.config import ConfigOptions def retry_w_mpi_context( From 18933758698d165d3a182e8f31c6298ccac25ad6 Mon Sep 17 00:00:00 2001 From: "Matthew.Deshotel" Date: Fri, 13 Feb 2026 17:05:57 -0600 Subject: [PATCH 02/34] add consts.py --- .../NextGen_Forcings_Engine/core/consts.py | 11 +++++++++++ 1 file changed, 11 insertions(+) create mode 100644 NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/consts.py diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/consts.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/consts.py new file mode 100644 index 00000000..05759781 --- /dev/null +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/consts.py @@ -0,0 +1,11 @@ +from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.geoMod import ( + GriddedGeoMeta, + HydrofabricGeoMeta, + UnstructuredGeoMeta, +) + +GEOGRID = { + "gridded": GriddedGeoMeta, + "unstructured": UnstructuredGeoMeta, + "hydrofabric": HydrofabricGeoMeta, +} From 32c944644033df2ac33f377fb374eaf14018bc30 Mon Sep 17 00:00:00 2001 From: "Matthew.Deshotel" Date: Fri, 13 Feb 2026 17:07:08 -0600 Subject: [PATCH 03/34] split GeoMeta class into a base class and children classes --- .../NextGen_Forcings_Engine/core/geoMod.py | 1547 +++++++++-------- 1 file changed, 820 insertions(+), 727 deletions(-) diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py index 8902ebbe..514d2dba 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py @@ -19,30 +19,30 @@ import logging +from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.config import ( + ConfigOptions, +) +from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.parallel import MpiConfig from nextgen_forcings_ewts import MODULE_NAME LOG = logging.getLogger(MODULE_NAME) -class GeoMetaWrfHydro: +class GeoMeta: """Abstract class for handling information about the WRF-Hydro domain we are processing forcings too.""" - def __init__(self): - """Initialize GeoMetaWrfHydro class variables.""" + def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): + """Initialize GeoMeta class variables.""" + self.config_options = config_options + self.mpi_config = mpi_config self.nx_global = None self.ny_global = None self.nx_global_elem = None self.ny_global_elem = None self.dx_meters = None self.dy_meters = None - self.nx_local = None - self.ny_local = None self.nx_local_elem = None self.ny_local_elem = None - self.x_lower_bound = None - self.x_upper_bound = None - self.y_lower_bound = None - self.y_upper_bound = None self.latitude_grid = None self.longitude_grid = None self.element_ids = None @@ -74,40 +74,320 @@ def __init__(self): self.y_coords = None self.spatial_global_atts = None - def get_processor_bounds(self, config_options): - """Calculate the local grid boundaries for this processor. + def initialize_geospatial_metadata(self): + """Initialize GeoMetaWrfHydro class variables. - ESMF operates under the hood and the boundary values - are calculated within the ESMF software. + Function that will read in crs/x/y geospatial metadata and coordinates + from the optional geospatial metadata file IF it was specified by the user in + the configuration file. + :param config_options: :return: """ - if config_options.grid_type == "gridded": - self.x_lower_bound = self.esmf_grid.lower_bounds[ESMF.StaggerLoc.CENTER][1] - self.x_upper_bound = self.esmf_grid.upper_bounds[ESMF.StaggerLoc.CENTER][1] - self.y_lower_bound = self.esmf_grid.lower_bounds[ESMF.StaggerLoc.CENTER][0] - self.y_upper_bound = self.esmf_grid.upper_bounds[ESMF.StaggerLoc.CENTER][0] - self.nx_local = self.x_upper_bound - self.x_lower_bound - self.ny_local = self.y_upper_bound - self.y_lower_bound - elif config_options.grid_type == "unstructured": - self.nx_local = len(self.esmf_grid.coords[0][1]) - self.ny_local = len(self.esmf_grid.coords[0][1]) - self.nx_local_elem = len(self.esmf_grid.coords[1][1]) - self.ny_local_elem = len(self.esmf_grid.coords[1][1]) - # LOG.debug("ESMF Mesh nx local node is " + str(self.nx_local)) - # LOG.debug("ESMF Mesh nx local elem is " + str(self.nx_local_elem)) - elif config_options.grid_type == "hydrofabric": - self.nx_local = len(self.esmf_grid.coords[1][1]) - self.ny_local = len(self.esmf_grid.coords[1][1]) - # self.nx_local_poly = len(self.esmf_poly_coords) - # self.ny_local_poly = len(self.esmf_poly_coords) - # LOG.debug("ESMF Mesh nx local elem is " + str(self.nx_local)) - # LOG.debug("ESMF Mesh nx local poly is " + str(self.nx_local_poly)) - # LOG.debug("WRF-HYDRO LOCAL X BOUND 1 = " + str(self.x_lower_bound)) - # LOG.debug("WRF-HYDRO LOCAL X BOUND 2 = " + str(self.x_upper_bound)) - # LOG.debug("WRF-HYDRO LOCAL Y BOUND 1 = " + str(self.y_lower_bound)) - # LOG.debug("WRF-HYDRO LOCAL Y BOUND 2 = " + str(self.y_upper_bound)) - - def initialize_destination_geo_gridded(self, config_options, mpi_config): + # We will only read information on processor 0. This data is not necessary for the + # other processors, and is only used in the output routines. + if self.mpi_config.rank == 0: + # Open the geospatial metadata file. + try: + esmf_nc = netCDF4.Dataset(self.config_options.spatial_meta, "r") + except Exception as e: + self.config_options.errMsg = ( + "Unable to open spatial metadata file: " + + self.config_options.spatial_meta + ) + raise Exception + + # Make sure the expected variables are present in the file. + if "crs" not in esmf_nc.variables.keys(): + self.config_options.errMsg = ( + "Unable to locate crs variable in: " + + self.config_options.spatial_meta + ) + raise Exception + if "x" not in esmf_nc.variables.keys(): + self.config_options.errMsg = ( + "Unable to locate x variable in: " + + self.config_options.spatial_meta + ) + raise Exception + if "y" not in esmf_nc.variables.keys(): + self.config_options.errMsg = ( + "Unable to locate y variable in: " + + self.config_options.spatial_meta + ) + raise Exception + # Extract names of variable attributes from each of the input geospatial variables. These + # can change, so we are making this as flexible as possible to accomodate future changes. + try: + crs_att_names = esmf_nc.variables["crs"].ncattrs() + except Exception as e: + self.config_options.errMsg = ( + "Unable to extract crs attribute names from: " + + self.config_options.spatial_meta + ) + raise Exception + try: + x_coord_att_names = esmf_nc.variables["x"].ncattrs() + except Exception as e: + self.config_options.errMsg = ( + "Unable to extract x attribute names from: " + + self.config_options.spatial_meta + ) + raise Exception + try: + y_coord_att_names = esmf_nc.variables["y"].ncattrs() + except Exception as e: + self.config_options.errMsg = ( + "Unable to extract y attribute names from: " + + self.config_options.spatial_meta + ) + raise Exception + # Extract attribute values + try: + self.x_coord_atts = { + item: esmf_nc.variables["x"].getncattr(item) + for item in x_coord_att_names + } + except Exception as e: + self.config_options.errMsg = ( + "Unable to extract x coordinate attributes from: " + + self.config_options.spatial_meta + ) + raise Exception + try: + self.y_coord_atts = { + item: esmf_nc.variables["y"].getncattr(item) + for item in y_coord_att_names + } + except Exception as e: + self.config_options.errMsg = ( + "Unable to extract y coordinate attributes from: " + + self.config_options.spatial_meta + ) + raise Exception + try: + self.crs_atts = { + item: esmf_nc.variables["crs"].getncattr(item) + for item in crs_att_names + } + except Exception as e: + self.config_options.errMsg = ( + "Unable to extract crs coordinate attributes from: " + + self.config_options.spatial_meta + ) + raise Exception + + # Extract global attributes + try: + global_att_names = esmf_nc.ncattrs() + except Exception as e: + self.config_options.errMsg = ( + "Unable to extract global attribute names from: " + + self.config_options.spatial_meta + ) + raise Exception + try: + self.spatial_global_atts = { + item: esmf_nc.getncattr(item) for item in global_att_names + } + except Exception as e: + self.config_options.errMsg = ( + "Unable to extract global attributes from: " + + self.config_options.spatial_meta + ) + raise Exception + + # Extract x/y coordinate values + if len(esmf_nc.variables["x"].shape) == 1: + try: + self.x_coords = esmf_nc.variables["x"][:].data + except Exception as e: + self.config_options.errMsg = ( + "Unable to extract x coordinate values from: " + + self.config_options.spatial_meta + ) + raise Exception + try: + self.y_coords = esmf_nc.variables["y"][:].data + except Exception as e: + self.config_options.errMsg = ( + "Unable to extract y coordinate values from: " + + self.config_options.spatial_meta + ) + raise Exception + # Check to see if the Y coordinates are North-South. If so, flip them. + if self.y_coords[1] < self.y_coords[0]: + self.y_coords[:] = np.flip(self.y_coords[:], axis=0) + + if len(esmf_nc.variables["x"].shape) == 2: + try: + self.x_coords = esmf_nc.variables["x"][:, :].data + except Exception as e: + self.config_options.errMsg = ( + "Unable to extract x coordinate values from: " + + self.config_options.spatial_meta + ) + raise Exception + try: + self.y_coords = esmf_nc.variables["y"][:, :].data + except Exception as e: + self.config_options.errMsg = ( + "Unable to extract y coordinate values from: " + + self.config_options.spatial_meta + ) + raise Exception + # Check to see if the Y coordinates are North-South. If so, flip them. + if self.y_coords[1, 0] > self.y_coords[0, 0]: + self.y_coords[:, :] = np.flipud(self.y_coords[:, :]) + + # Close the geospatial metadata file. + try: + esmf_nc.close() + except Exception as e: + self.config_options.errMsg = ( + "Unable to close spatial metadata file: " + + self.config_options.spatial_meta + ) + raise Exception + + # mpi_config.comm.barrier() + + def calc_slope(self, esmf_nc): + """Calculate slope grids needed for incoming shortwave radiation downscaling. + + Function to calculate slope grids needed for incoming shortwave radiation downscaling + later during the program. + :param esmf_nc: + :return: + """ + # First extract the sina,cosa, and elevation variables from the geogrid file. + try: + sinaGrid = esmf_nc.variables[self.config_options.sinalpha_var][0, :, :] + except Exception as e: + self.config_options.errMsg = ( + "Unable to extract SINALPHA from: " + self.config_options.geogrid + ) + raise + + try: + cosaGrid = esmf_nc.variables[self.config_options.cosalpha_var][0, :, :] + except Exception as e: + self.config_options.errMsg = ( + "Unable to extract COSALPHA from: " + self.config_options.geogrid + ) + raise + + try: + heightDest = esmf_nc.variables[self.config_options.hgt_var][0, :, :] + except Exception as e: + self.config_options.errMsg = ( + "Unable to extract HGT_M from: " + self.config_options.geogrid + ) + raise + + # Ensure cosa/sina are correct dimensions + if sinaGrid.shape[0] != self.ny_global or sinaGrid.shape[1] != self.nx_global: + self.config_options.errMsg = ( + "SINALPHA dimensions mismatch in: " + self.config_options.geogrid + ) + raise Exception + if cosaGrid.shape[0] != self.ny_global or cosaGrid.shape[1] != self.nx_global: + self.config_options.errMsg = ( + "COSALPHA dimensions mismatch in: " + self.config_options.geogrid + ) + raise Exception + if ( + heightDest.shape[0] != self.ny_global + or heightDest.shape[1] != self.nx_global + ): + self.config_options.errMsg = ( + "HGT_M dimension mismatch in: " + self.config_options.geogrid + ) + raise Exception + + # Establish constants + rdx = 1.0 / self.dx_meters + rdy = 1.0 / self.dy_meters + msftx = 1.0 + msfty = 1.0 + + slopeOut = np.empty([self.ny_global, self.nx_global], np.float32) + toposlpx = np.empty([self.ny_global, self.nx_global], np.float32) + toposlpy = np.empty([self.ny_global, self.nx_global], np.float32) + slp_azi = np.empty([self.ny_global, self.nx_global], np.float32) + ipDiff = np.empty([self.ny_global, self.nx_global], np.int32) + jpDiff = np.empty([self.ny_global, self.nx_global], np.int32) + hx = np.empty([self.ny_global, self.nx_global], np.float32) + hy = np.empty([self.ny_global, self.nx_global], np.float32) + + # Create index arrays that will be used to calculate slope. + xTmp = np.arange(self.nx_global) + yTmp = np.arange(self.ny_global) + xGrid = np.tile(xTmp[:], (self.ny_global, 1)) + yGrid = np.repeat(yTmp[:, np.newaxis], self.nx_global, axis=1) + indOrig = np.where(heightDest == heightDest) + indIp1 = ((indOrig[0]), (indOrig[1] + 1)) + indIm1 = ((indOrig[0]), (indOrig[1] - 1)) + indJp1 = ((indOrig[0] + 1), (indOrig[1])) + indJm1 = ((indOrig[0] - 1), (indOrig[1])) + indIp1[1][np.where(indIp1[1] >= self.nx_global)] = self.nx_global - 1 + indJp1[0][np.where(indJp1[0] >= self.ny_global)] = self.ny_global - 1 + indIm1[1][np.where(indIm1[1] < 0)] = 0 + indJm1[0][np.where(indJm1[0] < 0)] = 0 + + ipDiff[indOrig] = xGrid[indIp1] - xGrid[indIm1] + jpDiff[indOrig] = yGrid[indJp1] - yGrid[indJm1] + + toposlpx[indOrig] = ( + (heightDest[indIp1] - heightDest[indIm1]) * msftx * rdx + ) / ipDiff[indOrig] + toposlpy[indOrig] = ( + (heightDest[indJp1] - heightDest[indJm1]) * msfty * rdy + ) / jpDiff[indOrig] + hx[indOrig] = toposlpx[indOrig] + hy[indOrig] = toposlpy[indOrig] + slopeOut[indOrig] = np.arctan((hx[indOrig] ** 2 + hy[indOrig] ** 2) ** 0.5) + slopeOut[np.where(slopeOut < 1e-4)] = 0.0 + slp_azi[np.where(slopeOut < 1e-4)] = 0.0 + indValesmf_nc = np.where(slopeOut >= 1e-4) + slp_azi[indValesmf_nc] = ( + np.arctan2(hx[indValesmf_nc], hy[indValesmf_nc]) + math.pi + ) + indValesmf_nc = np.where(cosaGrid >= 0.0) + slp_azi[indValesmf_nc] = slp_azi[indValesmf_nc] - np.arcsin( + sinaGrid[indValesmf_nc] + ) + indValesmf_nc = np.where(cosaGrid < 0.0) + slp_azi[indValesmf_nc] = slp_azi[indValesmf_nc] - ( + math.pi - np.arcsin(sinaGrid[indValesmf_nc]) + ) + + # Reset temporary arrays to None to free up memory + toposlpx = None + toposlpy = None + heightDest = None + sinaGrid = None + cosaGrid = None + indValesmf_nc = None + xTmp = None + yTmp = None + xGrid = None + ipDiff = None + jpDiff = None + indOrig = None + indJm1 = None + indJp1 = None + indIm1 = None + indIp1 = None + hx = None + hy = None + + return slopeOut, slp_azi + + +class GriddedGeoMeta(GeoMeta): + """Class for handling information about the gridded domain we are processing forcings too.""" + + def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): """Initialize GeoMetaWrfHydro class variables. Initialization function to initialize ESMF through ESMPy, @@ -116,124 +396,142 @@ def initialize_destination_geo_gridded(self, config_options, mpi_config): for this particular processor. :return: """ + super().__init__(config_options, mpi_config) + # Open the geogrid file and extract necessary information # to create ESMF fields. if mpi_config.rank == 0: try: - idTmp = netCDF4.Dataset(config_options.geogrid, "r") + esmf_nc = netCDF4.Dataset(self.config_options.geogrid, "r") except Exception as e: - config_options.errMsg = ( + self.config_options.errMsg = ( "Unable to open the WRF-Hydro geogrid file: " - + config_options.geogrid + + self.config_options.geogrid ) raise Exception - if idTmp.variables[config_options.lat_var].ndim == 3: + if esmf_nc.variables[self.config_options.lat_var].ndim == 3: try: - self.nx_global = idTmp.variables[config_options.lat_var].shape[2] + self.nx_global = esmf_nc.variables[ + self.config_options.lat_var + ].shape[2] except Exception as e: - config_options.errMsg = ( + self.config_options.errMsg = ( "Unable to extract X dimension size from latitude variable in: " - + config_options.geogrid + + self.config_options.geogrid ) raise Exception try: - self.ny_global = idTmp.variables[config_options.lat_var].shape[1] + self.ny_global = esmf_nc.variables[ + self.config_options.lat_var + ].shape[1] except Exception as e: - config_options.errMsg = ( + self.config_options.errMsg = ( "Unable to extract Y dimension size from latitude in: " - + config_options.geogrid + + self.config_options.geogrid ) raise Exception try: - self.dx_meters = idTmp.DX + self.dx_meters = esmf_nc.DX except Exception as e: - config_options.errMsg = ( + self.config_options.errMsg = ( "Unable to extract DX global attribute in: " - + config_options.geogrid + + self.config_options.geogrid ) raise Exception try: - self.dy_meters = idTmp.DY + self.dy_meters = esmf_nc.DY except Exception as e: - config_options.errMsg = ( + self.config_options.errMsg = ( "Unable to extract DY global attribute in: " - + config_options.geogrid + + self.config_options.geogrid ) raise Exception - elif idTmp.variables[config_options.lat_var].ndim == 2: + elif esmf_nc.variables[self.config_options.lat_var].ndim == 2: try: - self.nx_global = idTmp.variables[config_options.lat_var].shape[1] + self.nx_global = esmf_nc.variables[ + self.config_options.lat_var + ].shape[1] except Exception as e: - config_options.errMsg = ( + self.config_options.errMsg = ( "Unable to extract X dimension size from latitude variable in: " - + config_options.geogrid + + self.config_options.geogrid ) raise Exception try: - self.ny_global = idTmp.variables[config_options.lat_var].shape[0] + self.ny_global = esmf_nc.variables[ + self.config_options.lat_var + ].shape[0] except Exception as e: - config_options.errMsg = ( + self.config_options.errMsg = ( "Unable to extract Y dimension size from latitude in: " - + config_options.geogrid + + self.config_options.geogrid ) raise Exception try: - self.dx_meters = idTmp.variables[config_options.lon_var].dx + self.dx_meters = esmf_nc.variables[self.config_options.lon_var].dx except Exception as e: - config_options.errMsg = ( + self.config_options.errMsg = ( "Unable to extract DX global attribute in: " - + config_options.geogrid + + self.config_options.geogrid ) raise Exception try: - self.dy_meters = idTmp.variables[config_options.lat_var].dy + self.dy_meters = esmf_nc.variables[self.config_options.lat_var].dy except Exception as e: - config_options.errMsg = ( + self.config_options.errMsg = ( "Unable to extract DY global attribute in: " - + config_options.geogrid + + self.config_options.geogrid ) raise Exception else: try: - self.nx_global = idTmp.variables[config_options.lon_var].shape[0] + self.nx_global = esmf_nc.variables[ + self.config_options.lon_var + ].shape[0] except Exception as e: - config_options.errMsg = ( + self.config_options.errMsg = ( "Unable to extract X dimension size from longitude variable in: " - + config_options.geogrid + + self.config_options.geogrid ) raise Exception try: - self.ny_global = idTmp.variables[config_options.lat_var].shape[0] + self.ny_global = esmf_nc.variables[ + self.config_options.lat_var + ].shape[0] except Exception as e: - config_options.errMsg = ( + self.config_options.errMsg = ( "Unable to extract Y dimension size from latitude in: " - + config_options.geogrid + + self.config_options.geogrid ) raise Exception - if config_options.input_forcings[0] != 23: + if self.config_options.input_forcings[0] != 23: try: - self.dx_meters = idTmp.variables[config_options.lon_var].dx + self.dx_meters = esmf_nc.variables[ + self.config_options.lon_var + ].dx except Exception as e: - config_options.errMsg = ( + self.config_options.errMsg = ( "Unable to extract dx metadata attribute in: " - + config_options.geogrid + + self.config_options.geogrid ) raise Exception try: - self.dy_meters = idTmp.variables[config_options.lat_var].dy + self.dy_meters = esmf_nc.variables[ + self.config_options.lat_var + ].dy except Exception as e: - config_options.errMsg = ( + self.config_options.errMsg = ( "Unable to extract dy metadata attribute in: " - + config_options.geogrid + + self.config_options.geogrid ) raise Exception else: @@ -246,16 +544,16 @@ def initialize_destination_geo_gridded(self, config_options, mpi_config): # Broadcast global dimensions to the other processors. self.nx_global = mpi_config.broadcast_parameter( - self.nx_global, config_options, param_type=int + self.nx_global, self.config_options, param_type=int ) self.ny_global = mpi_config.broadcast_parameter( - self.ny_global, config_options, param_type=int + self.ny_global, self.config_options, param_type=int ) self.dx_meters = mpi_config.broadcast_parameter( - self.dx_meters, config_options, param_type=float + self.dx_meters, self.config_options, param_type=float ) self.dy_meters = mpi_config.broadcast_parameter( - self.dy_meters, config_options, param_type=float + self.dy_meters, self.config_options, param_type=float ) # mpi_config.comm.barrier() @@ -267,9 +565,9 @@ def initialize_destination_geo_gridded(self, config_options, mpi_config): coord_sys=ESMF.CoordSys.SPH_DEG, ) except Exception as e: - config_options.errMsg = ( + self.config_options.errMsg = ( "Unable to create ESMF grid for WRF-Hydro geogrid: " - + config_options.geogrid + + self.config_options.geogrid ) raise Exception @@ -280,30 +578,27 @@ def initialize_destination_geo_gridded(self, config_options, mpi_config): # mpi_config.comm.barrier() - # Obtain the local boundaries for this processor. - self.get_processor_bounds(config_options) - # Scatter global XLAT_M grid to processors.. if mpi_config.rank == 0: - if idTmp.variables[config_options.lat_var].ndim == 3: - varTmp = idTmp.variables[config_options.lat_var][0, :, :] - elif idTmp.variables[config_options.lat_var].ndim == 2: - varTmp = idTmp.variables[config_options.lat_var][:, :] - elif idTmp.variables[config_options.lat_var].ndim == 1: - lat = idTmp.variables[config_options.lat_var][:] - lon = idTmp.variables[config_options.lon_var][:] + if esmf_nc.variables[self.config_options.lat_var].ndim == 3: + varTmp = esmf_nc.variables[self.config_options.lat_var][0, :, :] + elif esmf_nc.variables[self.config_options.lat_var].ndim == 2: + varTmp = esmf_nc.variables[self.config_options.lat_var][:, :] + elif esmf_nc.variables[self.config_options.lat_var].ndim == 1: + lat = esmf_nc.variables[self.config_options.lat_var][:] + lon = esmf_nc.variables[self.config_options.lon_var][:] varTmp = np.meshgrid(lon, lat)[1] lat = None lon = None # Flag to grab entire array for AWS slicing - if config_options.aws: + if self.config_options.aws: self.lat_bounds = varTmp else: varTmp = None # mpi_config.comm.barrier() - varSubTmp = mpi_config.scatter_array(self, varTmp, config_options) + varSubTmp = mpi_config.scatter_array(self, varTmp, self.config_options) # mpi_config.comm.barrier() @@ -315,7 +610,7 @@ def initialize_destination_geo_gridded(self, config_options, mpi_config): varSubTmp = None varTmp = None except Exception as e: - config_options.errMsg = ( + self.config_options.errMsg = ( "Unable to subset latitude from geogrid file into ESMF object" ) raise Exception @@ -324,25 +619,25 @@ def initialize_destination_geo_gridded(self, config_options, mpi_config): # Scatter global XLONG_M grid to processors.. if mpi_config.rank == 0: - if idTmp.variables[config_options.lat_var].ndim == 3: - varTmp = idTmp.variables[config_options.lon_var][0, :, :] - elif idTmp.variables[config_options.lon_var].ndim == 2: - varTmp = idTmp.variables[config_options.lon_var][:, :] - elif idTmp.variables[config_options.lon_var].ndim == 1: - lat = idTmp.variables[config_options.lat_var][:] - lon = idTmp.variables[config_options.lon_var][:] + if esmf_nc.variables[self.config_options.lat_var].ndim == 3: + varTmp = esmf_nc.variables[self.config_options.lon_var][0, :, :] + elif esmf_nc.variables[self.config_options.lon_var].ndim == 2: + varTmp = esmf_nc.variables[self.config_options.lon_var][:, :] + elif esmf_nc.variables[self.config_options.lon_var].ndim == 1: + lat = esmf_nc.variables[self.config_options.lat_var][:] + lon = esmf_nc.variables[self.config_options.lon_var][:] varTmp = np.meshgrid(lon, lat)[0] lat = None lon = None # Flag to grab entire array for AWS slicing - if config_options.aws: + if self.config_options.aws: self.lon_bounds = varTmp else: varTmp = None # mpi_config.comm.barrier() - varSubTmp = mpi_config.scatter_array(self, varTmp, config_options) + varSubTmp = mpi_config.scatter_array(self, varTmp, self.config_options) # mpi_config.comm.barrier() @@ -352,7 +647,7 @@ def initialize_destination_geo_gridded(self, config_options, mpi_config): varSubTmp = None varTmp = None except Exception as e: - config_options.errMsg = ( + self.config_options.errMsg = ( "Unable to subset longitude from geogrid file into ESMF object" ) raise Exception @@ -360,21 +655,23 @@ def initialize_destination_geo_gridded(self, config_options, mpi_config): # mpi_config.comm.barrier() if ( - config_options.cosalpha_var is not None - and config_options.sinalpha_var is not None + self.config_options.cosalpha_var is not None + and self.config_options.sinalpha_var is not None ): # Scatter the COSALPHA,SINALPHA grids to the processors. if mpi_config.rank == 0: - if idTmp.variables[config_options.cosalpha_var].ndim == 3: - varTmp = idTmp.variables[config_options.cosalpha_var][0, :, :] + if esmf_nc.variables[self.config_options.cosalpha_var].ndim == 3: + varTmp = esmf_nc.variables[self.config_options.cosalpha_var][ + 0, :, : + ] else: - varTmp = idTmp.variables[config_options.cosalpha_var][:, :] + varTmp = esmf_nc.variables[self.config_options.cosalpha_var][:, :] else: varTmp = None # mpi_config.comm.barrier() - varSubTmp = mpi_config.scatter_array(self, varTmp, config_options) + varSubTmp = mpi_config.scatter_array(self, varTmp, self.config_options) # mpi_config.comm.barrier() self.cosa_grid = varSubTmp[:, :] @@ -382,47 +679,51 @@ def initialize_destination_geo_gridded(self, config_options, mpi_config): varTmp = None if mpi_config.rank == 0: - if idTmp.variables[config_options.sinalpha_var].ndim == 3: - varTmp = idTmp.variables[config_options.sinalpha_var][0, :, :] + if esmf_nc.variables[self.config_options.sinalpha_var].ndim == 3: + varTmp = esmf_nc.variables[self.config_options.sinalpha_var][ + 0, :, : + ] else: - varTmp = idTmp.variables[config_options.sinalpha_var][:, :] + varTmp = esmf_nc.variables[self.config_options.sinalpha_var][:, :] else: varTmp = None # mpi_config.comm.barrier() - varSubTmp = mpi_config.scatter_array(self, varTmp, config_options) + varSubTmp = mpi_config.scatter_array(self, varTmp, self.config_options) # mpi_config.comm.barrier() self.sina_grid = varSubTmp[:, :] varSubTmp = None varTmp = None - if config_options.hgt_var is not None: + if self.config_options.hgt_var is not None: # Read in a scatter the WRF-Hydro elevation, which is used for downscaling # purposes. if mpi_config.rank == 0: - if idTmp.variables[config_options.hgt_var].ndim == 3: - varTmp = idTmp.variables[config_options.hgt_var][0, :, :] + if esmf_nc.variables[self.config_options.hgt_var].ndim == 3: + varTmp = esmf_nc.variables[self.config_options.hgt_var][0, :, :] else: - varTmp = idTmp.variables[config_options.hgt_var][:, :] + varTmp = esmf_nc.variables[self.config_options.hgt_var][:, :] else: varTmp = None # mpi_config.comm.barrier() - varSubTmp = mpi_config.scatter_array(self, varTmp, config_options) + varSubTmp = mpi_config.scatter_array(self, varTmp, self.config_options) # mpi_config.comm.barrier() self.height = varSubTmp varSubTmp = None varTmp = None if ( - config_options.cosalpha_var is not None - and config_options.sinalpha_var is not None + self.config_options.cosalpha_var is not None + and self.config_options.sinalpha_var is not None ): # Calculate the slope from the domain using elevation on the WRF-Hydro domain. This will # be used for downscaling purposes. if mpi_config.rank == 0: try: - slopeTmp, slp_azi_tmp = self.calc_slope(idTmp, config_options) + slopeTmp, slp_azi_tmp = self.calc_slope( + esmf_nc, self.config_options + ) except Exception: raise Exception else: @@ -430,48 +731,54 @@ def initialize_destination_geo_gridded(self, config_options, mpi_config): slp_azi_tmp = None # mpi_config.comm.barrier() - slopeSubTmp = mpi_config.scatter_array(self, slopeTmp, config_options) + slopeSubTmp = mpi_config.scatter_array(self, slopeTmp, self.config_options) self.slope = slopeSubTmp[:, :] slopeSubTmp = None - slp_azi_sub = mpi_config.scatter_array(self, slp_azi_tmp, config_options) + slp_azi_sub = mpi_config.scatter_array( + self, slp_azi_tmp, self.config_options + ) self.slp_azi = slp_azi_sub[:, :] slp_azi_tmp = None elif ( - config_options.slope_var is not None - and config_options.slope_azimuth_var is not None + self.config_options.slope_var is not None + and self.config_options.slope_azimuth_var is not None ): if mpi_config.rank == 0: - if idTmp.variables[config_options.slope_var].ndim == 3: - varTmp = idTmp.variables[config_options.slope_var][0, :, :] + if esmf_nc.variables[self.config_options.slope_var].ndim == 3: + varTmp = esmf_nc.variables[self.config_options.slope_var][0, :, :] else: - varTmp = idTmp.variables[config_options.slope_var][:, :] + varTmp = esmf_nc.variables[self.config_options.slope_var][:, :] else: varTmp = None - slopeSubTmp = mpi_config.scatter_array(self, varTmp, config_options) + slopeSubTmp = mpi_config.scatter_array(self, varTmp, self.config_options) self.slope = slopeSubTmp varTmp = None if mpi_config.rank == 0: - if idTmp.variables[config_options.slope_azimuth_var].ndim == 3: - varTmp = idTmp.variables[config_options.slope_azimuth_var][0, :, :] + if esmf_nc.variables[self.config_options.slope_azimuth_var].ndim == 3: + varTmp = esmf_nc.variables[self.config_options.slope_azimuth_var][ + 0, :, : + ] else: - varTmp = idTmp.variables[config_options.slope_azimuth_var][:, :] + varTmp = esmf_nc.variables[self.config_options.slope_azimuth_var][ + :, : + ] else: varTmp = None - slp_azi_sub = mpi_config.scatter_array(self, varTmp, config_options) + slp_azi_sub = mpi_config.scatter_array(self, varTmp, self.config_options) self.slp_azi = slp_azi_sub[:, :] varTmp = None - elif config_options.hgt_var is not None: + elif self.config_options.hgt_var is not None: # Calculate the slope from the domain using elevation of the gridded model and other approximations if mpi_config.rank == 0: try: slopeTmp, slp_azi_tmp = self.calc_slope_gridded( - idTmp, config_options + esmf_nc, self.config_options ) except Exception: raise Exception @@ -480,382 +787,84 @@ def initialize_destination_geo_gridded(self, config_options, mpi_config): slp_azi_tmp = None # mpi_config.comm.barrier() - slopeSubTmp = mpi_config.scatter_array(self, slopeTmp, config_options) + slopeSubTmp = mpi_config.scatter_array(self, slopeTmp, self.config_options) self.slope = slopeSubTmp[:, :] slopeSubTmp = None - slp_azi_sub = mpi_config.scatter_array(self, slp_azi_tmp, config_options) - self.slp_azi = slp_azi_sub[:, :] - slp_azi_tmp = None - - if mpi_config.rank == 0: - # Close the geogrid file - try: - idTmp.close() - except Exception as e: - config_options.errMsg = ( - "Unable to close geogrid file: " + config_options.geogrid - ) - raise Exception - - # Reset temporary variables to free up memory - slopeTmp = None - slp_azi_tmp = None - varTmp = None - - def initialize_geospatial_metadata(self, config_options, mpi_config): - """Initialize GeoMetaWrfHydro class variables. - - Function that will read in crs/x/y geospatial metadata and coordinates - from the optional geospatial metadata file IF it was specified by the user in - the configuration file. - :param config_options: - :return: - """ - # We will only read information on processor 0. This data is not necessary for the - # other processors, and is only used in the output routines. - if mpi_config.rank == 0: - # Open the geospatial metadata file. - try: - idTmp = netCDF4.Dataset(config_options.spatial_meta, "r") - except Exception as e: - config_options.errMsg = ( - "Unable to open spatial metadata file: " - + config_options.spatial_meta - ) - raise Exception - - # Make sure the expected variables are present in the file. - if "crs" not in idTmp.variables.keys(): - config_options.errMsg = ( - "Unable to locate crs variable in: " + config_options.spatial_meta - ) - raise Exception - if "x" not in idTmp.variables.keys(): - config_options.errMsg = ( - "Unable to locate x variable in: " + config_options.spatial_meta - ) - raise Exception - if "y" not in idTmp.variables.keys(): - config_options.errMsg = ( - "Unable to locate y variable in: " + config_options.spatial_meta - ) - raise Exception - # Extract names of variable attributes from each of the input geospatial variables. These - # can change, so we are making this as flexible as possible to accomodate future changes. - try: - crs_att_names = idTmp.variables["crs"].ncattrs() - except Exception as e: - config_options.errMsg = ( - "Unable to extract crs attribute names from: " - + config_options.spatial_meta - ) - raise Exception - try: - x_coord_att_names = idTmp.variables["x"].ncattrs() - except Exception as e: - config_options.errMsg = ( - "Unable to extract x attribute names from: " - + config_options.spatial_meta - ) - raise Exception - try: - y_coord_att_names = idTmp.variables["y"].ncattrs() - except Exception as e: - config_options.errMsg = ( - "Unable to extract y attribute names from: " - + config_options.spatial_meta - ) - raise Exception - # Extract attribute values - try: - self.x_coord_atts = { - item: idTmp.variables["x"].getncattr(item) - for item in x_coord_att_names - } - except Exception as e: - config_options.errMsg = ( - "Unable to extract x coordinate attributes from: " - + config_options.spatial_meta - ) - raise Exception - try: - self.y_coord_atts = { - item: idTmp.variables["y"].getncattr(item) - for item in y_coord_att_names - } - except Exception as e: - config_options.errMsg = ( - "Unable to extract y coordinate attributes from: " - + config_options.spatial_meta - ) - raise Exception - try: - self.crs_atts = { - item: idTmp.variables["crs"].getncattr(item) - for item in crs_att_names - } - except Exception as e: - config_options.errMsg = ( - "Unable to extract crs coordinate attributes from: " - + config_options.spatial_meta - ) - raise Exception - - # Extract global attributes - try: - global_att_names = idTmp.ncattrs() - except Exception as e: - config_options.errMsg = ( - "Unable to extract global attribute names from: " - + config_options.spatial_meta - ) - raise Exception - try: - self.spatial_global_atts = { - item: idTmp.getncattr(item) for item in global_att_names - } - except Exception as e: - config_options.errMsg = ( - "Unable to extract global attributes from: " - + config_options.spatial_meta - ) - raise Exception - - # Extract x/y coordinate values - if len(idTmp.variables["x"].shape) == 1: - try: - self.x_coords = idTmp.variables["x"][:].data - except Exception as e: - config_options.errMsg = ( - "Unable to extract x coordinate values from: " - + config_options.spatial_meta - ) - raise Exception - try: - self.y_coords = idTmp.variables["y"][:].data - except Exception as e: - config_options.errMsg = ( - "Unable to extract y coordinate values from: " - + config_options.spatial_meta - ) - raise Exception - # Check to see if the Y coordinates are North-South. If so, flip them. - if self.y_coords[1] < self.y_coords[0]: - self.y_coords[:] = np.flip(self.y_coords[:], axis=0) - - if len(idTmp.variables["x"].shape) == 2: - try: - self.x_coords = idTmp.variables["x"][:, :].data - except Exception as e: - config_options.errMsg = ( - "Unable to extract x coordinate values from: " - + config_options.spatial_meta - ) - raise Exception - try: - self.y_coords = idTmp.variables["y"][:, :].data - except Exception as e: - config_options.errMsg = ( - "Unable to extract y coordinate values from: " - + config_options.spatial_meta - ) - raise Exception - # Check to see if the Y coordinates are North-South. If so, flip them. - if self.y_coords[1, 0] > self.y_coords[0, 0]: - self.y_coords[:, :] = np.flipud(self.y_coords[:, :]) - - # Close the geospatial metadata file. - try: - idTmp.close() - except Exception as e: - config_options.errMsg = ( - "Unable to close spatial metadata file: " - + config_options.spatial_meta - ) - raise Exception - - # mpi_config.comm.barrier() - - def calc_slope(self, idTmp, config_options): - """Calculate slope grids needed for incoming shortwave radiation downscaling. - - Function to calculate slope grids needed for incoming shortwave radiation downscaling - later during the program. - :param idTmp: - :param config_options: - :return: - """ - # First extract the sina,cosa, and elevation variables from the geogrid file. - try: - sinaGrid = idTmp.variables[config_options.sinalpha_var][0, :, :] - except Exception as e: - config_options.errMsg = ( - "Unable to extract SINALPHA from: " + config_options.geogrid - ) - raise - - try: - cosaGrid = idTmp.variables[config_options.cosalpha_var][0, :, :] - except Exception as e: - config_options.errMsg = ( - "Unable to extract COSALPHA from: " + config_options.geogrid - ) - raise - - try: - heightDest = idTmp.variables[config_options.hgt_var][0, :, :] - except Exception as e: - config_options.errMsg = ( - "Unable to extract HGT_M from: " + config_options.geogrid + slp_azi_sub = mpi_config.scatter_array( + self, slp_azi_tmp, self.config_options ) - raise - - # Ensure cosa/sina are correct dimensions - if sinaGrid.shape[0] != self.ny_global or sinaGrid.shape[1] != self.nx_global: - config_options.errMsg = ( - "SINALPHA dimensions mismatch in: " + config_options.geogrid - ) - raise Exception - if cosaGrid.shape[0] != self.ny_global or cosaGrid.shape[1] != self.nx_global: - config_options.errMsg = ( - "COSALPHA dimensions mismatch in: " + config_options.geogrid - ) - raise Exception - if ( - heightDest.shape[0] != self.ny_global - or heightDest.shape[1] != self.nx_global - ): - config_options.errMsg = ( - "HGT_M dimension mismatch in: " + config_options.geogrid - ) - raise Exception - - # Establish constants - rdx = 1.0 / self.dx_meters - rdy = 1.0 / self.dy_meters - msftx = 1.0 - msfty = 1.0 - - slopeOut = np.empty([self.ny_global, self.nx_global], np.float32) - toposlpx = np.empty([self.ny_global, self.nx_global], np.float32) - toposlpy = np.empty([self.ny_global, self.nx_global], np.float32) - slp_azi = np.empty([self.ny_global, self.nx_global], np.float32) - ipDiff = np.empty([self.ny_global, self.nx_global], np.int32) - jpDiff = np.empty([self.ny_global, self.nx_global], np.int32) - hx = np.empty([self.ny_global, self.nx_global], np.float32) - hy = np.empty([self.ny_global, self.nx_global], np.float32) - - # Create index arrays that will be used to calculate slope. - xTmp = np.arange(self.nx_global) - yTmp = np.arange(self.ny_global) - xGrid = np.tile(xTmp[:], (self.ny_global, 1)) - yGrid = np.repeat(yTmp[:, np.newaxis], self.nx_global, axis=1) - indOrig = np.where(heightDest == heightDest) - indIp1 = ((indOrig[0]), (indOrig[1] + 1)) - indIm1 = ((indOrig[0]), (indOrig[1] - 1)) - indJp1 = ((indOrig[0] + 1), (indOrig[1])) - indJm1 = ((indOrig[0] - 1), (indOrig[1])) - indIp1[1][np.where(indIp1[1] >= self.nx_global)] = self.nx_global - 1 - indJp1[0][np.where(indJp1[0] >= self.ny_global)] = self.ny_global - 1 - indIm1[1][np.where(indIm1[1] < 0)] = 0 - indJm1[0][np.where(indJm1[0] < 0)] = 0 - - ipDiff[indOrig] = xGrid[indIp1] - xGrid[indIm1] - jpDiff[indOrig] = yGrid[indJp1] - yGrid[indJm1] - - toposlpx[indOrig] = ( - (heightDest[indIp1] - heightDest[indIm1]) * msftx * rdx - ) / ipDiff[indOrig] - toposlpy[indOrig] = ( - (heightDest[indJp1] - heightDest[indJm1]) * msfty * rdy - ) / jpDiff[indOrig] - hx[indOrig] = toposlpx[indOrig] - hy[indOrig] = toposlpy[indOrig] - slopeOut[indOrig] = np.arctan((hx[indOrig] ** 2 + hy[indOrig] ** 2) ** 0.5) - slopeOut[np.where(slopeOut < 1e-4)] = 0.0 - slp_azi[np.where(slopeOut < 1e-4)] = 0.0 - indValidTmp = np.where(slopeOut >= 1e-4) - slp_azi[indValidTmp] = np.arctan2(hx[indValidTmp], hy[indValidTmp]) + math.pi - indValidTmp = np.where(cosaGrid >= 0.0) - slp_azi[indValidTmp] = slp_azi[indValidTmp] - np.arcsin(sinaGrid[indValidTmp]) - indValidTmp = np.where(cosaGrid < 0.0) - slp_azi[indValidTmp] = slp_azi[indValidTmp] - ( - math.pi - np.arcsin(sinaGrid[indValidTmp]) - ) - - # Reset temporary arrays to None to free up memory - toposlpx = None - toposlpy = None - heightDest = None - sinaGrid = None - cosaGrid = None - indValidTmp = None - xTmp = None - yTmp = None - xGrid = None - ipDiff = None - jpDiff = None - indOrig = None - indJm1 = None - indJp1 = None - indIm1 = None - indIp1 = None - hx = None - hy = None + self.slp_azi = slp_azi_sub[:, :] + slp_azi_tmp = None - return slopeOut, slp_azi + if mpi_config.rank == 0: + # Close the geogrid file + try: + esmf_nc.close() + except Exception as e: + self.config_options.errMsg = ( + "Unable to close geogrid file: " + self.config_options.geogrid + ) + raise Exception + + # Reset temporary variables to free up memory + slopeTmp = None + slp_azi_tmp = None + varTmp = None - def calc_slope_gridded(self, idTmp, config_options): + def calc_slope_gridded(self, esmf_nc): """Calculate slope grids needed for incoming shortwave radiation downscaling. Function to calculate slope grids needed for incoming shortwave radiation downscaling later during the program. This calculates the slopes for grid cells - :param idTmp: + :param esmf_nc: :param config_options: :return: """ - idTmp = netCDF4.Dataset(config_options.geogrid, "r") + esmf_nc = netCDF4.Dataset(self.config_options.geogrid, "r") try: - lons = idTmp.variables[config_options.lon_var][:] - lats = idTmp.variables[config_options.lat_var][:] + lons = esmf_nc.variables[self.config_options.lon_var][:] + lats = esmf_nc.variables[self.config_options.lat_var][:] except Exception as e: - config_options.errMsg = ( - "Unable to extract gridded coordinates in " + config_options.geogrid + self.config_options.errMsg = ( + "Unable to extract gridded coordinates in " + + self.config_options.geogrid ) raise Exception try: dx = np.empty( ( - idTmp.variables[config_options.lat_var].shape[0], - idTmp.variables[config_options.lon_var].shape[0], + esmf_nc.variables[self.config_options.lat_var].shape[0], + esmf_nc.variables[self.config_options.lon_var].shape[0], ), dtype=float, ) dy = np.empty( ( - idTmp.variables[config_options.lat_var].shape[0], - idTmp.variables[config_options.lon_var].shape[0], + esmf_nc.variables[self.config_options.lat_var].shape[0], + esmf_nc.variables[self.config_options.lon_var].shape[0], ), dtype=float, ) - dx[:] = idTmp.variables[config_options.lon_var].dx - dy[:] = idTmp.variables[config_options.lat_var].dy + dx[:] = esmf_nc.variables[self.config_options.lon_var].dx + dy[:] = esmf_nc.variables[self.config_options.lat_var].dy except Exception as e: - config_options.errMsg = ( - "Unable to extract dx and dy distances in " + config_options.geogrid + self.config_options.errMsg = ( + "Unable to extract dx and dy distances in " + + self.config_options.geogrid ) raise Exception try: - heights = idTmp.variables[config_options.hgt_var][:] + heights = esmf_nc.variables[self.config_options.hgt_var][:] except Exception as e: - config_options.errMsg = ( - "Unable to extract heights of grid cells in " + config_options.geogrid + self.config_options.errMsg = ( + "Unable to extract heights of grid cells in " + + self.config_options.geogrid ) raise Exception - idTmp.close() + esmf_nc.close() # calculate grid coordinates dx distances in meters # based on general geospatial formula approximations @@ -878,7 +887,208 @@ def calc_slope_gridded(self, idTmp, config_options): return slope, slp_azi - def initialize_destination_geo_unstructured(self, config_options, mpi_config): + @property + def x_lower_bound(self): + """Get the local x lower bound for this processor.""" + return self.esmf_grid.lower_bounds[ESMF.StaggerLoc.CENTER][1] + + @property + def x_upper_bound(self): + """Get the local x upper bound for this processor.""" + return self.esmf_grid.upper_bounds[ESMF.StaggerLoc.CENTER][1] + + @property + def y_lower_bound(self): + """Get the local y lower bound for this processor.""" + return self.esmf_grid.lower_bounds[ESMF.StaggerLoc.CENTER][0] + + @property + def y_upper_bound(self): + """Get the local y upper bound for this processor.""" + return self.esmf_grid.upper_bounds[ESMF.StaggerLoc.CENTER][0] + + @property + def nx_local(self): + """Get the local x dimension size for this processor.""" + return self.x_upper_bound - self.x_lower_bound + + @property + def ny_local(self): + """Get the local y dimension size for this processor.""" + return self.y_upper_bound - self.y_lower_bound + + +class HydrofabricGeoMeta(GeoMeta): + """Class for handling information about the unstructured hydrofabric domain we are processing forcings too.""" + + def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): + """Initialize GeoMetaWrfHydro class variables. + + Initialization function to initialize ESMF through ESMPy, + calculate the global parameters of the WRF-Hydro grid + being processed to, along with the local parameters + for this particular processor. + :return: + """ + super().__init__(config_options, mpi_config) + + if self.config_options.geogrid is not None: + # Phase 1: Rank 0 extracts all needed global data + if self.mpi_config.rank == 0: + try: + esmf_nc = nc_utils.nc_Dataset_retry( + self.mpi_config, + self.config_options, + err_handler, + self.config_options.geogrid, + "r", + ) + + # Extract everything we need with retries + tmp_vars = esmf_nc.variables + + if self.config_options.aws: + nodecoords_data = nc_utils.nc_read_var_retry( + self.mpi_config, + self.config_options, + err_handler, + tmp_vars[self.config_options.nodecoords_var], + ) + self.lat_bounds = nodecoords_data[:, 1] + self.lon_bounds = nodecoords_data[:, 0] + + # Store these for later broadcast/scatter + elementcoords_global = nc_utils.nc_read_var_retry( + self.mpi_config, + self.config_options, + err_handler, + tmp_vars[self.config_options.elemcoords_var], + ) + + self.nx_global = elementcoords_global.shape[0] + self.ny_global = self.nx_global + + element_ids_global = nc_utils.nc_read_var_retry( + self.mpi_config, + self.config_options, + err_handler, + tmp_vars[self.config_options.element_id_var], + ) + + heights_global = None + if self.config_options.hgt_var is not None: + heights_global = nc_utils.nc_read_var_retry( + self.mpi_config, + self.config_options, + err_handler, + tmp_vars[self.config_options.hgt_var], + ) + slopes_global = None + slp_azi_global = None + if self.config_options.slope_var is not None: + slopes_global = nc_utils.nc_read_var_retry( + self.mpi_config, + self.config_options, + err_handler, + tmp_vars[self.config_options.slope_var], + ) + if self.config_options.slope_azimuth_var is not None: + slp_azi_global = nc_utils.nc_read_var_retry( + self.mpi_config, + self.config_options, + err_handler, + tmp_vars[self.config_options.slope_azimuth_var], + ) + + except Exception as e: + LOG.critical( + f"Failed to open mesh file: {self.config_options.geogrid} " + f"due to {str(e)}" + ) + raise + finally: + esmf_nc.close() + else: + elementcoords_global = None + element_ids_global = None + heights_global = None + slopes_global = None + slp_azi_global = None + + # Broadcast dimensions + self.nx_global = self.mpi_config.broadcast_parameter( + self.nx_global, self.config_options, param_type=int + ) + self.ny_global = self.mpi_config.broadcast_parameter( + self.ny_global, self.config_options, param_type=int + ) + + self.mpi_config.comm.barrier() + + # Phase 2: Create ESMF Mesh (collective operation with retry) + try: + self.esmf_grid = esmf_utils.esmf_mesh_retry( + self.mpi_config, + self.config_options, + err_handler, + filename=self.config_options.geogrid, + filetype=ESMF.FileFormat.ESMFMESH, + ) + except Exception as e: + LOG.critical( + f"Unable to create ESMF Mesh: {self.config_options.geogrid} " + f"due to {str(e)}" + ) + raise + + # Extract local coordinates from ESMF mesh + self.latitude_grid = self.esmf_grid.coords[1][1] + self.longitude_grid = self.esmf_grid.coords[1][0] + + # Phase 3: Broadcast global arrays and compute local indices + elementcoords_global = self.mpi_config.comm.bcast( + elementcoords_global, root=0 + ) + element_ids_global = self.mpi_config.comm.bcast(element_ids_global, root=0) + + # Each rank computes its own local indices + pet_elementcoords = np.column_stack( + [self.longitude_grid, self.latitude_grid] + ) + tree = spatial.KDTree(elementcoords_global) + _, pet_element_inds = tree.query(pet_elementcoords) + + self.element_ids = element_ids_global[pet_element_inds] + self.element_ids_global = element_ids_global + + # Broadcast and extract height/slope data + if self.config_options.hgt_var is not None: + heights_global = self.mpi_config.comm.bcast(heights_global, root=0) + self.height = heights_global[pet_element_inds] + + if self.config_options.slope_var is not None: + slopes_global = self.mpi_config.comm.bcast(slopes_global, root=0) + slp_azi_global = self.mpi_config.comm.bcast(slp_azi_global, root=0) + self.slope = slopes_global[pet_element_inds] + self.slp_azi = slp_azi_global[pet_element_inds] + + self.mesh_inds = pet_element_inds + + @property + def nx_local(self): + """Get the local x dimension size for this processor.""" + return len(self.esmf_grid.coords[1][1]) + + @property + def ny_local(self): + """Get the local y dimension size for this processor.""" + return len(self.esmf_grid.coords[1][1]) + + +class UnstructuredGeoMeta(GeoMeta): + """Class for handling information about the unstructured domain we are processing forcings too.""" + + def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): """Initialize GeoMetaWrfHydro class variables. Initialization function to initialize ESMF through ESMPy, @@ -887,77 +1097,87 @@ def initialize_destination_geo_unstructured(self, config_options, mpi_config): for this particular processor. :return: """ + super().__init__(config_options, mpi_config) + # Open the geogrid file and extract necessary information # to create ESMF fields. if mpi_config.rank == 0: try: - idTmp = netCDF4.Dataset(config_options.geogrid, "r") + esmf_nc = netCDF4.Dataset(self.config_options.geogrid, "r") except Exception as e: - config_options.errMsg = ( + self.config_options.errMsg = ( "Unable to open the unstructured mesh file: " - + config_options.geogrid + + self.config_options.geogrid ) raise Exception try: - self.nx_global = idTmp.variables[config_options.nodecoords_var].shape[0] + self.nx_global = esmf_nc.variables[ + self.config_options.nodecoords_var + ].shape[0] except Exception as e: - config_options.errMsg = ( - "Unable to extract X dimension size in " + config_options.geogrid + self.config_options.errMsg = ( + "Unable to extract X dimension size in " + + self.config_options.geogrid ) raise Exception try: - self.ny_global = idTmp.variables[config_options.nodecoords_var].shape[0] + self.ny_global = esmf_nc.variables[ + self.config_options.nodecoords_var + ].shape[0] except Exception as e: - config_options.errMsg = ( - "Unable to extract Y dimension size in " + config_options.geogrid + self.config_options.errMsg = ( + "Unable to extract Y dimension size in " + + self.config_options.geogrid ) raise Exception try: - self.nx_global_elem = idTmp.variables[ - config_options.elemcoords_var + self.nx_global_elem = esmf_nc.variables[ + self.config_options.elemcoords_var ].shape[0] except Exception as e: - config_options.errMsg = ( - "Unable to extract X dimension size in " + config_options.geogrid + self.config_options.errMsg = ( + "Unable to extract X dimension size in " + + self.config_options.geogrid ) raise Exception try: - self.ny_global_elem = idTmp.variables[ - config_options.elemcoords_var + self.ny_global_elem = esmf_nc.variables[ + self.config_options.elemcoords_var ].shape[0] except Exception as e: - config_options.errMsg = ( - "Unable to extract Y dimension size in " + config_options.geogrid + self.config_options.errMsg = ( + "Unable to extract Y dimension size in " + + self.config_options.geogrid ) raise Exception # Flag to grab entire array for AWS slicing - if config_options.aws: - self.lat_bounds = idTmp.variables[config_options.nodecoords_var][:][ - :, 1 - ] - self.lon_bounds = idTmp.variables[config_options.nodecoords_var][:][ - :, 0 - ] + if self.config_options.aws: + self.lat_bounds = esmf_nc.variables[self.config_options.nodecoords_var][ + : + ][:, 1] + self.lon_bounds = esmf_nc.variables[self.config_options.nodecoords_var][ + : + ][:, 0] # mpi_config.comm.barrier() # Broadcast global dimensions to the other processors. self.nx_global = mpi_config.broadcast_parameter( - self.nx_global, config_options, param_type=int + self.nx_global, self.config_options, param_type=int ) self.ny_global = mpi_config.broadcast_parameter( - self.ny_global, config_options, param_type=int + self.ny_global, self.config_options, param_type=int ) self.nx_global_elem = mpi_config.broadcast_parameter( - self.nx_global_elem, config_options, param_type=int + self.nx_global_elem, self.config_options, param_type=int ) self.ny_global_elem = mpi_config.broadcast_parameter( - self.ny_global_elem, config_options, param_type=int + self.ny_global_elem, self.config_options, param_type=int ) # mpi_config.comm.barrier() @@ -965,10 +1185,10 @@ def initialize_destination_geo_unstructured(self, config_options, mpi_config): if mpi_config.rank == 0: # Close the geogrid file try: - idTmp.close() + esmf_nc.close() except Exception as e: - config_options.errMsg = ( - "Unable to close geogrid Mesh file: " + config_options.geogrid + self.config_options.errMsg = ( + "Unable to close geogrid Mesh file: " + self.config_options.geogrid ) raise Exception @@ -977,20 +1197,17 @@ def initialize_destination_geo_unstructured(self, config_options, mpi_config): # From ESMF documentation # If you create a mesh from a file (like NetCDF/ESMF-Mesh), coord_sys is ignored. The mesh’s coordinate system should be embedded in the file or inferred. self.esmf_grid = ESMF.Mesh( - filename=config_options.geogrid, filetype=ESMF.FileFormat.ESMFMESH + filename=self.config_options.geogrid, filetype=ESMF.FileFormat.ESMFMESH ) except Exception as e: - config_options.errMsg = ( + self.config_options.errMsg = ( "Unable to create ESMF Mesh from geogrid file: " - + config_options.geogrid + + self.config_options.geogrid ) raise Exception # mpi_config.comm.barrier() - # Obtain the local boundaries for this processor. - self.get_processor_bounds(config_options) - # Place the local lat/lon grid slices from the parent geogrid file into # the ESMF lat/lon grids that have already been seperated by processors. try: @@ -999,7 +1216,7 @@ def initialize_destination_geo_unstructured(self, config_options, mpi_config): varSubTmp = None varTmp = None except Exception as e: - config_options.errMsg = ( + self.config_options.errMsg = ( "Unable to subset node latitudes from ESMF Mesh object" ) raise Exception @@ -1009,16 +1226,20 @@ def initialize_destination_geo_unstructured(self, config_options, mpi_config): varSubTmp = None varTmp = None except Exception as e: - config_options.errMsg = ( + self.config_options.errMsg = ( "Unable to subset XLONG_M from geogrid file into ESMF Mesh object" ) raise Exception - idTmp = netCDF4.Dataset(config_options.geogrid, "r") + esmf_nc = netCDF4.Dataset(self.config_options.geogrid, "r") # Get lat and lon global variables for pet extraction of indices - nodecoords_global = idTmp.variables[config_options.nodecoords_var][:].data - elementcoords_global = idTmp.variables[config_options.elemcoords_var][:].data + nodecoords_global = esmf_nc.variables[self.config_options.nodecoords_var][ + : + ].data + elementcoords_global = esmf_nc.variables[self.config_options.elemcoords_var][ + : + ].data # Find the corresponding local indices to slice global heights and slope # variables that are based on the partitioning on the unstructured mesh @@ -1047,48 +1268,52 @@ def initialize_destination_geo_unstructured(self, config_options, mpi_config): # accepting the pre-calculated slope and slope azmiuth variables if available, # otherwise calculate slope from height estimates # if(config_options.cosalpha_var != None and config_options.sinalpha_var != None): - # self.cosa_grid = idTmp.variables[config_options.cosalpha_var][:].data[pet_node_inds] - # self.sina_grid = idTmp.variables[config_options.sinalpha_var][:].data[pet_node_inds] - # slopeTmp, slp_azi_tmp = self.calc_slope(idTmp,config_options) + # self.cosa_grid = esmf_nc.variables[config_options.cosalpha_var][:].data[pet_node_inds] + # self.sina_grid = esmf_nc.variables[config_options.sinalpha_var][:].data[pet_node_inds] + # slopeTmp, slp_azi_tmp = self.calc_slope(esmf_nc,config_options) # self.slope = slope_node_Tmp[pet_node_inds] # self.slp_azi = slp_azi_node_tmp[pet_node_inds] if ( - config_options.slope_var is not None - and config_options.slp_azi_var is not None + self.config_options.slope_var is not None + and self.config_options.slp_azi_var is not None ): - self.slope = idTmp.variables[config_options.slope_var][:].data[ - pet_node_inds - ] - self.slp_azi = idTmp.variables[config_options.slope_azimuth_var][:].data[ + self.slope = esmf_nc.variables[self.config_options.slope_var][:].data[ pet_node_inds ] - self.slope_elem = idTmp.variables[config_options.slope_var_elem][:].data[ - pet_element_inds - ] - self.slp_azi_elem = idTmp.variables[config_options.slope_azimuth_var_elem][ + self.slp_azi = esmf_nc.variables[self.config_options.slope_azimuth_var][ + : + ].data[pet_node_inds] + self.slope_elem = esmf_nc.variables[self.config_options.slope_var_elem][ : ].data[pet_element_inds] + self.slp_azi_elem = esmf_nc.variables[ + self.config_options.slope_azimuth_var_elem + ][:].data[pet_element_inds] # Read in a scatter the mesh node elevation, which is used for downscaling purposes - self.height = idTmp.variables[config_options.hgt_var][:].data[pet_node_inds] - # Read in a scatter the mesh element elevation, which is used for downscaling purposes. - self.height_elem = idTmp.variables[config_options.hgt_elem_var][:].data[ - pet_element_inds + self.height = esmf_nc.variables[self.config_options.hgt_var][:].data[ + pet_node_inds ] + # Read in a scatter the mesh element elevation, which is used for downscaling purposes. + self.height_elem = esmf_nc.variables[self.config_options.hgt_elem_var][ + : + ].data[pet_element_inds] - elif config_options.hgt_var is not None: + elif self.config_options.hgt_var is not None: # Read in a scatter the mesh node elevation, which is used for downscaling purposes - self.height = idTmp.variables[config_options.hgt_var][:].data[pet_node_inds] + self.height = esmf_nc.variables[self.config_options.hgt_var][:].data[ + pet_node_inds + ] # Read in a scatter the mesh element elevation, which is used for downscaling purposes. - self.height_elem = idTmp.variables[config_options.hgt_elem_var][:].data[ - pet_element_inds - ] + self.height_elem = esmf_nc.variables[self.config_options.hgt_elem_var][ + : + ].data[pet_element_inds] # Calculate the slope from the domain using elevation on the WRF-Hydro domain. This will # be used for downscaling purposes. slope_node_Tmp, slp_azi_node_tmp, slope_elem_Tmp, slp_azi_elem_tmp = ( - self.calc_slope_unstructured(idTmp, config_options) + self.calc_slope_unstructured(esmf_nc, self.config_options) ) self.slope = slope_node_Tmp[pet_node_inds] @@ -1111,69 +1336,90 @@ def initialize_destination_geo_unstructured(self, config_options, mpi_config): pet_node_inds = None pet_element_inds = None - def calc_slope_unstructured(self, idTmp, config_options): + @property + def nx_local(self): + """Get the local x dimension size for this processor.""" + return len(self.esmf_grid.coords[0][1]) + + @property + def ny_local(self): + """Get the local y dimension size for this processor.""" + return len(self.esmf_grid.coords[0][1]) + + @property + def nx_local_elem(self): + """Get the local x dimension size for this processor.""" + return len(self.esmf_grid.coords[1][1]) + + @property + def ny_local_elem(self): + """Get the local y dimension size for this processor.""" + return len(self.esmf_grid.coords[1][1]) + + def calc_slope_unstructured(self, esmf_nc): """Calculate slope grids needed for incoming shortwave radiation downscaling. Function to calculate slope grids needed for incoming shortwave radiation downscaling later during the program. This calculates the slopes for both nodes and elements - :param idTmp: - :param config_options: + :param esmf_nc: :return: """ - idTmp = netCDF4.Dataset(config_options.geogrid, "r") + esmf_nc = netCDF4.Dataset(self.config_options.geogrid, "r") try: - node_lons = idTmp.variables[config_options.nodecoords_var][:][:, 0] - node_lats = idTmp.variables[config_options.nodecoords_var][:][:, 1] + node_lons = esmf_nc.variables[self.config_options.nodecoords_var][:][:, 0] + node_lats = esmf_nc.variables[self.config_options.nodecoords_var][:][:, 1] except Exception as e: - config_options.errMsg = ( - "Unable to extract node coordinates in " + config_options.geogrid + self.config_options.errMsg = ( + "Unable to extract node coordinates in " + self.config_options.geogrid ) raise Exception try: - elem_lons = idTmp.variables[config_options.elemcoords_var][:][:, 0] - elem_lats = idTmp.variables[config_options.elemcoords_var][:][:, 1] + elem_lons = esmf_nc.variables[self.config_options.elemcoords_var][:][:, 0] + elem_lats = esmf_nc.variables[self.config_options.elemcoords_var][:][:, 1] except Exception as e: - config_options.errMsg = ( - "Unable to extract element coordinates in " + config_options.geogrid + self.config_options.errMsg = ( + "Unable to extract element coordinates in " + + self.config_options.geogrid ) raise Exception try: - elem_conn = idTmp.variables[config_options.elemconn_var][:][:, 0] + elem_conn = esmf_nc.variables[self.config_options.elemconn_var][:][:, 0] except Exception as e: - config_options.errMsg = ( - "Unable to extract element connectivity in " + config_options.geogrid + self.config_options.errMsg = ( + "Unable to extract element connectivity in " + + self.config_options.geogrid ) raise Exception try: - node_heights = idTmp.variables[config_options.hgt_var][:] + node_heights = esmf_nc.variables[self.config_options.hgt_var][:] except Exception as e: - config_options.errMsg = ( - "Unable to extract HGT_M from: " + config_options.geogrid + self.config_options.errMsg = ( + "Unable to extract HGT_M from: " + self.config_options.geogrid ) raise if node_heights.shape[0] != self.ny_global: - config_options.errMsg = ( - "HGT_M dimension mismatch in: " + config_options.geogrid + self.config_options.errMsg = ( + "HGT_M dimension mismatch in: " + self.config_options.geogrid ) raise Exception try: - elem_heights = idTmp.variables[config_options.hgt_elem_var][:] + elem_heights = esmf_nc.variables[self.config_options.hgt_elem_var][:] except Exception as e: - config_options.errMsg = ( - "Unable to extract HGT_M_ELEM from: " + config_options.geogrid + self.config_options.errMsg = ( + "Unable to extract HGT_M_ELEM from: " + self.config_options.geogrid ) raise if elem_heights.shape[0] != len(elem_lons): - config_options.errMsg = ( - "HGT_M_ELEM dimension mismatch in: " + config_options.geogrid + self.config_options.errMsg = ( + "HGT_M_ELEM dimension mismatch in: " + self.config_options.geogrid ) raise Exception - idTmp.close() + esmf_nc.close() # calculate node coordinate distances in meters # based on general geospatial formula approximations @@ -1213,156 +1459,3 @@ def calc_slope_unstructured(self, idTmp, config_options): dz = None return slope_nodes, slp_azi_nodes, slope_elem, slp_azi_elem - - def initialize_destination_geo_hydrofabric(self, config_options, mpi_config): - """Initialize GeoMetaWrfHydro class variables. - - Initialization function to initialize ESMF through ESMPy, - calculate the global parameters of the WRF-Hydro grid - being processed to, along with the local parameters - for this particular processor. - :return: - """ - - if config_options.geogrid is not None: - # Phase 1: Rank 0 extracts all needed global data - if mpi_config.rank == 0: - try: - idTmp = nc_utils.nc_Dataset_retry( - mpi_config, - config_options, - err_handler, - config_options.geogrid, - "r", - ) - - # Extract everything we need with retries - tmp_vars = idTmp.variables - - if config_options.aws: - nodecoords_data = nc_utils.nc_read_var_retry( - mpi_config, - config_options, - err_handler, - tmp_vars[config_options.nodecoords_var], - ) - self.lat_bounds = nodecoords_data[:, 1] - self.lon_bounds = nodecoords_data[:, 0] - - # Store these for later broadcast/scatter - elementcoords_global = nc_utils.nc_read_var_retry( - mpi_config, - config_options, - err_handler, - tmp_vars[config_options.elemcoords_var], - ) - - self.nx_global = elementcoords_global.shape[0] - self.ny_global = self.nx_global - - element_ids_global = nc_utils.nc_read_var_retry( - mpi_config, - config_options, - err_handler, - tmp_vars[config_options.element_id_var], - ) - - heights_global = None - if config_options.hgt_var is not None: - heights_global = nc_utils.nc_read_var_retry( - mpi_config, - config_options, - err_handler, - tmp_vars[config_options.hgt_var], - ) - slopes_global = None - slp_azi_global = None - if config_options.slope_var is not None: - slopes_global = nc_utils.nc_read_var_retry( - mpi_config, - config_options, - err_handler, - tmp_vars[config_options.slope_var], - ) - if config_options.slope_azimuth_var is not None: - slp_azi_global = nc_utils.nc_read_var_retry( - mpi_config, - config_options, - err_handler, - tmp_vars[config_options.slope_azimuth_var], - ) - - except Exception as e: - LOG.critical( - f"Failed to open mesh file: {config_options.geogrid} " - f"due to {str(e)}" - ) - raise - finally: - idTmp.close() - else: - elementcoords_global = None - element_ids_global = None - heights_global = None - slopes_global = None - slp_azi_global = None - - # Broadcast dimensions - self.nx_global = mpi_config.broadcast_parameter( - self.nx_global, config_options, param_type=int - ) - self.ny_global = mpi_config.broadcast_parameter( - self.ny_global, config_options, param_type=int - ) - - mpi_config.comm.barrier() - - # Phase 2: Create ESMF Mesh (collective operation with retry) - try: - self.esmf_grid = esmf_utils.esmf_mesh_retry( - mpi_config, - config_options, - err_handler, - filename=config_options.geogrid, - filetype=ESMF.FileFormat.ESMFMESH, - ) - except Exception as e: - LOG.critical( - f"Unable to create ESMF Mesh: {config_options.geogrid} " - f"due to {str(e)}" - ) - raise - - # Get processor bounds - self.get_processor_bounds(config_options) - - # Extract local coordinates from ESMF mesh - self.latitude_grid = self.esmf_grid.coords[1][1] - self.longitude_grid = self.esmf_grid.coords[1][0] - - # Phase 3: Broadcast global arrays and compute local indices - elementcoords_global = mpi_config.comm.bcast(elementcoords_global, root=0) - element_ids_global = mpi_config.comm.bcast(element_ids_global, root=0) - - # Each rank computes its own local indices - pet_elementcoords = np.column_stack( - [self.longitude_grid, self.latitude_grid] - ) - tree = spatial.KDTree(elementcoords_global) - _, pet_element_inds = tree.query(pet_elementcoords) - - self.element_ids = element_ids_global[pet_element_inds] - self.element_ids_global = element_ids_global - - # Broadcast and extract height/slope data - if config_options.hgt_var is not None: - heights_global = mpi_config.comm.bcast(heights_global, root=0) - self.height = heights_global[pet_element_inds] - - if config_options.slope_var is not None: - slopes_global = mpi_config.comm.bcast(slopes_global, root=0) - slp_azi_global = mpi_config.comm.bcast(slp_azi_global, root=0) - self.slope = slopes_global[pet_element_inds] - self.slp_azi = slp_azi_global[pet_element_inds] - - self.mesh_inds = pet_element_inds From a3c94872dc353741d5926547a15074a2f582034e Mon Sep 17 00:00:00 2001 From: "Matthew.Deshotel" Date: Tue, 17 Feb 2026 10:30:58 -0600 Subject: [PATCH 04/34] perform check before initializing GeoMeta --- .../NextGen_Forcings_Engine/bmi_model.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/bmi_model.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/bmi_model.py index 71e23349..2d287521 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/bmi_model.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/bmi_model.py @@ -24,9 +24,12 @@ from mpi4py import MPI from NextGen_Forcings_Engine_BMI import esmf_creation, forcing_extraction +from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.config import ( + ConfigOptions, +) from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.consts import GEOGRID from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.parallel import MpiConfig -from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.config import ConfigOptions + from .bmi_grid import Grid, GridType from .core import ( err_handler, @@ -251,14 +254,14 @@ def initialize(self, config_file: str, output_path: str | None = None) -> None: # information about the modeling domain, local processor # grid boundaries, and ESMF grid objects/fields to be used # in regridding. + if self._job_meta.grid_type not in GEOGRID: + self._job_meta.errMsg = f"Invalid grid type specified: {self._job_meta.grid_type}. Valid options are: {list(GEOGRID.keys())}" + err_handler.err_out_screen_para(self._job_meta.errMsg, self._mpi_meta) + self._wrf_hydro_geo_meta = GEOGRID.get(self._job_meta.grid_type)( self._job_meta, self._mpi_meta ) - if self._wrf_hydro_geo_meta is None: - self._job_meta.errMsg = "You must specify a proper grid_type (gridded, unstructured, hydrofabric) in the config." - err_handler.err_out_screen_para(self._job_meta.errMsg, self._mpi_meta) - # Assign grid type to BMI class for grid information self._grid_type = self._job_meta.grid_type.lower() @@ -884,9 +887,7 @@ def initialize_with_params( :raises ValueError: If an invalid grid type is specified, an exception is raised. """ # Set the job metadata parameters (b_date, geogrid) using config_options - self._job_meta = ConfigOptions( - self.cfg_bmi, b_date=b_date, geogrid_arg=geogrid - ) + self._job_meta = ConfigOptions(self.cfg_bmi, b_date=b_date, geogrid_arg=geogrid) # Now that _job_meta is set, call initialize() to set up the core model self.initialize(config_file, output_path=output_path) From ab19b1b0b1930638f3e223136037151cf0dc2228 Mon Sep 17 00:00:00 2001 From: "Matthew.Deshotel" Date: Tue, 17 Feb 2026 10:31:47 -0600 Subject: [PATCH 05/34] add type hints and correctly handle new properties/attributes --- .../NextGen_Forcings_Engine/core/geoMod.py | 51 +++++++++++-------- 1 file changed, 29 insertions(+), 22 deletions(-) diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py index 514d2dba..e592e08e 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py @@ -29,7 +29,7 @@ class GeoMeta: - """Abstract class for handling information about the WRF-Hydro domain we are processing forcings too.""" + """Abstract class for handling information about the WRF-Hydro domain we are processing forcings to.""" def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): """Initialize GeoMeta class variables.""" @@ -41,8 +41,6 @@ def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): self.ny_global_elem = None self.dx_meters = None self.dy_meters = None - self.nx_local_elem = None - self.ny_local_elem = None self.latitude_grid = None self.longitude_grid = None self.element_ids = None @@ -385,7 +383,7 @@ def calc_slope(self, esmf_nc): class GriddedGeoMeta(GeoMeta): - """Class for handling information about the gridded domain we are processing forcings too.""" + """Class for handling information about the gridded domain we are processing forcings to.""" def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): """Initialize GeoMetaWrfHydro class variables. @@ -397,7 +395,8 @@ def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): :return: """ super().__init__(config_options, mpi_config) - + self.nx_local_elem = None + self.ny_local_elem = None # Open the geogrid file and extract necessary information # to create ESMF fields. if mpi_config.rank == 0: @@ -888,38 +887,38 @@ def calc_slope_gridded(self, esmf_nc): return slope, slp_azi @property - def x_lower_bound(self): + def x_lower_bound(self) -> int: """Get the local x lower bound for this processor.""" return self.esmf_grid.lower_bounds[ESMF.StaggerLoc.CENTER][1] @property - def x_upper_bound(self): + def x_upper_bound(self) -> int: """Get the local x upper bound for this processor.""" return self.esmf_grid.upper_bounds[ESMF.StaggerLoc.CENTER][1] @property - def y_lower_bound(self): + def y_lower_bound(self) -> int: """Get the local y lower bound for this processor.""" return self.esmf_grid.lower_bounds[ESMF.StaggerLoc.CENTER][0] @property - def y_upper_bound(self): + def y_upper_bound(self) -> int: """Get the local y upper bound for this processor.""" return self.esmf_grid.upper_bounds[ESMF.StaggerLoc.CENTER][0] @property - def nx_local(self): + def nx_local(self) -> int: """Get the local x dimension size for this processor.""" return self.x_upper_bound - self.x_lower_bound @property - def ny_local(self): + def ny_local(self) -> int: """Get the local y dimension size for this processor.""" return self.y_upper_bound - self.y_lower_bound class HydrofabricGeoMeta(GeoMeta): - """Class for handling information about the unstructured hydrofabric domain we are processing forcings too.""" + """Class for handling information about the unstructured hydrofabric domain we are processing forcings to.""" def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): """Initialize GeoMetaWrfHydro class variables. @@ -931,7 +930,12 @@ def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): :return: """ super().__init__(config_options, mpi_config) - + self.nx_local_elem = None + self.ny_local_elem = None + self.x_lower_bound = None + self.x_upper_bound = None + self.y_lower_bound = None + self.y_upper_bound = None if self.config_options.geogrid is not None: # Phase 1: Rank 0 extracts all needed global data if self.mpi_config.rank == 0: @@ -1075,18 +1079,18 @@ def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): self.mesh_inds = pet_element_inds @property - def nx_local(self): + def nx_local(self) -> int: """Get the local x dimension size for this processor.""" return len(self.esmf_grid.coords[1][1]) @property - def ny_local(self): + def ny_local(self) -> int: """Get the local y dimension size for this processor.""" return len(self.esmf_grid.coords[1][1]) class UnstructuredGeoMeta(GeoMeta): - """Class for handling information about the unstructured domain we are processing forcings too.""" + """Class for handling information about the unstructured domain we are processing forcings to.""" def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): """Initialize GeoMetaWrfHydro class variables. @@ -1099,6 +1103,10 @@ def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): """ super().__init__(config_options, mpi_config) + self.x_lower_bound = None + self.x_upper_bound = None + self.y_lower_bound = None + self.y_upper_bound = None # Open the geogrid file and extract necessary information # to create ESMF fields. if mpi_config.rank == 0: @@ -1337,31 +1345,30 @@ def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): pet_element_inds = None @property - def nx_local(self): + def nx_local(self) -> int: """Get the local x dimension size for this processor.""" return len(self.esmf_grid.coords[0][1]) @property - def ny_local(self): + def ny_local(self) -> int: """Get the local y dimension size for this processor.""" return len(self.esmf_grid.coords[0][1]) @property - def nx_local_elem(self): + def nx_local_elem(self) -> int: """Get the local x dimension size for this processor.""" return len(self.esmf_grid.coords[1][1]) @property - def ny_local_elem(self): + def ny_local_elem(self) -> int: """Get the local y dimension size for this processor.""" return len(self.esmf_grid.coords[1][1]) - def calc_slope_unstructured(self, esmf_nc): + def calc_slope_unstructured(self): """Calculate slope grids needed for incoming shortwave radiation downscaling. Function to calculate slope grids needed for incoming shortwave radiation downscaling later during the program. This calculates the slopes for both nodes and elements - :param esmf_nc: :return: """ esmf_nc = netCDF4.Dataset(self.config_options.geogrid, "r") From 5b9dac45d93d0f50c2778b0f4ca39a817e1b2d66 Mon Sep 17 00:00:00 2001 From: "Matthew.Deshotel" Date: Tue, 17 Feb 2026 11:41:26 -0600 Subject: [PATCH 06/34] assume bounds are float instead of int --- .../NextGen_Forcings_Engine/core/geoMod.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py index e592e08e..cf7f9903 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py @@ -887,22 +887,22 @@ def calc_slope_gridded(self, esmf_nc): return slope, slp_azi @property - def x_lower_bound(self) -> int: + def x_lower_bound(self) -> float: """Get the local x lower bound for this processor.""" return self.esmf_grid.lower_bounds[ESMF.StaggerLoc.CENTER][1] @property - def x_upper_bound(self) -> int: + def x_upper_bound(self) -> float: """Get the local x upper bound for this processor.""" return self.esmf_grid.upper_bounds[ESMF.StaggerLoc.CENTER][1] @property - def y_lower_bound(self) -> int: + def y_lower_bound(self) -> float: """Get the local y lower bound for this processor.""" return self.esmf_grid.lower_bounds[ESMF.StaggerLoc.CENTER][0] @property - def y_upper_bound(self) -> int: + def y_upper_bound(self) -> float: """Get the local y upper bound for this processor.""" return self.esmf_grid.upper_bounds[ESMF.StaggerLoc.CENTER][0] From 472936b21ba921cb4af8e1f750227698905da52b Mon Sep 17 00:00:00 2001 From: "Matthew.Deshotel" Date: Wed, 18 Feb 2026 04:55:06 -0600 Subject: [PATCH 07/34] fix malformed function calls for slopes --- .../NextGen_Forcings_Engine/core/geoMod.py | 33 +++++++------------ 1 file changed, 12 insertions(+), 21 deletions(-) diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py index cf7f9903..28e8065e 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py @@ -78,7 +78,6 @@ def initialize_geospatial_metadata(self): Function that will read in crs/x/y geospatial metadata and coordinates from the optional geospatial metadata file IF it was specified by the user in the configuration file. - :param config_options: :return: """ # We will only read information on processor 0. This data is not necessary for the @@ -249,13 +248,13 @@ def initialize_geospatial_metadata(self): # mpi_config.comm.barrier() - def calc_slope(self, esmf_nc): + def calc_slope(self, esmf_nc: netCDF4.Dataset) -> tuple: """Calculate slope grids needed for incoming shortwave radiation downscaling. Function to calculate slope grids needed for incoming shortwave radiation downscaling later during the program. - :param esmf_nc: - :return: + :param esmf_nc: The open netCDF4 dataset for the geogrid file, passed in to avoid having to reopen the file multiple times + :return: A tuple containing slope and slope azimuth for nodes and elements """ # First extract the sina,cosa, and elevation variables from the geogrid file. try: @@ -720,9 +719,7 @@ def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): # be used for downscaling purposes. if mpi_config.rank == 0: try: - slopeTmp, slp_azi_tmp = self.calc_slope( - esmf_nc, self.config_options - ) + slopeTmp, slp_azi_tmp = self.calc_slope(esmf_nc) except Exception: raise Exception else: @@ -776,9 +773,7 @@ def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): # Calculate the slope from the domain using elevation of the gridded model and other approximations if mpi_config.rank == 0: try: - slopeTmp, slp_azi_tmp = self.calc_slope_gridded( - esmf_nc, self.config_options - ) + slopeTmp, slp_azi_tmp = self.calc_slope_gridded(esmf_nc) except Exception: raise Exception else: @@ -811,17 +806,14 @@ def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): slp_azi_tmp = None varTmp = None - def calc_slope_gridded(self, esmf_nc): + def calc_slope_gridded(self, esmf_nc: netCDF4.Dataset) -> tuple: """Calculate slope grids needed for incoming shortwave radiation downscaling. Function to calculate slope grids needed for incoming shortwave radiation downscaling later during the program. This calculates the slopes for grid cells - :param esmf_nc: - :param config_options: - :return: + :param esmf_nc: The open netCDF4 dataset for the geogrid file, passed in to avoid having to reopen the file multiple times + :return: A tuple containing slope and slope azimuth for grid cells """ - esmf_nc = netCDF4.Dataset(self.config_options.geogrid, "r") - try: lons = esmf_nc.variables[self.config_options.lon_var][:] lats = esmf_nc.variables[self.config_options.lat_var][:] @@ -1321,7 +1313,7 @@ def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): # Calculate the slope from the domain using elevation on the WRF-Hydro domain. This will # be used for downscaling purposes. slope_node_Tmp, slp_azi_node_tmp, slope_elem_Tmp, slp_azi_elem_tmp = ( - self.calc_slope_unstructured(esmf_nc, self.config_options) + self.calc_slope_unstructured(esmf_nc) ) self.slope = slope_node_Tmp[pet_node_inds] @@ -1364,15 +1356,14 @@ def ny_local_elem(self) -> int: """Get the local y dimension size for this processor.""" return len(self.esmf_grid.coords[1][1]) - def calc_slope_unstructured(self): + def calc_slope_unstructured(self, esmf_nc: netCDF4.Dataset) -> tuple: """Calculate slope grids needed for incoming shortwave radiation downscaling. Function to calculate slope grids needed for incoming shortwave radiation downscaling later during the program. This calculates the slopes for both nodes and elements - :return: + :param esmf_nc: The open netCDF4 dataset for the geogrid file, passed in to avoid having to reopen the file multiple times + :return: A tuple containing slope and slope azimuth for nodes and elements """ - esmf_nc = netCDF4.Dataset(self.config_options.geogrid, "r") - try: node_lons = esmf_nc.variables[self.config_options.nodecoords_var][:][:, 0] node_lats = esmf_nc.variables[self.config_options.nodecoords_var][:][:, 1] From 1246c6e630efdd4536d569157cdeee33457cdb2c Mon Sep 17 00:00:00 2001 From: "Matthew.Deshotel" Date: Wed, 18 Feb 2026 04:59:41 -0600 Subject: [PATCH 08/34] linting --- .../NextGen_Forcings_Engine/core/geoMod.py | 625 +++++++----------- 1 file changed, 253 insertions(+), 372 deletions(-) diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py index 28e8065e..e5b0995c 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py @@ -1,5 +1,4 @@ import math -from time import time import netCDF4 import numpy as np @@ -87,57 +86,36 @@ def initialize_geospatial_metadata(self): try: esmf_nc = netCDF4.Dataset(self.config_options.spatial_meta, "r") except Exception as e: - self.config_options.errMsg = ( - "Unable to open spatial metadata file: " - + self.config_options.spatial_meta - ) - raise Exception + self.config_options.errMsg = f"Unable to open spatial metadata file: {self.config_options.spatial_meta}" + raise e # Make sure the expected variables are present in the file. if "crs" not in esmf_nc.variables.keys(): - self.config_options.errMsg = ( - "Unable to locate crs variable in: " - + self.config_options.spatial_meta - ) + self.config_options.errMsg = f"Unable to locate crs variable in: {self.config_options.spatial_meta}" raise Exception if "x" not in esmf_nc.variables.keys(): - self.config_options.errMsg = ( - "Unable to locate x variable in: " - + self.config_options.spatial_meta - ) + self.config_options.errMsg = f"Unable to locate x variable in: {self.config_options.spatial_meta}" raise Exception if "y" not in esmf_nc.variables.keys(): - self.config_options.errMsg = ( - "Unable to locate y variable in: " - + self.config_options.spatial_meta - ) + self.config_options.errMsg = f"Unable to locate y variable in: {self.config_options.spatial_meta}" raise Exception # Extract names of variable attributes from each of the input geospatial variables. These # can change, so we are making this as flexible as possible to accomodate future changes. try: crs_att_names = esmf_nc.variables["crs"].ncattrs() except Exception as e: - self.config_options.errMsg = ( - "Unable to extract crs attribute names from: " - + self.config_options.spatial_meta - ) - raise Exception + self.config_options.errMsg = f"Unable to extract crs attribute names from: {self.config_options.spatial_meta}" + raise e try: x_coord_att_names = esmf_nc.variables["x"].ncattrs() except Exception as e: - self.config_options.errMsg = ( - "Unable to extract x attribute names from: " - + self.config_options.spatial_meta - ) - raise Exception + self.config_options.errMsg = f"Unable to extract x attribute names from: {self.config_options.spatial_meta}" + raise e try: y_coord_att_names = esmf_nc.variables["y"].ncattrs() except Exception as e: - self.config_options.errMsg = ( - "Unable to extract y attribute names from: " - + self.config_options.spatial_meta - ) - raise Exception + self.config_options.errMsg = f"Unable to extract y attribute names from: {self.config_options.spatial_meta}" + raise e # Extract attribute values try: self.x_coord_atts = { @@ -145,72 +123,51 @@ def initialize_geospatial_metadata(self): for item in x_coord_att_names } except Exception as e: - self.config_options.errMsg = ( - "Unable to extract x coordinate attributes from: " - + self.config_options.spatial_meta - ) - raise Exception + self.config_options.errMsg = f"Unable to extract x coordinate attributes from: {self.config_options.spatial_meta}" + raise e try: self.y_coord_atts = { item: esmf_nc.variables["y"].getncattr(item) for item in y_coord_att_names } except Exception as e: - self.config_options.errMsg = ( - "Unable to extract y coordinate attributes from: " - + self.config_options.spatial_meta - ) - raise Exception + self.config_options.errMsg = f"Unable to extract y coordinate attributes from: {self.config_options.spatial_meta}" + raise e try: self.crs_atts = { item: esmf_nc.variables["crs"].getncattr(item) for item in crs_att_names } except Exception as e: - self.config_options.errMsg = ( - "Unable to extract crs coordinate attributes from: " - + self.config_options.spatial_meta - ) - raise Exception + self.config_options.errMsg = f"Unable to extract crs coordinate attributes from: {self.config_options.spatial_meta}" + raise e # Extract global attributes try: global_att_names = esmf_nc.ncattrs() except Exception as e: - self.config_options.errMsg = ( - "Unable to extract global attribute names from: " - + self.config_options.spatial_meta - ) - raise Exception + self.config_options.errMsg = f"Unable to extract global attribute names from: {self.config_options.spatial_meta}" + raise e try: self.spatial_global_atts = { item: esmf_nc.getncattr(item) for item in global_att_names } except Exception as e: - self.config_options.errMsg = ( - "Unable to extract global attributes from: " - + self.config_options.spatial_meta - ) - raise Exception + self.config_options.errMsg = f"Unable to extract global attributes from: {self.config_options.spatial_meta}" + raise e # Extract x/y coordinate values if len(esmf_nc.variables["x"].shape) == 1: try: self.x_coords = esmf_nc.variables["x"][:].data except Exception as e: - self.config_options.errMsg = ( - "Unable to extract x coordinate values from: " - + self.config_options.spatial_meta - ) - raise Exception + self.config_options.errMsg = f"Unable to extract x coordinate values from: {self.config_options.spatial_meta}" + raise e try: self.y_coords = esmf_nc.variables["y"][:].data except Exception as e: - self.config_options.errMsg = ( - "Unable to extract y coordinate values from: " - + self.config_options.spatial_meta - ) - raise Exception + self.config_options.errMsg = f"Unable to extract y coordinate values from: {self.config_options.spatial_meta}" + raise e # Check to see if the Y coordinates are North-South. If so, flip them. if self.y_coords[1] < self.y_coords[0]: self.y_coords[:] = np.flip(self.y_coords[:], axis=0) @@ -219,19 +176,13 @@ def initialize_geospatial_metadata(self): try: self.x_coords = esmf_nc.variables["x"][:, :].data except Exception as e: - self.config_options.errMsg = ( - "Unable to extract x coordinate values from: " - + self.config_options.spatial_meta - ) - raise Exception + self.config_options.errMsg = f"Unable to extract x coordinate values from: {self.config_options.spatial_meta}" + raise e try: self.y_coords = esmf_nc.variables["y"][:, :].data except Exception as e: - self.config_options.errMsg = ( - "Unable to extract y coordinate values from: " - + self.config_options.spatial_meta - ) - raise Exception + self.config_options.errMsg = f"Unable to extract y coordinate values from: {self.config_options.spatial_meta}" + raise e # Check to see if the Y coordinates are North-South. If so, flip them. if self.y_coords[1, 0] > self.y_coords[0, 0]: self.y_coords[:, :] = np.flipud(self.y_coords[:, :]) @@ -240,11 +191,8 @@ def initialize_geospatial_metadata(self): try: esmf_nc.close() except Exception as e: - self.config_options.errMsg = ( - "Unable to close spatial metadata file: " - + self.config_options.spatial_meta - ) - raise Exception + self.config_options.errMsg = f"Unable to close spatial metadata file: {self.config_options.spatial_meta}" + raise e # mpi_config.comm.barrier() @@ -258,46 +206,46 @@ def calc_slope(self, esmf_nc: netCDF4.Dataset) -> tuple: """ # First extract the sina,cosa, and elevation variables from the geogrid file. try: - sinaGrid = esmf_nc.variables[self.config_options.sinalpha_var][0, :, :] + sina_grid = esmf_nc.variables[self.config_options.sinalpha_var][0, :, :] except Exception as e: self.config_options.errMsg = ( - "Unable to extract SINALPHA from: " + self.config_options.geogrid + f"Unable to extract SINALPHA from: {self.config_options.geogrid}" ) - raise + raise e try: - cosaGrid = esmf_nc.variables[self.config_options.cosalpha_var][0, :, :] + cosa_grid = esmf_nc.variables[self.config_options.cosalpha_var][0, :, :] except Exception as e: self.config_options.errMsg = ( - "Unable to extract COSALPHA from: " + self.config_options.geogrid + f"Unable to extract COSALPHA from: {self.config_options.geogrid}" ) - raise + raise e try: - heightDest = esmf_nc.variables[self.config_options.hgt_var][0, :, :] + height_dest = esmf_nc.variables[self.config_options.hgt_var][0, :, :] except Exception as e: self.config_options.errMsg = ( - "Unable to extract HGT_M from: " + self.config_options.geogrid + f"Unable to extract HGT_M from: {self.config_options.geogrid}" ) - raise + raise e # Ensure cosa/sina are correct dimensions - if sinaGrid.shape[0] != self.ny_global or sinaGrid.shape[1] != self.nx_global: + if sina_grid.shape[0] != self.ny_global or sina_grid.shape[1] != self.nx_global: self.config_options.errMsg = ( - "SINALPHA dimensions mismatch in: " + self.config_options.geogrid + f"SINALPHA dimensions mismatch in: {self.config_options.geogrid}" ) raise Exception - if cosaGrid.shape[0] != self.ny_global or cosaGrid.shape[1] != self.nx_global: + if cosa_grid.shape[0] != self.ny_global or cosa_grid.shape[1] != self.nx_global: self.config_options.errMsg = ( - "COSALPHA dimensions mismatch in: " + self.config_options.geogrid + f"COSALPHA dimensions mismatch in: {self.config_options.geogrid}" ) raise Exception if ( - heightDest.shape[0] != self.ny_global - or heightDest.shape[1] != self.nx_global + height_dest.shape[0] != self.ny_global + or height_dest.shape[1] != self.nx_global ): self.config_options.errMsg = ( - "HGT_M dimension mismatch in: " + self.config_options.geogrid + f"HGT_M dimension mismatch in: {self.config_options.geogrid}" ) raise Exception @@ -307,78 +255,78 @@ def calc_slope(self, esmf_nc: netCDF4.Dataset) -> tuple: msftx = 1.0 msfty = 1.0 - slopeOut = np.empty([self.ny_global, self.nx_global], np.float32) + slope_out = np.empty([self.ny_global, self.nx_global], np.float32) toposlpx = np.empty([self.ny_global, self.nx_global], np.float32) toposlpy = np.empty([self.ny_global, self.nx_global], np.float32) slp_azi = np.empty([self.ny_global, self.nx_global], np.float32) - ipDiff = np.empty([self.ny_global, self.nx_global], np.int32) - jpDiff = np.empty([self.ny_global, self.nx_global], np.int32) + ip_diff = np.empty([self.ny_global, self.nx_global], np.int32) + jp_diff = np.empty([self.ny_global, self.nx_global], np.int32) hx = np.empty([self.ny_global, self.nx_global], np.float32) hy = np.empty([self.ny_global, self.nx_global], np.float32) # Create index arrays that will be used to calculate slope. - xTmp = np.arange(self.nx_global) - yTmp = np.arange(self.ny_global) - xGrid = np.tile(xTmp[:], (self.ny_global, 1)) - yGrid = np.repeat(yTmp[:, np.newaxis], self.nx_global, axis=1) - indOrig = np.where(heightDest == heightDest) - indIp1 = ((indOrig[0]), (indOrig[1] + 1)) - indIm1 = ((indOrig[0]), (indOrig[1] - 1)) - indJp1 = ((indOrig[0] + 1), (indOrig[1])) - indJm1 = ((indOrig[0] - 1), (indOrig[1])) - indIp1[1][np.where(indIp1[1] >= self.nx_global)] = self.nx_global - 1 - indJp1[0][np.where(indJp1[0] >= self.ny_global)] = self.ny_global - 1 - indIm1[1][np.where(indIm1[1] < 0)] = 0 - indJm1[0][np.where(indJm1[0] < 0)] = 0 - - ipDiff[indOrig] = xGrid[indIp1] - xGrid[indIm1] - jpDiff[indOrig] = yGrid[indJp1] - yGrid[indJm1] - - toposlpx[indOrig] = ( - (heightDest[indIp1] - heightDest[indIm1]) * msftx * rdx - ) / ipDiff[indOrig] - toposlpy[indOrig] = ( - (heightDest[indJp1] - heightDest[indJm1]) * msfty * rdy - ) / jpDiff[indOrig] - hx[indOrig] = toposlpx[indOrig] - hy[indOrig] = toposlpy[indOrig] - slopeOut[indOrig] = np.arctan((hx[indOrig] ** 2 + hy[indOrig] ** 2) ** 0.5) - slopeOut[np.where(slopeOut < 1e-4)] = 0.0 - slp_azi[np.where(slopeOut < 1e-4)] = 0.0 - indValesmf_nc = np.where(slopeOut >= 1e-4) - slp_azi[indValesmf_nc] = ( - np.arctan2(hx[indValesmf_nc], hy[indValesmf_nc]) + math.pi + x_tmp = np.arange(self.nx_global) + y_tmp = np.arange(self.ny_global) + x_grid = np.tile(x_tmp[:], (self.ny_global, 1)) + y_grid = np.repeat(y_tmp[:, np.newaxis], self.nx_global, axis=1) + ind_orig = np.where(height_dest == height_dest) + ind_ip1 = ((ind_orig[0]), (ind_orig[1] + 1)) + ind_im1 = ((ind_orig[0]), (ind_orig[1] - 1)) + ind_jp1 = ((ind_orig[0] + 1), (ind_orig[1])) + ind_jm1 = ((ind_orig[0] - 1), (ind_orig[1])) + ind_ip1[1][np.where(ind_ip1[1] >= self.nx_global)] = self.nx_global - 1 + ind_jp1[0][np.where(ind_jp1[0] >= self.ny_global)] = self.ny_global - 1 + ind_im1[1][np.where(ind_im1[1] < 0)] = 0 + ind_jm1[0][np.where(ind_jm1[0] < 0)] = 0 + + ip_diff[ind_orig] = x_grid[ind_ip1] - x_grid[ind_im1] + jp_diff[ind_orig] = y_grid[ind_jp1] - y_grid[ind_jm1] + + toposlpx[ind_orig] = ( + (height_dest[ind_ip1] - height_dest[ind_im1]) * msftx * rdx + ) / ip_diff[ind_orig] + toposlpy[ind_orig] = ( + (height_dest[ind_jp1] - height_dest[ind_jm1]) * msfty * rdy + ) / jp_diff[ind_orig] + hx[ind_orig] = toposlpx[ind_orig] + hy[ind_orig] = toposlpy[ind_orig] + slope_out[ind_orig] = np.arctan((hx[ind_orig] ** 2 + hy[ind_orig] ** 2) ** 0.5) + slope_out[np.where(slope_out < 1e-4)] = 0.0 + slp_azi[np.where(slope_out < 1e-4)] = 0.0 + ind_valesmf_nc = np.where(slope_out >= 1e-4) + slp_azi[ind_valesmf_nc] = ( + np.arctan2(hx[ind_valesmf_nc], hy[ind_valesmf_nc]) + math.pi ) - indValesmf_nc = np.where(cosaGrid >= 0.0) - slp_azi[indValesmf_nc] = slp_azi[indValesmf_nc] - np.arcsin( - sinaGrid[indValesmf_nc] + ind_valesmf_nc = np.where(cosa_grid >= 0.0) + slp_azi[ind_valesmf_nc] = slp_azi[ind_valesmf_nc] - np.arcsin( + sina_grid[ind_valesmf_nc] ) - indValesmf_nc = np.where(cosaGrid < 0.0) - slp_azi[indValesmf_nc] = slp_azi[indValesmf_nc] - ( - math.pi - np.arcsin(sinaGrid[indValesmf_nc]) + ind_valesmf_nc = np.where(cosa_grid < 0.0) + slp_azi[ind_valesmf_nc] = slp_azi[ind_valesmf_nc] - ( + math.pi - np.arcsin(sina_grid[ind_valesmf_nc]) ) # Reset temporary arrays to None to free up memory toposlpx = None toposlpy = None - heightDest = None - sinaGrid = None - cosaGrid = None - indValesmf_nc = None - xTmp = None - yTmp = None - xGrid = None - ipDiff = None - jpDiff = None - indOrig = None - indJm1 = None - indJp1 = None - indIm1 = None - indIp1 = None + height_dest = None + sina_grid = None + cosa_grid = None + ind_valesmf_nc = None + x_tmp = None + y_tmp = None + x_grid = None + ip_diff = None + jp_diff = None + ind_orig = None + ind_jm1 = None + ind_jp1 = None + ind_im1 = None + ind_ip1 = None hx = None hy = None - return slopeOut, slp_azi + return slope_out, slp_azi class GriddedGeoMeta(GeoMeta): @@ -402,91 +350,64 @@ def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): try: esmf_nc = netCDF4.Dataset(self.config_options.geogrid, "r") except Exception as e: - self.config_options.errMsg = ( - "Unable to open the WRF-Hydro geogrid file: " - + self.config_options.geogrid - ) - raise Exception + self.config_options.errMsg = f"Unable to open the WRF-Hydro geogrid file: {self.config_options.geogrid}" + raise e if esmf_nc.variables[self.config_options.lat_var].ndim == 3: try: self.nx_global = esmf_nc.variables[ self.config_options.lat_var ].shape[2] except Exception as e: - self.config_options.errMsg = ( - "Unable to extract X dimension size from latitude variable in: " - + self.config_options.geogrid - ) - raise Exception + self.config_options.errMsg = f"Unable to extract X dimension size from latitude variable in: {self.config_options.geogrid}" + raise e try: self.ny_global = esmf_nc.variables[ self.config_options.lat_var ].shape[1] except Exception as e: - self.config_options.errMsg = ( - "Unable to extract Y dimension size from latitude in: " - + self.config_options.geogrid - ) - raise Exception + self.config_options.errMsg = f"Unable to extract Y dimension size from latitude in: {self.config_options.geogrid}" + raise e try: self.dx_meters = esmf_nc.DX except Exception as e: - self.config_options.errMsg = ( - "Unable to extract DX global attribute in: " - + self.config_options.geogrid - ) - raise Exception + self.config_options.errMsg = f"Unable to extract DX global attribute in: {self.config_options.geogrid}" + raise e try: self.dy_meters = esmf_nc.DY except Exception as e: - self.config_options.errMsg = ( - "Unable to extract DY global attribute in: " - + self.config_options.geogrid - ) - raise Exception + self.config_options.errMsg = f"Unable to extract DY global attribute in: {self.config_options.geogrid}" + raise e elif esmf_nc.variables[self.config_options.lat_var].ndim == 2: try: self.nx_global = esmf_nc.variables[ self.config_options.lat_var ].shape[1] except Exception as e: - self.config_options.errMsg = ( - "Unable to extract X dimension size from latitude variable in: " - + self.config_options.geogrid - ) - raise Exception + self.config_options.errMsg = f"Unable to extract X dimension size from latitude variable in: {self.config_options.geogrid}" + raise e try: self.ny_global = esmf_nc.variables[ self.config_options.lat_var ].shape[0] except Exception as e: - self.config_options.errMsg = ( - "Unable to extract Y dimension size from latitude in: " - + self.config_options.geogrid - ) - raise Exception + self.config_options.errMsg = f"Unable to extract Y dimension size from latitude in: {self.config_options.geogrid}" + raise e try: self.dx_meters = esmf_nc.variables[self.config_options.lon_var].dx except Exception as e: - self.config_options.errMsg = ( - "Unable to extract DX global attribute in: " - + self.config_options.geogrid - ) - raise Exception + self.config_options.errMsg = f"Unable to extract DX global attribute in: {self.config_options.geogrid}" + raise e try: self.dy_meters = esmf_nc.variables[self.config_options.lat_var].dy except Exception as e: - self.config_options.errMsg = ( - "Unable to extract DY global attribute in: " - + self.config_options.geogrid - ) - raise Exception + self.config_options.errMsg = f"Unable to extract DY global attribute in: {self.config_options.geogrid}" + raise e else: try: @@ -494,44 +415,32 @@ def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): self.config_options.lon_var ].shape[0] except Exception as e: - self.config_options.errMsg = ( - "Unable to extract X dimension size from longitude variable in: " - + self.config_options.geogrid - ) - raise Exception + self.config_options.errMsg = f"Unable to extract X dimension size from longitude variable in: {self.config_options.geogrid}" + raise e try: self.ny_global = esmf_nc.variables[ self.config_options.lat_var ].shape[0] except Exception as e: - self.config_options.errMsg = ( - "Unable to extract Y dimension size from latitude in: " - + self.config_options.geogrid - ) - raise Exception + self.config_options.errMsg = f"Unable to extract Y dimension size from latitude in: {self.config_options.geogrid}" + raise e if self.config_options.input_forcings[0] != 23: try: self.dx_meters = esmf_nc.variables[ self.config_options.lon_var ].dx except Exception as e: - self.config_options.errMsg = ( - "Unable to extract dx metadata attribute in: " - + self.config_options.geogrid - ) - raise Exception + self.config_options.errMsg = f"Unable to extract dx metadata attribute in: {self.config_options.geogrid}" + raise e try: self.dy_meters = esmf_nc.variables[ self.config_options.lat_var ].dy except Exception as e: - self.config_options.errMsg = ( - "Unable to extract dy metadata attribute in: " - + self.config_options.geogrid - ) - raise Exception + self.config_options.errMsg = f"Unable to extract dy metadata attribute in: {self.config_options.geogrid}" + raise e else: # Manually input the grid spacing since ERA5-Interim does not # internally have this geospatial information within the netcdf file @@ -563,11 +472,8 @@ def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): coord_sys=ESMF.CoordSys.SPH_DEG, ) except Exception as e: - self.config_options.errMsg = ( - "Unable to create ESMF grid for WRF-Hydro geogrid: " - + self.config_options.geogrid - ) - raise Exception + self.config_options.errMsg = f"Unable to create ESMF grid for WRF-Hydro geogrid: {self.config_options.geogrid}" + raise e # mpi_config.comm.barrier() @@ -579,76 +485,76 @@ def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): # Scatter global XLAT_M grid to processors.. if mpi_config.rank == 0: if esmf_nc.variables[self.config_options.lat_var].ndim == 3: - varTmp = esmf_nc.variables[self.config_options.lat_var][0, :, :] + var_tmp = esmf_nc.variables[self.config_options.lat_var][0, :, :] elif esmf_nc.variables[self.config_options.lat_var].ndim == 2: - varTmp = esmf_nc.variables[self.config_options.lat_var][:, :] + var_tmp = esmf_nc.variables[self.config_options.lat_var][:, :] elif esmf_nc.variables[self.config_options.lat_var].ndim == 1: lat = esmf_nc.variables[self.config_options.lat_var][:] lon = esmf_nc.variables[self.config_options.lon_var][:] - varTmp = np.meshgrid(lon, lat)[1] + var_tmp = np.meshgrid(lon, lat)[1] lat = None lon = None # Flag to grab entire array for AWS slicing if self.config_options.aws: - self.lat_bounds = varTmp + self.lat_bounds = var_tmp else: - varTmp = None + var_tmp = None # mpi_config.comm.barrier() - varSubTmp = mpi_config.scatter_array(self, varTmp, self.config_options) + var_sub_tmp = mpi_config.scatter_array(self, var_tmp, self.config_options) # mpi_config.comm.barrier() # Place the local lat/lon grid slices from the parent geogrid file into # the ESMF lat/lon grids. try: - self.esmf_lat[:, :] = varSubTmp - self.latitude_grid = varSubTmp - varSubTmp = None - varTmp = None + self.esmf_lat[:, :] = var_sub_tmp + self.latitude_grid = var_sub_tmp + var_sub_tmp = None + var_tmp = None except Exception as e: self.config_options.errMsg = ( "Unable to subset latitude from geogrid file into ESMF object" ) - raise Exception + raise e # mpi_config.comm.barrier() # Scatter global XLONG_M grid to processors.. if mpi_config.rank == 0: if esmf_nc.variables[self.config_options.lat_var].ndim == 3: - varTmp = esmf_nc.variables[self.config_options.lon_var][0, :, :] + var_tmp = esmf_nc.variables[self.config_options.lon_var][0, :, :] elif esmf_nc.variables[self.config_options.lon_var].ndim == 2: - varTmp = esmf_nc.variables[self.config_options.lon_var][:, :] + var_tmp = esmf_nc.variables[self.config_options.lon_var][:, :] elif esmf_nc.variables[self.config_options.lon_var].ndim == 1: lat = esmf_nc.variables[self.config_options.lat_var][:] lon = esmf_nc.variables[self.config_options.lon_var][:] - varTmp = np.meshgrid(lon, lat)[0] + var_tmp = np.meshgrid(lon, lat)[0] lat = None lon = None # Flag to grab entire array for AWS slicing if self.config_options.aws: - self.lon_bounds = varTmp + self.lon_bounds = var_tmp else: - varTmp = None + var_tmp = None # mpi_config.comm.barrier() - varSubTmp = mpi_config.scatter_array(self, varTmp, self.config_options) + var_sub_tmp = mpi_config.scatter_array(self, var_tmp, self.config_options) # mpi_config.comm.barrier() try: - self.esmf_lon[:, :] = varSubTmp - self.longitude_grid = varSubTmp - varSubTmp = None - varTmp = None + self.esmf_lon[:, :] = var_sub_tmp + self.longitude_grid = var_sub_tmp + var_sub_tmp = None + var_tmp = None except Exception as e: self.config_options.errMsg = ( "Unable to subset longitude from geogrid file into ESMF object" ) - raise Exception + raise e # mpi_config.comm.barrier() @@ -659,57 +565,57 @@ def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): # Scatter the COSALPHA,SINALPHA grids to the processors. if mpi_config.rank == 0: if esmf_nc.variables[self.config_options.cosalpha_var].ndim == 3: - varTmp = esmf_nc.variables[self.config_options.cosalpha_var][ + var_tmp = esmf_nc.variables[self.config_options.cosalpha_var][ 0, :, : ] else: - varTmp = esmf_nc.variables[self.config_options.cosalpha_var][:, :] + var_tmp = esmf_nc.variables[self.config_options.cosalpha_var][:, :] else: - varTmp = None + var_tmp = None # mpi_config.comm.barrier() - varSubTmp = mpi_config.scatter_array(self, varTmp, self.config_options) + var_sub_tmp = mpi_config.scatter_array(self, var_tmp, self.config_options) # mpi_config.comm.barrier() - self.cosa_grid = varSubTmp[:, :] - varSubTmp = None - varTmp = None + self.cosa_grid = var_sub_tmp[:, :] + var_sub_tmp = None + var_tmp = None if mpi_config.rank == 0: if esmf_nc.variables[self.config_options.sinalpha_var].ndim == 3: - varTmp = esmf_nc.variables[self.config_options.sinalpha_var][ + var_tmp = esmf_nc.variables[self.config_options.sinalpha_var][ 0, :, : ] else: - varTmp = esmf_nc.variables[self.config_options.sinalpha_var][:, :] + var_tmp = esmf_nc.variables[self.config_options.sinalpha_var][:, :] else: - varTmp = None + var_tmp = None # mpi_config.comm.barrier() - varSubTmp = mpi_config.scatter_array(self, varTmp, self.config_options) + var_sub_tmp = mpi_config.scatter_array(self, var_tmp, self.config_options) # mpi_config.comm.barrier() - self.sina_grid = varSubTmp[:, :] - varSubTmp = None - varTmp = None + self.sina_grid = var_sub_tmp[:, :] + var_sub_tmp = None + var_tmp = None if self.config_options.hgt_var is not None: # Read in a scatter the WRF-Hydro elevation, which is used for downscaling # purposes. if mpi_config.rank == 0: if esmf_nc.variables[self.config_options.hgt_var].ndim == 3: - varTmp = esmf_nc.variables[self.config_options.hgt_var][0, :, :] + var_tmp = esmf_nc.variables[self.config_options.hgt_var][0, :, :] else: - varTmp = esmf_nc.variables[self.config_options.hgt_var][:, :] + var_tmp = esmf_nc.variables[self.config_options.hgt_var][:, :] else: - varTmp = None + var_tmp = None # mpi_config.comm.barrier() - varSubTmp = mpi_config.scatter_array(self, varTmp, self.config_options) + var_sub_tmp = mpi_config.scatter_array(self, var_tmp, self.config_options) # mpi_config.comm.barrier() - self.height = varSubTmp - varSubTmp = None - varTmp = None + self.height = var_sub_tmp + var_sub_tmp = None + var_tmp = None if ( self.config_options.cosalpha_var is not None @@ -719,17 +625,19 @@ def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): # be used for downscaling purposes. if mpi_config.rank == 0: try: - slopeTmp, slp_azi_tmp = self.calc_slope(esmf_nc) + slope_tmp, slp_azi_tmp = self.calc_slope(esmf_nc) except Exception: raise Exception else: - slopeTmp = None + slope_tmp = None slp_azi_tmp = None # mpi_config.comm.barrier() - slopeSubTmp = mpi_config.scatter_array(self, slopeTmp, self.config_options) - self.slope = slopeSubTmp[:, :] - slopeSubTmp = None + slope_sub_tmp = mpi_config.scatter_array( + self, slope_tmp, self.config_options + ) + self.slope = slope_sub_tmp[:, :] + slope_sub_tmp = None slp_azi_sub = mpi_config.scatter_array( self, slp_azi_tmp, self.config_options @@ -743,47 +651,49 @@ def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): ): if mpi_config.rank == 0: if esmf_nc.variables[self.config_options.slope_var].ndim == 3: - varTmp = esmf_nc.variables[self.config_options.slope_var][0, :, :] + var_tmp = esmf_nc.variables[self.config_options.slope_var][0, :, :] else: - varTmp = esmf_nc.variables[self.config_options.slope_var][:, :] + var_tmp = esmf_nc.variables[self.config_options.slope_var][:, :] else: - varTmp = None + var_tmp = None - slopeSubTmp = mpi_config.scatter_array(self, varTmp, self.config_options) - self.slope = slopeSubTmp - varTmp = None + slope_sub_tmp = mpi_config.scatter_array(self, var_tmp, self.config_options) + self.slope = slope_sub_tmp + var_tmp = None if mpi_config.rank == 0: if esmf_nc.variables[self.config_options.slope_azimuth_var].ndim == 3: - varTmp = esmf_nc.variables[self.config_options.slope_azimuth_var][ + var_tmp = esmf_nc.variables[self.config_options.slope_azimuth_var][ 0, :, : ] else: - varTmp = esmf_nc.variables[self.config_options.slope_azimuth_var][ + var_tmp = esmf_nc.variables[self.config_options.slope_azimuth_var][ :, : ] else: - varTmp = None + var_tmp = None - slp_azi_sub = mpi_config.scatter_array(self, varTmp, self.config_options) + slp_azi_sub = mpi_config.scatter_array(self, var_tmp, self.config_options) self.slp_azi = slp_azi_sub[:, :] - varTmp = None + var_tmp = None elif self.config_options.hgt_var is not None: # Calculate the slope from the domain using elevation of the gridded model and other approximations if mpi_config.rank == 0: try: - slopeTmp, slp_azi_tmp = self.calc_slope_gridded(esmf_nc) + slope_tmp, slp_azi_tmp = self.calc_slope_gridded(esmf_nc) except Exception: raise Exception else: - slopeTmp = None + slope_tmp = None slp_azi_tmp = None # mpi_config.comm.barrier() - slopeSubTmp = mpi_config.scatter_array(self, slopeTmp, self.config_options) - self.slope = slopeSubTmp[:, :] - slopeSubTmp = None + slope_sub_tmp = mpi_config.scatter_array( + self, slope_tmp, self.config_options + ) + self.slope = slope_sub_tmp[:, :] + slope_sub_tmp = None slp_azi_sub = mpi_config.scatter_array( self, slp_azi_tmp, self.config_options @@ -797,14 +707,14 @@ def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): esmf_nc.close() except Exception as e: self.config_options.errMsg = ( - "Unable to close geogrid file: " + self.config_options.geogrid + f"Unable to close geogrid file: {self.config_options.geogrid}" ) - raise Exception + raise e # Reset temporary variables to free up memory - slopeTmp = None + slope_tmp = None slp_azi_tmp = None - varTmp = None + var_tmp = None def calc_slope_gridded(self, esmf_nc: netCDF4.Dataset) -> tuple: """Calculate slope grids needed for incoming shortwave radiation downscaling. @@ -814,15 +724,14 @@ def calc_slope_gridded(self, esmf_nc: netCDF4.Dataset) -> tuple: :param esmf_nc: The open netCDF4 dataset for the geogrid file, passed in to avoid having to reopen the file multiple times :return: A tuple containing slope and slope azimuth for grid cells """ + esmf_nc = netCDF4.Dataset(self.config_options.geogrid, "r") + try: lons = esmf_nc.variables[self.config_options.lon_var][:] lats = esmf_nc.variables[self.config_options.lat_var][:] except Exception as e: - self.config_options.errMsg = ( - "Unable to extract gridded coordinates in " - + self.config_options.geogrid - ) - raise Exception + self.config_options.errMsg = f"Unable to extract gridded coordinates in {self.config_options.geogrid}" + raise e try: dx = np.empty( ( @@ -841,19 +750,13 @@ def calc_slope_gridded(self, esmf_nc: netCDF4.Dataset) -> tuple: dx[:] = esmf_nc.variables[self.config_options.lon_var].dx dy[:] = esmf_nc.variables[self.config_options.lat_var].dy except Exception as e: - self.config_options.errMsg = ( - "Unable to extract dx and dy distances in " - + self.config_options.geogrid - ) - raise Exception + self.config_options.errMsg = f"Unable to extract dx and dy distances in {self.config_options.geogrid}" + raise e try: heights = esmf_nc.variables[self.config_options.hgt_var][:] except Exception as e: - self.config_options.errMsg = ( - "Unable to extract heights of grid cells in " - + self.config_options.geogrid - ) - raise Exception + self.config_options.errMsg = f"Unable to extract heights of grid cells in {self.config_options.geogrid}" + raise e esmf_nc.close() @@ -1035,7 +938,7 @@ def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): f"Unable to create ESMF Mesh: {self.config_options.geogrid} " f"due to {str(e)}" ) - raise + raise e # Extract local coordinates from ESMF mesh self.latitude_grid = self.esmf_grid.coords[1][1] @@ -1105,55 +1008,40 @@ def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): try: esmf_nc = netCDF4.Dataset(self.config_options.geogrid, "r") except Exception as e: - self.config_options.errMsg = ( - "Unable to open the unstructured mesh file: " - + self.config_options.geogrid - ) - raise Exception + self.config_options.errMsg = f"Unable to open the unstructured mesh file: {self.config_options.geogrid}" + raise e try: self.nx_global = esmf_nc.variables[ self.config_options.nodecoords_var ].shape[0] except Exception as e: - self.config_options.errMsg = ( - "Unable to extract X dimension size in " - + self.config_options.geogrid - ) - raise Exception + self.config_options.errMsg = f"Unable to extract X dimension size in {self.config_options.geogrid}" + raise e try: self.ny_global = esmf_nc.variables[ self.config_options.nodecoords_var ].shape[0] except Exception as e: - self.config_options.errMsg = ( - "Unable to extract Y dimension size in " - + self.config_options.geogrid - ) - raise Exception + self.config_options.errMsg = f"Unable to extract Y dimension size in {self.config_options.geogrid}" + raise e try: self.nx_global_elem = esmf_nc.variables[ self.config_options.elemcoords_var ].shape[0] except Exception as e: - self.config_options.errMsg = ( - "Unable to extract X dimension size in " - + self.config_options.geogrid - ) - raise Exception + self.config_options.errMsg = f"Unable to extract X dimension size in {self.config_options.geogrid}" + raise e try: self.ny_global_elem = esmf_nc.variables[ self.config_options.elemcoords_var ].shape[0] except Exception as e: - self.config_options.errMsg = ( - "Unable to extract Y dimension size in " - + self.config_options.geogrid - ) - raise Exception + self.config_options.errMsg = f"Unable to extract Y dimension size in {self.config_options.geogrid}" + raise e # Flag to grab entire array for AWS slicing if self.config_options.aws: @@ -1188,9 +1076,9 @@ def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): esmf_nc.close() except Exception as e: self.config_options.errMsg = ( - "Unable to close geogrid Mesh file: " + self.config_options.geogrid + f"Unable to close geogrid Mesh file: {self.config_options.geogrid}" ) - raise Exception + raise e try: # Removed argument coord_sys=ESMF.CoordSys.SPH_DEG since we are always reading from a file @@ -1200,11 +1088,8 @@ def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): filename=self.config_options.geogrid, filetype=ESMF.FileFormat.ESMFMESH ) except Exception as e: - self.config_options.errMsg = ( - "Unable to create ESMF Mesh from geogrid file: " - + self.config_options.geogrid - ) - raise Exception + self.config_options.errMsg = f"Unable to create ESMF Mesh from geogrid file: {self.config_options.geogrid}" + raise e # mpi_config.comm.barrier() @@ -1213,23 +1098,23 @@ def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): try: self.latitude_grid = self.esmf_grid.coords[0][1] self.latitude_grid_elem = self.esmf_grid.coords[1][1] - varSubTmp = None - varTmp = None + var_sub_tmp = None + var_tmp = None except Exception as e: self.config_options.errMsg = ( "Unable to subset node latitudes from ESMF Mesh object" ) - raise Exception + raise e try: self.longitude_grid = self.esmf_grid.coords[0][0] self.longitude_grid_elem = self.esmf_grid.coords[1][0] - varSubTmp = None - varTmp = None + var_sub_tmp = None + var_tmp = None except Exception as e: self.config_options.errMsg = ( "Unable to subset XLONG_M from geogrid file into ESMF Mesh object" ) - raise Exception + raise e esmf_nc = netCDF4.Dataset(self.config_options.geogrid, "r") @@ -1270,8 +1155,8 @@ def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): # if(config_options.cosalpha_var != None and config_options.sinalpha_var != None): # self.cosa_grid = esmf_nc.variables[config_options.cosalpha_var][:].data[pet_node_inds] # self.sina_grid = esmf_nc.variables[config_options.sinalpha_var][:].data[pet_node_inds] - # slopeTmp, slp_azi_tmp = self.calc_slope(esmf_nc,config_options) - # self.slope = slope_node_Tmp[pet_node_inds] + # slope_tmp, slp_azi_tmp = self.calc_slope(esmf_nc,config_options) + # self.slope = slope_node_tmp[pet_node_inds] # self.slp_azi = slp_azi_node_tmp[pet_node_inds] if ( self.config_options.slope_var is not None @@ -1312,18 +1197,18 @@ def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): # Calculate the slope from the domain using elevation on the WRF-Hydro domain. This will # be used for downscaling purposes. - slope_node_Tmp, slp_azi_node_tmp, slope_elem_Tmp, slp_azi_elem_tmp = ( + slope_node_tmp, slp_azi_node_tmp, slope_elem_tmp, slp_azi_elem_tmp = ( self.calc_slope_unstructured(esmf_nc) ) - self.slope = slope_node_Tmp[pet_node_inds] - slope_node_Tmp = None + self.slope = slope_node_tmp[pet_node_inds] + slope_node_tmp = None self.slp_azi = slp_azi_node_tmp[pet_node_inds] - slp_azi__node_tmp = None + slp_azi_node_tmp = None - self.slope_elem = slope_elem_Tmp[pet_element_inds] - slope_elem_Tmp = None + self.slope_elem = slope_elem_tmp[pet_element_inds] + slope_elem_tmp = None self.slp_azi_elem = slp_azi_elem_tmp[pet_element_inds] slp_azi_elem_tmp = None @@ -1364,42 +1249,38 @@ def calc_slope_unstructured(self, esmf_nc: netCDF4.Dataset) -> tuple: :param esmf_nc: The open netCDF4 dataset for the geogrid file, passed in to avoid having to reopen the file multiple times :return: A tuple containing slope and slope azimuth for nodes and elements """ + esmf_nc = netCDF4.Dataset(self.config_options.geogrid, "r") + try: node_lons = esmf_nc.variables[self.config_options.nodecoords_var][:][:, 0] node_lats = esmf_nc.variables[self.config_options.nodecoords_var][:][:, 1] except Exception as e: self.config_options.errMsg = ( - "Unable to extract node coordinates in " + self.config_options.geogrid + f"Unable to extract node coordinates in {self.config_options.geogrid}" ) - raise Exception + raise e try: elem_lons = esmf_nc.variables[self.config_options.elemcoords_var][:][:, 0] elem_lats = esmf_nc.variables[self.config_options.elemcoords_var][:][:, 1] except Exception as e: - self.config_options.errMsg = ( - "Unable to extract element coordinates in " - + self.config_options.geogrid - ) - raise Exception + self.config_options.errMsg = f"Unable to extract element coordinates in {self.config_options.geogrid}" + raise e try: elem_conn = esmf_nc.variables[self.config_options.elemconn_var][:][:, 0] except Exception as e: - self.config_options.errMsg = ( - "Unable to extract element connectivity in " - + self.config_options.geogrid - ) - raise Exception + self.config_options.errMsg = f"Unable to extract element connectivity in {self.config_options.geogrid}" + raise e try: node_heights = esmf_nc.variables[self.config_options.hgt_var][:] except Exception as e: self.config_options.errMsg = ( - "Unable to extract HGT_M from: " + self.config_options.geogrid + f"Unable to extract HGT_M from: {self.config_options.geogrid}" ) - raise + raise e if node_heights.shape[0] != self.ny_global: self.config_options.errMsg = ( - "HGT_M dimension mismatch in: " + self.config_options.geogrid + f"HGT_M dimension mismatch in: {self.config_options.geogrid}" ) raise Exception @@ -1407,13 +1288,13 @@ def calc_slope_unstructured(self, esmf_nc: netCDF4.Dataset) -> tuple: elem_heights = esmf_nc.variables[self.config_options.hgt_elem_var][:] except Exception as e: self.config_options.errMsg = ( - "Unable to extract HGT_M_ELEM from: " + self.config_options.geogrid + f"Unable to extract HGT_M_ELEM from: {self.config_options.geogrid}" ) - raise + raise e if elem_heights.shape[0] != len(elem_lons): self.config_options.errMsg = ( - "HGT_M_ELEM dimension mismatch in: " + self.config_options.geogrid + f"HGT_M_ELEM dimension mismatch in: {self.config_options.geogrid}" ) raise Exception From ccc84a39ff314c9507744a87c80cde5846b28153 Mon Sep 17 00:00:00 2001 From: Matt Deshotel Date: Sun, 22 Feb 2026 15:11:47 -0600 Subject: [PATCH 09/34] modularize the classes in geoMod.py --- .../NextGen_Forcings_Engine/bmi_model.py | 9 +- .../NextGen_Forcings_Engine/core/consts.py | 97 +- .../NextGen_Forcings_Engine/core/geoMod.py | 2214 +++++++++-------- 3 files changed, 1258 insertions(+), 1062 deletions(-) diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/bmi_model.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/bmi_model.py index 2d287521..3a6f0174 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/bmi_model.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/bmi_model.py @@ -27,7 +27,7 @@ from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.config import ( ConfigOptions, ) -from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.consts import GEOGRID +from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.geoMod import GEOGRID from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.parallel import MpiConfig from .bmi_grid import Grid, GridType @@ -751,13 +751,6 @@ def initialize(self, config_file: str, output_path: str | None = None) -> None: for long_name in self._var_name_units_map.keys() } - if self._job_meta.spatial_meta is not None: - try: - self._wrf_hydro_geo_meta.initialize_geospatial_metadata() - except Exception as e: - err_handler.err_out_screen_para(self._job_meta.errMsg, self._mpi_meta) - err_handler.check_program_status(self._job_meta, self._mpi_meta) - # Check to make sure we have enough dimensionality to run regridding. ESMF requires both grids # to have a size of at least 2. if ( diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/consts.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/consts.py index 05759781..cc54e8d9 100644 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/consts.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/consts.py @@ -1,11 +1,88 @@ -from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.geoMod import ( - GriddedGeoMeta, - HydrofabricGeoMeta, - UnstructuredGeoMeta, -) - -GEOGRID = { - "gridded": GriddedGeoMeta, - "unstructured": UnstructuredGeoMeta, - "hydrofabric": HydrofabricGeoMeta, +CONSTS = { + "geoMod": { + "GeoMeta": [ + # "nx_global", + # "ny_global", + # "nx_global_elem", + # "ny_global_elem", + # "dx_meters", + # "dy_meters", + # "latitude_grid", + # "longitude_grid", + # "element_ids", + # "element_ids_global", + # "latitude_grid_elem", + # "longitude_grid_elem", + # "lat_bounds", + # "lon_bounds", + # "mesh_inds", + # "mesh_inds_elem", + # "height", + # "height_elem", + # "sina_grid", + # "cosa_grid", + "nodeCoords", + "centerCoords", + "inds", + # "slope", + # "slp_azi", + # "slope_elem", + # "slp_azi_elem", + # "esmf_grid", + "esmf_lat", + "esmf_lon", + ], + "handle_exception": { + "esmf_nc": "Unable to open spatial metadata file: :::arg:::", + }, + "UnstructuredGeoMeta": [ + "x_lower_bound", + "x_upper_bound", + "y_lower_bound", + "y_upper_bound", + "dx_meters", + "dy_meters", + "element_ids", + "element_ids_global", + "sina_grid", + "cosa_grid", + "esmf_lat", + "esmf_lon", + ], + "HydrofabricGeoMeta": [ + "nx_local_elem", + "ny_local_elem", + "x_lower_bound", + "x_upper_bound", + "y_lower_bound", + "y_upper_bound", + "nx_global_elem", + "ny_global_elem", + "dx_meters", + "dy_meters", + "mesh_inds_elem", + "height_elem", + "sina_grid", + "cosa_grid", + "slope_elem", + "slp_azi_elem", + "esmf_lat", + "esmf_lon", + ], + "GriddedGeoMeta": [ + "nx_local_elem", + "ny_local_elem", + "nx_global_elem", + "ny_global_elem", + "element_ids", + "element_ids_global", + "lat_bounds", + "lon_bounds", + "mesh_inds", + "mesh_inds_elem", + "height_elem", + "slope_elem", + "slp_azi_elem", + ], + }, } diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py index e5b0995c..4afb6bc5 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py @@ -1,6 +1,6 @@ import math +from pathlib import Path -import netCDF4 import numpy as np # For ESMF + shapely 2.x, shapely must be imported first, to avoid segfault "address not mapped to object" stemming from calls such as: @@ -8,332 +8,282 @@ import shapely from scipy import spatial -from .. import esmf_utils, nc_utils -from . import err_handler - try: import esmpy as ESMF except ImportError: import ESMF import logging +from functools import lru_cache, wraps +from typing import Any + +import xarray as xr from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.config import ( ConfigOptions, ) +from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.consts import CONSTS from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.parallel import MpiConfig from nextgen_forcings_ewts import MODULE_NAME LOG = logging.getLogger(MODULE_NAME) +CONSTS = CONSTS[Path(__file__).stem] -class GeoMeta: - """Abstract class for handling information about the WRF-Hydro domain we are processing forcings to.""" +def set_none(func) -> Any: + """Set the output of a function to None if an exception is raised.""" - def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): - """Initialize GeoMeta class variables.""" - self.config_options = config_options - self.mpi_config = mpi_config - self.nx_global = None - self.ny_global = None - self.nx_global_elem = None - self.ny_global_elem = None - self.dx_meters = None - self.dy_meters = None - self.latitude_grid = None - self.longitude_grid = None - self.element_ids = None - self.element_ids_global = None - self.latitude_grid_elem = None - self.longitude_grid_elem = None - self.lat_bounds = None - self.lon_bounds = None - self.mesh_inds = None - self.mesh_inds_elem = None - self.height = None - self.height_elem = None - self.sina_grid = None - self.cosa_grid = None - self.nodeCoords = None - self.centerCoords = None - self.inds = None - self.slope = None - self.slp_azi = None - self.slope_elem = None - self.slp_azi_elem = None - self.esmf_grid = None - self.esmf_lat = None - self.esmf_lon = None - self.crs_atts = None - self.x_coord_atts = None - self.x_coords = None - self.y_coord_atts = None - self.y_coords = None - self.spatial_global_atts = None - - def initialize_geospatial_metadata(self): - """Initialize GeoMetaWrfHydro class variables. - - Function that will read in crs/x/y geospatial metadata and coordinates - from the optional geospatial metadata file IF it was specified by the user in - the configuration file. - :return: - """ - # We will only read information on processor 0. This data is not necessary for the - # other processors, and is only used in the output routines. - if self.mpi_config.rank == 0: - # Open the geospatial metadata file. - try: - esmf_nc = netCDF4.Dataset(self.config_options.spatial_meta, "r") - except Exception as e: - self.config_options.errMsg = f"Unable to open spatial metadata file: {self.config_options.spatial_meta}" - raise e + @wraps(func) + def wrapper(self) -> Any: + """Set the output of a function to None if an exception is raised.""" + if self.spatial_metadata_exists: + return func(self) + else: + return None - # Make sure the expected variables are present in the file. - if "crs" not in esmf_nc.variables.keys(): - self.config_options.errMsg = f"Unable to locate crs variable in: {self.config_options.spatial_meta}" - raise Exception - if "x" not in esmf_nc.variables.keys(): - self.config_options.errMsg = f"Unable to locate x variable in: {self.config_options.spatial_meta}" - raise Exception - if "y" not in esmf_nc.variables.keys(): - self.config_options.errMsg = f"Unable to locate y variable in: {self.config_options.spatial_meta}" - raise Exception - # Extract names of variable attributes from each of the input geospatial variables. These - # can change, so we are making this as flexible as possible to accomodate future changes. - try: - crs_att_names = esmf_nc.variables["crs"].ncattrs() - except Exception as e: - self.config_options.errMsg = f"Unable to extract crs attribute names from: {self.config_options.spatial_meta}" - raise e - try: - x_coord_att_names = esmf_nc.variables["x"].ncattrs() - except Exception as e: - self.config_options.errMsg = f"Unable to extract x attribute names from: {self.config_options.spatial_meta}" - raise e - try: - y_coord_att_names = esmf_nc.variables["y"].ncattrs() - except Exception as e: - self.config_options.errMsg = f"Unable to extract y attribute names from: {self.config_options.spatial_meta}" - raise e - # Extract attribute values - try: - self.x_coord_atts = { - item: esmf_nc.variables["x"].getncattr(item) - for item in x_coord_att_names - } - except Exception as e: - self.config_options.errMsg = f"Unable to extract x coordinate attributes from: {self.config_options.spatial_meta}" - raise e - try: - self.y_coord_atts = { - item: esmf_nc.variables["y"].getncattr(item) - for item in y_coord_att_names - } - except Exception as e: - self.config_options.errMsg = f"Unable to extract y coordinate attributes from: {self.config_options.spatial_meta}" - raise e - try: - self.crs_atts = { - item: esmf_nc.variables["crs"].getncattr(item) - for item in crs_att_names - } - except Exception as e: - self.config_options.errMsg = f"Unable to extract crs coordinate attributes from: {self.config_options.spatial_meta}" - raise e + return wrapper - # Extract global attributes - try: - global_att_names = esmf_nc.ncattrs() - except Exception as e: - self.config_options.errMsg = f"Unable to extract global attribute names from: {self.config_options.spatial_meta}" - raise e - try: - self.spatial_global_atts = { - item: esmf_nc.getncattr(item) for item in global_att_names - } - except Exception as e: - self.config_options.errMsg = f"Unable to extract global attributes from: {self.config_options.spatial_meta}" - raise e - # Extract x/y coordinate values - if len(esmf_nc.variables["x"].shape) == 1: - try: - self.x_coords = esmf_nc.variables["x"][:].data - except Exception as e: - self.config_options.errMsg = f"Unable to extract x coordinate values from: {self.config_options.spatial_meta}" - raise e - try: - self.y_coords = esmf_nc.variables["y"][:].data - except Exception as e: - self.config_options.errMsg = f"Unable to extract y coordinate values from: {self.config_options.spatial_meta}" - raise e - # Check to see if the Y coordinates are North-South. If so, flip them. - if self.y_coords[1] < self.y_coords[0]: - self.y_coords[:] = np.flip(self.y_coords[:], axis=0) - - if len(esmf_nc.variables["x"].shape) == 2: - try: - self.x_coords = esmf_nc.variables["x"][:, :].data - except Exception as e: - self.config_options.errMsg = f"Unable to extract x coordinate values from: {self.config_options.spatial_meta}" - raise e - try: - self.y_coords = esmf_nc.variables["y"][:, :].data - except Exception as e: - self.config_options.errMsg = f"Unable to extract y coordinate values from: {self.config_options.spatial_meta}" - raise e - # Check to see if the Y coordinates are North-South. If so, flip them. - if self.y_coords[1, 0] > self.y_coords[0, 0]: - self.y_coords[:, :] = np.flipud(self.y_coords[:, :]) - - # Close the geospatial metadata file. - try: - esmf_nc.close() - except Exception as e: - self.config_options.errMsg = f"Unable to close spatial metadata file: {self.config_options.spatial_meta}" - raise e +def broadcast(func) -> Any: + """Broadcast the output of a function to all processors.""" - # mpi_config.comm.barrier() + @wraps(func) + def wrapper(self) -> Any: + """Broadcast the output of a function to all processors.""" + result = func(self) + return self.mpi_config.comm.bcast(result, root=0) - def calc_slope(self, esmf_nc: netCDF4.Dataset) -> tuple: - """Calculate slope grids needed for incoming shortwave radiation downscaling. + return wrapper - Function to calculate slope grids needed for incoming shortwave radiation downscaling - later during the program. - :param esmf_nc: The open netCDF4 dataset for the geogrid file, passed in to avoid having to reopen the file multiple times - :return: A tuple containing slope and slope azimuth for nodes and elements - """ - # First extract the sina,cosa, and elevation variables from the geogrid file. + +def barrier(func) -> Any: + """Synchronize all processors at a barrier.""" + + @wraps(func) + def wrapper(self) -> Any: + """Synchronize all processors at a barrier.""" + result = func(self) + self.mpi_config.comm.barrier() + return result + + return wrapper + + +def scatter(func) -> Any: + """Scatter the output of a function to all processors.""" + + @wraps(func) + def wrapper(self) -> Any: + """Scatter the output of a function to all processors.""" try: - sina_grid = esmf_nc.variables[self.config_options.sinalpha_var][0, :, :] + var, name, config_options, post_slice = func(self) + var = self.mpi_config.scatter_array(self, var, config_options) + if post_slice: + return var[:, :] + else: + return var except Exception as e: self.config_options.errMsg = ( - f"Unable to extract SINALPHA from: {self.config_options.geogrid}" + f"Unable to subset {name} from geogrid file into ESMF object" ) raise e + return wrapper + + +class GeoMeta: + """GeoMeta class for handling information about the geometry metadata. + + Extract names of variable attributes from each of the input geospatial variables. These + can change, so we are making this as flexible as possible to accomodate future changes. + """ + + def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig) -> None: + """Initialize GeoMeta class variables.""" + self.config_options = config_options + self.mpi_config = mpi_config + for attr in CONSTS[self.__class__.__base__.__name__]: + setattr(self, attr, None) + + @property + @lru_cache + def spatial_metadata_exists(self) -> bool: + """Check to make sure the geospatial metadata file exists.""" + if self.config_options.spatial_meta is None: + return False + else: + return True + + @property + @lru_cache + def geogrid_ds(self) -> xr.Dataset: + """Get the geogrid file path.""" try: - cosa_grid = esmf_nc.variables[self.config_options.cosalpha_var][0, :, :] + with xr.open_dataset(self.config_options.geogrid) as ds: + return ds except Exception as e: - self.config_options.errMsg = ( - f"Unable to extract COSALPHA from: {self.config_options.geogrid}" - ) + self.config_options.errMsg = "Unable to open geogrid file with xarray" raise e + @property + @lru_cache + @set_none + def esmf_ds(self) -> xr.Dataset: + """Open the geospatial metadata file and return the xarray dataset object.""" try: - height_dest = esmf_nc.variables[self.config_options.hgt_var][0, :, :] + with xr.open_dataset(self.config_options.spatial_meta) as ds: + esmf_ds = ds.load() except Exception as e: self.config_options.errMsg = ( - f"Unable to extract HGT_M from: {self.config_options.geogrid}" + f"Unable to open esmf file: {self.config_options.spatial_meta}" ) raise e + self._check_variables_exist(esmf_ds) + return esmf_ds - # Ensure cosa/sina are correct dimensions - if sina_grid.shape[0] != self.ny_global or sina_grid.shape[1] != self.nx_global: - self.config_options.errMsg = ( - f"SINALPHA dimensions mismatch in: {self.config_options.geogrid}" - ) - raise Exception - if cosa_grid.shape[0] != self.ny_global or cosa_grid.shape[1] != self.nx_global: - self.config_options.errMsg = ( - f"COSALPHA dimensions mismatch in: {self.config_options.geogrid}" - ) - raise Exception - if ( - height_dest.shape[0] != self.ny_global - or height_dest.shape[1] != self.nx_global - ): - self.config_options.errMsg = ( - f"HGT_M dimension mismatch in: {self.config_options.geogrid}" - ) - raise Exception + def _check_variables_exist(self, esmf_ds: xr.Dataset): + """Check to make sure the expected variables are present in the geospatial metadata file.""" + if self.mpi_config.rank == 0: + for var in ["crs", "x", "y"]: + if var not in esmf_ds.variables.keys(): + self.config_options.errMsg = f"Unable to locate {var} variable in: {self.config_options.spatial_meta}" + raise Exception - # Establish constants - rdx = 1.0 / self.dx_meters - rdy = 1.0 / self.dy_meters - msftx = 1.0 - msfty = 1.0 + def ncattrs(self, var: str) -> list: + """Extract variable attribute names from the geospatial metadata file.""" + return self.get_esmf_var(var).ncattrs() - slope_out = np.empty([self.ny_global, self.nx_global], np.float32) - toposlpx = np.empty([self.ny_global, self.nx_global], np.float32) - toposlpy = np.empty([self.ny_global, self.nx_global], np.float32) - slp_azi = np.empty([self.ny_global, self.nx_global], np.float32) - ip_diff = np.empty([self.ny_global, self.nx_global], np.int32) - jp_diff = np.empty([self.ny_global, self.nx_global], np.int32) - hx = np.empty([self.ny_global, self.nx_global], np.float32) - hy = np.empty([self.ny_global, self.nx_global], np.float32) + def get_var(self, ds: xr.Dataset, var: str) -> xr.DataArray: + """Get a variable from a xr.Dataset.""" + if self.mpi_config.rank == 0: + try: + return ds.variables[var] + except Exception as e: + self.config_options.errMsg = f"Unable to extract {var} variable from: {self.config_options.spatial_meta} due to {str(e)}" + raise e - # Create index arrays that will be used to calculate slope. - x_tmp = np.arange(self.nx_global) - y_tmp = np.arange(self.ny_global) - x_grid = np.tile(x_tmp[:], (self.ny_global, 1)) - y_grid = np.repeat(y_tmp[:, np.newaxis], self.nx_global, axis=1) - ind_orig = np.where(height_dest == height_dest) - ind_ip1 = ((ind_orig[0]), (ind_orig[1] + 1)) - ind_im1 = ((ind_orig[0]), (ind_orig[1] - 1)) - ind_jp1 = ((ind_orig[0] + 1), (ind_orig[1])) - ind_jm1 = ((ind_orig[0] - 1), (ind_orig[1])) - ind_ip1[1][np.where(ind_ip1[1] >= self.nx_global)] = self.nx_global - 1 - ind_jp1[0][np.where(ind_jp1[0] >= self.ny_global)] = self.ny_global - 1 - ind_im1[1][np.where(ind_im1[1] < 0)] = 0 - ind_jm1[0][np.where(ind_jm1[0] < 0)] = 0 + def get_geogrid_var(self, var: str) -> xr.DataArray: + """Get a variable from the geogrid file.""" + return self.get_var(self.geogrid_ds, var) - ip_diff[ind_orig] = x_grid[ind_ip1] - x_grid[ind_im1] - jp_diff[ind_orig] = y_grid[ind_jp1] - y_grid[ind_jm1] - - toposlpx[ind_orig] = ( - (height_dest[ind_ip1] - height_dest[ind_im1]) * msftx * rdx - ) / ip_diff[ind_orig] - toposlpy[ind_orig] = ( - (height_dest[ind_jp1] - height_dest[ind_jm1]) * msfty * rdy - ) / jp_diff[ind_orig] - hx[ind_orig] = toposlpx[ind_orig] - hy[ind_orig] = toposlpy[ind_orig] - slope_out[ind_orig] = np.arctan((hx[ind_orig] ** 2 + hy[ind_orig] ** 2) ** 0.5) - slope_out[np.where(slope_out < 1e-4)] = 0.0 - slp_azi[np.where(slope_out < 1e-4)] = 0.0 - ind_valesmf_nc = np.where(slope_out >= 1e-4) - slp_azi[ind_valesmf_nc] = ( - np.arctan2(hx[ind_valesmf_nc], hy[ind_valesmf_nc]) + math.pi - ) - ind_valesmf_nc = np.where(cosa_grid >= 0.0) - slp_azi[ind_valesmf_nc] = slp_azi[ind_valesmf_nc] - np.arcsin( - sina_grid[ind_valesmf_nc] - ) - ind_valesmf_nc = np.where(cosa_grid < 0.0) - slp_azi[ind_valesmf_nc] = slp_azi[ind_valesmf_nc] - ( - math.pi - np.arcsin(sina_grid[ind_valesmf_nc]) - ) + def get_esmf_var(self, var: str) -> xr.DataArray: + """Get a variable from the geospatial metadata file.""" + return self.get_var(self.esmf_ds, var) + + @property + @lru_cache + @set_none + def _crs_att_names(self) -> list: + """Extract crs attribute names from the geospatial metadata file.""" + return self.ncattrs("crs") + + @property + @lru_cache + @set_none + def _x_coord_att_names(self) -> list: + """Extract x coordinate attribute names from the geospatial metadata file.""" + return self.ncattrs("x") + + @property + @lru_cache + @set_none + def _y_coord_att_names(self) -> list: + """Extract y coordinate attribute names from the geospatial metadata file.""" + return self.ncattrs("y") + + def getncattr(self, var: str) -> dict: + """Extract variable attribute values from the geospatial metadata file.""" + return { + item: self.get_esmf_var(var).getncattr(item) for item in self.ncattrs(var) + } + + @property + @lru_cache + @set_none + def x_coord_atts(self) -> dict: + """Extract x coordinate attribute values from the geospatial metadata file.""" + return self.getncattr("x") + + @property + @lru_cache + @set_none + def y_coord_atts(self) -> dict: + """Extract y coordinate attribute values from the geospatial metadata file.""" + return self.getncattr("y") - # Reset temporary arrays to None to free up memory - toposlpx = None - toposlpy = None - height_dest = None - sina_grid = None - cosa_grid = None - ind_valesmf_nc = None - x_tmp = None - y_tmp = None - x_grid = None - ip_diff = None - jp_diff = None - ind_orig = None - ind_jm1 = None - ind_jp1 = None - ind_im1 = None - ind_ip1 = None - hx = None - hy = None - - return slope_out, slp_azi + @property + @lru_cache + @set_none + def crs_atts(self) -> dict: + """Extract crs coordinate attribute values from the geospatial metadata file.""" + return self.getncattr("crs") + + @property + @lru_cache + @set_none + def _global_att_names(self) -> list: + """Extract global attribute values from the geospatial metadata file.""" + if self.mpi_config.rank == 0: + try: + return self.esmf_ds.ncattrs() + except Exception as e: + self.config_options.errMsg = f"Unable to extract global attribute names from: {self.config_options.spatial_meta}" + raise e + + @property + @lru_cache + @set_none + def spatial_global_atts(self) -> dict: + """Extract global attribute values from the geospatial metadata file.""" + if self.mpi_config.rank == 0: + try: + return { + item: self.esmf_ds.getncattr(item) + for item in self._global_att_names + } + except Exception as e: + self.config_options.errMsg = f"Unable to extract global attributes from: {self.config_options.spatial_meta}" + raise e + + def extract_coords(self, dimension: str) -> np.ndarray: + """Extract coordinate values from the geospatial metadata file.""" + if self.mpi_config.rank == 0: + if len(self.get_esmf_var(dimension).shape) == 1: + return self.get_esmf_var(dimension)[:].data + elif len(self.get_esmf_var(dimension).shape) == 2: + return self.get_esmf_var(dimension)[:, :].data + + @property + @lru_cache + @set_none + def x_coords(self) -> np.ndarray: + """Extract x coordinate values from the geospatial metadata file.""" + return self.extract_coords("x") + + @property + @lru_cache + @set_none + def y_coords(self) -> np.ndarray: + """Extract y coordinate values from the geospatial metadata file. + + Check to see if the Y coordinates are North-South. If so, flip them. + """ + if self.mpi_config.rank == 0: + y_coords = self.extract_coords("y") + if len(self.get_esmf_var("y").shape) == 1: + if y_coords[1] < y_coords[0]: + y_coords[:] = np.flip(y_coords[:], axis=0) + elif len(self.get_esmf_var("y").shape) == 2: + if y_coords[1, 0] > y_coords[0, 0]: + y_coords[:, :] = np.flipud(y_coords[:, :]) + return y_coords class GriddedGeoMeta(GeoMeta): - """Class for handling information about the gridded domain we are processing forcings to.""" + """Class for handling information about the gridded domains for forcing.""" - def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): - """Initialize GeoMetaWrfHydro class variables. + def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig) -> None: + """Initialize GriddedGeoMeta class variables. Initialization function to initialize ESMF through ESMPy, calculate the global parameters of the WRF-Hydro grid @@ -342,131 +292,104 @@ def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): :return: """ super().__init__(config_options, mpi_config) - self.nx_local_elem = None - self.ny_local_elem = None - # Open the geogrid file and extract necessary information - # to create ESMF fields. - if mpi_config.rank == 0: + for attr in CONSTS[self.__class__.__name__]: + setattr(self, attr, None) + + @broadcast + @property + @lru_cache + def nx_global(self) -> int: + """Get the global x dimension size for the gridded domain.""" + if self.mpi_config.rank == 0: try: - esmf_nc = netCDF4.Dataset(self.config_options.geogrid, "r") + if self.ndim_lat == 3: + return self.lat_var.shape[2] + elif self.ndim_lat == 2: + return self.lat_var.shape[1] + else: + # NOTE Is this correct? using lon_var + return self.lon_var.shape[0] except Exception as e: - self.config_options.errMsg = f"Unable to open the WRF-Hydro geogrid file: {self.config_options.geogrid}" + self.config_options.errMsg = f"Unable to extract X dimension size from longitude variable in: {self.config_options.geogrid}" raise e - if esmf_nc.variables[self.config_options.lat_var].ndim == 3: - try: - self.nx_global = esmf_nc.variables[ - self.config_options.lat_var - ].shape[2] - except Exception as e: - self.config_options.errMsg = f"Unable to extract X dimension size from latitude variable in: {self.config_options.geogrid}" - raise e - - try: - self.ny_global = esmf_nc.variables[ - self.config_options.lat_var - ].shape[1] - except Exception as e: - self.config_options.errMsg = f"Unable to extract Y dimension size from latitude in: {self.config_options.geogrid}" - raise e - - try: - self.dx_meters = esmf_nc.DX - except Exception as e: - self.config_options.errMsg = f"Unable to extract DX global attribute in: {self.config_options.geogrid}" - raise e - - try: - self.dy_meters = esmf_nc.DY - except Exception as e: - self.config_options.errMsg = f"Unable to extract DY global attribute in: {self.config_options.geogrid}" - raise e - elif esmf_nc.variables[self.config_options.lat_var].ndim == 2: - try: - self.nx_global = esmf_nc.variables[ - self.config_options.lat_var - ].shape[1] - except Exception as e: - self.config_options.errMsg = f"Unable to extract X dimension size from latitude variable in: {self.config_options.geogrid}" - raise e - - try: - self.ny_global = esmf_nc.variables[ - self.config_options.lat_var - ].shape[0] - except Exception as e: - self.config_options.errMsg = f"Unable to extract Y dimension size from latitude in: {self.config_options.geogrid}" - raise e - - try: - self.dx_meters = esmf_nc.variables[self.config_options.lon_var].dx - except Exception as e: - self.config_options.errMsg = f"Unable to extract DX global attribute in: {self.config_options.geogrid}" - raise e - - try: - self.dy_meters = esmf_nc.variables[self.config_options.lat_var].dy - except Exception as e: - self.config_options.errMsg = f"Unable to extract DY global attribute in: {self.config_options.geogrid}" - raise e - else: - try: - self.nx_global = esmf_nc.variables[ - self.config_options.lon_var - ].shape[0] - except Exception as e: - self.config_options.errMsg = f"Unable to extract X dimension size from longitude variable in: {self.config_options.geogrid}" - raise e - - try: - self.ny_global = esmf_nc.variables[ - self.config_options.lat_var - ].shape[0] - except Exception as e: - self.config_options.errMsg = f"Unable to extract Y dimension size from latitude in: {self.config_options.geogrid}" - raise e - if self.config_options.input_forcings[0] != 23: - try: - self.dx_meters = esmf_nc.variables[ - self.config_options.lon_var - ].dx - except Exception as e: - self.config_options.errMsg = f"Unable to extract dx metadata attribute in: {self.config_options.geogrid}" - raise e - - try: - self.dy_meters = esmf_nc.variables[ - self.config_options.lat_var - ].dy - except Exception as e: - self.config_options.errMsg = f"Unable to extract dy metadata attribute in: {self.config_options.geogrid}" - raise e + @broadcast + @property + @lru_cache + def ny_global(self) -> int: + """Get the global y dimension size for the gridded domain.""" + if self.mpi_config.rank == 0: + try: + if self.ndim_lat == 3: + return self.lat_var.shape[1] else: - # Manually input the grid spacing since ERA5-Interim does not - # internally have this geospatial information within the netcdf file - self.dx_meters = 31000 - self.dy_meters = 31000 + return self.lat_var.shape[0] + except Exception as e: + self.config_options.errMsg = f"Unable to extract Y dimension size from latitude in: {self.config_options.geogrid}" + raise e - # mpi_config.comm.barrier() + @property + @lru_cache + def ndim_lat(self) -> int: + """Get the number of dimensions for the latitude variable.""" + return self.lat_var.ndim - # Broadcast global dimensions to the other processors. - self.nx_global = mpi_config.broadcast_parameter( - self.nx_global, self.config_options, param_type=int - ) - self.ny_global = mpi_config.broadcast_parameter( - self.ny_global, self.config_options, param_type=int - ) - self.dx_meters = mpi_config.broadcast_parameter( - self.dx_meters, self.config_options, param_type=float - ) - self.dy_meters = mpi_config.broadcast_parameter( - self.dy_meters, self.config_options, param_type=float - ) + @property + @lru_cache + def ndim_lon(self) -> int: + """Get the number of dimensions for the longitude variable.""" + return self.lon_var.ndim + + @broadcast + @property + @lru_cache + def dy_meters(self) -> float: + """Get the DY distance in meters for the latitude variable.""" + if self.mpi_config.rank == 0: + try: + if self.ndim_lat == 3: + return self.geogrid_ds.DY + elif self.ndim_lat == 2: + return self.lat_var.dy + else: + if self.config_options.input_forcings[0] != 23: + return self.lat_var.dy + else: + # Manually input the grid spacing since ERA5-Interim does not + # internally have this geospatial information within the netcdf file + return 31000 + except Exception as e: + self.config_options.errMsg = f"Unable to extract DY global attribute in: {self.config_options.geogrid}" + raise e - # mpi_config.comm.barrier() + @broadcast + @property + @lru_cache + def dx_meters(self) -> float: + """Get the DX distance in meters for the longitude variable.""" + if self.mpi_config.rank == 0: + try: + if self.ndim_lat == 3: + return self.geogrid_ds.DX + elif self.ndim_lat == 2: + return self.lon_var.dx + else: + if self.config_options.input_forcings[0] != 23: + return self.lon_var.dx + else: + # Manually input the grid spacing since ERA5-Interim does not + # internally have this geospatial information within the netcdf file + return 31000 + except Exception as e: + self.config_options.errMsg = f"Unable to extract dx metadata attribute in: {self.config_options.geogrid}" + raise e + @property + @lru_cache + def esmf_grid(self) -> ESMF.Grid: + """Create the ESMF grid object for the gridded domain.""" try: - self.esmf_grid = ESMF.Grid( + return ESMF.Grid( np.array([self.ny_global, self.nx_global]), staggerloc=ESMF.StaggerLoc.CENTER, coord_sys=ESMF.CoordSys.SPH_DEG, @@ -475,866 +398,1069 @@ def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): self.config_options.errMsg = f"Unable to create ESMF grid for WRF-Hydro geogrid: {self.config_options.geogrid}" raise e - # mpi_config.comm.barrier() - - self.esmf_lat = self.esmf_grid.get_coords(1) - self.esmf_lon = self.esmf_grid.get_coords(0) - - # mpi_config.comm.barrier() + @property + @lru_cache + def esmf_lat(self) -> np.ndarray: + """Get the ESMF latitude grid.""" + esmf_lat = self.esmf_grid.get_coords(1) + esmf_lat[:, :] = self.latitude_grid + return esmf_lat + @property + @lru_cache + def esmf_lon(self) -> np.ndarray: + """Get the ESMF longitude grid.""" + esmf_lon = self.esmf_grid.get_coords(0) + esmf_lon[:, :] = self.longitude_grid + return esmf_lon + + @scatter + @property + @lru_cache + def latitude_grid(self) -> np.ndarray: + """Get the latitude grid for the gridded domain.""" # Scatter global XLAT_M grid to processors.. - if mpi_config.rank == 0: - if esmf_nc.variables[self.config_options.lat_var].ndim == 3: - var_tmp = esmf_nc.variables[self.config_options.lat_var][0, :, :] - elif esmf_nc.variables[self.config_options.lat_var].ndim == 2: - var_tmp = esmf_nc.variables[self.config_options.lat_var][:, :] - elif esmf_nc.variables[self.config_options.lat_var].ndim == 1: - lat = esmf_nc.variables[self.config_options.lat_var][:] - lon = esmf_nc.variables[self.config_options.lon_var][:] + if self.mpi_config.rank == 0: + if self.ndim_lat == 3: + var_tmp = self.lat_var[0, :, :] + elif self.ndim_lat == 2: + var_tmp = self.lat_var[:, :] + elif self.ndim_lat == 1: + lat = self.lat_var[:] + lon = self.lon_var[:] var_tmp = np.meshgrid(lon, lat)[1] - lat = None - lon = None + # Flag to grab entire array for AWS slicing if self.config_options.aws: self.lat_bounds = var_tmp else: var_tmp = None + return var_tmp, "latitude_grid", self.config_options, False - # mpi_config.comm.barrier() - - var_sub_tmp = mpi_config.scatter_array(self, var_tmp, self.config_options) - - # mpi_config.comm.barrier() - - # Place the local lat/lon grid slices from the parent geogrid file into - # the ESMF lat/lon grids. - try: - self.esmf_lat[:, :] = var_sub_tmp - self.latitude_grid = var_sub_tmp - var_sub_tmp = None - var_tmp = None - except Exception as e: - self.config_options.errMsg = ( - "Unable to subset latitude from geogrid file into ESMF object" - ) - raise e + @property + @lru_cache + def lon_var(self) -> xr.DataArray: + """Get the longitude variable from the geospatial metadata file.""" + return self.get_geogrid_var(self.config_options.lon_var) - # mpi_config.comm.barrier() + @property + @lru_cache + def lat_var(self) -> xr.DataArray: + """Get the latitude variable from the geospatial metadata file.""" + return self.get_geogrid_var(self.config_options.lat_var) + @scatter + @property + @lru_cache + def longitude_grid(self) -> np.ndarray: + """Get the longitude grid for the gridded domain.""" # Scatter global XLONG_M grid to processors.. - if mpi_config.rank == 0: - if esmf_nc.variables[self.config_options.lat_var].ndim == 3: - var_tmp = esmf_nc.variables[self.config_options.lon_var][0, :, :] - elif esmf_nc.variables[self.config_options.lon_var].ndim == 2: - var_tmp = esmf_nc.variables[self.config_options.lon_var][:, :] - elif esmf_nc.variables[self.config_options.lon_var].ndim == 1: - lat = esmf_nc.variables[self.config_options.lat_var][:] - lon = esmf_nc.variables[self.config_options.lon_var][:] + if self.mpi_config.rank == 0: + if self.ndim_lat == 3: + var_tmp = self.lon_var[0, :, :] + elif self.ndim_lat == 2: + var_tmp = self.lon_var[:, :] + elif self.ndim_lat == 1: + lat = self.lat_var[:] + lon = self.lon_var[:] var_tmp = np.meshgrid(lon, lat)[0] - lat = None - lon = None + # Flag to grab entire array for AWS slicing if self.config_options.aws: self.lon_bounds = var_tmp else: var_tmp = None - # mpi_config.comm.barrier() + return var_tmp, "longitude_grid", self.config_options, False - var_sub_tmp = mpi_config.scatter_array(self, var_tmp, self.config_options) - - # mpi_config.comm.barrier() - - try: - self.esmf_lon[:, :] = var_sub_tmp - self.longitude_grid = var_sub_tmp - var_sub_tmp = None - var_tmp = None - except Exception as e: - self.config_options.errMsg = ( - "Unable to subset longitude from geogrid file into ESMF object" - ) - raise e - - # mpi_config.comm.barrier() + @property + @lru_cache + def cosalpha_var(self) -> xr.DataArray: + """Get the COSALPHA variable from the geospatial metadata file.""" + return self.get_geogrid_var(self.config_options.cosalpha_var) + @scatter + @property + @lru_cache + def cosa_grid(self) -> np.ndarray: + """Get the COSALPHA grid for the gridded domain.""" if ( self.config_options.cosalpha_var is not None and self.config_options.sinalpha_var is not None ): # Scatter the COSALPHA,SINALPHA grids to the processors. - if mpi_config.rank == 0: - if esmf_nc.variables[self.config_options.cosalpha_var].ndim == 3: - var_tmp = esmf_nc.variables[self.config_options.cosalpha_var][ - 0, :, : - ] + if self.mpi_config.rank == 0: + if self.cosalpha_var.ndim == 3: + cosa = self.cosa_grid_from_geogrid_n3 else: - var_tmp = esmf_nc.variables[self.config_options.cosalpha_var][:, :] + cosa = self.cosalpha_var[:, :] else: - var_tmp = None - # mpi_config.comm.barrier() + cosa = None - var_sub_tmp = mpi_config.scatter_array(self, var_tmp, self.config_options) - # mpi_config.comm.barrier() + return cosa, "cosa", self.config_options, True - self.cosa_grid = var_sub_tmp[:, :] - var_sub_tmp = None - var_tmp = None + @property + @lru_cache + def sinalpha_var(self) -> xr.DataArray: + """Get the SINALPHA variable from the geospatial metadata file.""" + return self.get_geogrid_var(self.config_options.sinalpha_var) - if mpi_config.rank == 0: - if esmf_nc.variables[self.config_options.sinalpha_var].ndim == 3: - var_tmp = esmf_nc.variables[self.config_options.sinalpha_var][ - 0, :, : - ] + @property + @lru_cache + def sina_grid(self) -> np.ndarray: + """Get the SINALPHA grid for the gridded domain.""" + if ( + self.config_options.cosalpha_var is not None + and self.config_options.sinalpha_var is not None + ): + if self.mpi_config.rank == 0: + if self.sinalpha_var.ndim == 3: + sina = self.sina_grid_from_geogrid_n3 else: - var_tmp = esmf_nc.variables[self.config_options.sinalpha_var][:, :] + sina = self.sinalpha_var[:, :] else: - var_tmp = None - # mpi_config.comm.barrier() + sina = None - var_sub_tmp = mpi_config.scatter_array(self, var_tmp, self.config_options) - # mpi_config.comm.barrier() - self.sina_grid = var_sub_tmp[:, :] - var_sub_tmp = None - var_tmp = None + return sina, "sina", self.config_options, True + + @property + @lru_cache + def hgt_var(self) -> xr.DataArray: + """Get the HGT variable from the geospatial metadata file.""" + return self.get_geogrid_var(self.config_options.hgt_var) + + @scatter + @property + @lru_cache + def height(self) -> np.ndarray: + """Get the height grid for the gridded domain. + Used for downscaling purposes. + """ if self.config_options.hgt_var is not None: - # Read in a scatter the WRF-Hydro elevation, which is used for downscaling - # purposes. - if mpi_config.rank == 0: - if esmf_nc.variables[self.config_options.hgt_var].ndim == 3: - var_tmp = esmf_nc.variables[self.config_options.hgt_var][0, :, :] + if self.mpi_config.rank == 0: + if self.hgt_var.ndim == 3: + height = self.hgt_grid_from_geogrid_n3 else: - var_tmp = esmf_nc.variables[self.config_options.hgt_var][:, :] + height = self.hgt_var[:, :] else: - var_tmp = None - # mpi_config.comm.barrier() + height = None - var_sub_tmp = mpi_config.scatter_array(self, var_tmp, self.config_options) - # mpi_config.comm.barrier() - self.height = var_sub_tmp - var_sub_tmp = None - var_tmp = None + return height, "height", self.config_options, False + + @property + @lru_cache + def slope_var(self) -> xr.DataArray: + """Get the slope variable from the geospatial metadata file.""" + return self.get_geogrid_var(self.config_options.slope_var) + + @property + @lru_cache + def slope_azimuth_var(self) -> xr.DataArray: + """Get the slope azimuth variable from the geospatial metadata file.""" + return self.get_geogrid_var(self.config_options.slope_azimuth_var) + + @property + @lru_cache + def dx(self) -> np.ndarray: + """Calculate the dx distance in meters for the longitude variable.""" + dx = np.empty( + ( + self.lat_var.shape[0], + self.lon_var.shape[0], + ), + dtype=float, + ) + dx[:] = self.lon_var.dx + return dx + + @property + @lru_cache + def dy(self) -> np.ndarray: + """Calculate the dy distance in meters for the latitude variable.""" + dy = np.empty( + ( + self.lat_var.shape[0], + self.lon_var.shape[0], + ), + dtype=float, + ) + dy[:] = self.lat_var.dy + return dy + + @property + @lru_cache + def dz(self) -> np.ndarray: + """Calculate the dz distance in meters for the height variable.""" + dz_init = np.diff(self.hgt_var, axis=0) + dz = np.empty(self.dx.shape, dtype=float) + dz[0 : dz_init.shape[0], 0 : dz_init.shape[1]] = dz_init + dz[dz_init.shape[0] :, :] = dz_init[-1, :] + return dz + + @scatter + @property + @lru_cache + def slope(self) -> np.ndarray: + """Calculate slope grids needed for incoming shortwave radiation downscaling. + Calculate slope from sina_grid, cosa_grid, and height variables if they are + present in the geogrid file, otherwise calculate slope from slope and slope + azimuth variables, and if those are not present, calculate slope from height variable. + + + Calculate grid coordinates dx distances in meters + based on general geospatial formula approximations + on a spherical grid. + """ if ( self.config_options.cosalpha_var is not None and self.config_options.sinalpha_var is not None ): - # Calculate the slope from the domain using elevation on the WRF-Hydro domain. This will - # be used for downscaling purposes. - if mpi_config.rank == 0: - try: - slope_tmp, slp_azi_tmp = self.calc_slope(esmf_nc) - except Exception: - raise Exception - else: - slope_tmp = None - slp_azi_tmp = None - # mpi_config.comm.barrier() - - slope_sub_tmp = mpi_config.scatter_array( - self, slope_tmp, self.config_options + slope = self.slope_from_cosalpha_sinalpha + elif ( + self.config_options.slope_var is not None + and self.config_options.slope_azimuth_var is not None + ): + slope = self.slope_from_slope_azimuth + elif self.config_options.hgt_var is not None: + slope = self.slope_from_height + else: + raise Exception( + "Unable to calculate slope grid for incoming shortwave radiation downscaling. No geospatial metadata variables provided to calculate slope." ) - self.slope = slope_sub_tmp[:, :] - slope_sub_tmp = None + return slope, "slope", self.config_options, True - slp_azi_sub = mpi_config.scatter_array( - self, slp_azi_tmp, self.config_options - ) - self.slp_azi = slp_azi_sub[:, :] - slp_azi_tmp = None + @scatter + @property + @lru_cache + def slp_azi(self) -> np.ndarray: + """Calculate slope azimuth grids needed for incoming shortwave radiation downscaling. + Calculate slp_azi from sina_grid, cosa_grid, and height variables if they are + present in the geogrid file, otherwise calculate slope from slope and slope + azimuth variables, and if those are not present, calculate slope from height variable. + """ + if ( + self.config_options.cosalpha_var is not None + and self.config_options.sinalpha_var is not None + ): + slp_azi = self.slp_azi_from_cosalpha_sinalpha elif ( self.config_options.slope_var is not None and self.config_options.slope_azimuth_var is not None ): - if mpi_config.rank == 0: - if esmf_nc.variables[self.config_options.slope_var].ndim == 3: - var_tmp = esmf_nc.variables[self.config_options.slope_var][0, :, :] - else: - var_tmp = esmf_nc.variables[self.config_options.slope_var][:, :] - else: - var_tmp = None + slp_azi = self.slp_azi_from_slope_azimuth - slope_sub_tmp = mpi_config.scatter_array(self, var_tmp, self.config_options) - self.slope = slope_sub_tmp - var_tmp = None + elif self.config_options.hgt_var is not None: + slp_azi = self.slp_azi_from_height + else: + raise Exception( + "Unable to calculate slope azimuth grid for incoming shortwave radiation downscaling. No geospatial metadata variables provided to calculate slope azimuth." + ) - if mpi_config.rank == 0: - if esmf_nc.variables[self.config_options.slope_azimuth_var].ndim == 3: - var_tmp = esmf_nc.variables[self.config_options.slope_azimuth_var][ - 0, :, : - ] - else: - var_tmp = esmf_nc.variables[self.config_options.slope_azimuth_var][ - :, : - ] + return slp_azi, "slp_azi", self.config_options, True + + @property + @lru_cache + def slp_azi_from_slope_azimuth(self) -> np.ndarray: + """Calculate slope azimuth from slope and slope azimuth variables.""" + if self.mpi_config.rank == 0: + if self.slope_azimuth_var.ndim == 3: + return self.slope_azimuth_var[0, :, :] else: - var_tmp = None + return self.slope_azimuth_var[:, :] - slp_azi_sub = mpi_config.scatter_array(self, var_tmp, self.config_options) - self.slp_azi = slp_azi_sub[:, :] - var_tmp = None + @property + @lru_cache + def slp_azi_from_height(self) -> np.ndarray: + """Calculate slope azimuth from height variable.""" + if self.mpi_config.rank == 0: + return (180 / np.pi) * np.arctan(self.dx / self.dy) - elif self.config_options.hgt_var is not None: - # Calculate the slope from the domain using elevation of the gridded model and other approximations - if mpi_config.rank == 0: - try: - slope_tmp, slp_azi_tmp = self.calc_slope_gridded(esmf_nc) - except Exception: - raise Exception + @property + @lru_cache + def slope_from_height(self) -> np.ndarray: + """Calculate slope from height variable.""" + if self.mpi_config.rank == 0: + return self.dz / np.sqrt((self.dx**2) + (self.dy**2)) + + @property + @lru_cache + def slope_from_slope_azimuth(self) -> np.ndarray: + """Calculate slope from slope and slope azimuth variables.""" + if self.mpi_config.rank == 0: + if self.slope_var.ndim == 3: + return self.slope_var[0, :, :] else: - slope_tmp = None - slp_azi_tmp = None - # mpi_config.comm.barrier() + return self.slope_var[:, :] - slope_sub_tmp = mpi_config.scatter_array( - self, slope_tmp, self.config_options + @property + @lru_cache + def slope_from_cosalpha_sinalpha(self) -> np.ndarray: + """Calculate slope from COSALPHA and SINALPHA variables.""" + if self.mpi_config.rank == 0: + slope_tmp = np.arctan( + (self.hx[self.ind_orig] ** 2 + self.hy[self.ind_orig] ** 2) ** 0.5 ) - self.slope = slope_sub_tmp[:, :] - slope_sub_tmp = None + slope_tmp[np.where(slope_tmp < 1e-4)] = 0.0 + return slope_tmp - slp_azi_sub = mpi_config.scatter_array( - self, slp_azi_tmp, self.config_options + @property + @lru_cache + def slp_azi_from_cosalpha_sinalpha(self) -> np.ndarray: + """Calculate slope azimuth from COSALPHA and SINALPHA variables.""" + if self.mpi_config.rank == 0: + slp_azi = np.empty([self.ny_global, self.nx_global], np.float32) + slp_azi[np.where(self.slope_from_cosalpha_sinalpha < 1e-4)] = 0.0 + ind_valesmf_ds = np.where(self.slope_from_cosalpha_sinalpha >= 1e-4) + slp_azi[ind_valesmf_ds] = ( + np.arctan2(self.hx[ind_valesmf_ds], self.hy[ind_valesmf_ds]) + math.pi ) - self.slp_azi = slp_azi_sub[:, :] - slp_azi_tmp = None - - if mpi_config.rank == 0: - # Close the geogrid file - try: - esmf_nc.close() - except Exception as e: - self.config_options.errMsg = ( - f"Unable to close geogrid file: {self.config_options.geogrid}" - ) - raise e + ind_valesmf_ds = np.where(self.cosa_grid_from_geogrid_n3 >= 0.0) + slp_azi[ind_valesmf_ds] = slp_azi[ind_valesmf_ds] - np.arcsin( + self.sina_grid_from_geogrid_n3[ind_valesmf_ds] + ) + ind_valesmf_ds = np.where(self.cosa_grid_from_geogrid_n3 < 0.0) + slp_azi[ind_valesmf_ds] = slp_azi[ind_valesmf_ds] - ( + math.pi - np.arcsin(self.sina_grid_from_geogrid_n3[ind_valesmf_ds]) + ) + return slp_azi - # Reset temporary variables to free up memory - slope_tmp = None - slp_azi_tmp = None - var_tmp = None + @property + @lru_cache + def ind_orig(self) -> tuple[np.ndarray, np.ndarray]: + """Calculate the indices of the original grid points for the height variable.""" + return np.where(self.hgt_grid_from_geogrid_n3 == self.hgt_grid_from_geogrid_n3) - def calc_slope_gridded(self, esmf_nc: netCDF4.Dataset) -> tuple: - """Calculate slope grids needed for incoming shortwave radiation downscaling. + @property + @lru_cache + def hx(self) -> np.ndarray: + """Calculate the slope in the x direction from the height variable.""" + rdx = 1.0 / self.dx_meters + msftx = 1.0 + toposlpx = np.empty([self.ny_global, self.nx_global], np.float32) + ip_diff = np.empty([self.ny_global, self.nx_global], np.int32) + hx = np.empty([self.ny_global, self.nx_global], np.float32) - Function to calculate slope grids needed for incoming shortwave radiation downscaling - later during the program. This calculates the slopes for grid cells - :param esmf_nc: The open netCDF4 dataset for the geogrid file, passed in to avoid having to reopen the file multiple times - :return: A tuple containing slope and slope azimuth for grid cells - """ - esmf_nc = netCDF4.Dataset(self.config_options.geogrid, "r") + # Create index arrays that will be used to calculate slope. + x_tmp = np.arange(self.nx_global) + x_grid = np.tile(x_tmp[:], (self.ny_global, 1)) + ind_ip1 = ((self.ind_orig[0]), (self.ind_orig[1] + 1)) + ind_im1 = ((self.ind_orig[0]), (self.ind_orig[1] - 1)) + ind_ip1[1][np.where(ind_ip1[1] >= self.nx_global)] = self.nx_global - 1 + ind_im1[1][np.where(ind_im1[1] < 0)] = 0 - try: - lons = esmf_nc.variables[self.config_options.lon_var][:] - lats = esmf_nc.variables[self.config_options.lat_var][:] - except Exception as e: - self.config_options.errMsg = f"Unable to extract gridded coordinates in {self.config_options.geogrid}" - raise e - try: - dx = np.empty( - ( - esmf_nc.variables[self.config_options.lat_var].shape[0], - esmf_nc.variables[self.config_options.lon_var].shape[0], - ), - dtype=float, - ) - dy = np.empty( - ( - esmf_nc.variables[self.config_options.lat_var].shape[0], - esmf_nc.variables[self.config_options.lon_var].shape[0], - ), - dtype=float, + ip_diff[self.ind_orig] = x_grid[ind_ip1] - x_grid[ind_im1] + toposlpx[self.ind_orig] = ( + ( + self.hgt_grid_from_geogrid_n3[ind_ip1] + - self.hgt_grid_from_geogrid_n3[ind_im1] ) - dx[:] = esmf_nc.variables[self.config_options.lon_var].dx - dy[:] = esmf_nc.variables[self.config_options.lat_var].dy - except Exception as e: - self.config_options.errMsg = f"Unable to extract dx and dy distances in {self.config_options.geogrid}" - raise e - try: - heights = esmf_nc.variables[self.config_options.hgt_var][:] - except Exception as e: - self.config_options.errMsg = f"Unable to extract heights of grid cells in {self.config_options.geogrid}" - raise e - - esmf_nc.close() - - # calculate grid coordinates dx distances in meters - # based on general geospatial formula approximations - # on a spherical grid - dz_init = np.diff(heights, axis=0) - dz = np.empty(dx.shape, dtype=float) - dz[0 : dz_init.shape[0], 0 : dz_init.shape[1]] = dz_init - dz[dz_init.shape[0] :, :] = dz_init[-1, :] + * msftx + * rdx + ) / ip_diff[self.ind_orig] + hx = np.empty([self.ny_global, self.nx_global], np.float32) + hx[self.ind_orig] = toposlpx[self.ind_orig] + return hx - slope = dz / np.sqrt((dx**2) + (dy**2)) - slp_azi = (180 / np.pi) * np.arctan(dx / dy) + @property + @lru_cache + def hy(self) -> np.ndarray: + """Calculate the slope in the y direction from the height variable.""" + rdy = 1.0 / self.dy_meters + msfty = 1.0 + toposlpy = np.empty([self.ny_global, self.nx_global], np.float32) + jp_diff = np.empty([self.ny_global, self.nx_global], np.int32) + hy = np.empty([self.ny_global, self.nx_global], np.float32) - # Reset temporary arrays to None to free up memory - lons = None - lats = None - heights = None - dx = None - dy = None - dz = None + # Create index arrays that will be used to calculate slope. + y_tmp = np.arange(self.ny_global) + y_grid = np.repeat(y_tmp[:, np.newaxis], self.nx_global, axis=1) + ind_jp1 = ((self.ind_orig[0] + 1), (self.ind_orig[1])) + ind_jm1 = ((self.ind_orig[0] - 1), (self.ind_orig[1])) + ind_jp1[0][np.where(ind_jp1[0] >= self.ny_global)] = self.ny_global - 1 + ind_jm1[0][np.where(ind_jm1[0] < 0)] = 0 - return slope, slp_azi + jp_diff[self.ind_orig] = y_grid[ind_jp1] - y_grid[ind_jm1] + toposlpy[self.ind_orig] = ( + ( + self.hgt_grid_from_geogrid_n3[ind_jp1] + - self.hgt_grid_from_geogrid_n3[ind_jm1] + ) + * msfty + * rdy + ) / jp_diff[self.ind_orig] + hy[self.ind_orig] = toposlpy[self.ind_orig] + return hy @property + @lru_cache def x_lower_bound(self) -> float: """Get the local x lower bound for this processor.""" return self.esmf_grid.lower_bounds[ESMF.StaggerLoc.CENTER][1] @property + @lru_cache def x_upper_bound(self) -> float: """Get the local x upper bound for this processor.""" return self.esmf_grid.upper_bounds[ESMF.StaggerLoc.CENTER][1] @property + @lru_cache def y_lower_bound(self) -> float: """Get the local y lower bound for this processor.""" return self.esmf_grid.lower_bounds[ESMF.StaggerLoc.CENTER][0] @property + @lru_cache def y_upper_bound(self) -> float: """Get the local y upper bound for this processor.""" return self.esmf_grid.upper_bounds[ESMF.StaggerLoc.CENTER][0] @property - def nx_local(self) -> int: - """Get the local x dimension size for this processor.""" - return self.x_upper_bound - self.x_lower_bound + @lru_cache + def nx_local(self) -> int: + """Get the local x dimension size for this processor.""" + return self.x_upper_bound - self.x_lower_bound + + @property + @lru_cache + def ny_local(self) -> int: + """Get the local y dimension size for this processor.""" + return self.y_upper_bound - self.y_lower_bound + + @property + @lru_cache + def sina_grid_from_geogrid_n3(self) -> np.ndarray: + """Get the SINALPHA grid for the gridded domain directly from the geogrid file.""" + try: + return self.check_grid(self.sinalpha_var[0, :, :]) + except Exception as e: + self.config_options.errMsg = ( + f"Unable to extract SINALPHA from: {self.config_options.geogrid}" + ) + raise e + + def check_grid(self, grid: np.ndarray) -> np.ndarray: + """Check to make sure the grid dimensions match the expected dimensions for the gridded domain.""" + if grid.shape[0] != self.ny_global or grid.shape[1] != self.nx_global: + self.config_options.errMsg = ( + f"Grid dimensions mismatch in: {self.config_options.geogrid}" + ) + raise Exception + return grid + + @property + @lru_cache + def cosa_grid_from_geogrid_n3(self) -> np.ndarray: + """Get the COSALPHA grid for the gridded domain directly from the geogrid file.""" + try: + return self.check_grid(self.cosalpha_var[0, :, :]) + except Exception as e: + self.config_options.errMsg = ( + f"Unable to extract COSALPHA from: {self.config_options.geogrid}" + ) + raise e @property - def ny_local(self) -> int: - """Get the local y dimension size for this processor.""" - return self.y_upper_bound - self.y_lower_bound + @lru_cache + def hgt_grid_from_geogrid_n3(self) -> np.ndarray: + """Get the HGT_M grid for the gridded domain directly from the geogrid file.""" + try: + return self.check_grid(self.hgt_var[0, :, :]) + except Exception as e: + self.config_options.errMsg = ( + f"Unable to extract HGT_M from: {self.config_options.geogrid}" + ) + raise e class HydrofabricGeoMeta(GeoMeta): - """Class for handling information about the unstructured hydrofabric domain we are processing forcings to.""" + """Class for handling information about the hydrofabric domain forcing.""" def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): - """Initialize GeoMetaWrfHydro class variables. + """Initialize HydrofabricGeoMeta class variables. Initialization function to initialize ESMF through ESMPy, - calculate the global parameters of the WRF-Hydro grid + calculate the global parameters of the hydrofabric being processed to, along with the local parameters for this particular processor. :return: """ super().__init__(config_options, mpi_config) - self.nx_local_elem = None - self.ny_local_elem = None - self.x_lower_bound = None - self.x_upper_bound = None - self.y_lower_bound = None - self.y_upper_bound = None - if self.config_options.geogrid is not None: - # Phase 1: Rank 0 extracts all needed global data - if self.mpi_config.rank == 0: - try: - esmf_nc = nc_utils.nc_Dataset_retry( - self.mpi_config, - self.config_options, - err_handler, - self.config_options.geogrid, - "r", - ) - - # Extract everything we need with retries - tmp_vars = esmf_nc.variables - - if self.config_options.aws: - nodecoords_data = nc_utils.nc_read_var_retry( - self.mpi_config, - self.config_options, - err_handler, - tmp_vars[self.config_options.nodecoords_var], - ) - self.lat_bounds = nodecoords_data[:, 1] - self.lon_bounds = nodecoords_data[:, 0] - - # Store these for later broadcast/scatter - elementcoords_global = nc_utils.nc_read_var_retry( - self.mpi_config, - self.config_options, - err_handler, - tmp_vars[self.config_options.elemcoords_var], - ) - - self.nx_global = elementcoords_global.shape[0] - self.ny_global = self.nx_global - - element_ids_global = nc_utils.nc_read_var_retry( - self.mpi_config, - self.config_options, - err_handler, - tmp_vars[self.config_options.element_id_var], - ) - - heights_global = None - if self.config_options.hgt_var is not None: - heights_global = nc_utils.nc_read_var_retry( - self.mpi_config, - self.config_options, - err_handler, - tmp_vars[self.config_options.hgt_var], - ) - slopes_global = None - slp_azi_global = None - if self.config_options.slope_var is not None: - slopes_global = nc_utils.nc_read_var_retry( - self.mpi_config, - self.config_options, - err_handler, - tmp_vars[self.config_options.slope_var], - ) - if self.config_options.slope_azimuth_var is not None: - slp_azi_global = nc_utils.nc_read_var_retry( - self.mpi_config, - self.config_options, - err_handler, - tmp_vars[self.config_options.slope_azimuth_var], - ) - - except Exception as e: - LOG.critical( - f"Failed to open mesh file: {self.config_options.geogrid} " - f"due to {str(e)}" - ) - raise - finally: - esmf_nc.close() - else: - elementcoords_global = None - element_ids_global = None - heights_global = None - slopes_global = None - slp_azi_global = None - - # Broadcast dimensions - self.nx_global = self.mpi_config.broadcast_parameter( - self.nx_global, self.config_options, param_type=int + for attr in CONSTS[self.__class__.__name__]: + setattr(self, attr, None) + + @property + @lru_cache + def lat_bounds(self) -> np.ndarray: + """Get the latitude bounds for the unstructured domain.""" + return self.get_bound(1) + + @property + @lru_cache + def lon_bounds(self) -> np.ndarray: + """Get the longitude bounds for the unstructured domain.""" + return self.get_bound(0) + + def get_bound(self, dim: int) -> np.ndarray: + """Get the longitude or latitude bounds for the unstructured domain.""" + if self.config_options.aws: + return self.get_geogrid_var(self.config_options.nodecoords_var)[:, dim] + + @broadcast + @property + @lru_cache + def elementcoords_global(self) -> np.ndarray: + """Get the global element coordinates for the unstructured domain.""" + return self.get_geogrid_var(self.config_options.elemcoords_var) + + @barrier + @broadcast + @property + @lru_cache + def nx_global(self) -> int: + """Get the global x dimension size for the unstructured domain.""" + return self.elementcoords_global.shape[0] + + @barrier + @broadcast + @property + @lru_cache + def ny_global(self) -> int: + """Get the global y dimension size for the unstructured domain. + + Same as nx_global. + """ + return self.nx_global + + @property + @lru_cache + def esmf_grid(self) -> ESMF.Mesh: + """Create the ESMF Mesh object for the unstructured domain.""" + try: + return ESMF.Mesh( + self.config_options.geogrid, filetype=ESMF.FileFormat.ESMFMESH ) - self.ny_global = self.mpi_config.broadcast_parameter( - self.ny_global, self.config_options, param_type=int + except Exception as e: + LOG.critical( + f"Unable to create ESMF Mesh: {self.config_options.geogrid} " + f"due to {str(e)}" ) + raise e + + @property + @lru_cache + def latitude_grid(self) -> np.ndarray: + """Get the latitude grid for the unstructured domain.""" + return self.esmf_grid.coords[1][1] - self.mpi_config.comm.barrier() + @property + @lru_cache + def longitude_grid(self) -> np.ndarray: + """Get the longitude grid for the unstructured domain.""" + return self.esmf_grid.coords[1][0] - # Phase 2: Create ESMF Mesh (collective operation with retry) + @property + @lru_cache + def pet_element_inds(self) -> np.ndarray: + """Get the PET element indices for the unstructured domain.""" + if self.mpi_config.rank == 0: try: - self.esmf_grid = esmf_utils.esmf_mesh_retry( - self.mpi_config, - self.config_options, - err_handler, - filename=self.config_options.geogrid, - filetype=ESMF.FileFormat.ESMFMESH, - ) + tree = spatial.KDTree(self.elementcoords_global) + return tree.query( + np.column_stack([self.longitude_grid, self.latitude_grid]) + )[1] except Exception as e: LOG.critical( - f"Unable to create ESMF Mesh: {self.config_options.geogrid} " + f"Failed to open mesh file: {self.config_options.geogrid} " f"due to {str(e)}" ) raise e - # Extract local coordinates from ESMF mesh - self.latitude_grid = self.esmf_grid.coords[1][1] - self.longitude_grid = self.esmf_grid.coords[1][0] - - # Phase 3: Broadcast global arrays and compute local indices - elementcoords_global = self.mpi_config.comm.bcast( - elementcoords_global, root=0 - ) - element_ids_global = self.mpi_config.comm.bcast(element_ids_global, root=0) + @property + @lru_cache + def element_ids(self) -> np.ndarray: + """Get the element IDs for the unstructured domain.""" + return self.element_ids_global[self.pet_element_inds] - # Each rank computes its own local indices - pet_elementcoords = np.column_stack( - [self.longitude_grid, self.latitude_grid] - ) - tree = spatial.KDTree(elementcoords_global) - _, pet_element_inds = tree.query(pet_elementcoords) + @broadcast + @property + @lru_cache + def element_ids_global(self) -> np.ndarray: + """Get the global element IDs for the unstructured domain.""" + return self.get_geogrid_var(self.config_options.element_id_var) - self.element_ids = element_ids_global[pet_element_inds] - self.element_ids_global = element_ids_global + @broadcast + @property + @lru_cache + def heights_global(self) -> np.ndarray: + """Get the global heights for the unstructured domain.""" + return self.get_geogrid_var(self.config_options.hgt_var) - # Broadcast and extract height/slope data + @property + @lru_cache + def height(self) -> np.ndarray: + """Get the height grid for the unstructured domain.""" + if self.mpi_config.rank == 0: if self.config_options.hgt_var is not None: - heights_global = self.mpi_config.comm.bcast(heights_global, root=0) - self.height = heights_global[pet_element_inds] + return self.heights_global[self.pet_element_inds] + + @property + @lru_cache + def slope(self) -> np.ndarray: + """Get the slopes for the unstructured domain.""" + return self.slopes_global[self.pet_element_inds] + + @property + @lru_cache + def slp_azi(self) -> np.ndarray: + """Get the slope azimuths for the unstructured domain.""" + return self.slp_azi_global[self.pet_element_inds] + + @property + @lru_cache + def mesh_inds(self) -> np.ndarray: + """Get the mesh indices for the unstructured domain.""" + return self.pet_element_inds - if self.config_options.slope_var is not None: - slopes_global = self.mpi_config.comm.bcast(slopes_global, root=0) - slp_azi_global = self.mpi_config.comm.bcast(slp_azi_global, root=0) - self.slope = slopes_global[pet_element_inds] - self.slp_azi = slp_azi_global[pet_element_inds] + @broadcast + @property + @lru_cache + def slopes_global(self) -> np.ndarray: + """Get the global slopes for the unstructured domain.""" + return self.get_geogrid_var(self.config_options.slope_var) - self.mesh_inds = pet_element_inds + @property + @lru_cache + def slp_azi_global(self) -> np.ndarray: + """Get the global slope azimuths for the unstructured domain.""" + return self.get_geogrid_var(self.config_options.slope_azimuth_var) @property + @lru_cache def nx_local(self) -> int: """Get the local x dimension size for this processor.""" return len(self.esmf_grid.coords[1][1]) @property + @lru_cache def ny_local(self) -> int: """Get the local y dimension size for this processor.""" return len(self.esmf_grid.coords[1][1]) class UnstructuredGeoMeta(GeoMeta): - """Class for handling information about the unstructured domain we are processing forcings to.""" + """Class for handling information about the hydrofabric domain forcing.""" - def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): - """Initialize GeoMetaWrfHydro class variables. + def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig) -> None: + """Initialize HydrofabricGeoMeta class variables. Initialization function to initialize ESMF through ESMPy, - calculate the global parameters of the WRF-Hydro grid + calculate the global parameters of the unstructured mesh being processed to, along with the local parameters for this particular processor. :return: """ super().__init__(config_options, mpi_config) + for attr in CONSTS[self.__class__.__name__]: + setattr(self, attr, None) - self.x_lower_bound = None - self.x_upper_bound = None - self.y_lower_bound = None - self.y_upper_bound = None - # Open the geogrid file and extract necessary information - # to create ESMF fields. - if mpi_config.rank == 0: - try: - esmf_nc = netCDF4.Dataset(self.config_options.geogrid, "r") - except Exception as e: - self.config_options.errMsg = f"Unable to open the unstructured mesh file: {self.config_options.geogrid}" - raise e + @broadcast + @property + @lru_cache + def nx_global(self) -> int: + """Get the global x dimension size for the unstructured domain.""" + return self.get_geogrid_var(self.config_options.nodecoords_var).shape[0] - try: - self.nx_global = esmf_nc.variables[ - self.config_options.nodecoords_var - ].shape[0] - except Exception as e: - self.config_options.errMsg = f"Unable to extract X dimension size in {self.config_options.geogrid}" - raise e + @broadcast + @property + @lru_cache + def ny_global(self) -> int: + """Get the global y dimension size for the unstructured domain.""" + return self.get_geogrid_var(self.config_options.nodecoords_var).shape[0] - try: - self.ny_global = esmf_nc.variables[ - self.config_options.nodecoords_var - ].shape[0] - except Exception as e: - self.config_options.errMsg = f"Unable to extract Y dimension size in {self.config_options.geogrid}" - raise e + @broadcast + @property + @lru_cache + def nx_global_elem(self) -> int: + """Get the global x dimension size for the unstructured domain elements.""" + return self.get_esmf_var(self.config_options.elemcoords_var).shape[0] - try: - self.nx_global_elem = esmf_nc.variables[ - self.config_options.elemcoords_var - ].shape[0] - except Exception as e: - self.config_options.errMsg = f"Unable to extract X dimension size in {self.config_options.geogrid}" - raise e + @broadcast + @property + @lru_cache + def ny_global_elem(self) -> int: + """Get the global y dimension size for the unstructured domain elements.""" + return self.get_esmf_var(self.config_options.elemcoords_var).shape[0] - try: - self.ny_global_elem = esmf_nc.variables[ - self.config_options.elemcoords_var - ].shape[0] - except Exception as e: - self.config_options.errMsg = f"Unable to extract Y dimension size in {self.config_options.geogrid}" - raise e + @property + @lru_cache + def lon_bounds(self) -> np.ndarray: + """Get the longitude bounds for the unstructured domain.""" + return self.get_bound(0) + + @property + @lru_cache + def lat_bounds(self) -> np.ndarray: + """Get the latitude bounds for the unstructured domain.""" + return self.get_bound(1) + def get_bound(self, dim: int) -> np.ndarray: + """Get the longitude or latitude bounds for the unstructured domain.""" + if self.mpi_config.rank == 0: # Flag to grab entire array for AWS slicing if self.config_options.aws: - self.lat_bounds = esmf_nc.variables[self.config_options.nodecoords_var][ - : - ][:, 1] - self.lon_bounds = esmf_nc.variables[self.config_options.nodecoords_var][ - : - ][:, 0] - - # mpi_config.comm.barrier() - - # Broadcast global dimensions to the other processors. - self.nx_global = mpi_config.broadcast_parameter( - self.nx_global, self.config_options, param_type=int - ) - self.ny_global = mpi_config.broadcast_parameter( - self.ny_global, self.config_options, param_type=int - ) - self.nx_global_elem = mpi_config.broadcast_parameter( - self.nx_global_elem, self.config_options, param_type=int - ) - self.ny_global_elem = mpi_config.broadcast_parameter( - self.ny_global_elem, self.config_options, param_type=int - ) + return self.get_esmf_var(self.config_options.nodecoords_var)[:][:, dim] - # mpi_config.comm.barrier() - - if mpi_config.rank == 0: - # Close the geogrid file - try: - esmf_nc.close() - except Exception as e: - self.config_options.errMsg = ( - f"Unable to close geogrid Mesh file: {self.config_options.geogrid}" - ) - raise e + @property + @lru_cache + def esmf_grid(self) -> ESMF.Mesh: + """Create the ESMF grid object for the unstructured domain. + Removed argument coord_sys=ESMF.CoordSys.SPH_DEG since we are always reading from a file + From ESMF documentation + If you create a mesh from a file (like NetCDF/ESMF-Mesh), coord_sys is ignored. The mesh’s coordinate system should be embedded in the file or inferred. + """ try: - # Removed argument coord_sys=ESMF.CoordSys.SPH_DEG since we are always reading from a file - # From ESMF documentation - # If you create a mesh from a file (like NetCDF/ESMF-Mesh), coord_sys is ignored. The mesh’s coordinate system should be embedded in the file or inferred. - self.esmf_grid = ESMF.Mesh( + return ESMF.Mesh( filename=self.config_options.geogrid, filetype=ESMF.FileFormat.ESMFMESH ) except Exception as e: self.config_options.errMsg = f"Unable to create ESMF Mesh from geogrid file: {self.config_options.geogrid}" raise e - # mpi_config.comm.barrier() + @property + @lru_cache + def latitude_grid(self) -> np.ndarray: + """Get the latitude grid for the unstructured domain. - # Place the local lat/lon grid slices from the parent geogrid file into - # the ESMF lat/lon grids that have already been seperated by processors. - try: - self.latitude_grid = self.esmf_grid.coords[0][1] - self.latitude_grid_elem = self.esmf_grid.coords[1][1] - var_sub_tmp = None - var_tmp = None - except Exception as e: - self.config_options.errMsg = ( - "Unable to subset node latitudes from ESMF Mesh object" - ) - raise e - try: - self.longitude_grid = self.esmf_grid.coords[0][0] - self.longitude_grid_elem = self.esmf_grid.coords[1][0] - var_sub_tmp = None - var_tmp = None - except Exception as e: - self.config_options.errMsg = ( - "Unable to subset XLONG_M from geogrid file into ESMF Mesh object" - ) - raise e + Place the local lat/lon grid slices from the parent geogrid file into + the ESMF lat/lon grids that have already been seperated by processors. + """ + return self.esmf_grid.coords[0][1] + + @property + @lru_cache + def latitude_grid_elem(self) -> np.ndarray: + """Get the latitude grid for the unstructured domain elements. + + Place the local lat/lon grid slices from the parent geogrid file into + the ESMF lat/lon grids that have already been seperated by processors. + """ + return self.esmf_grid.coords[1][1] + + @property + @lru_cache + def longitude_grid(self) -> np.ndarray: + """Get the longitude grid for the unstructured domain. + + Place the local lat/lon grid slices from the parent geogrid file into + the ESMF lat/lon grids that have already been seperated by processors. + """ + return self.esmf_grid.coords[0][0] + + @property + @lru_cache + def longitude_grid_elem(self) -> np.ndarray: + """Get the longitude grid for the unstructured domain elements. - esmf_nc = netCDF4.Dataset(self.config_options.geogrid, "r") + Place the local lat/lon grid slices from the parent geogrid file into + the ESMF lat/lon grids that have already been seperated by processors. + """ + return self.esmf_grid.coords[1][0] + @property + @lru_cache + def pet_element_inds(self) -> np.ndarray: + """Get the local node indices for the unstructured domain elements.""" # Get lat and lon global variables for pet extraction of indices - nodecoords_global = esmf_nc.variables[self.config_options.nodecoords_var][ - : - ].data - elementcoords_global = esmf_nc.variables[self.config_options.elemcoords_var][ - : - ].data + elementcoords_global = self.get_var( + self.geogrid_ds, self.config_options.elemcoords_var + )[:].data + # Find the corresponding local indices to slice global heights and slope + # variables that are based on the partitioning on the unstructured mesh + pet_elementcoords = np.empty((len(self.latitude_grid_elem), 2), dtype=float) + pet_elementcoords[:, 0] = self.longitude_grid_elem + pet_elementcoords[:, 1] = self.latitude_grid_elem + return spatial.KDTree(elementcoords_global).query(pet_elementcoords)[1] + @property + @lru_cache + def pet_node_inds(self) -> np.ndarray: + """Get the local node indices for the unstructured domain nodes.""" + # Get lat and lon global variables for pet extraction of indices + nodecoords_global = self.get_var( + self.geogrid_ds, self.config_options.nodecoords_var + )[:].data # Find the corresponding local indices to slice global heights and slope # variables that are based on the partitioning on the unstructured mesh pet_nodecoords = np.empty((len(self.latitude_grid), 2), dtype=float) - pet_elementcoords = np.empty((len(self.latitude_grid_elem), 2), dtype=float) pet_nodecoords[:, 0] = self.longitude_grid pet_nodecoords[:, 1] = self.latitude_grid - pet_elementcoords[:, 0] = self.longitude_grid_elem - pet_elementcoords[:, 1] = self.latitude_grid_elem - distance, pet_node_inds = spatial.KDTree(nodecoords_global).query( - pet_nodecoords - ) - distance, pet_element_inds = spatial.KDTree(elementcoords_global).query( - pet_elementcoords - ) + return spatial.KDTree(nodecoords_global).query(pet_nodecoords)[1] + + # NOTE this is a note/commented out code from before refactor on 2/19/2026. + # Not accepting cosalpha and sinalpha at this time for unstructured meshes, only + # accepting the pre-calculated slope and slope azmiuth variables if available, + # otherwise calculate slope from height estimates + # if(config_options.cosalpha_var != None and config_options.sinalpha_var != None): + # self.cosa_grid = esmf_ds.variables[config_options.cosalpha_var][:].data[pet_node_inds] + # self.sina_grid = esmf_ds.variables[config_options.sinalpha_var][:].data[pet_node_inds] + # slope_tmp, slp_azi_tmp = self.calc_slope(esmf_ds,config_options) + # self.slope = slope_node_tmp[pet_node_inds] + # self.slp_azi = slp_azi_node_tmp[pet_node_inds] + + @property + @lru_cache + def slope(self) -> np.ndarray: + """Get the slope grid for the unstructured domain.""" + if ( + self.config_options.slope_var is not None + and self.config_options.slp_azi_var is not None + ): + return self.get_geogrid_var(self.config_options.slope_var)[ + self.pet_node_inds + ] + elif self.config_options.hgt_var is not None: + return ( + self.dz_node + / np.sqrt((self.dx_node**2) + (self.dy_node**2))[self.pet_node_inds] + ) + + @property + @lru_cache + def slp_azi(self) -> np.ndarray: + """Get the slope azimuth grid for the unstructured domain.""" + if ( + self.config_options.slope_var is not None + and self.config_options.slp_azi_var is not None + ): + return self.get_geogrid_var(self.config_options.slope_azimuth_var)[ + self.pet_node_inds + ] + elif self.config_options.hgt_var is not None: + return (180 / np.pi) * np.arctan(self.dx_node / self.dy_node)[ + self.pet_node_inds + ] + + @property + @lru_cache + def slope_elem(self) -> np.ndarray: + """Get the slope grid for the unstructured domain elements.""" + if ( + self.config_options.slope_var is not None + and self.config_options.slp_azi_var is not None + ): + return self.get_geogrid_var(self.config_options.slope_var_elem)[:].data[ + self.pet_element_inds + ] + elif self.config_options.hgt_var is not None: + return ( + self.dz_elem + / np.sqrt((self.dx_elem**2) + (self.dy_elem**2))[self.pet_element_inds] + ) + + @property + @lru_cache + def slp_azi_elem(self) -> np.ndarray: + """Get the slope azimuth grid for the unstructured domain elements.""" + if ( + self.config_options.slope_var is not None + and self.config_options.slp_azi_var is not None + ): + return self.get_var( + self.geogrid_ds, self.config_options.slope_azimuth_var_elem + )[:].data[self.pet_element_inds] + elif self.config_options.hgt_var is not None: + return (180 / np.pi) * np.arctan(self.dx_elem / self.dy_elem)[ + self.pet_element_inds + ] - # reset variables to free up memory - nodecoords_global = None - elementcoords_global = None - pet_nodecoords = None - pet_elementcoords = None - distance = None - - # Not accepting cosalpha and sinalpha at this time for unstructured meshes, only - # accepting the pre-calculated slope and slope azmiuth variables if available, - # otherwise calculate slope from height estimates - # if(config_options.cosalpha_var != None and config_options.sinalpha_var != None): - # self.cosa_grid = esmf_nc.variables[config_options.cosalpha_var][:].data[pet_node_inds] - # self.sina_grid = esmf_nc.variables[config_options.sinalpha_var][:].data[pet_node_inds] - # slope_tmp, slp_azi_tmp = self.calc_slope(esmf_nc,config_options) - # self.slope = slope_node_tmp[pet_node_inds] - # self.slp_azi = slp_azi_node_tmp[pet_node_inds] + @property + @lru_cache + def height(self) -> np.ndarray: + """Get the height grid for the unstructured domain nodes.""" if ( self.config_options.slope_var is not None and self.config_options.slp_azi_var is not None ): - self.slope = esmf_nc.variables[self.config_options.slope_var][:].data[ - pet_node_inds + return self.get_geogrid_var(self.config_options.hgt_var)[:].data[ + self.pet_node_inds ] - self.slp_azi = esmf_nc.variables[self.config_options.slope_azimuth_var][ - : - ].data[pet_node_inds] - self.slope_elem = esmf_nc.variables[self.config_options.slope_var_elem][ - : - ].data[pet_element_inds] - self.slp_azi_elem = esmf_nc.variables[ - self.config_options.slope_azimuth_var_elem - ][:].data[pet_element_inds] - - # Read in a scatter the mesh node elevation, which is used for downscaling purposes - self.height = esmf_nc.variables[self.config_options.hgt_var][:].data[ - pet_node_inds + elif self.config_options.hgt_var is not None: + return self.self.get_geogrid_var(self.config_options.hgt_var)[:].data[ + self.pet_node_inds ] - # Read in a scatter the mesh element elevation, which is used for downscaling purposes. - self.height_elem = esmf_nc.variables[self.config_options.hgt_elem_var][ - : - ].data[pet_element_inds] + @property + @lru_cache + def height_elem(self) -> np.ndarray: + """Get the height grid for the unstructured domain elements.""" + if ( + self.config_options.slope_var is not None + and self.config_options.slp_azi_var is not None + ): + return self.get_geogrid_var(self.config_options.hgt_elem_var)[:].data[ + self.pet_element_inds + ] elif self.config_options.hgt_var is not None: - # Read in a scatter the mesh node elevation, which is used for downscaling purposes - self.height = esmf_nc.variables[self.config_options.hgt_var][:].data[ - pet_node_inds + return self.get_geogrid_var(self.config_options.hgt_elem_var)[:].data[ + self.pet_element_inds ] - # Read in a scatter the mesh element elevation, which is used for downscaling purposes. - self.height_elem = esmf_nc.variables[self.config_options.hgt_elem_var][ - : - ].data[pet_element_inds] + @property + @lru_cache + def node_lons(self) -> np.ndarray: + """Get the longitude grid for the unstructured domain nodes.""" + return self.get_geogrid_var(self.config_options.nodecoords_var)[:][:, 0] + + @property + @lru_cache + def node_lats(self) -> np.ndarray: + """Get the latitude grid for the unstructured domain nodes.""" + return self.get_geogrid_var(self.config_options.nodecoords_var)[:][:, 1] + + @property + @lru_cache + def elem_lons(self) -> np.ndarray: + """Get the longitude grid for the unstructured domain elements.""" + return self.get_geogrid_var(self.config_options.elemcoords_var)[:][:, 0] + + @property + @lru_cache + def elem_lats(self) -> np.ndarray: + """Get the latitude grid for the unstructured domain elements.""" + return self.get_geogrid_var(self.config_options.elemcoords_var)[:][:, 1] + + @property + @lru_cache + def elem_conn(self) -> np.ndarray: + """Get the element connectivity for the unstructured domain.""" + return self.get_geogrid_var(self.config_options.elemconn_var)[:][:, 0] + + @property + @lru_cache + def node_heights(self) -> np.ndarray: + """Get the height grid for the unstructured domain nodes.""" + node_heights = self.get_geogrid_var(self.config_options.hgt_var)[:] + + if node_heights.shape[0] != self.ny_global: + self.config_options.errMsg = ( + f"HGT_M dimension mismatch in: {self.config_options.geogrid}" + ) + raise Exception + return node_heights + + @property + @lru_cache + def elem_heights(self) -> np.ndarray: + """Get the height grid for the unstructured domain elements.""" + elem_heights = self.get_var(self.geogrid_ds, self.config_options.hgt_elem_var)[ + : + ] - # Calculate the slope from the domain using elevation on the WRF-Hydro domain. This will - # be used for downscaling purposes. - slope_node_tmp, slp_azi_node_tmp, slope_elem_tmp, slp_azi_elem_tmp = ( - self.calc_slope_unstructured(esmf_nc) + if elem_heights.shape[0] != len(self.elem_lons): + self.config_options.errMsg = ( + f"HGT_M_ELEM dimension mismatch in: {self.config_options.geogrid}" ) + raise Exception + return elem_heights + + @property + @lru_cache + def dx_elem(self) -> np.ndarray: + """Calculate the dx distance in meters for the longitude variable for the unstructured domain elements.""" + dx = ( + np.diff(self.elem_lons) + * 40075160 + * np.cos(self.elem_lats[0:-1] * np.pi / 180) + / 360 + ) + return np.append(dx, dx[-1]) + + @property + @lru_cache + def dy_elem(self) -> np.ndarray: + """Calculate the dy distance in meters for the latitude variable for the unstructured domain elements.""" + dy = np.diff(self.elem_lats) * 40008000 / 360 + return np.append(dy, dy[-1]) - self.slope = slope_node_tmp[pet_node_inds] - slope_node_tmp = None + @property + @lru_cache + def dz_elem(self) -> np.ndarray: + """Calculate the dz distance in meters for the height variable for the unstructured domain elements.""" + dz = np.diff(self.elem_heights) + return np.append(dz, dz[-1]) - self.slp_azi = slp_azi_node_tmp[pet_node_inds] - slp_azi_node_tmp = None + @property + @lru_cache + def dx_node(self) -> np.ndarray: + """Calculate the dx distance in meters for the longitude variable for the unstructured domain nodes.""" + dx = ( + np.diff(self.node_lons) + * 40075160 + * np.cos(self.node_lats[0:-1] * np.pi / 180) + / 360 + ) + return np.append(dx, dx[-1]) - self.slope_elem = slope_elem_tmp[pet_element_inds] - slope_elem_tmp = None + @property + @lru_cache + def dy_node(self) -> np.ndarray: + """Calculate the dy distance in meters for the latitude variable for the unstructured domain nodes.""" + dy = np.diff(self.node_lats) * 40008000 / 360 + return np.append(dy, dy[-1]) - self.slp_azi_elem = slp_azi_elem_tmp[pet_element_inds] - slp_azi_elem_tmp = None + @property + @lru_cache + def dz_node(self) -> np.ndarray: + """Calculate the dz distance in meters for the height variable for the unstructured domain nodes.""" + dz = np.diff(self.node_heights) + return np.append(dz, dz[-1]) - # save indices where mesh was partition for future scatter functions - self.mesh_inds = pet_node_inds - self.mesh_inds_elem = pet_element_inds + @property + @lru_cache + def mesh_inds(self) -> np.ndarray: + """Get the local mesh node indices for the unstructured domain.""" + return self.pet_node_inds - # reset variables to free up memory - pet_node_inds = None - pet_element_inds = None + @property + @lru_cache + def mesh_inds_elem(self) -> np.ndarray: + """Get the local mesh element indices for the unstructured domain.""" + return self.pet_element_inds @property + @lru_cache def nx_local(self) -> int: """Get the local x dimension size for this processor.""" return len(self.esmf_grid.coords[0][1]) @property + @lru_cache def ny_local(self) -> int: """Get the local y dimension size for this processor.""" return len(self.esmf_grid.coords[0][1]) @property + @lru_cache def nx_local_elem(self) -> int: """Get the local x dimension size for this processor.""" return len(self.esmf_grid.coords[1][1]) @property + @lru_cache def ny_local_elem(self) -> int: """Get the local y dimension size for this processor.""" return len(self.esmf_grid.coords[1][1]) - def calc_slope_unstructured(self, esmf_nc: netCDF4.Dataset) -> tuple: - """Calculate slope grids needed for incoming shortwave radiation downscaling. - - Function to calculate slope grids needed for incoming shortwave radiation downscaling - later during the program. This calculates the slopes for both nodes and elements - :param esmf_nc: The open netCDF4 dataset for the geogrid file, passed in to avoid having to reopen the file multiple times - :return: A tuple containing slope and slope azimuth for nodes and elements - """ - esmf_nc = netCDF4.Dataset(self.config_options.geogrid, "r") - - try: - node_lons = esmf_nc.variables[self.config_options.nodecoords_var][:][:, 0] - node_lats = esmf_nc.variables[self.config_options.nodecoords_var][:][:, 1] - except Exception as e: - self.config_options.errMsg = ( - f"Unable to extract node coordinates in {self.config_options.geogrid}" - ) - raise e - try: - elem_lons = esmf_nc.variables[self.config_options.elemcoords_var][:][:, 0] - elem_lats = esmf_nc.variables[self.config_options.elemcoords_var][:][:, 1] - except Exception as e: - self.config_options.errMsg = f"Unable to extract element coordinates in {self.config_options.geogrid}" - raise e - try: - elem_conn = esmf_nc.variables[self.config_options.elemconn_var][:][:, 0] - except Exception as e: - self.config_options.errMsg = f"Unable to extract element connectivity in {self.config_options.geogrid}" - raise e - try: - node_heights = esmf_nc.variables[self.config_options.hgt_var][:] - except Exception as e: - self.config_options.errMsg = ( - f"Unable to extract HGT_M from: {self.config_options.geogrid}" - ) - raise e - - if node_heights.shape[0] != self.ny_global: - self.config_options.errMsg = ( - f"HGT_M dimension mismatch in: {self.config_options.geogrid}" - ) - raise Exception - - try: - elem_heights = esmf_nc.variables[self.config_options.hgt_elem_var][:] - except Exception as e: - self.config_options.errMsg = ( - f"Unable to extract HGT_M_ELEM from: {self.config_options.geogrid}" - ) - raise e - - if elem_heights.shape[0] != len(elem_lons): - self.config_options.errMsg = ( - f"HGT_M_ELEM dimension mismatch in: {self.config_options.geogrid}" - ) - raise Exception - esmf_nc.close() - - # calculate node coordinate distances in meters - # based on general geospatial formula approximations - # on a spherical grid - dx = np.diff(node_lons) * 40075160 * np.cos(node_lats[0:-1] * np.pi / 180) / 360 - dx = np.append(dx, dx[-1]) - dy = np.diff(node_lats) * 40008000 / 360 - dy = np.append(dy, dy[-1]) - dz = np.diff(node_heights) - dz = np.append(dz, dz[-1]) - - slope_nodes = dz / np.sqrt((dx**2) + (dy**2)) - slp_azi_nodes = (180 / np.pi) * np.arctan(dx / dy) - - # calculate element coordinate distances in meters - # based on general geospatial formula approximations - # on a spherical grid - dx = np.diff(elem_lons) * 40075160 * np.cos(elem_lats[0:-1] * np.pi / 180) / 360 - dx = np.append(dx, dx[-1]) - dy = np.diff(elem_lats) * 40008000 / 360 - dy = np.append(dy, dy[-1]) - dz = np.diff(elem_heights) - dz = np.append(dz, dz[-1]) - - slope_elem = dz / np.sqrt((dx**2) + (dy**2)) - slp_azi_elem = (180 / np.pi) * np.arctan(dx / dy) - - # Reset temporary arrays to None to free up memory - node_lons = None - node_lats = None - elem_lons = None - elem_lats = None - node_heights = None - elem_heights = None - dx = None - dy = None - dz = None - - return slope_nodes, slp_azi_nodes, slope_elem, slp_azi_elem +GEOGRID = { + "gridded": GriddedGeoMeta, + "unstructured": UnstructuredGeoMeta, + "hydrofabric": HydrofabricGeoMeta, +} From f39fb411e4dad2d550c15c532e66915984929172 Mon Sep 17 00:00:00 2001 From: Matt Deshotel Date: Sun, 22 Feb 2026 15:24:22 -0600 Subject: [PATCH 10/34] fix property decorators --- .../NextGen_Forcings_Engine/core/geoMod.py | 32 +++++++++---------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py index 4afb6bc5..0bfe63e3 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py @@ -44,39 +44,39 @@ def wrapper(self) -> Any: return wrapper -def broadcast(func) -> Any: +def broadcast(prop) -> Any: """Broadcast the output of a function to all processors.""" - @wraps(func) + @wraps(prop) def wrapper(self) -> Any: """Broadcast the output of a function to all processors.""" - result = func(self) + result = prop.fget(self) return self.mpi_config.comm.bcast(result, root=0) - return wrapper + return property(wrapper) -def barrier(func) -> Any: +def barrier(prop) -> Any: """Synchronize all processors at a barrier.""" - @wraps(func) + @wraps(prop) def wrapper(self) -> Any: """Synchronize all processors at a barrier.""" - result = func(self) + result = prop.fget(self) self.mpi_config.comm.barrier() return result - return wrapper + return property(wrapper) -def scatter(func) -> Any: +def scatter(prop) -> Any: """Scatter the output of a function to all processors.""" - @wraps(func) + @wraps(prop) def wrapper(self) -> Any: """Scatter the output of a function to all processors.""" try: - var, name, config_options, post_slice = func(self) + var, name, config_options, post_slice = prop.fget(self) var = self.mpi_config.scatter_array(self, var, config_options) if post_slice: return var[:, :] @@ -88,7 +88,7 @@ def wrapper(self) -> Any: ) raise e - return wrapper + return property(wrapper) class GeoMeta: @@ -942,7 +942,7 @@ def esmf_grid(self) -> ESMF.Mesh: """Create the ESMF Mesh object for the unstructured domain.""" try: return ESMF.Mesh( - self.config_options.geogrid, filetype=ESMF.FileFormat.ESMFMESH + filename=self.config_options.geogrid, filetype=ESMF.FileFormat.ESMFMESH ) except Exception as e: LOG.critical( @@ -991,7 +991,7 @@ def element_ids(self) -> np.ndarray: @lru_cache def element_ids_global(self) -> np.ndarray: """Get the global element IDs for the unstructured domain.""" - return self.get_geogrid_var(self.config_options.element_id_var) + return self.get_geogrid_var(self.config_options.element_id_var).values @broadcast @property @@ -1460,7 +1460,7 @@ def ny_local_elem(self) -> int: GEOGRID = { - "gridded": GriddedGeoMeta, - "unstructured": UnstructuredGeoMeta, + # "gridded": GriddedGeoMeta, + # "unstructured": UnstructuredGeoMeta, "hydrofabric": HydrofabricGeoMeta, } From 47e37a89209bf9a23aa8a4b4ef2a3fbe8ef37a1d Mon Sep 17 00:00:00 2001 From: Matt Deshotel Date: Mon, 23 Feb 2026 08:34:38 -0600 Subject: [PATCH 11/34] move intialization of InputForcings to its __ini__. --- .../core/forcingInputMod.py | 1085 +++-------------- 1 file changed, 155 insertions(+), 930 deletions(-) diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py index 5e94f65d..183e2bde 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py @@ -11,14 +11,25 @@ from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.config import ( ConfigOptions, ) +from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.consts import ( + CYCLE_FREQ, + FILE_EXT, + FIND_NEIGHBOR_FILES_MAP, + FORECAST_HORIZONS, + GRIB_LEVELS, + GRIB_MES_IDX, + INPUT_MAP_OUTPUT, + NET_CDF_VARS_NAMES, + PRODUCT_NAME, + REGRID_MAP, + TEMPORAL_INTERPOLATE_INPUTS_MAP, +) from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.geoMod import ( - GeoMeta, + GeoMetaWrfHydro, ) from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.parallel import MpiConfig from nextgen_forcings_ewts import MODULE_NAME -from . import regrid, time_handling, timeInterpMod - LOG = logging.getLogger(MODULE_NAME) @@ -29,14 +40,17 @@ class InputForcings: of a single input forcing product. """ - def __init__(self): - """Initialize all attributes and objects to None.""" - self.inDir = None - self.enforce = None - self.paramDir = None - self.userFcstHorizon = None - self.userCycleOffset = None - self.file_type = None + def __init__( + self, + key_value: int = None, + config_options: ConfigOptions = None, + geo_meta: GeoMetaWrfHydro = None, + mpi_config: MpiConfig = None, + ): + """Initialize InputForcings with configuration options, geospatial metadata, and MPI configuration.""" + self.config_options = config_options + self.geo_meta_wrf_hydro = geo_meta + self.mpi_config = mpi_config self.nx_global = None self.ny_global = None self.nx_local = None @@ -52,24 +66,10 @@ def __init__(self): self.y_lower_bound_corner = None self.y_upper_bound_corner = None self.outFreq = None - self.regridOpt = None - self.timeInterpOpt = None - self.t2dDownscaleOpt = None self.lapseGrid = None self.rqiClimoGrid = None - self.swDowscaleOpt = None - self.precipDownscaleOpt = None self.nwmPRISM_numGrid = None self.nwmPRISM_denGrid = None - self.q2dDownscaleOpt = None - self.psfcDownscaleOpt = None - self.t2dBiasCorrectOpt = None - self.swBiasCorrectOpt = None - self.precipBiasCorrectOpt = None - self.q2dBiasCorrectOpt = None - self.windBiasCorrectOpt = None - self.psfcBiasCorrectOpt = None - self.lwBiasCorrectOpt = None self.esmf_lats = None self.esmf_lons = None self.esmf_grid_in = None @@ -92,16 +92,10 @@ def __init__(self): self.regridded_forcings2 = None self.globalPcpRate1 = None self.globalPcpRate2 = None - self.regridded_mask = None - self.regridded_mask_AORC = None - self.final_forcings = None self.regridded_forcings1_elem = None self.regridded_forcings2_elem = None self.globalPcpRate1_elem = None self.globalPcpRate2_elem = None - self.regridded_mask_elem = None - self.regridded_mask_elem_AORC = None - self.final_forcings_elem = None self.ndv = None self.file_in1 = None self.file_in2 = None @@ -109,60 +103,136 @@ def __init__(self): self.fcst_hour2 = None self.fcst_date1 = None self.fcst_date2 = None - self.height = None self.height_elem = None self.tmpFile = None self.tmpFileHeight = None - self.psfcTmp = None - self.t2dTmp = None - self.psfcTmp_elem = None - self.t2dTmp_elem = None self.rstFlag = 0 self.regridded_precip1 = None self.regridded_precip2 = None self.regridded_precip1_elem = None self.regridded_precip2_elem = None - self.border = None self.skip = False # Private attrs that have associated @property setter/getter - self._keyValue = None + self._keyValue = key_value self._file_ext = None self._cycle_freq = None self._grib_vars = None + self.find_neighbor_files_map = FIND_NEIGHBOR_FILES_MAP + self.regrid_map = REGRID_MAP + self.temporal_interpolate_inputs_map = TEMPORAL_INTERPOLATE_INPUTS_MAP + + self.initialize_config_options() + if self.q2dDownscaleOpt > 0: + self.handle_humidity_downscaling() + self.initialize_geo_data() + + def initialize_config_options(self): + """Initialize configuration options from the config_options attribute.""" + [ + setattr(self, key, val[self.keyValue]) + for key, val in vars(self.config_options).items() + if isinstance(val, list) and len(val) > 0 + ] + + def handle_humidity_downscaling(self): + """Initialize temporary arrays for specific humidity downscaling if specified in configuration. + + If we have specified specific humidity downscaling, establish arrays to hold + temporary temperature arrays that are un-downscaled. + """ + if self.config_options.grid_type == "gridded": + self.t2dTmp = np.empty( + [self.geo_meta.ny_local, self.geo_meta.nx_local], + np.float32, + ) + self.psfcTmp = np.empty( + [self.geo_meta.ny_local, self.geo_meta.nx_local], + np.float32, + ) + self.t2dTmp_elem = None + self.psfcTmp_elem = None + elif self.config_options.grid_type == "unstructured": + self.t2dTmp = np.empty([self.geo_meta.ny_local], np.float32) + self.psfcTmp = np.empty([self.geo_meta.ny_local], np.float32) + self.t2dTmp_elem = np.empty([self.geo_meta.ny_local_elem], np.float32) + self.psfcTmp_elem = np.empty([self.geo_meta.ny_local_elem], np.float32) + elif self.config_options.grid_type == "hydrofabric": + self.t2dTmp = np.empty([self.geo_meta.ny_local], np.float32) + self.psfcTmp = np.empty([self.geo_meta.ny_local], np.float32) + self.t2dTmp_elem = None + self.psfcTmp_elem = None + + def initialize_geo_data(self): + """Initialize geometry-related arrays based on grid type and downscaling options. + + Initialize the local final grid of values. This is represntative + of the local grid for this forcing, for a specific output timesetp. + This grid will be updated from one output timestep to another, and + also through downscaling and bias correction. + """ + force_count = 9 if self.config_options.include_lqfrac else 8 + if force_count == 8 and 8 in self.input_map_output: + # TODO: this assumes that LQFRAC (8) is always the last grib var + self.grib_vars = self.grib_vars[:-1] + + if self.config_options.grid_type == "gridded": + self.final_forcings = np.empty( + [force_count, self.geo_meta.ny_local, self.geo_meta.nx_local], + np.float64, + ) + self.height = np.empty( + [self.geo_meta.ny_local, self.geo_meta.nx_local], np.float32 + ) + self.regridded_mask = np.empty( + [self.geo_meta.ny_local, self.geo_meta.nx_local], np.float32 + ) + self.regridded_mask_AORC = np.empty( + [self.geo_meta.ny_local, self.geo_meta.nx_local], np.float32 + ) + self.final_forcings_elem = None + self.height_elem = None + self.regridded_mask_elem = None + self.regridded_mask_elem_AORC = None + + elif self.config_options.grid_type == "unstructured": + self.final_forcings = np.empty( + [force_count, self.geo_meta.ny_local], np.float64 + ) + self.height = np.empty([self.geo_meta.ny_local], np.float32) + self.regridded_mask = np.empty([self.geo_meta.ny_local], np.float32) + self.regridded_mask_AORC = np.empty([self.geo_meta.ny_local], np.float32) + self.final_forcings_elem = np.empty( + [force_count, self.geo_meta.ny_local_elem], np.float64 + ) + self.height_elem = np.empty([self.geo_meta.ny_local_elem], np.float32) + self.regridded_mask_elem = np.empty( + [self.geo_meta.ny_local_elem], np.float32 + ) + self.regridded_mask_elem_AORC = np.empty( + [self.geo_meta.ny_local_elem], np.float32 + ) + + elif self.config_options.grid_type == "hydrofabric": + self.final_forcings = np.empty( + [force_count, self.geo_meta.ny_local], np.float64 + ) + self.height = np.empty([self.geo_meta.ny_local], np.float32) + self.regridded_mask = np.empty([self.geo_meta.ny_local], np.float32) + self.regridded_mask_AORC = np.empty([self.geo_meta.ny_local], np.float32) + self.final_forcings_elem = None + self.height_elem = None + self.regridded_mask_elem = None + self.regridded_mask_elem_AORC = None + + # Obtain custom input cycle frequencies + if self.key_value == 10 or self.key_value == 11: + self.cycle_freq = self.config_options.customFcstFreq[self.custom_count] @property def product_name(self): """Map the forcing key value to the product name.""" - return { - 1: "NLDAS2_GRIB1", - 2: "NARR_GRIB1", - 3: "GFS_Production_GRIB2", - 4: "NAM_Conus_Nest_GRIB2", - 5: "HRRR_Conus_GRIB2", - 6: "RAP_Conus_GRIB2", - 7: "CFSv2_6Hr_Global_GRIB2", - 8: "WRF_ARW_Hawaii_GRIB2", - 9: "GFS_Production_025d_GRIB2", - 10: "Custom_NetCDF_Hourly", - 11: "Custom_NetCDF_Hourly", - 12: "AORC", - 13: "NAM_Nest_3km_Hawaii", - 14: "NAM_Nest_3km_PuertoRico", - 15: "NAM_Nest_3km_Alaska", - 16: "NAM_Nest_3km_Hawaii_Radiation-Only", - 17: "NAM_Nest_3km_PuertoRico_Radiation-Only", - 18: "WRF_ARW_PuertoRico_GRIB2", - 19: "HRRR_Alaska_GRIB2", - 20: "Alaska_AnA", - 21: "AORC_Alaska", - 22: "Alaska_ExtAnA", - 23: "ERA5", - 24: "NBM", - 25: "NDFD", - 26: "HRRR_15min", - 27: "NWM", - }[self.keyValue] + return PRODUCT_NAME[self.keyValue] @property def keyValue(self): @@ -181,23 +251,10 @@ def keyValue(self, val): @property def file_ext(self) -> str: """Map the forcing file type to the file extension.""" - if self._file_ext is None: - # First call to getter, initialize - if self.file_type == "GRIB1": - ext = ".grb" - elif self.file_type == "GRIB2": - ext = ".grib2" - elif self.file_type == "NETCDF": - ext = ".nc" - elif self.file_type == "NETCDF4": - ext = ".nc4" - elif self.file_type == "NWM": - ext = ".LDASIN_DOMAIN1" - elif self.file_type == "ZARR": - ext = ".zarr" - else: - raise ValueError(f"Unexpected file_type: {self.file_type}") - self._file_ext = ext + ext = FILE_EXT.get(self.file_type) + if ext is None: + raise ValueError(f"Unexpected file_type: {self.file_type}") + self._file_ext = ext return self._file_ext @@ -214,35 +271,7 @@ def cycle_freq(self) -> int: """Map the forcing key value to the cycle frequency in minutes.""" if self._cycle_freq is None: # First call to getter, initialize - self._cycle_freq = { - 1: 60, - 2: 180, - 3: 360, - 4: 360, - 5: 60, - 6: 60, - 7: 360, - 8: 1440, - 9: 360, - 10: -9999, - 11: -9999, - 12: -9999, - 13: 360, - 14: 360, - 15: 360, - 16: 360, - 17: 360, - 18: 1440, - 19: 180, - 20: 180, - 21: -9999, - 22: 180, - 23: -9999, - 24: 60, - 25: 1440, - 26: 15, - 27: -9999, - }[self.keyValue] + self._cycle_freq = CYCLE_FREQ[self.keyValue] return self._cycle_freq @cycle_freq.setter @@ -258,74 +287,7 @@ def grib_vars(self) -> list[str] | None: """Map the forcing key value to the required GRIB variable names.""" if self._grib_vars is None: # First call to getter, initialize - self._grib_vars = { - 1: ["TMP", "SPFH", "UGRD", "VGRD", "PRATE", "DSWRF", "DLWRF", "PRES"], - 2: None, - 3: [ - "TMP", - "SPFH", - "UGRD", - "VGRD", - "PRATE", - "DSWRF", - "DLWRF", - "PRES", - "CPOFP", - ], - 4: None, - 5: [ - "TMP", - "SPFH", - "UGRD", - "VGRD", - "APCP", - "DSWRF", - "DLWRF", - "PRES", - "CPOFP", - ], - 6: [ - "TMP", - "SPFH", - "UGRD", - "VGRD", - "APCP", - "DSWRF", - "DLWRF", - "PRES", - "FROZR", - ], - 7: ["TMP", "SPFH", "UGRD", "VGRD", "PRATE", "DSWRF", "DLWRF", "PRES"], - 8: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "PRES"], - 9: ["TMP", "SPFH", "UGRD", "VGRD", "PRATE", "DSWRF", "DLWRF", "PRES"], - 10: None, - 11: None, - 12: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "DSWRF", "DLWRF", "PRES"], - 13: ["TMP", "SPFH", "UGRD", "VGRD", "PRATE", "DSWRF", "DLWRF", "PRES"], - 14: ["TMP", "SPFH", "UGRD", "VGRD", "PRATE", "DSWRF", "DLWRF", "PRES"], - 15: ["TMP", "SPFH", "UGRD", "VGRD", "PRATE", "DSWRF", "DLWRF", "PRES"], - 16: ["DSWRF", "DLWRF"], - 17: ["DSWRF", "DLWRF"], - 18: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "PRES"], - 19: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "DSWRF", "DLWRF", "PRES"], - 20: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "DSWRF", "DLWRF", "PRES"], - 21: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "DSWRF", "DLWRF", "PRES"], - 22: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "DSWRF", "DLWRF", "PRES"], - 23: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "DSWRF", "DLWRF", "PRES"], - 24: ["TMP", "APCP"], - 25: ["TMP", "WDIR", "WSPD", "APCP"], - 26: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "DSWRF", "DLWRF", "PRES"], - 27: [ - "T2D", - "Q2D", - "U2D", - "V2D", - "RAINRATE", - "SWDOWN", - "LWDOWN", - "PSFC", - ], - }[self.keyValue] + self._grib_vars = [self.keyValue] return self._grib_vars @grib_vars.setter @@ -339,364 +301,12 @@ def grib_vars(self, val): @property def grib_levels(self): """Map the forcing key value to the required GRIB variable levels.""" - return { - 1: [ - "2 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - "surface", - "surface", - ], - 2: None, - 3: [ - "2 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - "surface", - "surface", - "surface", - ], - 4: None, - 5: [ - "2 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - "surface", - "surface", - "surface", - ], - 6: [ - "2 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - "surface", - "surface", - "surface", - ], - 7: [ - "2 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - "surface", - "surface", - ], - 8: [ - "80 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - ], - 9: [ - "2 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - "surface", - "surface", - ], - 10: None, - 11: None, - 12: None, - 13: [ - "2 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - "surface", - "surface", - ], - 14: [ - "2 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - "surface", - "surface", - ], - 15: [ - "2 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - "surface", - "surface", - ], - 16: ["surface", "surface"], - 17: ["surface", "surface"], - 18: [ - "80 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - ], - 19: [ - "2 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - "surface", - "surface", - ], - 20: [ - "2 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - "surface", - "surface", - ], - 21: None, - 22: [ - "2 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - "surface", - "surface", - ], - 23: None, - 24: ["2 m above ground", "surface"], - 25: [ - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - ], - 26: [ - "2 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - "surface", - "surface", - ], - 27: None, - }[self.keyValue] + return GRIB_LEVELS[self.keyValue] @property def netcdf_var_names(self): """Map the forcing key value to the required NetCDF variable names.""" - return { - 1: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "APCP_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - ], - 2: None, - 3: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "PRATE_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - "CPOFP_surface", - ], - 4: None, - 5: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "APCP_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - "CPOFP_surface", - ], - 6: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "APCP_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - "FROZR_surface", - ], - 7: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "PRATE_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - ], - 8: [ - "TMP_80maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "APCP_surface", - "PRES_surface", - ], - 9: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "PRATE_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - ], - 10: ["T2D", "Q2D", "U10", "V10", "RAINRATE", "DSWRF", "DLWRF", "PRES"], - 11: ["T2D", "Q2D", "U10", "V10", "RAINRATE", "DSWRF", "DLWRF", "PRES"], - 12: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "APCP_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - ], - 13: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "PRATE_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - ], - 14: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "PRATE_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - ], - 15: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "PRATE_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - ], - 16: ["DSWRF_surface", "DLWRF_surface"], - 17: ["DSWRF_surface", "DLWRF_surface"], - 18: [ - "TMP_80maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "APCP_surface", - "PRES_surface", - ], - 19: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "APCP_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - ], - 20: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "APCP_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - ], - 21: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "APCP_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - ], - 22: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "APCP_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - ], - 23: ["t2m", "d2m", "u10", "v10", "mtpr", "msdwswrf", "msdwlwrf", "sp"], - 24: ["TMP_2maboveground", "APCP_surface"], - 25: [ - "TMP_2maboveground", - "WDIR_10maboveground", - "WIND_10maboveground", - "APCP_surface", - ], - 26: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "APCP_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - ], - 27: ["T2D", "Q2D", "U2D", "V2D", "RAINRATE", "SWDOWN", "LWDOWN", "PSFC"], - }[self.keyValue] + return NET_CDF_VARS_NAMES[self.keyValue] @property def grib_mes_idx(self): @@ -705,207 +315,17 @@ def grib_mes_idx(self): arrays that store the message ids of required forcing variables for each forcing type TODO fill these arrays for forcing types other than GFS """ - return { - 1: None, - 2: None, - 3: None, - 4: None, - 5: None, - 6: None, - 7: None, - 8: None, - 9: [33, 34, 39, 40, 43, 88, 91, 6], - 10: None, - 11: None, - 12: None, - 13: None, - 14: None, - 15: None, - 16: None, - 17: None, - 18: None, - 19: None, - 20: None, - 21: None, - 22: None, - 23: None, - 24: None, - 25: None, - 26: None, - 27: None, - }[self.keyValue] + return GRIB_MES_IDX[self.keyValue] @property def input_map_output(self): """Map the forcing key value to the input to output variable mapping.""" - return { - 1: [4, 5, 0, 1, 3, 7, 2, 6], - 2: None, - 3: [4, 5, 0, 1, 3, 7, 2, 6, 8], - 4: None, - 5: [4, 5, 0, 1, 3, 7, 2, 6, 8], - 6: [4, 5, 0, 1, 3, 7, 2, 6, 8], - 7: [4, 5, 0, 1, 3, 7, 2, 6], - 8: [4, 5, 0, 1, 3, 6], - 9: [4, 5, 0, 1, 3, 7, 2, 6], - 10: [4, 5, 0, 1, 3, 7, 2, 6], - 11: [4, 5, 0, 1, 3, 7, 2, 6], - 12: [4, 5, 0, 1, 3, 7, 2, 6], - 13: [4, 5, 0, 1, 3, 7, 2, 6], - 14: [4, 5, 0, 1, 3, 7, 2, 6], - 15: [4, 5, 0, 1, 3, 7, 2, 6], - 16: [7, 2], - 17: [7, 2], - 18: [4, 5, 0, 1, 3, 6], - 19: [4, 5, 0, 1, 3, 7, 2, 6], - 20: [4, 5, 0, 1, 3, 7, 2, 6], - 21: [4, 5, 0, 1, 3, 7, 2, 6], - 22: [4, 5, 0, 1, 3, 7, 2, 6], - 23: [4, 5, 0, 1, 3, 7, 2, 6], - 24: [4, 3], - 25: [4, 0, 1, 3], - 26: [4, 5, 0, 1, 3, 7, 2, 6], - 27: [4, 5, 0, 1, 3, 7, 2, 6], - }[self.keyValue] + return INPUT_MAP_OUTPUT[self.keyValue] @property def forecast_horizons(self): """Map the forcing key value to the forecast horizons list.""" - return { - 1: None, - 2: None, - 3: None, - 4: None, - 5: [ - 18, - 18, - 18, - 18, - 18, - 18, - 36, - 18, - 18, - 18, - 18, - 18, - 36, - 18, - 18, - 18, - 18, - 18, - 36, - 18, - 18, - 18, - 18, - 18, - ], - 6: [ - 21, - 21, - 21, - 39, - 21, - 21, - 21, - 21, - 21, - 39, - 21, - 21, - 21, - 21, - 21, - 39, - 21, - 21, - 21, - 21, - 21, - 39, - 21, - 21, - ], - 7: None, - 8: None, - 9: None, - 10: None, - 11: None, - 12: None, - 13: None, - 14: None, - 15: None, - 16: None, - 17: None, - 18: None, - 19: None, - 20: None, - 21: None, - 22: None, - 23: None, - 24: None, - 25: None, - 26: [ - 18, - 18, - 18, - 18, - 18, - 18, - 36, - 18, - 18, - 18, - 18, - 18, - 36, - 18, - 18, - 18, - 18, - 18, - 36, - 18, - 18, - 18, - 18, - 18, - ], - 27: None, - }[self.keyValue] - - @property - def find_neighbor_files_map(self): - """Map the forcing key value to the neighbor file finding function.""" - return { - 1: time_handling.find_nldas_neighbors, - 3: time_handling.find_gfs_neighbors, - 5: time_handling.find_input_neighbors, - 6: time_handling.find_input_neighbors, - 7: time_handling.find_cfsv2_neighbors, - 8: time_handling.find_hourly_wrf_arw_neighbors, - 9: time_handling.find_gfs_neighbors, - 10: time_handling.find_custom_hourly_neighbors, - 11: time_handling.find_custom_hourly_neighbors, - 12: time_handling.find_aorc_neighbors, - 13: time_handling.find_nam_nest_neighbors, - 14: time_handling.find_nam_nest_neighbors, - 15: time_handling.find_nam_nest_neighbors, - 16: time_handling.find_nam_nest_neighbors, - 17: time_handling.find_nam_nest_neighbors, - 18: time_handling.find_hourly_wrf_arw_neighbors, - 19: time_handling.find_ak_hrrr_neighbors, - 20: time_handling.find_ak_hrrr_neighbors, - 21: time_handling.find_aorc_neighbors, - 22: time_handling.find_ak_hrrr_neighbors, - 23: time_handling.find_era5_neighbors, - 24: time_handling.find_hourly_nbm_neighbors, - 25: time_handling.find_ndfd_neighbors, - 26: time_handling.find_input_neighbors, - 27: time_handling.find_nwm_neighbors, - } + return FORECAST_HORIZONS[self.keyValue] def calc_neighbor_files( self, config_options: ConfigOptions, dcurrent, mpi_config: MpiConfig @@ -929,41 +349,10 @@ def calc_neighbor_files( self, config_options, dcurrent, mpi_config ) - @property - def regrid_map(self): - """Map the forcing key value to the regridding function.""" - return { - 1: regrid.regrid_conus_rap, - 3: regrid.regrid_gfs, - 5: regrid.regrid_conus_hrrr, - 6: regrid.regrid_conus_rap, - 7: regrid.regrid_cfsv2, - 8: regrid.regrid_hourly_wrf_arw, - 9: regrid.regrid_gfs, - 10: regrid.regrid_custom_hourly_netcdf, - 11: regrid.regrid_custom_hourly_netcdf, - 12: regrid.regrid_custom_hourly_netcdf, - 13: regrid.regrid_nam_nest, - 14: regrid.regrid_nam_nest, - 15: regrid.regrid_nam_nest, - 16: regrid.regrid_nam_nest, - 17: regrid.regrid_nam_nest, - 18: regrid.regrid_hourly_wrf_arw, - 19: regrid.regrid_conus_hrrr, - 20: regrid.regrid_conus_hrrr, - 21: regrid.regrid_custom_hourly_netcdf, - 22: regrid.regrid_conus_hrrr, - 23: regrid.regrid_era5, - 24: regrid.regrid_hourly_nbm, - 25: regrid.regrid_ndfd, - 26: regrid.regrid_conus_hrrr, - 27: regrid.regrid_nwm, - } - def regrid_inputs( self, config_options: ConfigOptions, - wrf_hyro_geo_meta: GeoMeta, + wrf_hyro_geo_meta: GeoMetaWrfHydro, mpi_config: MpiConfig, ): """Regrid input forcings to the final output grids for this timestep. @@ -982,15 +371,6 @@ def regrid_inputs( self, config_options, wrf_hyro_geo_meta, mpi_config ) - @property - def temporal_interpolate_inputs_map(self): - """Map the temporal interpolation options to the functions.""" - return { - 0: timeInterpMod.no_interpolation, - 1: timeInterpMod.nearest_neighbor, - 2: timeInterpMod.weighted_average, - } - def temporal_interpolate_inputs( self, config_options: ConfigOptions, mpi_config: MpiConfig ): @@ -1012,7 +392,7 @@ def temporal_interpolate_inputs( def init_dict( config_options: ConfigOptions, - geo_meta_wrf_hydro: GeoMeta, + geo_meta_wrf_hydro: GeoMetaWrfHydro, mpi_config: MpiConfig, ) -> dict: """Initialize the input forcing dictionary. @@ -1031,170 +411,15 @@ def init_dict( # Loop through and initialize the empty class for each product. custom_count = 0 - for force_tmp in range(0, config_options.number_inputs): - force_key = config_options.input_forcings[force_tmp] - input_dict[force_key] = InputForcings() + for idx in range(0, config_options.number_inputs): + force_key = config_options.input_forcings[idx] + input_dict[force_key] = InputForcings( + force_key, config_options, geo_meta_wrf_hydro, mpi_config + ) input_dict[force_key].keyValue = force_key - input_dict[force_key].regridOpt = config_options.regrid_opt[force_tmp] - input_dict[force_key].enforce = config_options.input_force_mandatory[force_tmp] - input_dict[force_key].timeInterpOpt = config_options.forceTemoralInterp[ - force_tmp - ] - input_dict[force_key].q2dDownscaleOpt = config_options.q2dDownscaleOpt[ - force_tmp - ] - input_dict[force_key].t2dDownscaleOpt = config_options.t2dDownscaleOpt[ - force_tmp - ] - input_dict[force_key].precipDownscaleOpt = config_options.precipDownscaleOpt[ - force_tmp - ] - input_dict[force_key].swDowscaleOpt = config_options.swDownscaleOpt[force_tmp] - input_dict[force_key].psfcDownscaleOpt = config_options.psfcDownscaleOpt[ - force_tmp - ] - # Check to make sure the necessary input files for downscaling are present. - # if input_dict[force_key].t2dDownscaleOpt == 2: - # # We are using a pre-calculated lapse rate on the WRF-Hydro grid. - # pathCheck = config_options.downscaleParamDir = "/T2M_Lapse_Rate_" + \ - # input_dict[force_key].product_name + ".nc" - # if not os.path.isfile(pathCheck): - # config_options.errMsg = "Expected temperature lapse rate grid: " + \ - # pathCheck + " not found." - # raise Exception - - input_dict[force_key].t2dBiasCorrectOpt = config_options.t2BiasCorrectOpt[ - force_tmp - ] - input_dict[force_key].q2dBiasCorrectOpt = config_options.q2BiasCorrectOpt[ - force_tmp - ] - input_dict[ - force_key - ].precipBiasCorrectOpt = config_options.precipBiasCorrectOpt[force_tmp] - input_dict[force_key].swBiasCorrectOpt = config_options.swBiasCorrectOpt[ - force_tmp - ] - input_dict[force_key].lwBiasCorrectOpt = config_options.lwBiasCorrectOpt[ - force_tmp - ] - input_dict[force_key].windBiasCorrectOpt = config_options.windBiasCorrect[ - force_tmp - ] - input_dict[force_key].psfcBiasCorrectOpt = config_options.psfcBiasCorrectOpt[ - force_tmp - ] - - input_dict[force_key].inDir = config_options.input_force_dirs[force_tmp] - input_dict[force_key].paramDir = config_options.dScaleParamDirs[force_tmp] - input_dict[force_key].file_type = config_options.input_force_types[force_tmp] - input_dict[force_key].userFcstHorizon = config_options.fcst_input_horizons[ - force_tmp - ] - input_dict[force_key].userCycleOffset = config_options.fcst_input_offsets[ - force_tmp - ] - - input_dict[force_key].border = config_options.ignored_border_widths[force_tmp] - - # If we have specified specific humidity downscaling, establish arrays to hold - # temporary temperature arrays that are un-downscaled. - if input_dict[force_key].q2dDownscaleOpt > 0: - if config_options.grid_type == "gridded": - input_dict[force_key].t2dTmp = np.empty( - [geo_meta_wrf_hydro.ny_local, geo_meta_wrf_hydro.nx_local], - np.float32, - ) - input_dict[force_key].psfcTmp = np.empty( - [geo_meta_wrf_hydro.ny_local, geo_meta_wrf_hydro.nx_local], - np.float32, - ) - elif config_options.grid_type == "unstructured": - input_dict[force_key].t2dTmp = np.empty( - [geo_meta_wrf_hydro.ny_local], np.float32 - ) - input_dict[force_key].psfcTmp = np.empty( - [geo_meta_wrf_hydro.ny_local], np.float32 - ) - input_dict[force_key].t2dTmp_elem = np.empty( - [geo_meta_wrf_hydro.ny_local_elem], np.float32 - ) - input_dict[force_key].psfcTmp_elem = np.empty( - [geo_meta_wrf_hydro.ny_local_elem], np.float32 - ) - elif config_options.grid_type == "hydrofabric": - input_dict[force_key].t2dTmp = np.empty( - [geo_meta_wrf_hydro.ny_local], np.float32 - ) - input_dict[force_key].psfcTmp = np.empty( - [geo_meta_wrf_hydro.ny_local], np.float32 - ) - # Initialize the local final grid of values. This is represntative - # of the local grid for this forcing, for a specific output timesetp. - # This grid will be updated from one output timestep to another, and - # also through downscaling and bias correction. - force_count = 9 if config_options.include_lqfrac else 8 - if force_count == 8 and 8 in input_dict[force_key].input_map_output: - # TODO: this assumes that LQFRAC (8) is always the last grib var - input_dict[force_key].grib_vars = input_dict[force_key].grib_vars[:-1] - if config_options.grid_type == "gridded": - input_dict[force_key].final_forcings = np.empty( - [force_count, geo_meta_wrf_hydro.ny_local, geo_meta_wrf_hydro.nx_local], - np.float64, - ) - input_dict[force_key].height = np.empty( - [geo_meta_wrf_hydro.ny_local, geo_meta_wrf_hydro.nx_local], np.float32 - ) - input_dict[force_key].regridded_mask = np.empty( - [geo_meta_wrf_hydro.ny_local, geo_meta_wrf_hydro.nx_local], np.float32 - ) - input_dict[force_key].regridded_mask_AORC = np.empty( - [geo_meta_wrf_hydro.ny_local, geo_meta_wrf_hydro.nx_local], np.float32 - ) - elif config_options.grid_type == "unstructured": - input_dict[force_key].final_forcings = np.empty( - [force_count, geo_meta_wrf_hydro.ny_local], np.float64 - ) - input_dict[force_key].height = np.empty( - [geo_meta_wrf_hydro.ny_local], np.float32 - ) - input_dict[force_key].regridded_mask = np.empty( - [geo_meta_wrf_hydro.ny_local], np.float32 - ) - input_dict[force_key].regridded_mask_AORC = np.empty( - [geo_meta_wrf_hydro.ny_local], np.float32 - ) - input_dict[force_key].final_forcings_elem = np.empty( - [force_count, geo_meta_wrf_hydro.ny_local_elem], np.float64 - ) - input_dict[force_key].height_elem = np.empty( - [geo_meta_wrf_hydro.ny_local_elem], np.float32 - ) - input_dict[force_key].regridded_mask_elem = np.empty( - [geo_meta_wrf_hydro.ny_local_elem], np.float32 - ) - input_dict[force_key].regridded_mask_elem_AORC = np.empty( - [geo_meta_wrf_hydro.ny_local_elem], np.float32 - ) - elif config_options.grid_type == "hydrofabric": - input_dict[force_key].final_forcings = np.empty( - [force_count, geo_meta_wrf_hydro.ny_local], np.float64 - ) - input_dict[force_key].height = np.empty( - [geo_meta_wrf_hydro.ny_local], np.float32 - ) - input_dict[force_key].regridded_mask = np.empty( - [geo_meta_wrf_hydro.ny_local], np.float32 - ) - input_dict[force_key].regridded_mask_AORC = np.empty( - [geo_meta_wrf_hydro.ny_local], np.float32 - ) # Obtain custom input cycle frequencies if force_key == 10 or force_key == 11: - input_dict[force_key].cycle_freq = config_options.customFcstFreq[ - custom_count - ] custom_count = custom_count + 1 return input_dict From b3a96dc3e280b932fb1467923e04a3abdad8b947 Mon Sep 17 00:00:00 2001 From: Matt Deshotel Date: Mon, 23 Feb 2026 09:09:23 -0600 Subject: [PATCH 12/34] move static dictionaries to conts.py --- .../NextGen_Forcings_Engine/core/consts.py | 784 +++++++++++++++++- .../core/forcingInputMod.py | 103 +-- 2 files changed, 797 insertions(+), 90 deletions(-) diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/consts.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/consts.py index cc54e8d9..9bd8e4b0 100644 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/consts.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/consts.py @@ -1,3 +1,13 @@ +from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core import ( + regrid, + time_handling, +) +from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.timeInterpMod import ( + nearest_neighbor, + no_interpolation, + weighted_average, +) + CONSTS = { "geoMod": { "GeoMeta": [ @@ -32,9 +42,6 @@ "esmf_lat", "esmf_lon", ], - "handle_exception": { - "esmf_nc": "Unable to open spatial metadata file: :::arg:::", - }, "UnstructuredGeoMeta": [ "x_lower_bound", "x_upper_bound", @@ -85,4 +92,775 @@ "slp_azi_elem", ], }, + "forcingInputMod": { + "InputForcing": [ + "nx_global", + "ny_global", + "nx_local", + "ny_local", + "nx_local_corner", + "ny_local_corner", + "x_lower_bound", + "x_upper_bound", + "y_lower_bound", + "y_upper_bound", + "x_lower_bound_corner", + "x_upper_bound_corner", + "y_lower_bound_corner", + "y_upper_bound_corner", + "outFreq", + "lapseGrid", + "rqiClimoGrid", + "nwmPRISM_numGrid", + "nwmPRISM_denGrid", + "esmf_lats", + "esmf_lons", + "esmf_grid_in", + "esmf_grid_in_elem", + "regridObj", + "regridObj_elem", + "esmf_field_in", + "esmf_field_in_elem", + "esmf_field_out", + "esmf_field_out_elem", + # -------------------------------- + # Only used for CFSv2 bias correction + # as bias correction needs to take + # place prior to regridding. + "coarse_input_forcings1", + "coarse_input_forcings2", + # -------------------------------- + "regridded_forcings1", + "regridded_forcings2", + "globalPcpRate1", + "globalPcpRate2", + "regridded_forcings1_elem", + "regridded_forcings2_elem", + "globalPcpRate1_elem", + "globalPcpRate2_elem", + "ndv", + "file_in1", + "file_in2", + "fcst_hour1", + "fcst_hour2", + "fcst_date1", + "fcst_date2", + "height_elem", + "tmpFile", + "tmpFileHeight", + "regridded_precip1", + "regridded_precip2", + "regridded_precip1_elem", + "regridded_precip2_elem", + ], + "PRODUCT_NAME": { + 1: "NLDAS2_GRIB1", + 2: "NARR_GRIB1", + 3: "GFS_Production_GRIB2", + 4: "NAM_Conus_Nest_GRIB2", + 5: "HRRR_Conus_GRIB2", + 6: "RAP_Conus_GRIB2", + 7: "CFSv2_6Hr_Global_GRIB2", + 8: "WRF_ARW_Hawaii_GRIB2", + 9: "GFS_Production_025d_GRIB2", + 10: "Custom_NetCDF_Hourly", + 11: "Custom_NetCDF_Hourly", + 12: "AORC", + 13: "NAM_Nest_3km_Hawaii", + 14: "NAM_Nest_3km_PuertoRico", + 15: "NAM_Nest_3km_Alaska", + 16: "NAM_Nest_3km_Hawaii_Radiation-Only", + 17: "NAM_Nest_3km_PuertoRico_Radiation-Only", + 18: "WRF_ARW_PuertoRico_GRIB2", + 19: "HRRR_Alaska_GRIB2", + 20: "Alaska_AnA", + 21: "AORC_Alaska", + 22: "Alaska_ExtAnA", + 23: "ERA5", + 24: "NBM", + 25: "NDFD", + 26: "HRRR_15min", + 27: "NWM", + }, + "CYCLE_FREQ": { + 1: 60, + 2: 180, + 3: 360, + 4: 360, + 5: 60, + 6: 60, + 7: 360, + 8: 1440, + 9: 360, + 10: -9999, + 11: -9999, + 12: -9999, + 13: 360, + 14: 360, + 15: 360, + 16: 360, + 17: 360, + 18: 1440, + 19: 180, + 20: 180, + 21: -9999, + 22: 180, + 23: -9999, + 24: 60, + 25: 1440, + 26: 15, + 27: -9999, + }, + "GRIB_VARS": { + 1: ["TMP", "SPFH", "UGRD", "VGRD", "PRATE", "DSWRF", "DLWRF", "PRES"], + 2: None, + 3: [ + "TMP", + "SPFH", + "UGRD", + "VGRD", + "PRATE", + "DSWRF", + "DLWRF", + "PRES", + "CPOFP", + ], + 4: None, + 5: [ + "TMP", + "SPFH", + "UGRD", + "VGRD", + "APCP", + "DSWRF", + "DLWRF", + "PRES", + "CPOFP", + ], + 6: [ + "TMP", + "SPFH", + "UGRD", + "VGRD", + "APCP", + "DSWRF", + "DLWRF", + "PRES", + "FROZR", + ], + 7: ["TMP", "SPFH", "UGRD", "VGRD", "PRATE", "DSWRF", "DLWRF", "PRES"], + 8: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "PRES"], + 9: ["TMP", "SPFH", "UGRD", "VGRD", "PRATE", "DSWRF", "DLWRF", "PRES"], + 10: None, + 11: None, + 12: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "DSWRF", "DLWRF", "PRES"], + 13: ["TMP", "SPFH", "UGRD", "VGRD", "PRATE", "DSWRF", "DLWRF", "PRES"], + 14: ["TMP", "SPFH", "UGRD", "VGRD", "PRATE", "DSWRF", "DLWRF", "PRES"], + 15: ["TMP", "SPFH", "UGRD", "VGRD", "PRATE", "DSWRF", "DLWRF", "PRES"], + 16: ["DSWRF", "DLWRF"], + 17: ["DSWRF", "DLWRF"], + 18: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "PRES"], + 19: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "DSWRF", "DLWRF", "PRES"], + 20: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "DSWRF", "DLWRF", "PRES"], + 21: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "DSWRF", "DLWRF", "PRES"], + 22: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "DSWRF", "DLWRF", "PRES"], + 23: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "DSWRF", "DLWRF", "PRES"], + 24: ["TMP", "APCP"], + 25: ["TMP", "WDIR", "WSPD", "APCP"], + 26: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "DSWRF", "DLWRF", "PRES"], + 27: [ + "T2D", + "Q2D", + "U2D", + "V2D", + "RAINRATE", + "SWDOWN", + "LWDOWN", + "PSFC", + ], + }, + "GRIB_LEVELS": { + 1: [ + "2 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + "surface", + "surface", + ], + 2: None, + 3: [ + "2 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + "surface", + "surface", + "surface", + ], + 4: None, + 5: [ + "2 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + "surface", + "surface", + "surface", + ], + 6: [ + "2 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + "surface", + "surface", + "surface", + ], + 7: [ + "2 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + "surface", + "surface", + ], + 8: [ + "80 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + ], + 9: [ + "2 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + "surface", + "surface", + ], + 10: None, + 11: None, + 12: None, + 13: [ + "2 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + "surface", + "surface", + ], + 14: [ + "2 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + "surface", + "surface", + ], + 15: [ + "2 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + "surface", + "surface", + ], + 16: ["surface", "surface"], + 17: ["surface", "surface"], + 18: [ + "80 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + ], + 19: [ + "2 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + "surface", + "surface", + ], + 20: [ + "2 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + "surface", + "surface", + ], + 21: None, + 22: [ + "2 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + "surface", + "surface", + ], + 23: None, + 24: ["2 m above ground", "surface"], + 25: [ + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + ], + 26: [ + "2 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + "surface", + "surface", + ], + 27: None, + }, + "NET_CDF_VARS_NAMES": { + 1: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "APCP_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + ], + 2: None, + 3: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "PRATE_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + "CPOFP_surface", + ], + 4: None, + 5: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "APCP_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + "CPOFP_surface", + ], + 6: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "APCP_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + "FROZR_surface", + ], + 7: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "PRATE_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + ], + 8: [ + "TMP_80maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "APCP_surface", + "PRES_surface", + ], + 9: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "PRATE_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + ], + 10: ["T2D", "Q2D", "U10", "V10", "RAINRATE", "DSWRF", "DLWRF", "PRES"], + 11: ["T2D", "Q2D", "U10", "V10", "RAINRATE", "DSWRF", "DLWRF", "PRES"], + 12: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "APCP_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + ], + 13: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "PRATE_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + ], + 14: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "PRATE_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + ], + 15: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "PRATE_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + ], + 16: ["DSWRF_surface", "DLWRF_surface"], + 17: ["DSWRF_surface", "DLWRF_surface"], + 18: [ + "TMP_80maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "APCP_surface", + "PRES_surface", + ], + 19: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "APCP_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + ], + 20: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "APCP_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + ], + 21: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "APCP_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + ], + 22: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "APCP_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + ], + 23: ["t2m", "d2m", "u10", "v10", "mtpr", "msdwswrf", "msdwlwrf", "sp"], + 24: ["TMP_2maboveground", "APCP_surface"], + 25: [ + "TMP_2maboveground", + "WDIR_10maboveground", + "WIND_10maboveground", + "APCP_surface", + ], + 26: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "APCP_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + ], + 27: ["T2D", "Q2D", "U2D", "V2D", "RAINRATE", "SWDOWN", "LWDOWN", "PSFC"], + }, + "GRIB_MES_IDX": { + 1: None, + 2: None, + 3: None, + 4: None, + 5: None, + 6: None, + 7: None, + 8: None, + 9: [33, 34, 39, 40, 43, 88, 91, 6], + 10: None, + 11: None, + 12: None, + 13: None, + 14: None, + 15: None, + 16: None, + 17: None, + 18: None, + 19: None, + 20: None, + 21: None, + 22: None, + 23: None, + 24: None, + 25: None, + 26: None, + 27: None, + }, + "INPUT_MAP_OUTPUT": { + 1: [4, 5, 0, 1, 3, 7, 2, 6], + 2: None, + 3: [4, 5, 0, 1, 3, 7, 2, 6, 8], + 4: None, + 5: [4, 5, 0, 1, 3, 7, 2, 6, 8], + 6: [4, 5, 0, 1, 3, 7, 2, 6, 8], + 7: [4, 5, 0, 1, 3, 7, 2, 6], + 8: [4, 5, 0, 1, 3, 6], + 9: [4, 5, 0, 1, 3, 7, 2, 6], + 10: [4, 5, 0, 1, 3, 7, 2, 6], + 11: [4, 5, 0, 1, 3, 7, 2, 6], + 12: [4, 5, 0, 1, 3, 7, 2, 6], + 13: [4, 5, 0, 1, 3, 7, 2, 6], + 14: [4, 5, 0, 1, 3, 7, 2, 6], + 15: [4, 5, 0, 1, 3, 7, 2, 6], + 16: [7, 2], + 17: [7, 2], + 18: [4, 5, 0, 1, 3, 6], + 19: [4, 5, 0, 1, 3, 7, 2, 6], + 20: [4, 5, 0, 1, 3, 7, 2, 6], + 21: [4, 5, 0, 1, 3, 7, 2, 6], + 22: [4, 5, 0, 1, 3, 7, 2, 6], + 23: [4, 5, 0, 1, 3, 7, 2, 6], + 24: [4, 3], + 25: [4, 0, 1, 3], + 26: [4, 5, 0, 1, 3, 7, 2, 6], + 27: [4, 5, 0, 1, 3, 7, 2, 6], + }, + "FORECAST_HORIZONS": { + 1: None, + 2: None, + 3: None, + 4: None, + 5: [ + 18, + 18, + 18, + 18, + 18, + 18, + 36, + 18, + 18, + 18, + 18, + 18, + 36, + 18, + 18, + 18, + 18, + 18, + 36, + 18, + 18, + 18, + 18, + 18, + ], + 6: [ + 21, + 21, + 21, + 39, + 21, + 21, + 21, + 21, + 21, + 39, + 21, + 21, + 21, + 21, + 21, + 39, + 21, + 21, + 21, + 21, + 21, + 39, + 21, + 21, + ], + 7: None, + 8: None, + 9: None, + 10: None, + 11: None, + 12: None, + 13: None, + 14: None, + 15: None, + 16: None, + 17: None, + 18: None, + 19: None, + 20: None, + 21: None, + 22: None, + 23: None, + 24: None, + 25: None, + 26: [ + 18, + 18, + 18, + 18, + 18, + 18, + 36, + 18, + 18, + 18, + 18, + 18, + 36, + 18, + 18, + 18, + 18, + 18, + 36, + 18, + 18, + 18, + 18, + 18, + ], + 27: None, + }, + "FIND_NEIGHBOR_FILES_MAP": { + 1: time_handling.find_nldas_neighbors, + 3: time_handling.find_gfs_neighbors, + 5: time_handling.find_input_neighbors, + 6: time_handling.find_input_neighbors, + 7: time_handling.find_cfsv2_neighbors, + 8: time_handling.find_hourly_wrf_arw_neighbors, + 9: time_handling.find_gfs_neighbors, + 10: time_handling.find_custom_hourly_neighbors, + 11: time_handling.find_custom_hourly_neighbors, + 12: time_handling.find_aorc_neighbors, + 13: time_handling.find_nam_nest_neighbors, + 14: time_handling.find_nam_nest_neighbors, + 15: time_handling.find_nam_nest_neighbors, + 16: time_handling.find_nam_nest_neighbors, + 17: time_handling.find_nam_nest_neighbors, + 18: time_handling.find_hourly_wrf_arw_neighbors, + 19: time_handling.find_ak_hrrr_neighbors, + 20: time_handling.find_ak_hrrr_neighbors, + 21: time_handling.find_aorc_neighbors, + 22: time_handling.find_ak_hrrr_neighbors, + 23: time_handling.find_era5_neighbors, + 24: time_handling.find_hourly_nbm_neighbors, + 25: time_handling.find_ndfd_neighbors, + 26: time_handling.find_input_neighbors, + 27: time_handling.find_nwm_neighbors, + }, + "REGRID_MAP": { + 1: regrid.regrid_conus_rap, + 3: regrid.regrid_gfs, + 5: regrid.regrid_conus_hrrr, + 6: regrid.regrid_conus_rap, + 7: regrid.regrid_cfsv2, + 8: regrid.regrid_hourly_wrf_arw, + 9: regrid.regrid_gfs, + 10: regrid.regrid_custom_hourly_netcdf, + 11: regrid.regrid_custom_hourly_netcdf, + 12: regrid.regrid_custom_hourly_netcdf, + 13: regrid.regrid_nam_nest, + 14: regrid.regrid_nam_nest, + 15: regrid.regrid_nam_nest, + 16: regrid.regrid_nam_nest, + 17: regrid.regrid_nam_nest, + 18: regrid.regrid_hourly_wrf_arw, + 19: regrid.regrid_conus_hrrr, + 20: regrid.regrid_conus_hrrr, + 21: regrid.regrid_custom_hourly_netcdf, + 22: regrid.regrid_conus_hrrr, + 23: regrid.regrid_era5, + 24: regrid.regrid_hourly_nbm, + 25: regrid.regrid_ndfd, + 26: regrid.regrid_conus_hrrr, + 27: regrid.regrid_nwm, + }, + "TEMPORAL_INTERPOLATE_INPUTS_MAP": { + 0: no_interpolation, + 1: nearest_neighbor, + 2: weighted_average, + }, + "FILE_EXT": { + "GRIB1": ".grb", + "GRIB2": ".grib2", + "NETCDF": ".nc", + "NETCDF4": ".nc4", + "NWM": ".LDASIN_DOMAIN1", + "ZARR": ".zarr", + }, + }, } diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py index 183e2bde..4a274fd9 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py @@ -5,25 +5,14 @@ """ import logging +from pathlib import Path import numpy as np from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.config import ( ConfigOptions, ) -from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.consts import ( - CYCLE_FREQ, - FILE_EXT, - FIND_NEIGHBOR_FILES_MAP, - FORECAST_HORIZONS, - GRIB_LEVELS, - GRIB_MES_IDX, - INPUT_MAP_OUTPUT, - NET_CDF_VARS_NAMES, - PRODUCT_NAME, - REGRID_MAP, - TEMPORAL_INTERPOLATE_INPUTS_MAP, -) +from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.consts import CONSTS from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.geoMod import ( GeoMetaWrfHydro, ) @@ -31,6 +20,7 @@ from nextgen_forcings_ewts import MODULE_NAME LOG = logging.getLogger(MODULE_NAME) +CONSTS = CONSTS[Path(__file__).stem] class InputForcings: @@ -51,76 +41,15 @@ def __init__( self.config_options = config_options self.geo_meta_wrf_hydro = geo_meta self.mpi_config = mpi_config - self.nx_global = None - self.ny_global = None - self.nx_local = None - self.ny_local = None - self.nx_local_corner = None - self.ny_local_corner = None - self.x_lower_bound = None - self.x_upper_bound = None - self.y_lower_bound = None - self.y_upper_bound = None - self.x_lower_bound_corner = None - self.x_upper_bound_corner = None - self.y_lower_bound_corner = None - self.y_upper_bound_corner = None - self.outFreq = None - self.lapseGrid = None - self.rqiClimoGrid = None - self.nwmPRISM_numGrid = None - self.nwmPRISM_denGrid = None - self.esmf_lats = None - self.esmf_lons = None - self.esmf_grid_in = None - self.esmf_grid_in_elem = None self.regridComplete = False - self.regridObj = None - self.regridObj_elem = None - self.esmf_field_in = None - self.esmf_field_in_elem = None - self.esmf_field_out = None - self.esmf_field_out_elem = None - # -------------------------------- - # Only used for CFSv2 bias correction - # as bias correction needs to take - # place prior to regridding. - self.coarse_input_forcings1 = None - self.coarse_input_forcings2 = None - # -------------------------------- - self.regridded_forcings1 = None - self.regridded_forcings2 = None - self.globalPcpRate1 = None - self.globalPcpRate2 = None - self.regridded_forcings1_elem = None - self.regridded_forcings2_elem = None - self.globalPcpRate1_elem = None - self.globalPcpRate2_elem = None - self.ndv = None - self.file_in1 = None - self.file_in2 = None - self.fcst_hour1 = None - self.fcst_hour2 = None - self.fcst_date1 = None - self.fcst_date2 = None - self.height_elem = None - self.tmpFile = None - self.tmpFileHeight = None + self.regridComplete = False self.rstFlag = 0 - self.regridded_precip1 = None - self.regridded_precip2 = None - self.regridded_precip1_elem = None - self.regridded_precip2_elem = None self.skip = False - - # Private attrs that have associated @property setter/getter self._keyValue = key_value - self._file_ext = None - self._cycle_freq = None - self._grib_vars = None - self.find_neighbor_files_map = FIND_NEIGHBOR_FILES_MAP - self.regrid_map = REGRID_MAP - self.temporal_interpolate_inputs_map = TEMPORAL_INTERPOLATE_INPUTS_MAP + + self.find_neighbor_files_map = CONSTS["FIND_NEIGHBOR_FILES_MAP"] + self.regrid_map = CONSTS["REGRID_MAP"] + self.temporal_interpolate_inputs_map = CONSTS["TEMPORAL_INTERPOLATE_INPUTS_MAP"] self.initialize_config_options() if self.q2dDownscaleOpt > 0: @@ -232,7 +161,7 @@ def initialize_geo_data(self): @property def product_name(self): """Map the forcing key value to the product name.""" - return PRODUCT_NAME[self.keyValue] + return CONSTS["PRODUCT_NAME"][self.keyValue] @property def keyValue(self): @@ -251,7 +180,7 @@ def keyValue(self, val): @property def file_ext(self) -> str: """Map the forcing file type to the file extension.""" - ext = FILE_EXT.get(self.file_type) + ext = CONSTS["FILE_EXT"].get(self.file_type) if ext is None: raise ValueError(f"Unexpected file_type: {self.file_type}") self._file_ext = ext @@ -271,7 +200,7 @@ def cycle_freq(self) -> int: """Map the forcing key value to the cycle frequency in minutes.""" if self._cycle_freq is None: # First call to getter, initialize - self._cycle_freq = CYCLE_FREQ[self.keyValue] + self._cycle_freq = CONSTS["CYCLE_FREQ"][self.keyValue] return self._cycle_freq @cycle_freq.setter @@ -301,12 +230,12 @@ def grib_vars(self, val): @property def grib_levels(self): """Map the forcing key value to the required GRIB variable levels.""" - return GRIB_LEVELS[self.keyValue] + return CONSTS["GRIB_LEVELS"][self.keyValue] @property def netcdf_var_names(self): """Map the forcing key value to the required NetCDF variable names.""" - return NET_CDF_VARS_NAMES[self.keyValue] + return CONSTS["NET_CDF_VARS_NAMES"][self.keyValue] @property def grib_mes_idx(self): @@ -315,17 +244,17 @@ def grib_mes_idx(self): arrays that store the message ids of required forcing variables for each forcing type TODO fill these arrays for forcing types other than GFS """ - return GRIB_MES_IDX[self.keyValue] + return CONSTS["GRIB_MES_IDX"][self.keyValue] @property def input_map_output(self): """Map the forcing key value to the input to output variable mapping.""" - return INPUT_MAP_OUTPUT[self.keyValue] + return CONSTS["INPUT_MAP_OUTPUT"][self.keyValue] @property def forecast_horizons(self): """Map the forcing key value to the forecast horizons list.""" - return FORECAST_HORIZONS[self.keyValue] + return CONSTS["FORECAST_HORIZONS"][self.keyValue] def calc_neighbor_files( self, config_options: ConfigOptions, dcurrent, mpi_config: MpiConfig From c3f1099509ec260cf65c53ba0582393a59df3310 Mon Sep 17 00:00:00 2001 From: Matt Deshotel Date: Mon, 23 Feb 2026 09:23:11 -0600 Subject: [PATCH 13/34] add type hints --- .../core/forcingInputMod.py | 38 +++++++++---------- 1 file changed, 18 insertions(+), 20 deletions(-) diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py index 4a274fd9..2eafffcd 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py @@ -36,7 +36,7 @@ def __init__( config_options: ConfigOptions = None, geo_meta: GeoMetaWrfHydro = None, mpi_config: MpiConfig = None, - ): + ) -> None: """Initialize InputForcings with configuration options, geospatial metadata, and MPI configuration.""" self.config_options = config_options self.geo_meta_wrf_hydro = geo_meta @@ -56,7 +56,7 @@ def __init__( self.handle_humidity_downscaling() self.initialize_geo_data() - def initialize_config_options(self): + def initialize_config_options(self) -> None: """Initialize configuration options from the config_options attribute.""" [ setattr(self, key, val[self.keyValue]) @@ -64,7 +64,7 @@ def initialize_config_options(self): if isinstance(val, list) and len(val) > 0 ] - def handle_humidity_downscaling(self): + def handle_humidity_downscaling(self) -> None: """Initialize temporary arrays for specific humidity downscaling if specified in configuration. If we have specified specific humidity downscaling, establish arrays to hold @@ -92,7 +92,7 @@ def handle_humidity_downscaling(self): self.t2dTmp_elem = None self.psfcTmp_elem = None - def initialize_geo_data(self): + def initialize_geo_data(self) -> None: """Initialize geometry-related arrays based on grid type and downscaling options. Initialize the local final grid of values. This is represntative @@ -159,19 +159,19 @@ def initialize_geo_data(self): self.cycle_freq = self.config_options.customFcstFreq[self.custom_count] @property - def product_name(self): + def product_name(self) -> str: """Map the forcing key value to the product name.""" return CONSTS["PRODUCT_NAME"][self.keyValue] @property - def keyValue(self): + def keyValue(self) -> int: """Get the forcing key value.""" if self._keyValue is None: raise RuntimeError("keyValue has not yet been set") return self._keyValue @keyValue.setter - def keyValue(self, val): + def keyValue(self, val: int) -> int: """Set the forcing key value.""" if self._keyValue is not None: raise RuntimeError(f"keyValue has already been set (to {self._keyValue}).") @@ -188,7 +188,7 @@ def file_ext(self) -> str: return self._file_ext @file_ext.setter - def file_ext(self, val): + def file_ext(self, val: str) -> str: if val is None: raise TypeError( "Cannot set file_ext to None since that value indicates an uninitialized state" @@ -204,7 +204,7 @@ def cycle_freq(self) -> int: return self._cycle_freq @cycle_freq.setter - def cycle_freq(self, val): + def cycle_freq(self, val: int) -> int: if val is None: raise TypeError( "Cannot set cycle_freq to None since that value indicates an uninitialized state" @@ -220,7 +220,7 @@ def grib_vars(self) -> list[str] | None: return self._grib_vars @grib_vars.setter - def grib_vars(self, val): + def grib_vars(self, val: list[str]) -> list[str] | None: if val is None: raise TypeError( "Cannot set grib_vars to None since that value indicates an uninitialized state" @@ -228,17 +228,17 @@ def grib_vars(self, val): self._grib_vars = val @property - def grib_levels(self): + def grib_levels(self) -> str: """Map the forcing key value to the required GRIB variable levels.""" return CONSTS["GRIB_LEVELS"][self.keyValue] @property - def netcdf_var_names(self): + def netcdf_var_names(self) -> str: """Map the forcing key value to the required NetCDF variable names.""" return CONSTS["NET_CDF_VARS_NAMES"][self.keyValue] @property - def grib_mes_idx(self): + def grib_mes_idx(self) -> list[int] | None: """Map the forcing key value to the required GRIB message ids. arrays that store the message ids of required forcing variables for each forcing type @@ -247,18 +247,18 @@ def grib_mes_idx(self): return CONSTS["GRIB_MES_IDX"][self.keyValue] @property - def input_map_output(self): + def input_map_output(self) -> list[int] | None: """Map the forcing key value to the input to output variable mapping.""" return CONSTS["INPUT_MAP_OUTPUT"][self.keyValue] @property - def forecast_horizons(self): + def forecast_horizons(self) -> list[int] | None: """Map the forcing key value to the forecast horizons list.""" return CONSTS["FORECAST_HORIZONS"][self.keyValue] def calc_neighbor_files( self, config_options: ConfigOptions, dcurrent, mpi_config: MpiConfig - ): + ) -> None: """Calculate the last/next expected input forcing file based on the current time step. Function that will calculate the last/next expected @@ -283,7 +283,7 @@ def regrid_inputs( config_options: ConfigOptions, wrf_hyro_geo_meta: GeoMetaWrfHydro, mpi_config: MpiConfig, - ): + ) -> None: """Regrid input forcings to the final output grids for this timestep. Polymorphic function that will regrid input forcings to the @@ -302,7 +302,7 @@ def regrid_inputs( def temporal_interpolate_inputs( self, config_options: ConfigOptions, mpi_config: MpiConfig - ): + ) -> None: """Run temporal interpolation of the input forcing grids that have been regridded. Polymorphic function that will run temporal interpolation of @@ -332,9 +332,7 @@ def init_dict( :param config_options: :return: input_dict - A dictionary defining our inputs. """ - # Initialize an empty dictionary input_dict = {} - if config_options.precip_only_flag: return input_dict From e6e89228268e60a41df69f973dbb6ca422c0c090 Mon Sep 17 00:00:00 2001 From: Matt Deshotel Date: Mon, 23 Feb 2026 09:53:31 -0600 Subject: [PATCH 14/34] move conditionals to geoMod.py --- .../core/forcingInputMod.py | 109 ++---------- .../NextGen_Forcings_Engine/core/geoMod.py | 165 +++++++++++++++++- 2 files changed, 177 insertions(+), 97 deletions(-) diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py index 2eafffcd..bf78be81 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py @@ -53,8 +53,17 @@ def __init__( self.initialize_config_options() if self.q2dDownscaleOpt > 0: - self.handle_humidity_downscaling() - self.initialize_geo_data() + self.geo_meta.handle_humidity_downscaling() + + if self.force_count == 8 and 8 in self.input_map_output: + # TODO: this assumes that LQFRAC (8) is always the last grib var + self.grib_vars = self.grib_vars[:-1] + + self.geo_meta_wrf_hydro.initialize_geo_data() + + # Obtain custom input cycle frequencies + if self.key_value == 10 or self.key_value == 11: + self.cycle_freq = self.config_options.customFcstFreq[self.custom_count] def initialize_config_options(self) -> None: """Initialize configuration options from the config_options attribute.""" @@ -64,99 +73,9 @@ def initialize_config_options(self) -> None: if isinstance(val, list) and len(val) > 0 ] - def handle_humidity_downscaling(self) -> None: - """Initialize temporary arrays for specific humidity downscaling if specified in configuration. - - If we have specified specific humidity downscaling, establish arrays to hold - temporary temperature arrays that are un-downscaled. - """ - if self.config_options.grid_type == "gridded": - self.t2dTmp = np.empty( - [self.geo_meta.ny_local, self.geo_meta.nx_local], - np.float32, - ) - self.psfcTmp = np.empty( - [self.geo_meta.ny_local, self.geo_meta.nx_local], - np.float32, - ) - self.t2dTmp_elem = None - self.psfcTmp_elem = None - elif self.config_options.grid_type == "unstructured": - self.t2dTmp = np.empty([self.geo_meta.ny_local], np.float32) - self.psfcTmp = np.empty([self.geo_meta.ny_local], np.float32) - self.t2dTmp_elem = np.empty([self.geo_meta.ny_local_elem], np.float32) - self.psfcTmp_elem = np.empty([self.geo_meta.ny_local_elem], np.float32) - elif self.config_options.grid_type == "hydrofabric": - self.t2dTmp = np.empty([self.geo_meta.ny_local], np.float32) - self.psfcTmp = np.empty([self.geo_meta.ny_local], np.float32) - self.t2dTmp_elem = None - self.psfcTmp_elem = None - - def initialize_geo_data(self) -> None: - """Initialize geometry-related arrays based on grid type and downscaling options. - - Initialize the local final grid of values. This is represntative - of the local grid for this forcing, for a specific output timesetp. - This grid will be updated from one output timestep to another, and - also through downscaling and bias correction. - """ - force_count = 9 if self.config_options.include_lqfrac else 8 - if force_count == 8 and 8 in self.input_map_output: - # TODO: this assumes that LQFRAC (8) is always the last grib var - self.grib_vars = self.grib_vars[:-1] - - if self.config_options.grid_type == "gridded": - self.final_forcings = np.empty( - [force_count, self.geo_meta.ny_local, self.geo_meta.nx_local], - np.float64, - ) - self.height = np.empty( - [self.geo_meta.ny_local, self.geo_meta.nx_local], np.float32 - ) - self.regridded_mask = np.empty( - [self.geo_meta.ny_local, self.geo_meta.nx_local], np.float32 - ) - self.regridded_mask_AORC = np.empty( - [self.geo_meta.ny_local, self.geo_meta.nx_local], np.float32 - ) - self.final_forcings_elem = None - self.height_elem = None - self.regridded_mask_elem = None - self.regridded_mask_elem_AORC = None - - elif self.config_options.grid_type == "unstructured": - self.final_forcings = np.empty( - [force_count, self.geo_meta.ny_local], np.float64 - ) - self.height = np.empty([self.geo_meta.ny_local], np.float32) - self.regridded_mask = np.empty([self.geo_meta.ny_local], np.float32) - self.regridded_mask_AORC = np.empty([self.geo_meta.ny_local], np.float32) - self.final_forcings_elem = np.empty( - [force_count, self.geo_meta.ny_local_elem], np.float64 - ) - self.height_elem = np.empty([self.geo_meta.ny_local_elem], np.float32) - self.regridded_mask_elem = np.empty( - [self.geo_meta.ny_local_elem], np.float32 - ) - self.regridded_mask_elem_AORC = np.empty( - [self.geo_meta.ny_local_elem], np.float32 - ) - - elif self.config_options.grid_type == "hydrofabric": - self.final_forcings = np.empty( - [force_count, self.geo_meta.ny_local], np.float64 - ) - self.height = np.empty([self.geo_meta.ny_local], np.float32) - self.regridded_mask = np.empty([self.geo_meta.ny_local], np.float32) - self.regridded_mask_AORC = np.empty([self.geo_meta.ny_local], np.float32) - self.final_forcings_elem = None - self.height_elem = None - self.regridded_mask_elem = None - self.regridded_mask_elem_AORC = None - - # Obtain custom input cycle frequencies - if self.key_value == 10 or self.key_value == 11: - self.cycle_freq = self.config_options.customFcstFreq[self.custom_count] + def force_count(self) -> int: + """Force count.""" + return 9 if self.config_options.include_lqfrac else 8 @property def product_name(self) -> str: diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py index 0bfe63e3..7e513b9d 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py @@ -1,5 +1,8 @@ +from __future__ import annotations + import math from pathlib import Path +from typing import TYPE_CHECKING import numpy as np @@ -8,6 +11,10 @@ import shapely from scipy import spatial +from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.forcingInputMod import ( + InputForcings, +) + try: import esmpy as ESMF except ImportError: @@ -26,6 +33,11 @@ from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.parallel import MpiConfig from nextgen_forcings_ewts import MODULE_NAME +if TYPE_CHECKING: + from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.forcingInputMod import ( + InputForcings, + ) + LOG = logging.getLogger(MODULE_NAME) CONSTS = CONSTS[Path(__file__).stem] @@ -295,6 +307,62 @@ def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig) -> None for attr in CONSTS[self.__class__.__name__]: setattr(self, attr, None) + def initialize_geo_data( + self, + input_forcings: InputForcings, + ) -> None: + """Initialize geometry-related arrays based on grid type and downscaling options. + + Initialize the local final grid of values. This is represntative + of the local grid for this forcing, for a specific output timesetp. + This grid will be updated from one output timestep to another, and + also through downscaling and bias correction. + """ + input_forcings.final_forcings = np.empty( + [ + input_forcings.force_count, + input_forcings.geo_meta.ny_local, + input_forcings.geo_meta.nx_local, + ], + np.float64, + ) + input_forcings.height = np.empty( + [input_forcings.geo_meta.ny_local, input_forcings.geo_meta.nx_local], + np.float32, + ) + input_forcings.regridded_mask = np.empty( + [input_forcings.geo_meta.ny_local, input_forcings.geo_meta.nx_local], + np.float32, + ) + input_forcings.regridded_mask_AORC = np.empty( + [input_forcings.geo_meta.ny_local, input_forcings.geo_meta.nx_local], + np.float32, + ) + input_forcings.final_forcings_elem = None + input_forcings.height_elem = None + input_forcings.regridded_mask_elem = None + input_forcings.regridded_mask_elem_AORC = None + + def handle_humidity_downscaling( + self, + input_forcings: InputForcings, + ) -> None: + """Initialize temporary arrays for specific humidity downscaling if specified in configuration. + + If we have specified specific humidity downscaling, establish arrays to hold + temporary temperature arrays that are un-downscaled. + """ + input_forcings.t2dTmp = np.empty( + [input_forcings.geo_meta.ny_local, input_forcings.geo_meta.nx_local], + np.float32, + ) + input_forcings.psfcTmp = np.empty( + [input_forcings.geo_meta.ny_local, input_forcings.geo_meta.nx_local], + np.float32, + ) + input_forcings.t2dTmp_elem = None + input_forcings.psfcTmp_elem = None + @broadcast @property @lru_cache @@ -893,6 +961,46 @@ def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): for attr in CONSTS[self.__class__.__name__]: setattr(self, attr, None) + def initialize_geo_data(self, input_forcings: InputForcings) -> None: + """Initialize geometry-related arrays based on grid type and downscaling options. + + From forcingInputMod.py + + Initialize the local final grid of values. This is represntative + of the local grid for this forcing, for a specific output timesetp. + This grid will be updated from one output timestep to another, and + also through downscaling and bias correction. + """ + input_forcings.final_forcings = np.empty( + [input_forcings.force_count, input_forcings.geo_meta.ny_local], np.float64 + ) + input_forcings.height = np.empty([input_forcings.geo_meta.ny_local], np.float32) + input_forcings.regridded_mask = np.empty( + [input_forcings.geo_meta.ny_local], np.float32 + ) + input_forcings.regridded_mask_AORC = np.empty( + [input_forcings.geo_meta.ny_local], np.float32 + ) + input_forcings.final_forcings_elem = None + input_forcings.height_elem = None + input_forcings.regridded_mask_elem = None + input_forcings.regridded_mask_elem_AORC = None + + def handle_humidity_downscaling(self, input_forcings: InputForcings) -> None: + """Initialize temporary arrays for specific humidity downscaling if specified in configuration. + + From forcingInputMod.py + + If we have specified specific humidity downscaling, establish arrays to hold + temporary temperature arrays that are un-downscaled. + """ + input_forcings.t2dTmp = np.empty([input_forcings.geo_meta.ny_local], np.float32) + input_forcings.psfcTmp = np.empty( + [input_forcings.geo_meta.ny_local], np.float32 + ) + input_forcings.t2dTmp_elem = None + input_forcings.psfcTmp_elem = None + @property @lru_cache def lat_bounds(self) -> np.ndarray: @@ -1068,6 +1176,59 @@ def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig) -> None for attr in CONSTS[self.__class__.__name__]: setattr(self, attr, None) + def handle_humidity_downscaling(self, input_forcings: InputForcings) -> None: + """Initialize temporary arrays for specific humidity downscaling if specified in configuration. + + From forcingInputMod.py + + If we have specified specific humidity downscaling, establish arrays to hold + temporary temperature arrays that are un-downscaled. + """ + input_forcings.t2dTmp = np.empty([input_forcings.geo_meta.ny_local], np.float32) + input_forcings.psfcTmp = np.empty( + [input_forcings.geo_meta.ny_local], np.float32 + ) + input_forcings.t2dTmp_elem = np.empty( + [input_forcings.geo_meta.ny_local_elem], np.float32 + ) + input_forcings.psfcTmp_elem = np.empty( + [input_forcings.geo_meta.ny_local_elem], np.float32 + ) + + def initialize_geo_data(self, input_forcings: InputForcings) -> None: + """Initialize geometry-related arrays based on grid type and downscaling options. + + From forcingInputMod.py + + Initialize the local final grid of values. This is represntative + of the local grid for this forcing, for a specific output timesetp. + This grid will be updated from one output timestep to another, and + also through downscaling and bias correction. + """ + input_forcings.final_forcings = np.empty( + [input_forcings.force_count, input_forcings.geo_meta.ny_local], np.float64 + ) + input_forcings.height = np.empty([input_forcings.geo_meta.ny_local], np.float32) + input_forcings.regridded_mask = np.empty( + [input_forcings.geo_meta.ny_local], np.float32 + ) + input_forcings.regridded_mask_AORC = np.empty( + [input_forcings.geo_meta.ny_local], np.float32 + ) + input_forcings.final_forcings_elem = np.empty( + [input_forcings.force_count, input_forcings.geo_meta.ny_local_elem], + np.float64, + ) + input_forcings.height_elem = np.empty( + [input_forcings.geo_meta.ny_local_elem], np.float32 + ) + input_forcings.regridded_mask_elem = np.empty( + [input_forcings.geo_meta.ny_local_elem], np.float32 + ) + input_forcings.regridded_mask_elem_AORC = np.empty( + [input_forcings.geo_meta.ny_local_elem], np.float32 + ) + @broadcast @property @lru_cache @@ -1460,7 +1621,7 @@ def ny_local_elem(self) -> int: GEOGRID = { - # "gridded": GriddedGeoMeta, - # "unstructured": UnstructuredGeoMeta, + "gridded": GriddedGeoMeta, + "unstructured": UnstructuredGeoMeta, "hydrofabric": HydrofabricGeoMeta, } From 0877b2a0d91a8e941d2661677d12c85b6f459f13 Mon Sep 17 00:00:00 2001 From: Matt Deshotel Date: Mon, 23 Feb 2026 09:55:29 -0600 Subject: [PATCH 15/34] update type hint --- .../NextGen_Forcings_Engine/core/forcingInputMod.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py index bf78be81..248b8e1b 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py @@ -14,7 +14,7 @@ ) from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.consts import CONSTS from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.geoMod import ( - GeoMetaWrfHydro, + GeoMeta, ) from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.parallel import MpiConfig from nextgen_forcings_ewts import MODULE_NAME @@ -34,7 +34,7 @@ def __init__( self, key_value: int = None, config_options: ConfigOptions = None, - geo_meta: GeoMetaWrfHydro = None, + geo_meta: GeoMeta = None, mpi_config: MpiConfig = None, ) -> None: """Initialize InputForcings with configuration options, geospatial metadata, and MPI configuration.""" @@ -200,7 +200,7 @@ def calc_neighbor_files( def regrid_inputs( self, config_options: ConfigOptions, - wrf_hyro_geo_meta: GeoMetaWrfHydro, + wrf_hyro_geo_meta: GeoMeta, mpi_config: MpiConfig, ) -> None: """Regrid input forcings to the final output grids for this timestep. @@ -240,7 +240,7 @@ def temporal_interpolate_inputs( def init_dict( config_options: ConfigOptions, - geo_meta_wrf_hydro: GeoMetaWrfHydro, + geo_meta_wrf_hydro: GeoMeta, mpi_config: MpiConfig, ) -> dict: """Initialize the input forcing dictionary. From 542aa3ebb8c954124baef5aa33089c54985e35f5 Mon Sep 17 00:00:00 2001 From: Matt Deshotel Date: Tue, 24 Feb 2026 04:54:13 -0600 Subject: [PATCH 16/34] rename variables for better consistency across the repo --- .../core/bias_correction.py | 14 +- .../NextGen_Forcings_Engine/core/consts.py | 62 ++++++- .../core/disaggregateMod.py | 2 +- .../NextGen_Forcings_Engine/core/downscale.py | 14 +- .../core/forcingInputMod.py | 61 ++++--- .../NextGen_Forcings_Engine/core/regrid.py | 39 ++-- .../core/time_handling.py | 172 +++++++++--------- 7 files changed, 222 insertions(+), 142 deletions(-) diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/bias_correction.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/bias_correction.py index ae96b86f..49d5972e 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/bias_correction.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/bias_correction.py @@ -54,7 +54,7 @@ def run_bias_correction(input_forcings, config_options, geo_meta_wrf_hydro, mpi_ 3: ncar_temp_gfs_bias_correct, 4: ncar_temp_hrrr_bias_correct, } - bias_correct_temperature[input_forcings.t2dBiasCorrectOpt]( + bias_correct_temperature[input_forcings.t2BiasCorrectOpt]( input_forcings, config_options, mpi_config, 0 ) err_handler.check_program_status(config_options, mpi_config) @@ -65,7 +65,7 @@ def run_bias_correction(input_forcings, config_options, geo_meta_wrf_hydro, mpi_ 1: cfsv2_nldas_nwm_bias_correct, 2: ncar_tbl_correction, } - bias_correct_humidity[input_forcings.q2dBiasCorrectOpt]( + bias_correct_humidity[input_forcings.q2BiasCorrectOpt]( input_forcings, config_options, mpi_config, 1 ) err_handler.check_program_status(config_options, mpi_config) @@ -114,12 +114,12 @@ def run_bias_correction(input_forcings, config_options, geo_meta_wrf_hydro, mpi_ 4: ncar_wspd_hrrr_bias_correct, } # Run for U-Wind - bias_correct_wind[input_forcings.windBiasCorrectOpt]( + bias_correct_wind[input_forcings.windBiasCorrect]( input_forcings, config_options, mpi_config, 2 ) err_handler.check_program_status(config_options, mpi_config) # Run for V-Wind - bias_correct_wind[input_forcings.windBiasCorrectOpt]( + bias_correct_wind[input_forcings.windBiasCorrect]( input_forcings, config_options, mpi_config, 3 ) err_handler.check_program_status(config_options, mpi_config) @@ -2473,7 +2473,7 @@ def cfsv2_nldas_nwm_bias_correct(input_forcings, config_options, mpi_config, for id_nldas_param = nldas_param_file = None if mpi_config.rank == 0: nldas_param_file = ( - input_forcings.paramDir + input_forcings.dScaleParamDirs + "/NLDAS_Climo/nldas2_" + config_options.current_output_date.strftime("%m%d%H") + "_dist_params.nc" @@ -2700,7 +2700,7 @@ def cfsv2_nldas_nwm_bias_correct(input_forcings, config_options, mpi_config, for if mpi_config.rank == 0: # Read in the CFSv2 parameter files, based on the previous CFSv2 dates cfs_param_path1 = ( - input_forcings.paramDir + input_forcings.dScaleParamDirs + "/CFSv2_Climo/cfs_" + cfs_param_path_vars[force_num] + "_" @@ -2711,7 +2711,7 @@ def cfsv2_nldas_nwm_bias_correct(input_forcings, config_options, mpi_config, for ) cfs_param_path2 = ( - input_forcings.paramDir + input_forcings.dScaleParamDirs + "/CFSv2_Climo/cfs_" + cfs_param_path_vars[force_num] + "_" diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/consts.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/consts.py index 9bd8e4b0..5a361aff 100644 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/consts.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/consts.py @@ -91,9 +91,69 @@ "slope_elem", "slp_azi_elem", ], + "_output_var_names": [ + "U2D_ELEMENT", + "V2D_ELEMENT", + "LWDOWN_ELEMENT", + "SWDOWN_ELEMENT", + "T2D_ELEMENT", + "Q2D_ELEMENT", + "PSFC_ELEMENT", + "RAINRATE_ELEMENT", + ], + "_output_var_names_unstructured": [ + "U2D_NODE", + "V2D_NODE", + "LWDOWN_NODE", + "SWDOWN_NODE", + "T2D_NODE", + "Q2D_NODE", + "PSFC_NODE", + "RAINRATE_NODE", + "LQFRAC_NODE", + ], + "_var_name_units_map": { + "U2D_ELEMENT": ["10-m U-component of wind", "m/s"], + "V2D_ELEMENT": ["10-m V-component of wind", "m/s"], + "T2D_ELEMENT": ["2-m Air Temperature", "K"], + "Q2D_ELEMENT": ["2-m Specific Humidity", "kg/kg"], + "LWDOWN_ELEMENT": [ + "Surface downward long-wave radiation flux", + "W/m^2", + ], + "SWDOWN_ELEMENT": [ + "Surface downward short-wave radiation flux", + "W/m^2", + ], + "PSFC_ELEMENT": ["Surface Pressure", "Pa"], + "RAINRATE_ELEMENT": ["Surface Precipitation Rate", "mm/s"], + }, + "_var_name_units_map_unstructured": { + "U2D_NODE": ["10-m U-component of wind", "m/s"], + "V2D_NODE": ["10-m V-component of wind", "m/s"], + "T2D_NODE": ["2-m Air Temperature", "K"], + "Q2D_NODE": ["2-m Specific Humidity", "kg/kg"], + "LWDOWN_NODE": [ + "Surface downward long-wave radiation flux", + "W/m^2", + ], + "SWDOWN_NODE": [ + "Surface downward short-wave radiation flux", + "W/m^2", + ], + "PSFC_NODE": ["Surface Pressure", "Pa"], + "RAINRATE_NODE": ["Surface Precipitation Rate", "mm/s"], + }, + }, + "bmi_model": { + "extension_map": { + "gridded": "GRIDDED", + "hydrofabric": "HYDROFABRIC", + "unstructured": "MESH", + } }, "forcingInputMod": { - "InputForcing": [ + "InputForcings": [ "nx_global", "ny_global", "nx_local", diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/disaggregateMod.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/disaggregateMod.py index 2b5812ac..d7a3d473 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/disaggregateMod.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/disaggregateMod.py @@ -128,7 +128,7 @@ def ak_ext_ana_disaggregate( # (begin_date,end_date] date_iter += timedelta(hours=1) while date_iter <= end_date: - tmp_file = f"{input_forcings.inDir}/{date_iter.strftime('%Y%m%d%H')}/{date_iter.strftime('%Y%m%d%H')}00.LDASIN_DOMAIN1" + tmp_file = f"{input_forcings.input_force_dirs}/{date_iter.strftime('%Y%m%d%H')}/{date_iter.strftime('%Y%m%d%H')}00.LDASIN_DOMAIN1" if os.path.exists(tmp_file): config_options.statusMsg = f"Reading {input_forcings.netcdf_var_names[3]} from {tmp_file} for disaggregation" err_handler.log_msg(config_options, mpi_config) diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/downscale.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/downscale.py index bdfbaeb2..23983d9f 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/downscale.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/downscale.py @@ -39,7 +39,7 @@ def run_downscaling(input_forcings, config_options, geo_meta_wrf_hydro, mpi_conf # Dictionary mapping to shortwave radiation downscaling downscale_sw = {0: no_downscale, 1: ncar_topo_adj} - downscale_sw[input_forcings.swDowscaleOpt]( + downscale_sw[input_forcings.swDownscaleOpt]( input_forcings, config_options, geo_meta_wrf_hydro, mpi_config ) err_handler.check_program_status(config_options, mpi_config) @@ -254,7 +254,7 @@ def param_lapse(input_forcings, ConfigOptions, GeoMetaWrfHydro, MpiConfig): if MpiConfig.rank == 0: while True: # First ensure we have a parameter directory - if input_forcings.paramDir == "NONE": + if input_forcings.dScaleParamDirs == "NONE": ConfigOptions.errMsg = ( "User has specified spatial temperature lapse rate " "downscaling while no downscaling parameter directory " @@ -264,7 +264,7 @@ def param_lapse(input_forcings, ConfigOptions, GeoMetaWrfHydro, MpiConfig): break # Compose the path to the lapse rate grid file. - lapsePath = input_forcings.paramDir + "/lapse_param.nc" + lapsePath = input_forcings.dScaleParamDirs + "/lapse_param.nc" if not os.path.isfile(lapsePath): ConfigOptions.errMsg = ( "Expected lapse rate parameter file: " @@ -699,13 +699,13 @@ def nwm_monthly_PRISM_downscale( if mmVersion == 1: # Compose paths to the expected files. numeratorPath = ( - input_forcings.paramDir + input_forcings.dScaleParamDirs + "/PRISM_Precip_Clim_" + ConfigOptions.current_output_date.strftime("%b") + "_NWM_Grid.nc" ) denominatorPath = ( - input_forcings.paramDir + input_forcings.dScaleParamDirs + "/PRISM_Precip_Clim_" + ConfigOptions.current_output_date.strftime("%b") + "_NWM_to_" @@ -716,13 +716,13 @@ def nwm_monthly_PRISM_downscale( elif mmVersion == 2: # Compose paths to the expected files. numeratorPath = ( - input_forcings.paramDir + input_forcings.dScaleParamDirs + "/PRISM_Precip_Clim_" + ConfigOptions.current_output_date.strftime("%b") + "_NWM_Grid.nc" ) denominatorPath = ( - input_forcings.paramDir + input_forcings.dScaleParamDirs + "/PRISM_Precip_Clim_" + ConfigOptions.current_output_date.strftime("%b") + "_" diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py index 248b8e1b..4cd15b8d 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py @@ -4,19 +4,24 @@ initializing ESMF grids and regrid objects), etc """ +from __future__ import annotations + import logging from pathlib import Path +from typing import TYPE_CHECKING -import numpy as np - -from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.config import ( - ConfigOptions, -) from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.consts import CONSTS -from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.geoMod import ( - GeoMeta, -) -from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.parallel import MpiConfig + +if TYPE_CHECKING: + from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.config import ( + ConfigOptions, + ) + from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.geoMod import ( + GeoMeta, + ) + from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.parallel import ( + MpiConfig, + ) from nextgen_forcings_ewts import MODULE_NAME LOG = logging.getLogger(MODULE_NAME) @@ -32,20 +37,25 @@ class InputForcings: def __init__( self, - key_value: int = None, + force_key: int, + idx: int = None, config_options: ConfigOptions = None, geo_meta: GeoMeta = None, mpi_config: MpiConfig = None, ) -> None: """Initialize InputForcings with configuration options, geospatial metadata, and MPI configuration.""" self.config_options = config_options - self.geo_meta_wrf_hydro = geo_meta + self.geo_meta = geo_meta self.mpi_config = mpi_config self.regridComplete = False self.regridComplete = False self.rstFlag = 0 self.skip = False - self._keyValue = key_value + self._keyValue = force_key + self.idx = idx + + for attr in CONSTS[self.__class__.__name__]: + setattr(self, attr, None) self.find_neighbor_files_map = CONSTS["FIND_NEIGHBOR_FILES_MAP"] self.regrid_map = CONSTS["REGRID_MAP"] @@ -59,20 +69,21 @@ def __init__( # TODO: this assumes that LQFRAC (8) is always the last grib var self.grib_vars = self.grib_vars[:-1] - self.geo_meta_wrf_hydro.initialize_geo_data() + self.geo_meta.initialize_geo_data(self) # Obtain custom input cycle frequencies - if self.key_value == 10 or self.key_value == 11: + if self.keyValue == 10 or self.keyValue == 11: self.cycle_freq = self.config_options.customFcstFreq[self.custom_count] def initialize_config_options(self) -> None: """Initialize configuration options from the config_options attribute.""" [ - setattr(self, key, val[self.keyValue]) - for key, val in vars(self.config_options).items() + setattr(self, key, val[self.idx]) + for key, val in list(vars(self.config_options).items()) if isinstance(val, list) and len(val) > 0 ] + @property def force_count(self) -> int: """Force count.""" return 9 if self.config_options.include_lqfrac else 8 @@ -99,9 +110,9 @@ def keyValue(self, val: int) -> int: @property def file_ext(self) -> str: """Map the forcing file type to the file extension.""" - ext = CONSTS["FILE_EXT"].get(self.file_type) + ext = CONSTS["FILE_EXT"].get(self.input_force_types) if ext is None: - raise ValueError(f"Unexpected file_type: {self.file_type}") + raise ValueError(f"Unexpected file_type: {self.input_force_types}") self._file_ext = ext return self._file_ext @@ -200,7 +211,7 @@ def calc_neighbor_files( def regrid_inputs( self, config_options: ConfigOptions, - wrf_hyro_geo_meta: GeoMeta, + geo_meta: GeoMeta, mpi_config: MpiConfig, ) -> None: """Regrid input forcings to the final output grids for this timestep. @@ -215,9 +226,7 @@ def regrid_inputs( """ # Establish a mapping dictionary that will point the # code to the functions to that will regrid the data. - self.regrid_map[self.keyValue]( - self, config_options, wrf_hyro_geo_meta, mpi_config - ) + self.regrid_map[self.keyValue](self, config_options, geo_meta, mpi_config) def temporal_interpolate_inputs( self, config_options: ConfigOptions, mpi_config: MpiConfig @@ -233,14 +242,14 @@ def temporal_interpolate_inputs( :param mpi_config: :return: """ - self.temporal_interpolate_inputs_map[self.timeInterpOpt]( + self.temporal_interpolate_inputs_map[self.forceTemoralInterp]( self, config_options, mpi_config ) def init_dict( config_options: ConfigOptions, - geo_meta_wrf_hydro: GeoMeta, + geo_meta: GeoMeta, mpi_config: MpiConfig, ) -> dict: """Initialize the input forcing dictionary. @@ -260,9 +269,9 @@ def init_dict( for idx in range(0, config_options.number_inputs): force_key = config_options.input_forcings[idx] input_dict[force_key] = InputForcings( - force_key, config_options, geo_meta_wrf_hydro, mpi_config + force_key, idx, config_options, geo_meta, mpi_config ) - input_dict[force_key].keyValue = force_key + # input_dict[force_key].keyValue = force_key # Obtain custom input cycle frequencies if force_key == 10 or force_key == 11: diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/regrid.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/regrid.py index df0b9e89..71c8727e 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/regrid.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/regrid.py @@ -1,14 +1,17 @@ """Regridding module file for regridding input forcing files.""" -from functools import partial +from __future__ import annotations + import hashlib import os import sys import traceback from contextlib import contextmanager from datetime import datetime, timedelta +from functools import partial from pathlib import Path from time import monotonic, time +from typing import TYPE_CHECKING # import mpi4py.util.pool as mpi_pool # For ESMF + shapely 2.x, shapely must be imported first, to avoid segfault "address not mapped to object" stemming from calls such as: @@ -39,13 +42,17 @@ ioMod, timeInterpMod, ) -from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.config import ( - ConfigOptions, -) -from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.geoMod import ( - GeoMeta, -) -from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.parallel import MpiConfig + +if TYPE_CHECKING: + from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.config import ( + ConfigOptions, + ) + from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.geoMod import ( + GeoMeta, + ) + from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.parallel import ( + MpiConfig, + ) from nextgen_forcings_ewts import MODULE_NAME from ..esmf_utils import ( @@ -1534,7 +1541,7 @@ def regrid_conus_hrrr(input_forcings, config_options, wrf_hydro_geo_meta, mpi_co try: pt.log_info("Regrid CONUS HRRR") - if input_forcings.file_type != NETCDF: + if input_forcings.input_force_types != NETCDF: # This file shouldn't exist.... but if it does (previously failed # execution of the program), remove it..... if mpi_config.rank == 0 and os.path.isfile(input_forcings.tmpFile): @@ -2333,7 +2340,7 @@ def regrid_conus_rap(input_forcings, config_options, wrf_hydro_geo_meta, mpi_con id_tmp = None try: pt.log_info("Regrid CONUS RAP") - if input_forcings.file_type != NETCDF: + if input_forcings.input_force_types != NETCDF: # This file shouldn't exist.... but if it does (previously failed # execution of the program), remove it..... if mpi_config.rank == 0: @@ -3134,7 +3141,7 @@ def regrid_cfsv2(input_forcings, config_options, wrf_hydro_geo_meta, mpi_config) id_tmp = None try: pt.log_info("Regrid CFSv2") - if input_forcings.file_type != NETCDF: + if input_forcings.input_force_types != NETCDF: # This file shouldn't exist.... but if it does (previously failed # execution of the program), remove it..... if mpi_config.rank == 0: @@ -5927,7 +5934,7 @@ def regrid_gfs(input_forcings, config_options, wrf_hydro_geo_meta, mpi_config): try: pt.log_info("Regridding 13km GFS Variables.") - if input_forcings.file_type != NETCDF: + if input_forcings.input_force_types != NETCDF: # This file shouldn't exist.... but if it does (previously failed # execution of the program), remove it..... if mpi_config.rank == 0 and os.path.isfile(input_forcings.tmpFile): @@ -6687,7 +6694,7 @@ def regrid_nam_nest(input_forcings, config_options, wrf_hydro_geo_meta, mpi_conf id_tmp = None try: pt.log_info("Regridding NAM nest data") - if input_forcings.file_type != NETCDF: + if input_forcings.input_force_types != NETCDF: # This file shouldn't exist.... but if it does (previously failed # execution of the program), remove it..... if mpi_config.rank == 0: @@ -8469,7 +8476,7 @@ def regrid_hourly_wrf_arw( try: pt.log_info("Regrid WRF-ARW nest data") - if input_forcings.file_type != NETCDF: + if input_forcings.input_force_types != NETCDF: # This file shouldn't exist.... but if it does (previously failed # execution of the program), remove it..... if mpi_config.rank == 0: @@ -12082,7 +12089,7 @@ def make_regrid( extrap_method = ESMF.ExtrapMethod.CREEP_FILL if fill else ESMF.ExtrapMethod.NONE regrid_method = (ESMF.RegridMethod.BILINEAR, ESMF.RegridMethod.NEAREST_STOD)[ - input_forcings.regridOpt - 1 + input_forcings.regrid_opt - 1 ] err_handler.check_program_status(config_options, mpi_config) @@ -12279,7 +12286,7 @@ def calculate_weights( err_handler.check_program_status(config_options, mpi_config) # check if we're doing border trimming and set up mask - border = input_forcings.border # // 5 # HRRR is a 3 km product + border = input_forcings.ignored_border_widths # // 5 # HRRR is a 3 km product if border > 0: try: mask = input_forcings.esmf_grid_in.add_item( diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/time_handling.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/time_handling.py index 04708010..a45dbf6c 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/time_handling.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/time_handling.py @@ -87,7 +87,7 @@ def find_nldas_neighbors(input_forcings, config_options, d_current, mpi_config): # greater than an expected value. However, since these are custom input NetCDF files, # we are foregoing that check. current_nldas_cycle = config_options.current_fcst_cycle - datetime.timedelta( - seconds=input_forcings.userCycleOffset * 60.0 + seconds=input_forcings.fcst_input_offsets * 60.0 ) # Calculate the current forecast hour within this cycle. @@ -123,14 +123,14 @@ def find_nldas_neighbors(input_forcings, config_options, d_current, mpi_config): # Calculate expected file paths. tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/NLDAS_FORA0125_H.A" + input_forcings.fcst_date1.strftime("%Y%m%d.%H%M") + ".002" + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/NLDAS_FORA0125_H.A" + input_forcings.fcst_date1.strftime("%Y%m%d.%H%M") + ".002" @@ -213,7 +213,7 @@ def find_nldas_neighbors(input_forcings, config_options, d_current, mpi_config): # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.isfile(input_forcings.file_in2): - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( "Expected input Custom file: " + input_forcings.file_in2 @@ -255,27 +255,27 @@ def find_aorc_neighbors(input_forcings, config_options, d_current, mpi_config): # Calculate expected file paths. if d_current.year > 2019: tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/AORC-OWP_" + d_current.strftime("%Y%m%d%H") + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/AORC-OWP_" + d_current.strftime("%Y%m%d%H") + input_forcings.file_ext ) else: tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/AORC-OWP_" + d_current.strftime("%Y%m%d%H") + "z" + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/AORC-OWP_" + d_current.strftime("%Y%m%d%H") + "z" @@ -284,13 +284,13 @@ def find_aorc_neighbors(input_forcings, config_options, d_current, mpi_config): if input_forcings.product_name == "AORC_Alaska": # Calculate expected file paths. tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/AK_AORC-OWP_" + d_current.strftime("%Y%m%d%H") + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/AK_AORC-OWP_" + d_current.strftime("%Y%m%d%H") + input_forcings.file_ext @@ -373,7 +373,7 @@ def find_aorc_neighbors(input_forcings, config_options, d_current, mpi_config): if config_options.input_forcings[0] not in [12, 21]: if mpi_config.rank == 0: if not os.path.isfile(input_forcings.file_in2): - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( "Expected input AORC file: " + input_forcings.file_in2 @@ -412,8 +412,12 @@ def find_era5_neighbors(input_forcings, config_options, d_current, mpi_config): :return: """ # Point to ERA5 netcdf input file - tmp_file1 = os.path.join(input_forcings.inDir, os.listdir(input_forcings.inDir)[0]) - tmp_file2 = os.path.join(input_forcings.inDir, os.listdir(input_forcings.inDir)[0]) + tmp_file1 = os.path.join( + input_forcings.input_force_dirs, os.listdir(input_forcings.input_force_dirs)[0] + ) + tmp_file2 = os.path.join( + input_forcings.input_force_dirs, os.listdir(input_forcings.input_force_dirs)[0] + ) if mpi_config.rank == 0: # Check to see if files are already set. If not, then reset, grids and @@ -491,7 +495,7 @@ def find_era5_neighbors(input_forcings, config_options, d_current, mpi_config): # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.isfile(input_forcings.file_in2): - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( "Expected input ERA5 Interim file: " + input_forcings.file_in2 @@ -539,13 +543,13 @@ def find_nwm_neighbors(input_forcings, config_options, d_current, mpi_config): # Calculate expected file paths. tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/" + d_current.strftime("%Y%m%d%H") + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/" + d_current.strftime("%Y%m%d%H") + input_forcings.file_ext @@ -554,13 +558,13 @@ def find_nwm_neighbors(input_forcings, config_options, d_current, mpi_config): # Calculate expected file paths. tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/" + d_current.strftime("%Y%m%d%H%M") + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/" + d_current.strftime("%Y%m%d%H%M") + input_forcings.file_ext @@ -642,7 +646,7 @@ def find_nwm_neighbors(input_forcings, config_options, d_current, mpi_config): # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.isfile(input_forcings.file_in2): - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( "Expected input NWM file: " + input_forcings.file_in2 @@ -691,7 +695,7 @@ def find_ak_ext_ana_neighbors(input_forcings, config_options, d_current, mpi_con # First find the current ExtAnA forecast cycle that we are using. ana_offset = 1 if config_options.ana_flag else 0 current_ext_ana_cycle = config_options.current_fcst_cycle - datetime.timedelta( - seconds=(ana_offset + input_forcings.userCycleOffset) * 60.0 + seconds=(ana_offset + input_forcings.fcst_input_offsets) * 60.0 ) ext_ana_horizon = 32 @@ -699,7 +703,7 @@ def find_ak_ext_ana_neighbors(input_forcings, config_options, d_current, mpi_con # If the user has specified a forcing horizon that is greater than what is available # for this time period, throw an error. if ( - input_forcings.userFcstHorizon + input_forcings.userCycleOffset + input_forcings.fcst_input_horizons + input_forcings.fcst_input_offsets ) / 60.0 > ext_ana_horizon: config_options.errMsg = ( "User has specified a ExtAnA conus forecast horizon " @@ -747,7 +751,7 @@ def find_ak_ext_ana_neighbors(input_forcings, config_options, d_current, mpi_con # Calculate expected file paths. tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/" + prev_ext_ana_date.strftime("%Y%m%d%H") + "/" @@ -834,7 +838,7 @@ def find_ak_ext_ana_neighbors(input_forcings, config_options, d_current, mpi_con # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.exists(input_forcings.file_in2): - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( "Expected input ExtAnA file: " + input_forcings.file_in2 @@ -892,7 +896,7 @@ def find_conus_hrrr_neighbors(input_forcings, config_options, d_current, mpi_con # First find the current HRRR forecast cycle that we are using. ana_offset = 1 if config_options.ana_flag else 0 current_hrrr_cycle = config_options.current_fcst_cycle - datetime.timedelta( - seconds=(ana_offset + input_forcings.userCycleOffset) * 60.0 + seconds=(ana_offset + input_forcings.fcst_input_offsets) * 60.0 ) if current_hrrr_cycle.hour % 6 != 0: hrrr_horizon = default_horizon @@ -902,7 +906,7 @@ def find_conus_hrrr_neighbors(input_forcings, config_options, d_current, mpi_con # If the user has specified a forcing horizon that is greater than what is available # for this time period, throw an error. if ( - input_forcings.userFcstHorizon + input_forcings.userCycleOffset + input_forcings.fcst_input_horizons + input_forcings.fcst_input_offsets ) / 60.0 > hrrr_horizon: config_options.errMsg = ( "User has specified a HRRR conus forecast horizon " @@ -946,7 +950,7 @@ def find_conus_hrrr_neighbors(input_forcings, config_options, d_current, mpi_con # Calculate expected file paths. tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/hrrr." + current_hrrr_cycle.strftime("%Y%m%d") + "/hrrr.t" @@ -960,7 +964,7 @@ def find_conus_hrrr_neighbors(input_forcings, config_options, d_current, mpi_con err_handler.log_msg(config_options, mpi_config) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/hrrr." + current_hrrr_cycle.strftime("%Y%m%d") + "/hrrr.t" @@ -975,7 +979,7 @@ def find_conus_hrrr_neighbors(input_forcings, config_options, d_current, mpi_con if os.path.isfile(tmp_file1) == False and os.path.isfile(tmp_file2) == False: # Calculate expected file paths. tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/hrrr." + current_hrrr_cycle.strftime("%Y%m%d") + "/hrrr.t" @@ -989,7 +993,7 @@ def find_conus_hrrr_neighbors(input_forcings, config_options, d_current, mpi_con err_handler.log_msg(config_options, mpi_config) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/hrrr." + current_hrrr_cycle.strftime("%Y%m%d") + "/hrrr.t" @@ -1075,7 +1079,7 @@ def find_conus_hrrr_neighbors(input_forcings, config_options, d_current, mpi_con # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.exists(input_forcings.file_in2): - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( "Expected input HRRR file: " + input_forcings.file_in2 @@ -1289,7 +1293,7 @@ def find_ak_hrrr_neighbors(input_forcings, config_options, d_current, mpi_config else: current_hrrr_cycle = ( config_options.current_fcst_cycle - ) # - datetime.timedelta(seconds=input_forcings.userCycleOffset * 60.0) + ) # - datetime.timedelta(seconds=input_forcings.fcst_input_offsets * 60.0) # Map the native forecast hour to the shifted HRRR cycles hrrr_cycle = (current_hrrr_cycle.hour // 3 * 3) - 3 @@ -1353,7 +1357,7 @@ def find_ak_hrrr_neighbors(input_forcings, config_options, d_current, mpi_config # Calculate expected file paths. tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/hrrr." + current_hrrr_cycle.strftime("%Y%m%d") + "/alaska/hrrr.t" @@ -1368,7 +1372,7 @@ def find_ak_hrrr_neighbors(input_forcings, config_options, d_current, mpi_config err_handler.log_msg(config_options, mpi_config) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/hrrr." + current_hrrr_cycle.strftime("%Y%m%d") + "/alaska/hrrr.t" @@ -1452,7 +1456,7 @@ def find_ak_hrrr_neighbors(input_forcings, config_options, d_current, mpi_config # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.exists(input_forcings.file_in2): - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( "Expected input HRRR file: " + input_forcings.file_in2 @@ -1502,7 +1506,7 @@ def find_conus_rap_neighbors(input_forcings, config_options, d_current, mpi_conf # First find the current RAP forecast cycle that we are using. ana_offset = 1 if config_options.ana_flag else 0 current_rap_cycle = config_options.current_fcst_cycle - datetime.timedelta( - seconds=(ana_offset + input_forcings.userCycleOffset) * 60.0 + seconds=(ana_offset + input_forcings.fcst_input_offsets) * 60.0 ) if ( current_rap_cycle.hour == 3 @@ -1517,7 +1521,7 @@ def find_conus_rap_neighbors(input_forcings, config_options, d_current, mpi_conf # If the user has specified a forcing horizon that is greater than what is available # for this time period, throw an error. if ( - input_forcings.userFcstHorizon + input_forcings.userCycleOffset + input_forcings.fcst_input_horizons + input_forcings.fcst_input_offsets ) / 60.0 > rap_horizon: config_options.errMsg = ( "User has specified a RAP CONUS 13km forecast horizon " @@ -1558,7 +1562,7 @@ def find_conus_rap_neighbors(input_forcings, config_options, d_current, mpi_conf # Calculate expected file paths. tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/rap." + current_rap_cycle.strftime("%Y%m%d") + "/rap.t" @@ -1568,7 +1572,7 @@ def find_conus_rap_neighbors(input_forcings, config_options, d_current, mpi_conf + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/rap." + current_rap_cycle.strftime("%Y%m%d") + "/rap.t" @@ -1583,7 +1587,7 @@ def find_conus_rap_neighbors(input_forcings, config_options, d_current, mpi_conf if os.path.isfile(tmp_file1) == False and os.path.isfile(tmp_file2) == False: # Calculate expected file paths. tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/rap." + current_rap_cycle.strftime("%Y%m%d") + "/rap.t" @@ -1593,7 +1597,7 @@ def find_conus_rap_neighbors(input_forcings, config_options, d_current, mpi_conf + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/rap." + current_rap_cycle.strftime("%Y%m%d") + "/rap.t" @@ -1608,7 +1612,7 @@ def find_conus_rap_neighbors(input_forcings, config_options, d_current, mpi_conf if os.path.isfile(tmp_file1) == False and os.path.isfile(tmp_file2) == False: # Calculate expected file paths. tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/rap." + current_rap_cycle.strftime("%Y%m%d") + "/rap.t" @@ -1618,7 +1622,7 @@ def find_conus_rap_neighbors(input_forcings, config_options, d_current, mpi_conf + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/rap." + current_rap_cycle.strftime("%Y%m%d") + "/rap.t" @@ -1702,7 +1706,7 @@ def find_conus_rap_neighbors(input_forcings, config_options, d_current, mpi_conf if mpi_config.rank == 0: if not os.path.isfile(input_forcings.file_in2): missing = input_forcings.file_in2.replace("pgrb", "[bgrb|pgrb]") - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( "Expected input RAP file: " + missing + " not found." ) @@ -1752,9 +1756,9 @@ def find_gfs_neighbors(input_forcings, config_options, d_current, mpi_config): gfs_precip_delineators = {120: [360, 60], 240: [360, 180], 384: [360, 180]} # If the user has specified a forcing horizon that is greater than what # is available here, return an error. - if (input_forcings.userFcstHorizon + input_forcings.userCycleOffset) / 60.0 > max( - gfs_out_horizons - ): + if ( + input_forcings.fcst_input_horizons + input_forcings.fcst_input_offsets + ) / 60.0 > max(gfs_out_horizons): config_options.errMsg = ( "User has specified a GFS forecast horizon " "that is greater than maximum allowed hours of: " @@ -1773,7 +1777,7 @@ def find_gfs_neighbors(input_forcings, config_options, d_current, mpi_config): # First find the current GFS forecast cycle that we are using. current_gfs_cycle = config_options.current_fcst_cycle - datetime.timedelta( - seconds=input_forcings.userCycleOffset * 60.0 + seconds=input_forcings.fcst_input_offsets * 60.0 ) # Calculate the current forecast hour within this GFS cycle. @@ -1819,7 +1823,7 @@ def find_gfs_neighbors(input_forcings, config_options, d_current, mpi_config): # Calculate expected file paths. if current_gfs_cycle < datetime.datetime(2019, 6, 12, 12): tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/gfs." + current_gfs_cycle.strftime("%Y%m%d%H") + "/gfs.t" @@ -1829,7 +1833,7 @@ def find_gfs_neighbors(input_forcings, config_options, d_current, mpi_config): + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/gfs." + current_gfs_cycle.strftime("%Y%m%d%H") + "/gfs.t" @@ -1841,7 +1845,7 @@ def find_gfs_neighbors(input_forcings, config_options, d_current, mpi_config): else: # FV3 change on June 12th, 2019 tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/gfs." + current_gfs_cycle.strftime("%Y%m%d") + "/" @@ -1853,7 +1857,7 @@ def find_gfs_neighbors(input_forcings, config_options, d_current, mpi_config): + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/gfs." + current_gfs_cycle.strftime("%Y%m%d") + "/" @@ -2016,7 +2020,7 @@ def find_gfs_neighbors(input_forcings, config_options, d_current, mpi_config): # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.isfile(input_forcings.file_in2): - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( "Expected input GFS file: " + input_forcings.file_in2 @@ -2066,9 +2070,9 @@ def find_nam_nest_neighbors(input_forcings, config_options, d_current, mpi_confi # If the user has specified a forcing horizon that is greater than what # is available here, return an error. - if (input_forcings.userFcstHorizon + input_forcings.userCycleOffset) / 60.0 > max( - nam_nest_out_horizons - ): + if ( + input_forcings.fcst_input_horizons + input_forcings.fcst_input_offsets + ) / 60.0 > max(nam_nest_out_horizons): config_options.errMsg = ( "User has specified a NAM nest forecast horizon " "that is greater than maximum allowed hours of: " @@ -2087,7 +2091,7 @@ def find_nam_nest_neighbors(input_forcings, config_options, d_current, mpi_confi else: current_nam_nest_cycle = config_options.current_fcst_cycle - datetime.timedelta( - seconds=input_forcings.userCycleOffset * 60.0 + seconds=input_forcings.fcst_input_offsets * 60.0 ) if mpi_config.rank == 0: @@ -2156,7 +2160,7 @@ def find_nam_nest_neighbors(input_forcings, config_options, d_current, mpi_confi # Calculate expected file paths. tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/nam." + current_nam_nest_cycle.strftime("%Y%m%d") + "/nam.t" @@ -2169,7 +2173,7 @@ def find_nam_nest_neighbors(input_forcings, config_options, d_current, mpi_confi + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/nam." + current_nam_nest_cycle.strftime("%Y%m%d") + "/nam.t" @@ -2258,7 +2262,7 @@ def find_nam_nest_neighbors(input_forcings, config_options, d_current, mpi_confi # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.isfile(input_forcings.file_in2): - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( "Expected input NAM Nest file: " + input_forcings.file_in2 @@ -2306,9 +2310,9 @@ def find_cfsv2_neighbors(input_forcings, config_options, d_current, mpi_config): # If the user has specified a forcing horizon that is greater than what # is available here, return an error. - if (input_forcings.userFcstHorizon + input_forcings.userCycleOffset) / 60.0 > max( - cfs_out_horizons - ): + if ( + input_forcings.fcst_input_horizons + input_forcings.fcst_input_offsets + ) / 60.0 > max(cfs_out_horizons): config_options.errMsg = ( "User has specified a CFSv2 forecast horizon " "that is greater than maximum allowed hours of: " @@ -2318,7 +2322,7 @@ def find_cfsv2_neighbors(input_forcings, config_options, d_current, mpi_config): # First find the current CFS forecast cycle that we are using. current_cfs_cycle = config_options.current_fcst_cycle - datetime.timedelta( - seconds=input_forcings.userCycleOffset * 60.0 + seconds=input_forcings.fcst_input_offsets * 60.0 ) # Calculate the current forecast hour within this CFSv2 cycle. @@ -2362,11 +2366,11 @@ def find_cfsv2_neighbors(input_forcings, config_options, d_current, mpi_config): prev_cfs_date = next_cfs_date # Calculate expected file paths. - if input_forcings.file_type == "GRIB2": + if input_forcings.input_force_types == "GRIB2": input_forcings.file_ext = ".grb2" tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/cfs." + current_cfs_cycle.strftime("%Y%m%d") + "/" @@ -2383,7 +2387,7 @@ def find_cfsv2_neighbors(input_forcings, config_options, d_current, mpi_config): + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/cfs." + current_cfs_cycle.strftime("%Y%m%d") + "/" @@ -2480,7 +2484,7 @@ def find_cfsv2_neighbors(input_forcings, config_options, d_current, mpi_config): # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.isfile(input_forcings.file_in2): - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( "Expected input CFSv2 file: " + input_forcings.file_in2 @@ -2522,7 +2526,7 @@ def find_custom_hourly_neighbors(input_forcings, config_options, d_current, mpi_ # greater than an expected value. However, since these are custom input NetCDF files, # we are foregoing that check. current_custom_cycle = config_options.current_fcst_cycle - datetime.timedelta( - seconds=input_forcings.userCycleOffset * 60.0 + seconds=input_forcings.fcst_input_offsets * 60.0 ) # Calculate the current forecast hour within this cycle. @@ -2563,7 +2567,7 @@ def find_custom_hourly_neighbors(input_forcings, config_options, d_current, mpi_ # Calculate expected file paths. tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/custom_hourly." + current_custom_cycle.strftime("%Y%m%d%H") + ".f" @@ -2571,7 +2575,7 @@ def find_custom_hourly_neighbors(input_forcings, config_options, d_current, mpi_ + ".nc" ) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/custom_hourly." + current_custom_cycle.strftime("%Y%m%d%H") + ".f" @@ -2653,7 +2657,7 @@ def find_custom_hourly_neighbors(input_forcings, config_options, d_current, mpi_ # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.isfile(input_forcings.file_in2): - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( "Expected input Custom file: " + input_forcings.file_in2 @@ -3256,7 +3260,7 @@ def find_sbcv2_lwf_neighbors(input_forcings, config_options, d_current, mpi_conf # Calculate expected file paths. tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/SBC_LWF/" + input_forcings.fcst_date1.strftime("%Y%m") + "/" @@ -3339,7 +3343,7 @@ def find_sbcv2_lwf_neighbors(input_forcings, config_options, d_current, mpi_conf # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.isfile(input_forcings.file_in2): - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( "Expected input SBCV2_LWF file: " + input_forcings.file_in2 @@ -4357,7 +4361,7 @@ def find_ndfd_neighbors(input_forcings, config_options, d_current, mpi_config): input_forcings.fcst_hour2 = current_fcst.total_seconds() / 3600 tmp_file1 = os.path.join( - input_forcings.inDir, + input_forcings.input_force_dirs, "NDFD", current_cycle.strftime("%Y%m%d"), "wgrbbul", @@ -4447,7 +4451,7 @@ def find_ndfd_neighbors(input_forcings, config_options, d_current, mpi_config): for tag in ("tmp", "wdir", "wspd", "qpf") ]: if not os.path.isfile(subfile): - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( f"Expected input NDFD file: {subfile} not found." ) @@ -4632,7 +4636,7 @@ def find_input_neighbors(input_forcings, config_options, d_current, mpi_config): # ana_offset = 0 current_input_cycle = config_options.current_fcst_cycle - datetime.timedelta( - seconds=(ana_offset + input_forcings.userCycleOffset) * 60.0 * 60 + seconds=(ana_offset + input_forcings.fcst_input_offsets) * 60.0 * 60 ) input_horizon = input_forcings.forecast_horizons[current_input_cycle.hour] @@ -4642,7 +4646,7 @@ def find_input_neighbors(input_forcings, config_options, d_current, mpi_config): if not config_options.ana_flag: if ( - input_forcings.userFcstHorizon + input_forcings.userCycleOffset + input_forcings.fcst_input_horizons + input_forcings.fcst_input_offsets ) / 60.0 > input_horizon: config_options.errMsg = f"Config file ForecastInputHorizons exceeds maximum allowed hours of: {str(input_horizon)}" err_handler.log_critical(config_options, mpi_config) @@ -4707,8 +4711,8 @@ def find_input_neighbors(input_forcings, config_options, d_current, mpi_config): input_forcings.fcst_hour1 = input_forcings.fcst_hour1 - 1 if config_options.ana_flag == 0: # Calculate expected file paths. - pattern1 = f"{input_forcings.inDir}/*.{current_input_cycle.strftime('%Y%m%d')}/*{current_input_cycle.strftime('%H')}z*{str(prev_input_forecast_hour).zfill(2)}.grib2" - pattern2 = f"{input_forcings.inDir}/*.{current_input_cycle.strftime('%Y%m%d')}/conus/*{current_input_cycle.strftime('%H')}z*{str(prev_input_forecast_hour).zfill(2)}.grib2" + pattern1 = f"{input_forcings.input_force_dirs}/*.{current_input_cycle.strftime('%Y%m%d')}/*{current_input_cycle.strftime('%H')}z*{str(prev_input_forecast_hour).zfill(2)}.grib2" + pattern2 = f"{input_forcings.input_force_dirs}/*.{current_input_cycle.strftime('%Y%m%d')}/conus/*{current_input_cycle.strftime('%H')}z*{str(prev_input_forecast_hour).zfill(2)}.grib2" files1 = glob.glob(pattern1) + glob.glob(pattern2) tmp_file1 = files1[0] @@ -4716,8 +4720,8 @@ def find_input_neighbors(input_forcings, config_options, d_current, mpi_config): config_options.statusMsg = "Previous input file being used: " + tmp_file1 err_handler.log_msg(config_options, mpi_config, True) # log at debug level - pattern1 = f"{input_forcings.inDir}/*.{current_input_cycle.strftime('%Y%m%d')}/*{current_input_cycle.strftime('%H')}z*{str(next_input_forecast_hour).zfill(2)}.grib2" - pattern2 = f"{input_forcings.inDir}/*.{current_input_cycle.strftime('%Y%m%d')}/conus/*{current_input_cycle.strftime('%H')}z*{str(next_input_forecast_hour).zfill(2)}.grib2" + pattern1 = f"{input_forcings.input_force_dirs}/*.{current_input_cycle.strftime('%Y%m%d')}/*{current_input_cycle.strftime('%H')}z*{str(next_input_forecast_hour).zfill(2)}.grib2" + pattern2 = f"{input_forcings.input_force_dirs}/*.{current_input_cycle.strftime('%Y%m%d')}/conus/*{current_input_cycle.strftime('%H')}z*{str(next_input_forecast_hour).zfill(2)}.grib2" files2 = glob.glob(pattern1) + glob.glob(pattern2) @@ -4729,8 +4733,8 @@ def find_input_neighbors(input_forcings, config_options, d_current, mpi_config): elif config_options.ana_flag == 1: # Calculate expected file paths. - pattern1 = f"{input_forcings.inDir}/*.{current_input_cycle.strftime('%Y%m%d')}/*{current_input_cycle.strftime('%H')}z*{str(prev_input_forecast_hour).zfill(2)}.grib2" - pattern2 = f"{input_forcings.inDir}/*.{current_input_cycle.strftime('%Y%m%d')}/conus/*{current_input_cycle.strftime('%H')}z*{str(prev_input_forecast_hour).zfill(2)}.grib2" + pattern1 = f"{input_forcings.input_force_dirs}/*.{current_input_cycle.strftime('%Y%m%d')}/*{current_input_cycle.strftime('%H')}z*{str(prev_input_forecast_hour).zfill(2)}.grib2" + pattern2 = f"{input_forcings.input_force_dirs}/*.{current_input_cycle.strftime('%Y%m%d')}/conus/*{current_input_cycle.strftime('%H')}z*{str(prev_input_forecast_hour).zfill(2)}.grib2" files1 = glob.glob(pattern1) + glob.glob(pattern2) @@ -4853,7 +4857,7 @@ def find_input_neighbors(input_forcings, config_options, d_current, mpi_config): # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.exists(input_forcings.file_in2): - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( "Expected input file: " + input_forcings.file_in2 + " not found." ) From d78a7a49ba1327a3117473f0e2fa0ef9cc47f6cd Mon Sep 17 00:00:00 2001 From: Matt Deshotel Date: Tue, 24 Feb 2026 13:05:12 -0600 Subject: [PATCH 17/34] update variable names to be consistent across the repo --- .../NextGen_Forcings_Engine/core/downscale.py | 14 +- .../NextGen_Forcings_Engine/core/regrid.py | 37 ++-- .../core/time_handling.py | 172 +++++++++--------- 3 files changed, 106 insertions(+), 117 deletions(-) diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/downscale.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/downscale.py index 23983d9f..bdfbaeb2 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/downscale.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/downscale.py @@ -39,7 +39,7 @@ def run_downscaling(input_forcings, config_options, geo_meta_wrf_hydro, mpi_conf # Dictionary mapping to shortwave radiation downscaling downscale_sw = {0: no_downscale, 1: ncar_topo_adj} - downscale_sw[input_forcings.swDownscaleOpt]( + downscale_sw[input_forcings.swDowscaleOpt]( input_forcings, config_options, geo_meta_wrf_hydro, mpi_config ) err_handler.check_program_status(config_options, mpi_config) @@ -254,7 +254,7 @@ def param_lapse(input_forcings, ConfigOptions, GeoMetaWrfHydro, MpiConfig): if MpiConfig.rank == 0: while True: # First ensure we have a parameter directory - if input_forcings.dScaleParamDirs == "NONE": + if input_forcings.paramDir == "NONE": ConfigOptions.errMsg = ( "User has specified spatial temperature lapse rate " "downscaling while no downscaling parameter directory " @@ -264,7 +264,7 @@ def param_lapse(input_forcings, ConfigOptions, GeoMetaWrfHydro, MpiConfig): break # Compose the path to the lapse rate grid file. - lapsePath = input_forcings.dScaleParamDirs + "/lapse_param.nc" + lapsePath = input_forcings.paramDir + "/lapse_param.nc" if not os.path.isfile(lapsePath): ConfigOptions.errMsg = ( "Expected lapse rate parameter file: " @@ -699,13 +699,13 @@ def nwm_monthly_PRISM_downscale( if mmVersion == 1: # Compose paths to the expected files. numeratorPath = ( - input_forcings.dScaleParamDirs + input_forcings.paramDir + "/PRISM_Precip_Clim_" + ConfigOptions.current_output_date.strftime("%b") + "_NWM_Grid.nc" ) denominatorPath = ( - input_forcings.dScaleParamDirs + input_forcings.paramDir + "/PRISM_Precip_Clim_" + ConfigOptions.current_output_date.strftime("%b") + "_NWM_to_" @@ -716,13 +716,13 @@ def nwm_monthly_PRISM_downscale( elif mmVersion == 2: # Compose paths to the expected files. numeratorPath = ( - input_forcings.dScaleParamDirs + input_forcings.paramDir + "/PRISM_Precip_Clim_" + ConfigOptions.current_output_date.strftime("%b") + "_NWM_Grid.nc" ) denominatorPath = ( - input_forcings.dScaleParamDirs + input_forcings.paramDir + "/PRISM_Precip_Clim_" + ConfigOptions.current_output_date.strftime("%b") + "_" diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/regrid.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/regrid.py index 71c8727e..ae80b704 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/regrid.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/regrid.py @@ -1,7 +1,5 @@ """Regridding module file for regridding input forcing files.""" -from __future__ import annotations - import hashlib import os import sys @@ -11,7 +9,6 @@ from functools import partial from pathlib import Path from time import monotonic, time -from typing import TYPE_CHECKING # import mpi4py.util.pool as mpi_pool # For ESMF + shapely 2.x, shapely must be imported first, to avoid segfault "address not mapped to object" stemming from calls such as: @@ -42,17 +39,13 @@ ioMod, timeInterpMod, ) - -if TYPE_CHECKING: - from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.config import ( - ConfigOptions, - ) - from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.geoMod import ( - GeoMeta, - ) - from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.parallel import ( - MpiConfig, - ) +from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.config import ( + ConfigOptions, +) +from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.geoMod import ( + GeoMeta, +) +from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.parallel import MpiConfig from nextgen_forcings_ewts import MODULE_NAME from ..esmf_utils import ( @@ -1541,7 +1534,7 @@ def regrid_conus_hrrr(input_forcings, config_options, wrf_hydro_geo_meta, mpi_co try: pt.log_info("Regrid CONUS HRRR") - if input_forcings.input_force_types != NETCDF: + if input_forcings.file_type != NETCDF: # This file shouldn't exist.... but if it does (previously failed # execution of the program), remove it..... if mpi_config.rank == 0 and os.path.isfile(input_forcings.tmpFile): @@ -2340,7 +2333,7 @@ def regrid_conus_rap(input_forcings, config_options, wrf_hydro_geo_meta, mpi_con id_tmp = None try: pt.log_info("Regrid CONUS RAP") - if input_forcings.input_force_types != NETCDF: + if input_forcings.file_type != NETCDF: # This file shouldn't exist.... but if it does (previously failed # execution of the program), remove it..... if mpi_config.rank == 0: @@ -3141,7 +3134,7 @@ def regrid_cfsv2(input_forcings, config_options, wrf_hydro_geo_meta, mpi_config) id_tmp = None try: pt.log_info("Regrid CFSv2") - if input_forcings.input_force_types != NETCDF: + if input_forcings.file_type != NETCDF: # This file shouldn't exist.... but if it does (previously failed # execution of the program), remove it..... if mpi_config.rank == 0: @@ -5934,7 +5927,7 @@ def regrid_gfs(input_forcings, config_options, wrf_hydro_geo_meta, mpi_config): try: pt.log_info("Regridding 13km GFS Variables.") - if input_forcings.input_force_types != NETCDF: + if input_forcings.file_type != NETCDF: # This file shouldn't exist.... but if it does (previously failed # execution of the program), remove it..... if mpi_config.rank == 0 and os.path.isfile(input_forcings.tmpFile): @@ -6694,7 +6687,7 @@ def regrid_nam_nest(input_forcings, config_options, wrf_hydro_geo_meta, mpi_conf id_tmp = None try: pt.log_info("Regridding NAM nest data") - if input_forcings.input_force_types != NETCDF: + if input_forcings.file_type != NETCDF: # This file shouldn't exist.... but if it does (previously failed # execution of the program), remove it..... if mpi_config.rank == 0: @@ -8476,7 +8469,7 @@ def regrid_hourly_wrf_arw( try: pt.log_info("Regrid WRF-ARW nest data") - if input_forcings.input_force_types != NETCDF: + if input_forcings.file_type != NETCDF: # This file shouldn't exist.... but if it does (previously failed # execution of the program), remove it..... if mpi_config.rank == 0: @@ -12089,7 +12082,7 @@ def make_regrid( extrap_method = ESMF.ExtrapMethod.CREEP_FILL if fill else ESMF.ExtrapMethod.NONE regrid_method = (ESMF.RegridMethod.BILINEAR, ESMF.RegridMethod.NEAREST_STOD)[ - input_forcings.regrid_opt - 1 + input_forcings.regridOpt - 1 ] err_handler.check_program_status(config_options, mpi_config) @@ -12286,7 +12279,7 @@ def calculate_weights( err_handler.check_program_status(config_options, mpi_config) # check if we're doing border trimming and set up mask - border = input_forcings.ignored_border_widths # // 5 # HRRR is a 3 km product + border = input_forcings.border # // 5 # HRRR is a 3 km product if border > 0: try: mask = input_forcings.esmf_grid_in.add_item( diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/time_handling.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/time_handling.py index a45dbf6c..04708010 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/time_handling.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/time_handling.py @@ -87,7 +87,7 @@ def find_nldas_neighbors(input_forcings, config_options, d_current, mpi_config): # greater than an expected value. However, since these are custom input NetCDF files, # we are foregoing that check. current_nldas_cycle = config_options.current_fcst_cycle - datetime.timedelta( - seconds=input_forcings.fcst_input_offsets * 60.0 + seconds=input_forcings.userCycleOffset * 60.0 ) # Calculate the current forecast hour within this cycle. @@ -123,14 +123,14 @@ def find_nldas_neighbors(input_forcings, config_options, d_current, mpi_config): # Calculate expected file paths. tmp_file1 = ( - input_forcings.input_force_dirs + input_forcings.inDir + "/NLDAS_FORA0125_H.A" + input_forcings.fcst_date1.strftime("%Y%m%d.%H%M") + ".002" + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.input_force_dirs + input_forcings.inDir + "/NLDAS_FORA0125_H.A" + input_forcings.fcst_date1.strftime("%Y%m%d.%H%M") + ".002" @@ -213,7 +213,7 @@ def find_nldas_neighbors(input_forcings, config_options, d_current, mpi_config): # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.isfile(input_forcings.file_in2): - if input_forcings.input_force_mandatory == 1: + if input_forcings.enforce == 1: config_options.errMsg = ( "Expected input Custom file: " + input_forcings.file_in2 @@ -255,27 +255,27 @@ def find_aorc_neighbors(input_forcings, config_options, d_current, mpi_config): # Calculate expected file paths. if d_current.year > 2019: tmp_file1 = ( - input_forcings.input_force_dirs + input_forcings.inDir + "/AORC-OWP_" + d_current.strftime("%Y%m%d%H") + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.input_force_dirs + input_forcings.inDir + "/AORC-OWP_" + d_current.strftime("%Y%m%d%H") + input_forcings.file_ext ) else: tmp_file1 = ( - input_forcings.input_force_dirs + input_forcings.inDir + "/AORC-OWP_" + d_current.strftime("%Y%m%d%H") + "z" + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.input_force_dirs + input_forcings.inDir + "/AORC-OWP_" + d_current.strftime("%Y%m%d%H") + "z" @@ -284,13 +284,13 @@ def find_aorc_neighbors(input_forcings, config_options, d_current, mpi_config): if input_forcings.product_name == "AORC_Alaska": # Calculate expected file paths. tmp_file1 = ( - input_forcings.input_force_dirs + input_forcings.inDir + "/AK_AORC-OWP_" + d_current.strftime("%Y%m%d%H") + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.input_force_dirs + input_forcings.inDir + "/AK_AORC-OWP_" + d_current.strftime("%Y%m%d%H") + input_forcings.file_ext @@ -373,7 +373,7 @@ def find_aorc_neighbors(input_forcings, config_options, d_current, mpi_config): if config_options.input_forcings[0] not in [12, 21]: if mpi_config.rank == 0: if not os.path.isfile(input_forcings.file_in2): - if input_forcings.input_force_mandatory == 1: + if input_forcings.enforce == 1: config_options.errMsg = ( "Expected input AORC file: " + input_forcings.file_in2 @@ -412,12 +412,8 @@ def find_era5_neighbors(input_forcings, config_options, d_current, mpi_config): :return: """ # Point to ERA5 netcdf input file - tmp_file1 = os.path.join( - input_forcings.input_force_dirs, os.listdir(input_forcings.input_force_dirs)[0] - ) - tmp_file2 = os.path.join( - input_forcings.input_force_dirs, os.listdir(input_forcings.input_force_dirs)[0] - ) + tmp_file1 = os.path.join(input_forcings.inDir, os.listdir(input_forcings.inDir)[0]) + tmp_file2 = os.path.join(input_forcings.inDir, os.listdir(input_forcings.inDir)[0]) if mpi_config.rank == 0: # Check to see if files are already set. If not, then reset, grids and @@ -495,7 +491,7 @@ def find_era5_neighbors(input_forcings, config_options, d_current, mpi_config): # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.isfile(input_forcings.file_in2): - if input_forcings.input_force_mandatory == 1: + if input_forcings.enforce == 1: config_options.errMsg = ( "Expected input ERA5 Interim file: " + input_forcings.file_in2 @@ -543,13 +539,13 @@ def find_nwm_neighbors(input_forcings, config_options, d_current, mpi_config): # Calculate expected file paths. tmp_file1 = ( - input_forcings.input_force_dirs + input_forcings.inDir + "/" + d_current.strftime("%Y%m%d%H") + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.input_force_dirs + input_forcings.inDir + "/" + d_current.strftime("%Y%m%d%H") + input_forcings.file_ext @@ -558,13 +554,13 @@ def find_nwm_neighbors(input_forcings, config_options, d_current, mpi_config): # Calculate expected file paths. tmp_file1 = ( - input_forcings.input_force_dirs + input_forcings.inDir + "/" + d_current.strftime("%Y%m%d%H%M") + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.input_force_dirs + input_forcings.inDir + "/" + d_current.strftime("%Y%m%d%H%M") + input_forcings.file_ext @@ -646,7 +642,7 @@ def find_nwm_neighbors(input_forcings, config_options, d_current, mpi_config): # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.isfile(input_forcings.file_in2): - if input_forcings.input_force_mandatory == 1: + if input_forcings.enforce == 1: config_options.errMsg = ( "Expected input NWM file: " + input_forcings.file_in2 @@ -695,7 +691,7 @@ def find_ak_ext_ana_neighbors(input_forcings, config_options, d_current, mpi_con # First find the current ExtAnA forecast cycle that we are using. ana_offset = 1 if config_options.ana_flag else 0 current_ext_ana_cycle = config_options.current_fcst_cycle - datetime.timedelta( - seconds=(ana_offset + input_forcings.fcst_input_offsets) * 60.0 + seconds=(ana_offset + input_forcings.userCycleOffset) * 60.0 ) ext_ana_horizon = 32 @@ -703,7 +699,7 @@ def find_ak_ext_ana_neighbors(input_forcings, config_options, d_current, mpi_con # If the user has specified a forcing horizon that is greater than what is available # for this time period, throw an error. if ( - input_forcings.fcst_input_horizons + input_forcings.fcst_input_offsets + input_forcings.userFcstHorizon + input_forcings.userCycleOffset ) / 60.0 > ext_ana_horizon: config_options.errMsg = ( "User has specified a ExtAnA conus forecast horizon " @@ -751,7 +747,7 @@ def find_ak_ext_ana_neighbors(input_forcings, config_options, d_current, mpi_con # Calculate expected file paths. tmp_file1 = ( - input_forcings.input_force_dirs + input_forcings.inDir + "/" + prev_ext_ana_date.strftime("%Y%m%d%H") + "/" @@ -838,7 +834,7 @@ def find_ak_ext_ana_neighbors(input_forcings, config_options, d_current, mpi_con # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.exists(input_forcings.file_in2): - if input_forcings.input_force_mandatory == 1: + if input_forcings.enforce == 1: config_options.errMsg = ( "Expected input ExtAnA file: " + input_forcings.file_in2 @@ -896,7 +892,7 @@ def find_conus_hrrr_neighbors(input_forcings, config_options, d_current, mpi_con # First find the current HRRR forecast cycle that we are using. ana_offset = 1 if config_options.ana_flag else 0 current_hrrr_cycle = config_options.current_fcst_cycle - datetime.timedelta( - seconds=(ana_offset + input_forcings.fcst_input_offsets) * 60.0 + seconds=(ana_offset + input_forcings.userCycleOffset) * 60.0 ) if current_hrrr_cycle.hour % 6 != 0: hrrr_horizon = default_horizon @@ -906,7 +902,7 @@ def find_conus_hrrr_neighbors(input_forcings, config_options, d_current, mpi_con # If the user has specified a forcing horizon that is greater than what is available # for this time period, throw an error. if ( - input_forcings.fcst_input_horizons + input_forcings.fcst_input_offsets + input_forcings.userFcstHorizon + input_forcings.userCycleOffset ) / 60.0 > hrrr_horizon: config_options.errMsg = ( "User has specified a HRRR conus forecast horizon " @@ -950,7 +946,7 @@ def find_conus_hrrr_neighbors(input_forcings, config_options, d_current, mpi_con # Calculate expected file paths. tmp_file1 = ( - input_forcings.input_force_dirs + input_forcings.inDir + "/hrrr." + current_hrrr_cycle.strftime("%Y%m%d") + "/hrrr.t" @@ -964,7 +960,7 @@ def find_conus_hrrr_neighbors(input_forcings, config_options, d_current, mpi_con err_handler.log_msg(config_options, mpi_config) tmp_file2 = ( - input_forcings.input_force_dirs + input_forcings.inDir + "/hrrr." + current_hrrr_cycle.strftime("%Y%m%d") + "/hrrr.t" @@ -979,7 +975,7 @@ def find_conus_hrrr_neighbors(input_forcings, config_options, d_current, mpi_con if os.path.isfile(tmp_file1) == False and os.path.isfile(tmp_file2) == False: # Calculate expected file paths. tmp_file1 = ( - input_forcings.input_force_dirs + input_forcings.inDir + "/hrrr." + current_hrrr_cycle.strftime("%Y%m%d") + "/hrrr.t" @@ -993,7 +989,7 @@ def find_conus_hrrr_neighbors(input_forcings, config_options, d_current, mpi_con err_handler.log_msg(config_options, mpi_config) tmp_file2 = ( - input_forcings.input_force_dirs + input_forcings.inDir + "/hrrr." + current_hrrr_cycle.strftime("%Y%m%d") + "/hrrr.t" @@ -1079,7 +1075,7 @@ def find_conus_hrrr_neighbors(input_forcings, config_options, d_current, mpi_con # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.exists(input_forcings.file_in2): - if input_forcings.input_force_mandatory == 1: + if input_forcings.enforce == 1: config_options.errMsg = ( "Expected input HRRR file: " + input_forcings.file_in2 @@ -1293,7 +1289,7 @@ def find_ak_hrrr_neighbors(input_forcings, config_options, d_current, mpi_config else: current_hrrr_cycle = ( config_options.current_fcst_cycle - ) # - datetime.timedelta(seconds=input_forcings.fcst_input_offsets * 60.0) + ) # - datetime.timedelta(seconds=input_forcings.userCycleOffset * 60.0) # Map the native forecast hour to the shifted HRRR cycles hrrr_cycle = (current_hrrr_cycle.hour // 3 * 3) - 3 @@ -1357,7 +1353,7 @@ def find_ak_hrrr_neighbors(input_forcings, config_options, d_current, mpi_config # Calculate expected file paths. tmp_file1 = ( - input_forcings.input_force_dirs + input_forcings.inDir + "/hrrr." + current_hrrr_cycle.strftime("%Y%m%d") + "/alaska/hrrr.t" @@ -1372,7 +1368,7 @@ def find_ak_hrrr_neighbors(input_forcings, config_options, d_current, mpi_config err_handler.log_msg(config_options, mpi_config) tmp_file2 = ( - input_forcings.input_force_dirs + input_forcings.inDir + "/hrrr." + current_hrrr_cycle.strftime("%Y%m%d") + "/alaska/hrrr.t" @@ -1456,7 +1452,7 @@ def find_ak_hrrr_neighbors(input_forcings, config_options, d_current, mpi_config # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.exists(input_forcings.file_in2): - if input_forcings.input_force_mandatory == 1: + if input_forcings.enforce == 1: config_options.errMsg = ( "Expected input HRRR file: " + input_forcings.file_in2 @@ -1506,7 +1502,7 @@ def find_conus_rap_neighbors(input_forcings, config_options, d_current, mpi_conf # First find the current RAP forecast cycle that we are using. ana_offset = 1 if config_options.ana_flag else 0 current_rap_cycle = config_options.current_fcst_cycle - datetime.timedelta( - seconds=(ana_offset + input_forcings.fcst_input_offsets) * 60.0 + seconds=(ana_offset + input_forcings.userCycleOffset) * 60.0 ) if ( current_rap_cycle.hour == 3 @@ -1521,7 +1517,7 @@ def find_conus_rap_neighbors(input_forcings, config_options, d_current, mpi_conf # If the user has specified a forcing horizon that is greater than what is available # for this time period, throw an error. if ( - input_forcings.fcst_input_horizons + input_forcings.fcst_input_offsets + input_forcings.userFcstHorizon + input_forcings.userCycleOffset ) / 60.0 > rap_horizon: config_options.errMsg = ( "User has specified a RAP CONUS 13km forecast horizon " @@ -1562,7 +1558,7 @@ def find_conus_rap_neighbors(input_forcings, config_options, d_current, mpi_conf # Calculate expected file paths. tmp_file1 = ( - input_forcings.input_force_dirs + input_forcings.inDir + "/rap." + current_rap_cycle.strftime("%Y%m%d") + "/rap.t" @@ -1572,7 +1568,7 @@ def find_conus_rap_neighbors(input_forcings, config_options, d_current, mpi_conf + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.input_force_dirs + input_forcings.inDir + "/rap." + current_rap_cycle.strftime("%Y%m%d") + "/rap.t" @@ -1587,7 +1583,7 @@ def find_conus_rap_neighbors(input_forcings, config_options, d_current, mpi_conf if os.path.isfile(tmp_file1) == False and os.path.isfile(tmp_file2) == False: # Calculate expected file paths. tmp_file1 = ( - input_forcings.input_force_dirs + input_forcings.inDir + "/rap." + current_rap_cycle.strftime("%Y%m%d") + "/rap.t" @@ -1597,7 +1593,7 @@ def find_conus_rap_neighbors(input_forcings, config_options, d_current, mpi_conf + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.input_force_dirs + input_forcings.inDir + "/rap." + current_rap_cycle.strftime("%Y%m%d") + "/rap.t" @@ -1612,7 +1608,7 @@ def find_conus_rap_neighbors(input_forcings, config_options, d_current, mpi_conf if os.path.isfile(tmp_file1) == False and os.path.isfile(tmp_file2) == False: # Calculate expected file paths. tmp_file1 = ( - input_forcings.input_force_dirs + input_forcings.inDir + "/rap." + current_rap_cycle.strftime("%Y%m%d") + "/rap.t" @@ -1622,7 +1618,7 @@ def find_conus_rap_neighbors(input_forcings, config_options, d_current, mpi_conf + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.input_force_dirs + input_forcings.inDir + "/rap." + current_rap_cycle.strftime("%Y%m%d") + "/rap.t" @@ -1706,7 +1702,7 @@ def find_conus_rap_neighbors(input_forcings, config_options, d_current, mpi_conf if mpi_config.rank == 0: if not os.path.isfile(input_forcings.file_in2): missing = input_forcings.file_in2.replace("pgrb", "[bgrb|pgrb]") - if input_forcings.input_force_mandatory == 1: + if input_forcings.enforce == 1: config_options.errMsg = ( "Expected input RAP file: " + missing + " not found." ) @@ -1756,9 +1752,9 @@ def find_gfs_neighbors(input_forcings, config_options, d_current, mpi_config): gfs_precip_delineators = {120: [360, 60], 240: [360, 180], 384: [360, 180]} # If the user has specified a forcing horizon that is greater than what # is available here, return an error. - if ( - input_forcings.fcst_input_horizons + input_forcings.fcst_input_offsets - ) / 60.0 > max(gfs_out_horizons): + if (input_forcings.userFcstHorizon + input_forcings.userCycleOffset) / 60.0 > max( + gfs_out_horizons + ): config_options.errMsg = ( "User has specified a GFS forecast horizon " "that is greater than maximum allowed hours of: " @@ -1777,7 +1773,7 @@ def find_gfs_neighbors(input_forcings, config_options, d_current, mpi_config): # First find the current GFS forecast cycle that we are using. current_gfs_cycle = config_options.current_fcst_cycle - datetime.timedelta( - seconds=input_forcings.fcst_input_offsets * 60.0 + seconds=input_forcings.userCycleOffset * 60.0 ) # Calculate the current forecast hour within this GFS cycle. @@ -1823,7 +1819,7 @@ def find_gfs_neighbors(input_forcings, config_options, d_current, mpi_config): # Calculate expected file paths. if current_gfs_cycle < datetime.datetime(2019, 6, 12, 12): tmp_file1 = ( - input_forcings.input_force_dirs + input_forcings.inDir + "/gfs." + current_gfs_cycle.strftime("%Y%m%d%H") + "/gfs.t" @@ -1833,7 +1829,7 @@ def find_gfs_neighbors(input_forcings, config_options, d_current, mpi_config): + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.input_force_dirs + input_forcings.inDir + "/gfs." + current_gfs_cycle.strftime("%Y%m%d%H") + "/gfs.t" @@ -1845,7 +1841,7 @@ def find_gfs_neighbors(input_forcings, config_options, d_current, mpi_config): else: # FV3 change on June 12th, 2019 tmp_file1 = ( - input_forcings.input_force_dirs + input_forcings.inDir + "/gfs." + current_gfs_cycle.strftime("%Y%m%d") + "/" @@ -1857,7 +1853,7 @@ def find_gfs_neighbors(input_forcings, config_options, d_current, mpi_config): + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.input_force_dirs + input_forcings.inDir + "/gfs." + current_gfs_cycle.strftime("%Y%m%d") + "/" @@ -2020,7 +2016,7 @@ def find_gfs_neighbors(input_forcings, config_options, d_current, mpi_config): # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.isfile(input_forcings.file_in2): - if input_forcings.input_force_mandatory == 1: + if input_forcings.enforce == 1: config_options.errMsg = ( "Expected input GFS file: " + input_forcings.file_in2 @@ -2070,9 +2066,9 @@ def find_nam_nest_neighbors(input_forcings, config_options, d_current, mpi_confi # If the user has specified a forcing horizon that is greater than what # is available here, return an error. - if ( - input_forcings.fcst_input_horizons + input_forcings.fcst_input_offsets - ) / 60.0 > max(nam_nest_out_horizons): + if (input_forcings.userFcstHorizon + input_forcings.userCycleOffset) / 60.0 > max( + nam_nest_out_horizons + ): config_options.errMsg = ( "User has specified a NAM nest forecast horizon " "that is greater than maximum allowed hours of: " @@ -2091,7 +2087,7 @@ def find_nam_nest_neighbors(input_forcings, config_options, d_current, mpi_confi else: current_nam_nest_cycle = config_options.current_fcst_cycle - datetime.timedelta( - seconds=input_forcings.fcst_input_offsets * 60.0 + seconds=input_forcings.userCycleOffset * 60.0 ) if mpi_config.rank == 0: @@ -2160,7 +2156,7 @@ def find_nam_nest_neighbors(input_forcings, config_options, d_current, mpi_confi # Calculate expected file paths. tmp_file1 = ( - input_forcings.input_force_dirs + input_forcings.inDir + "/nam." + current_nam_nest_cycle.strftime("%Y%m%d") + "/nam.t" @@ -2173,7 +2169,7 @@ def find_nam_nest_neighbors(input_forcings, config_options, d_current, mpi_confi + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.input_force_dirs + input_forcings.inDir + "/nam." + current_nam_nest_cycle.strftime("%Y%m%d") + "/nam.t" @@ -2262,7 +2258,7 @@ def find_nam_nest_neighbors(input_forcings, config_options, d_current, mpi_confi # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.isfile(input_forcings.file_in2): - if input_forcings.input_force_mandatory == 1: + if input_forcings.enforce == 1: config_options.errMsg = ( "Expected input NAM Nest file: " + input_forcings.file_in2 @@ -2310,9 +2306,9 @@ def find_cfsv2_neighbors(input_forcings, config_options, d_current, mpi_config): # If the user has specified a forcing horizon that is greater than what # is available here, return an error. - if ( - input_forcings.fcst_input_horizons + input_forcings.fcst_input_offsets - ) / 60.0 > max(cfs_out_horizons): + if (input_forcings.userFcstHorizon + input_forcings.userCycleOffset) / 60.0 > max( + cfs_out_horizons + ): config_options.errMsg = ( "User has specified a CFSv2 forecast horizon " "that is greater than maximum allowed hours of: " @@ -2322,7 +2318,7 @@ def find_cfsv2_neighbors(input_forcings, config_options, d_current, mpi_config): # First find the current CFS forecast cycle that we are using. current_cfs_cycle = config_options.current_fcst_cycle - datetime.timedelta( - seconds=input_forcings.fcst_input_offsets * 60.0 + seconds=input_forcings.userCycleOffset * 60.0 ) # Calculate the current forecast hour within this CFSv2 cycle. @@ -2366,11 +2362,11 @@ def find_cfsv2_neighbors(input_forcings, config_options, d_current, mpi_config): prev_cfs_date = next_cfs_date # Calculate expected file paths. - if input_forcings.input_force_types == "GRIB2": + if input_forcings.file_type == "GRIB2": input_forcings.file_ext = ".grb2" tmp_file1 = ( - input_forcings.input_force_dirs + input_forcings.inDir + "/cfs." + current_cfs_cycle.strftime("%Y%m%d") + "/" @@ -2387,7 +2383,7 @@ def find_cfsv2_neighbors(input_forcings, config_options, d_current, mpi_config): + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.input_force_dirs + input_forcings.inDir + "/cfs." + current_cfs_cycle.strftime("%Y%m%d") + "/" @@ -2484,7 +2480,7 @@ def find_cfsv2_neighbors(input_forcings, config_options, d_current, mpi_config): # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.isfile(input_forcings.file_in2): - if input_forcings.input_force_mandatory == 1: + if input_forcings.enforce == 1: config_options.errMsg = ( "Expected input CFSv2 file: " + input_forcings.file_in2 @@ -2526,7 +2522,7 @@ def find_custom_hourly_neighbors(input_forcings, config_options, d_current, mpi_ # greater than an expected value. However, since these are custom input NetCDF files, # we are foregoing that check. current_custom_cycle = config_options.current_fcst_cycle - datetime.timedelta( - seconds=input_forcings.fcst_input_offsets * 60.0 + seconds=input_forcings.userCycleOffset * 60.0 ) # Calculate the current forecast hour within this cycle. @@ -2567,7 +2563,7 @@ def find_custom_hourly_neighbors(input_forcings, config_options, d_current, mpi_ # Calculate expected file paths. tmp_file1 = ( - input_forcings.input_force_dirs + input_forcings.inDir + "/custom_hourly." + current_custom_cycle.strftime("%Y%m%d%H") + ".f" @@ -2575,7 +2571,7 @@ def find_custom_hourly_neighbors(input_forcings, config_options, d_current, mpi_ + ".nc" ) tmp_file2 = ( - input_forcings.input_force_dirs + input_forcings.inDir + "/custom_hourly." + current_custom_cycle.strftime("%Y%m%d%H") + ".f" @@ -2657,7 +2653,7 @@ def find_custom_hourly_neighbors(input_forcings, config_options, d_current, mpi_ # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.isfile(input_forcings.file_in2): - if input_forcings.input_force_mandatory == 1: + if input_forcings.enforce == 1: config_options.errMsg = ( "Expected input Custom file: " + input_forcings.file_in2 @@ -3260,7 +3256,7 @@ def find_sbcv2_lwf_neighbors(input_forcings, config_options, d_current, mpi_conf # Calculate expected file paths. tmp_file1 = ( - input_forcings.input_force_dirs + input_forcings.inDir + "/SBC_LWF/" + input_forcings.fcst_date1.strftime("%Y%m") + "/" @@ -3343,7 +3339,7 @@ def find_sbcv2_lwf_neighbors(input_forcings, config_options, d_current, mpi_conf # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.isfile(input_forcings.file_in2): - if input_forcings.input_force_mandatory == 1: + if input_forcings.enforce == 1: config_options.errMsg = ( "Expected input SBCV2_LWF file: " + input_forcings.file_in2 @@ -4361,7 +4357,7 @@ def find_ndfd_neighbors(input_forcings, config_options, d_current, mpi_config): input_forcings.fcst_hour2 = current_fcst.total_seconds() / 3600 tmp_file1 = os.path.join( - input_forcings.input_force_dirs, + input_forcings.inDir, "NDFD", current_cycle.strftime("%Y%m%d"), "wgrbbul", @@ -4451,7 +4447,7 @@ def find_ndfd_neighbors(input_forcings, config_options, d_current, mpi_config): for tag in ("tmp", "wdir", "wspd", "qpf") ]: if not os.path.isfile(subfile): - if input_forcings.input_force_mandatory == 1: + if input_forcings.enforce == 1: config_options.errMsg = ( f"Expected input NDFD file: {subfile} not found." ) @@ -4636,7 +4632,7 @@ def find_input_neighbors(input_forcings, config_options, d_current, mpi_config): # ana_offset = 0 current_input_cycle = config_options.current_fcst_cycle - datetime.timedelta( - seconds=(ana_offset + input_forcings.fcst_input_offsets) * 60.0 * 60 + seconds=(ana_offset + input_forcings.userCycleOffset) * 60.0 * 60 ) input_horizon = input_forcings.forecast_horizons[current_input_cycle.hour] @@ -4646,7 +4642,7 @@ def find_input_neighbors(input_forcings, config_options, d_current, mpi_config): if not config_options.ana_flag: if ( - input_forcings.fcst_input_horizons + input_forcings.fcst_input_offsets + input_forcings.userFcstHorizon + input_forcings.userCycleOffset ) / 60.0 > input_horizon: config_options.errMsg = f"Config file ForecastInputHorizons exceeds maximum allowed hours of: {str(input_horizon)}" err_handler.log_critical(config_options, mpi_config) @@ -4711,8 +4707,8 @@ def find_input_neighbors(input_forcings, config_options, d_current, mpi_config): input_forcings.fcst_hour1 = input_forcings.fcst_hour1 - 1 if config_options.ana_flag == 0: # Calculate expected file paths. - pattern1 = f"{input_forcings.input_force_dirs}/*.{current_input_cycle.strftime('%Y%m%d')}/*{current_input_cycle.strftime('%H')}z*{str(prev_input_forecast_hour).zfill(2)}.grib2" - pattern2 = f"{input_forcings.input_force_dirs}/*.{current_input_cycle.strftime('%Y%m%d')}/conus/*{current_input_cycle.strftime('%H')}z*{str(prev_input_forecast_hour).zfill(2)}.grib2" + pattern1 = f"{input_forcings.inDir}/*.{current_input_cycle.strftime('%Y%m%d')}/*{current_input_cycle.strftime('%H')}z*{str(prev_input_forecast_hour).zfill(2)}.grib2" + pattern2 = f"{input_forcings.inDir}/*.{current_input_cycle.strftime('%Y%m%d')}/conus/*{current_input_cycle.strftime('%H')}z*{str(prev_input_forecast_hour).zfill(2)}.grib2" files1 = glob.glob(pattern1) + glob.glob(pattern2) tmp_file1 = files1[0] @@ -4720,8 +4716,8 @@ def find_input_neighbors(input_forcings, config_options, d_current, mpi_config): config_options.statusMsg = "Previous input file being used: " + tmp_file1 err_handler.log_msg(config_options, mpi_config, True) # log at debug level - pattern1 = f"{input_forcings.input_force_dirs}/*.{current_input_cycle.strftime('%Y%m%d')}/*{current_input_cycle.strftime('%H')}z*{str(next_input_forecast_hour).zfill(2)}.grib2" - pattern2 = f"{input_forcings.input_force_dirs}/*.{current_input_cycle.strftime('%Y%m%d')}/conus/*{current_input_cycle.strftime('%H')}z*{str(next_input_forecast_hour).zfill(2)}.grib2" + pattern1 = f"{input_forcings.inDir}/*.{current_input_cycle.strftime('%Y%m%d')}/*{current_input_cycle.strftime('%H')}z*{str(next_input_forecast_hour).zfill(2)}.grib2" + pattern2 = f"{input_forcings.inDir}/*.{current_input_cycle.strftime('%Y%m%d')}/conus/*{current_input_cycle.strftime('%H')}z*{str(next_input_forecast_hour).zfill(2)}.grib2" files2 = glob.glob(pattern1) + glob.glob(pattern2) @@ -4733,8 +4729,8 @@ def find_input_neighbors(input_forcings, config_options, d_current, mpi_config): elif config_options.ana_flag == 1: # Calculate expected file paths. - pattern1 = f"{input_forcings.input_force_dirs}/*.{current_input_cycle.strftime('%Y%m%d')}/*{current_input_cycle.strftime('%H')}z*{str(prev_input_forecast_hour).zfill(2)}.grib2" - pattern2 = f"{input_forcings.input_force_dirs}/*.{current_input_cycle.strftime('%Y%m%d')}/conus/*{current_input_cycle.strftime('%H')}z*{str(prev_input_forecast_hour).zfill(2)}.grib2" + pattern1 = f"{input_forcings.inDir}/*.{current_input_cycle.strftime('%Y%m%d')}/*{current_input_cycle.strftime('%H')}z*{str(prev_input_forecast_hour).zfill(2)}.grib2" + pattern2 = f"{input_forcings.inDir}/*.{current_input_cycle.strftime('%Y%m%d')}/conus/*{current_input_cycle.strftime('%H')}z*{str(prev_input_forecast_hour).zfill(2)}.grib2" files1 = glob.glob(pattern1) + glob.glob(pattern2) @@ -4857,7 +4853,7 @@ def find_input_neighbors(input_forcings, config_options, d_current, mpi_config): # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.exists(input_forcings.file_in2): - if input_forcings.input_force_mandatory == 1: + if input_forcings.enforce == 1: config_options.errMsg = ( "Expected input file: " + input_forcings.file_in2 + " not found." ) From 259632a9da5964bd27c15e7228fb23a4898315dd Mon Sep 17 00:00:00 2001 From: Matt Deshotel Date: Tue, 24 Feb 2026 13:06:38 -0600 Subject: [PATCH 18/34] split bmi model into multiple classes based on specified discretization --- .../NextGen_Forcings_Engine/bmi_model.py | 866 +++++++----------- .../NextGen_Forcings_Engine/core/geoMod.py | 156 ---- 2 files changed, 327 insertions(+), 695 deletions(-) diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/bmi_model.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/bmi_model.py index 3a6f0174..262b51ca 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/bmi_model.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/bmi_model.py @@ -24,13 +24,18 @@ from mpi4py import MPI from NextGen_Forcings_Engine_BMI import esmf_creation, forcing_extraction +from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.bmi_grid import Grid, GridType from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.config import ( ConfigOptions, ) -from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.geoMod import GEOGRID +from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.consts import CONSTS +from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.geoMod import ( + GriddedGeoMeta, + HydrofabricGeoMeta, + UnstructuredGeoMeta, +) from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.parallel import MpiConfig -from .bmi_grid import Grid, GridType from .core import ( err_handler, forcingInputMod, @@ -63,6 +68,7 @@ from nextgen_forcings_ewts import MODULE_NAME, configure_logging +CONSTS = CONSTS[Path(__file__).stem] configure_logging() @@ -250,492 +256,9 @@ def initialize(self, config_file: str, output_path: str | None = None) -> None: if self._job_meta.nwmConfig not in ["AORC", "NWM"]: forcing_extraction.retrieve_forcing(self._job_meta) - # Initialize our WRF-Hydro geospatial object, which contains - # information about the modeling domain, local processor - # grid boundaries, and ESMF grid objects/fields to be used - # in regridding. - if self._job_meta.grid_type not in GEOGRID: - self._job_meta.errMsg = f"Invalid grid type specified: {self._job_meta.grid_type}. Valid options are: {list(GEOGRID.keys())}" - err_handler.err_out_screen_para(self._job_meta.errMsg, self._mpi_meta) - - self._wrf_hydro_geo_meta = GEOGRID.get(self._job_meta.grid_type)( - self._job_meta, self._mpi_meta - ) - # Assign grid type to BMI class for grid information self._grid_type = self._job_meta.grid_type.lower() - - # Set output var names based on grid type - if self._grid_type == "gridded": - # --------------------------------------------- - # Output variable names (CSDMS standard names) - # --------------------------------------------- - - # Flag here to indicate whether or not the NWM operational configuration - # will support a BMI field for liquid fraction of precipitation - if self._job_meta.include_lqfrac == 1: - self._output_var_names = [ - "U2D_ELEMENT", - "V2D_ELEMENT", - "LWDOWN_ELEMENT", - "SWDOWN_ELEMENT", - "T2D_ELEMENT", - "Q2D_ELEMENT", - "PSFC_ELEMENT", - "RAINRATE_ELEMENT", - "LQFRAC_ELEMENT", - ] - - # ------------------------------------------------------ - # Create a Python dictionary that maps CSDMS Standard - # Names to the model's internal variable names. - # This is going to get long, - # since the input variable names could come from any forcing... - # ------------------------------------------------------ - self._var_name_units_map = { - "U2D_ELEMENT": ["10-m U-component of wind", "m/s"], - "V2D_ELEMENT": ["10-m V-component of wind", "m/s"], - "T2D_ELEMENT": ["2-m Air Temperature", "K"], - "Q2D_ELEMENT": ["2-m Specific Humidity", "kg/kg"], - "LWDOWN_ELEMENT": [ - "Surface downward long-wave radiation flux", - "W/m^2", - ], - "SWDOWN_ELEMENT": [ - "Surface downward short-wave radiation flux", - "W/m^2", - ], - "PSFC_ELEMENT": ["Surface Pressure", "Pa"], - "RAINRATE_ELEMENT": ["Surface Precipitation Rate", "mm/s"], - "LQFRAC_ELEMENT": ["Liquid Fraction of Precipitation", "%"], - } - - self.grid_1: Grid = Grid( - 1, 2, GridType.uniform_rectilinear - ) # Grid 1 is a 2-dimensional grid - self.grid_1._grid_y = self._wrf_hydro_geo_meta.latitude_grid.flatten() - self.grid_1._grid_x = self._wrf_hydro_geo_meta.longitude_grid.flatten() - self.grid_1._shape = self._wrf_hydro_geo_meta.latitude_grid.shape - self.grid_1._size = len( - self._wrf_hydro_geo_meta.latitude_grid.flatten() - ) - self.grid_1._spacing = ( - self._wrf_hydro_geo_meta.dx_meters, - self._wrf_hydro_geo_meta.dy_meters, - ) - self.grid_1._units = "m" - self.grid_1._origin = None - - self._grids = [self.grid_1] - - self._grid_map = { - "U2D_ELEMENT": self.grid_1, - "V2D_ELEMENT": self.grid_1, - "LWDOWN_ELEMENT": self.grid_1, - "SWDOWN_ELEMENT": self.grid_1, - "T2D_ELEMENT": self.grid_1, - "Q2D_ELEMENT": self.grid_1, - "PSFC_ELEMENT": self.grid_1, - "RAINRATE_ELEMENT": self.grid_1, - "LQFRAC_ELEMENT": self.grid_1, - } - - else: - self._output_var_names = [ - "U2D_ELEMENT", - "V2D_ELEMENT", - "LWDOWN_ELEMENT", - "SWDOWN_ELEMENT", - "T2D_ELEMENT", - "Q2D_ELEMENT", - "PSFC_ELEMENT", - "RAINRATE_ELEMENT", - ] - - # ------------------------------------------------------ - # Create a Python dictionary that maps CSDMS Standard - # Names to the model's internal variable names. - # This is going to get long, - # since the input variable names could come from any forcing... - # ------------------------------------------------------ - self._var_name_units_map = { - "U2D_ELEMENT": ["10-m U-component of wind", "m/s"], - "V2D_ELEMENT": ["10-m V-component of wind", "m/s"], - "T2D_ELEMENT": ["2-m Air Temperature", "K"], - "Q2D_ELEMENT": ["2-m Specific Humidity", "kg/kg"], - "LWDOWN_ELEMENT": [ - "Surface downward long-wave radiation flux", - "W/m^2", - ], - "SWDOWN_ELEMENT": [ - "Surface downward short-wave radiation flux", - "W/m^2", - ], - "PSFC_ELEMENT": ["Surface Pressure", "Pa"], - "RAINRATE_ELEMENT": ["Surface Precipitation Rate", "mm/s"], - } - - self.grid_1: Grid = Grid( - 1, 2, GridType.uniform_rectilinear - ) # Grid 1 is a 2-dimensional grid - self.grid_1._grid_y = self._wrf_hydro_geo_meta.latitude_grid.flatten() - self.grid_1._grid_x = self._wrf_hydro_geo_meta.longitude_grid.flatten() - self.grid_1._shape = self._wrf_hydro_geo_meta.latitude_grid.shape - self.grid_1._size = len( - self._wrf_hydro_geo_meta.latitude_grid.flatten() - ) - self.grid_1._spacing = ( - self._wrf_hydro_geo_meta.dx_meters, - self._wrf_hydro_geo_meta.dy_meters, - ) - self.grid_1._units = "m" - self.grid_1._origin = None - - self._grids = [self.grid_1] - - self._grid_map = { - "U2D_ELEMENT": self.grid_1, - "V2D_ELEMENT": self.grid_1, - "LWDOWN_ELEMENT": self.grid_1, - "SWDOWN_ELEMENT": self.grid_1, - "T2D_ELEMENT": self.grid_1, - "Q2D_ELEMENT": self.grid_1, - "PSFC_ELEMENT": self.grid_1, - "RAINRATE_ELEMENT": self.grid_1, - } - - elif self._grid_type == "unstructured": - # Flag here to indicate whether or not the NWM operational configuration - # will support a BMI field for liquid fraction of precipitation - if self._job_meta.include_lqfrac == 1: - # --------------------------------------------- - # Output variable names (CSDMS standard names) - # --------------------------------------------- - self._output_var_names = [ - "U2D_NODE", - "V2D_NODE", - "LWDOWN_NODE", - "SWDOWN_NODE", - "T2D_NODE", - "Q2D_NODE", - "PSFC_NODE", - "RAINRATE_NODE", - "LQFRAC_NODE", - "U2D_ELEMENT", - "V2D_ELEMENT", - "LWDOWN_ELEMENT", - "SWDOWN_ELEMENT", - "T2D_ELEMENT", - "Q2D_ELEMENT", - "PSFC_ELEMENT", - "RAINRATE_ELEMENT", - "LQFRAC_ELEMENT", - ] - - # ------------------------------------------------------ - # Create a Python dictionary that maps CSDMS Standard - # Names to the model's internal variable names. - # This is going to get long, - # since the input variable names could come from any forcing... - # ------------------------------------------------------ - self._var_name_units_map = { - "U2D_NODE": ["10-m U-component of wind", "m/s"], - "V2D_NODE": ["10-m V-component of wind", "m/s"], - "T2D_NODE": ["2-m Air Temperature", "K"], - "Q2D_NODE": ["2-m Specific Humidity", "kg/kg"], - "LWDOWN_NODE": [ - "Surface downward long-wave radiation flux", - "W/m^2", - ], - "SWDOWN_NODE": [ - "Surface downward short-wave radiation flux", - "W/m^2", - ], - "PSFC_NODE": ["Surface Pressure", "Pa"], - "RAINRATE_NODE": ["Surface Precipitation Rate", "mm/s"], - "LQFRAC_NODE": ["Liquid Fraction of Precipitation", "%"], - "U2D_ELEMENT": ["10-m U-component of wind", "m/s"], - "V2D_ELEMENT": ["10-m V-component of wind", "m/s"], - "T2D_ELEMENT": ["2-m Air Temperature", "K"], - "Q2D_ELEMENT": ["2-m Specific Humidity", "kg/kg"], - "LWDOWN_ELEMENT": [ - "Surface downward long-wave radiation flux", - "W/m^2", - ], - "SWDOWN_ELEMENT": [ - "Surface downward short-wave radiation flux", - "W/m^2", - ], - "PSFC_ELEMENT": ["Surface Pressure", "Pa"], - "RAINRATE_ELEMENT": ["Surface Precipitation Rate", "mm/s"], - "LQFRAC_ELEMENT": ["Liquid Fraction of Precipitation", "%"], - } - - self.grid_2: Grid = Grid( - 2, 2, GridType.unstructured - ) # Grid 1 is a 2-dimensional grid - self.grid_3: Grid = Grid( - 3, 2, GridType.unstructured - ) # Grid 1 is a 2-dimensional grid - - self.grid_2._grid_y = self._wrf_hydro_geo_meta.latitude_grid_elem - self.grid_2._grid_x = self._wrf_hydro_geo_meta.longitude_grid_elem - - self.grid_3._grid_y = self._wrf_hydro_geo_meta.latitude_grid - self.grid_3._grid_x = self._wrf_hydro_geo_meta.longitude_grid - - self.grid_2._size = len(self._wrf_hydro_geo_meta.latitude_grid_elem) - self.grid_3._size = len(self._wrf_hydro_geo_meta.latitude_grid) - - self._grids = [self.grid_2, self.grid_3] - - self._grid_map = { - "U2D_ELEMENT": self.grid_2, - "V2D_ELEMENT": self.grid_2, - "LWDOWN_ELEMENT": self.grid_2, - "SWDOWN_ELEMENT": self.grid_2, - "T2D_ELEMENT": self.grid_2, - "Q2D_ELEMENT": self.grid_2, - "PSFC_ELEMENT": self.grid_2, - "RAINRATE_ELEMENT": self.grid_2, - "LQFRAC_ELEMENT": self.grid_2, - "U2D_NODE": self.grid_3, - "V2D_NODE": self.grid_3, - "LWDOWN_NODE": self.grid_3, - "SWDOWN_NODE": self.grid_3, - "T2D_NODE": self.grid_3, - "Q2D_NODE": self.grid_3, - "PSFC_NODE": self.grid_3, - "RAINRATE_NODE": self.grid_3, - "LQFRAC_NODE": self.grid_3, - } - else: - # --------------------------------------------- - # Output variable names (CSDMS standard names) - # --------------------------------------------- - self._output_var_names = [ - "U2D_NODE", - "V2D_NODE", - "LWDOWN_NODE", - "SWDOWN_NODE", - "T2D_NODE", - "Q2D_NODE", - "PSFC_NODE", - "RAINRATE_NODE", - "U2D_ELEMENT", - "V2D_ELEMENT", - "LWDOWN_ELEMENT", - "SWDOWN_ELEMENT", - "T2D_ELEMENT", - "Q2D_ELEMENT", - "PSFC_ELEMENT", - "RAINRATE_ELEMENT", - ] - - # ------------------------------------------------------ - # Create a Python dictionary that maps CSDMS Standard - # Names to the model's internal variable names. - # This is going to get long, - # since the input variable names could come from any forcing... - # ------------------------------------------------------ - self._var_name_units_map = { - "U2D_NODE": ["10-m U-component of wind", "m/s"], - "V2D_NODE": ["10-m V-component of wind", "m/s"], - "T2D_NODE": ["2-m Air Temperature", "K"], - "Q2D_NODE": ["2-m Specific Humidity", "kg/kg"], - "LWDOWN_NODE": [ - "Surface downward long-wave radiation flux", - "W/m^2", - ], - "SWDOWN_NODE": [ - "Surface downward short-wave radiation flux", - "W/m^2", - ], - "PSFC_NODE": ["Surface Pressure", "Pa"], - "RAINRATE_NODE": ["Surface Precipitation Rate", "mm/s"], - "U2D_ELEMENT": ["10-m U-component of wind", "m/s"], - "V2D_ELEMENT": ["10-m V-component of wind", "m/s"], - "T2D_ELEMENT": ["2-m Air Temperature", "K"], - "Q2D_ELEMENT": ["2-m Specific Humidity", "kg/kg"], - "LWDOWN_ELEMENT": [ - "Surface downward long-wave radiation flux", - "W/m^2", - ], - "SWDOWN_ELEMENT": [ - "Surface downward short-wave radiation flux", - "W/m^2", - ], - "PSFC_ELEMENT": ["Surface Pressure", "Pa"], - "RAINRATE_ELEMENT": ["Surface Precipitation Rate", "mm/s"], - } - - self.grid_2: Grid = Grid( - 2, 2, GridType.unstructured - ) # Grid 1 is a 2-dimensional grid - self.grid_3: Grid = Grid( - 3, 2, GridType.unstructured - ) # Grid 1 is a 2-dimensional grid - - self.grid_2._grid_y = self._wrf_hydro_geo_meta.latitude_grid_elem - self.grid_2._grid_x = self._wrf_hydro_geo_meta.longitude_grid_elem - - self.grid_3._grid_y = self._wrf_hydro_geo_meta.latitude_grid - self.grid_3._grid_x = self._wrf_hydro_geo_meta.longitude_grid - - self.grid_2._size = len(self._wrf_hydro_geo_meta.latitude_grid_elem) - self.grid_3._size = len(self._wrf_hydro_geo_meta.latitude_grid) - - self._grids = [self.grid_2, self.grid_3] - - self._grid_map = { - "U2D_ELEMENT": self.grid_2, - "V2D_ELEMENT": self.grid_2, - "LWDOWN_ELEMENT": self.grid_2, - "SWDOWN_ELEMENT": self.grid_2, - "T2D_ELEMENT": self.grid_2, - "Q2D_ELEMENT": self.grid_2, - "PSFC_ELEMENT": self.grid_2, - "RAINRATE_ELEMENT": self.grid_2, - "U2D_NODE": self.grid_3, - "V2D_NODE": self.grid_3, - "LWDOWN_NODE": self.grid_3, - "SWDOWN_NODE": self.grid_3, - "T2D_NODE": self.grid_3, - "Q2D_NODE": self.grid_3, - "PSFC_NODE": self.grid_3, - "RAINRATE_NODE": self.grid_3, - } - - elif self._grid_type == "hydrofabric": - # Flag here to indicate whether or not the NWM operational configuration - # will support a BMI field for liquid fraction of precipitation - if self._job_meta.include_lqfrac == 1: - # --------------------------------------------- - # Output variable names (CSDMS standard names) - # --------------------------------------------- - self._output_var_names = [ - "CAT-ID", - "U2D_ELEMENT", - "V2D_ELEMENT", - "LWDOWN_ELEMENT", - "SWDOWN_ELEMENT", - "T2D_ELEMENT", - "Q2D_ELEMENT", - "PSFC_ELEMENT", - "RAINRATE_ELEMENT", - "LQFRAC_ELEMENT", - ] - - # ------------------------------------------------------ - # Create a Python dictionary that maps CSDMS Standard - # Names to the model's internal variable names. - # This is going to get long, - # since the input variable names could come from any forcing... - # ------------------------------------------------------ - self._var_name_units_map = { - "CAT-ID": ["Catchment ID", ""], - "U2D_ELEMENT": ["10-m U-component of wind", "m/s"], - "V2D_ELEMENT": ["10-m V-component of wind", "m/s"], - "T2D_ELEMENT": ["2-m Air Temperature", "K"], - "Q2D_ELEMENT": ["2-m Specific Humidity", "kg/kg"], - "LWDOWN_ELEMENT": [ - "Surface downward long-wave radiation flux", - "W/m^2", - ], - "SWDOWN_ELEMENT": [ - "Surface downward short-wave radiation flux", - "W/m^2", - ], - "PSFC_ELEMENT": ["Surface Pressure", "Pa"], - "RAINRATE_ELEMENT": ["Surface Precipitation Rate", "mm/s"], - "LQFRAC_ELEMENT": ["Liquid Fraction of Precipitation", "%"], - } - - self.grid_4: Grid = Grid( - 4, 2, GridType.unstructured - ) # Grid 1 is a 2-dimensional grid - - self.grid_4._grid_y = self._wrf_hydro_geo_meta.latitude_grid - self.grid_4._grid_x = self._wrf_hydro_geo_meta.longitude_grid - - self.grid_4._size = len(self._wrf_hydro_geo_meta.latitude_grid) - - self._grids = [self.grid_4] - - self._grid_map = { - "CAT-ID": self.grid_4, - "U2D_ELEMENT": self.grid_4, - "V2D_ELEMENT": self.grid_4, - "LWDOWN_ELEMENT": self.grid_4, - "SWDOWN_ELEMENT": self.grid_4, - "T2D_ELEMENT": self.grid_4, - "Q2D_ELEMENT": self.grid_4, - "PSFC_ELEMENT": self.grid_4, - "RAINRATE_ELEMENT": self.grid_4, - "LQFRAC_ELEMENT": self.grid_4, - } - else: - # --------------------------------------------- - # Output variable names (CSDMS standard names) - # --------------------------------------------- - self._output_var_names = [ - "CAT-ID", - "U2D_ELEMENT", - "V2D_ELEMENT", - "LWDOWN_ELEMENT", - "SWDOWN_ELEMENT", - "T2D_ELEMENT", - "Q2D_ELEMENT", - "PSFC_ELEMENT", - "RAINRATE_ELEMENT", - ] - - # ------------------------------------------------------ - # Create a Python dictionary that maps CSDMS Standard - # Names to the model's internal variable names. - # This is going to get long, - # since the input variable names could come from any forcing... - # ------------------------------------------------------ - self._var_name_units_map = { - "CAT-ID": ["Catchment ID", ""], - "U2D_ELEMENT": ["10-m U-component of wind", "m/s"], - "V2D_ELEMENT": ["10-m V-component of wind", "m/s"], - "T2D_ELEMENT": ["2-m Air Temperature", "K"], - "Q2D_ELEMENT": ["2-m Specific Humidity", "kg/kg"], - "LWDOWN_ELEMENT": [ - "Surface downward long-wave radiation flux", - "W/m^2", - ], - "SWDOWN_ELEMENT": [ - "Surface downward short-wave radiation flux", - "W/m^2", - ], - "PSFC_ELEMENT": ["Surface Pressure", "Pa"], - "RAINRATE_ELEMENT": ["Surface Precipitation Rate", "mm/s"], - } - - self.grid_4: Grid = Grid( - 4, 2, GridType.unstructured - ) # Grid 1 is a 2-dimensional grid - - self.grid_4._grid_y = self._wrf_hydro_geo_meta.latitude_grid - self.grid_4._grid_x = self._wrf_hydro_geo_meta.longitude_grid - - self.grid_4._size = len(self._wrf_hydro_geo_meta.latitude_grid) - - self._grids = [self.grid_4] - - self._grid_map = { - "CAT-ID": self.grid_4, - "U2D_ELEMENT": self.grid_4, - "V2D_ELEMENT": self.grid_4, - "LWDOWN_ELEMENT": self.grid_4, - "SWDOWN_ELEMENT": self.grid_4, - "T2D_ELEMENT": self.grid_4, - "Q2D_ELEMENT": self.grid_4, - "PSFC_ELEMENT": self.grid_4, - "RAINRATE_ELEMENT": self.grid_4, - } + self._wrf_hydro_geo_meta.set_var_names(self) # ----- Create some lookup tabels from the long variable names --------# self._var_name_map_long_first = { @@ -798,42 +321,7 @@ def initialize(self, config_file: str, output_path: str | None = None) -> None: for parm in self._model_parameters_list: self._values[self._var_name_map_short_first[parm]] = self.cfg_bmi[parm] - if self._job_meta.grid_type == "gridded": - # -----------------------------------------------------------------------# - # Get the size of the flattened 2D arrays from the gridded domain - self._varsize = len( - np.zeros(self._wrf_hydro_geo_meta.latitude_grid.shape).flatten() - ) - - for model_output in self.get_output_var_names(): - self._values[model_output] = np.zeros(self._varsize, dtype=float) - - elif self._job_meta.grid_type == "unstructured": - # -----------------------------------------------------------------------# - # Get the size of the flattened 1D arrays from the unstructured domain - self._varsize = len( - np.zeros(self._wrf_hydro_geo_meta.latitude_grid.shape).flatten() - ) - self._varsize_elem = len( - np.zeros(self._wrf_hydro_geo_meta.latitude_grid_elem.shape).flatten() - ) - - for model_output in self.get_output_var_names(): - if "ELEMENT" in model_output: - self._values[model_output] = np.zeros( - self._varsize_elem, dtype=float - ) - else: - self._values[model_output] = np.zeros(self._varsize, dtype=float) - - elif self._job_meta.grid_type == "hydrofabric": - # -----------------------------------------------------------------------# - # Get the size of the flattened 1D arrays from the hydrofabric domain - self._varsize = len( - np.zeros(self._wrf_hydro_geo_meta.latitude_grid.shape).flatten() - ) - for model_output in self.get_output_var_names(): - self._values[model_output] = np.zeros(self._varsize, dtype=float) + self.get_size_of_arrays(self) # for model_input in self.get_input_var_names(): # self._values[model_input] = np.zeros(self._varsize, dtype=float) @@ -901,11 +389,7 @@ def _configure_output_path(self, output_path: str | None = None) -> None: return # Already configured or no output object to configure if self._job_meta.forcing_output == 1: - ext = { - "gridded": "GRIDDED", - "hydrofabric": "HYDROFABRIC", - "unstructured": "MESH", - }.get(self._job_meta.grid_type) + ext = CONSTS["extension_map"].get(self._job_meta.grid_type) if ext is None: raise ValueError(f"Invalid grid_type: {self._job_meta.grid_type}") @@ -1131,22 +615,13 @@ def get_value(self, var_name: str, dest: NDArray[Any]) -> NDArray[Any]: LOG.debug( f"[BMI get_value] Special case: 'grid:ids', grid_type: {self._job_meta.grid_type}" ) - if self._job_meta.grid_type == "gridded": - dest[:] = [self.grid_1.id] - elif self._job_meta.grid_type == "unstructured": - dest[:] = [self.grid_2.id, self.grid_3.id] - elif self._job_meta.grid_type == "hydrofabric": - dest[:] = [self.grid_4.id] + dest[:] = self.grid_ids(self) + elif var_name == "grid:ranks": LOG.debug( f"[BMI get_value] Special case: 'grid:ranks', grid_type: {self._job_meta.grid_type}" ) - if self._job_meta.grid_type == "gridded": - dest[:] = [self.grid_1.rank] - elif self._job_meta.grid_type == "unstructured": - dest[:] = [self.grid_2.rank, self.grid_3.rank] - elif self._job_meta.grid_type == "hydrofabric": - dest[:] = [self.grid_4.rank] + dest[:] = self.grid_ranks(self) else: src = self.get_value_ptr(var_name) LOG.debug( @@ -2078,3 +1553,316 @@ def _parse_config(self, cfg: dict) -> dict: # Add more config parsing if necessary return cfg + + +class NWMv3_Forcing_Engine_BMI_model_Gridded(NWMv3_Forcing_Engine_BMI_model): + """Defines the BMI (Basic Model Interface) for the NWMv3.0 Forcings Engine model. + + It includes methods for initializing the model, updating it, accessing model variables, + and managing model configuration. This class is responsible for interacting with + geospatial data and forcing inputs for the model simulation. + """ + + def __init__(self): + """Create a model that is ready for initialization. + + Initializes the model with default values for time, variables, and grid types. + """ + super().__init__() + + if self._job_meta.grid_type != "gridded": + self._job_meta.errMsg = ( + f"Invalid grid type specified: {self._job_meta.grid_type}." + ) + self._wrf_hydro_geo_meta = GriddedGeoMeta(self._job_meta, self._mpi_meta) + err_handler.err_out_screen_para(self._job_meta.errMsg, self._mpi_meta) + + def grid_ranks(self, bmi_model: NWMv3_Forcing_Engine_BMI_model) -> list[int]: + """Get the grid ranks for the gridded domain. + + From bmi_model.py. + """ + return [bmi_model.grid_4.rank] + + def grid_ids(self, bmi_model: NWMv3_Forcing_Engine_BMI_model) -> list[int]: + """Get the grid IDs for the gridded domain. + + From bmi_model.py. + """ + return [bmi_model.grid_1.id] + + def get_size_of_arrays(self, bmi_model: NWMv3_Forcing_Engine_BMI_model) -> None: + """Get the size of the flattened 2D arrays from the gridded domain. + + From bmi_model.py. + """ + bmi_model._varsize = len( + np.zeros(bmi_model._wrf_hydro_geo_meta.latitude_grid.shape).flatten() + ) + + for model_output in bmi_model.get_output_var_names(): + bmi_model._values[model_output] = np.zeros(bmi_model._varsize, dtype=float) + + def set_var_names(self, bmi_model: NWMv3_Forcing_Engine_BMI_model) -> None: + """Set the variable names for the BMI model based on the geospatial metadata. + + From bmi_model.py. + + Create a Python dictionary that maps CSDMS Standard + Names to the model's internal variable names. + This is going to get long, + since the input variable names could come from any forcing... + """ + # Flag here to indicate whether or not the NWM operational configuration + # will support a BMI field for liquid fraction of precipitation + if bmi_model.config_options.include_lqfrac == 1: + bmi_model._output_var_names = CONSTS["_output_var_names"].append( + "LQFRAC_ELEMENT" + ) + bmi_model._var_name_units_map = CONSTS["_var_name_units_map"] | { + "LQFRAC_ELEMENT": ["Liquid Fraction of Precipitation", "%"] + } + else: + bmi_model._output_var_names = CONSTS["_output_var_names"] + bmi_model._var_name_units_map = CONSTS["_var_name_units_map"] + + bmi_model.grid_1 = Grid( + 1, 2, GridType.uniform_rectilinear + ) # Grid 1 is a 2-dimensional grid + bmi_model.grid_1._grid_y = bmi_model._wrf_hydro_geo_meta.latitude_grid.flatten() + bmi_model.grid_1._grid_x = ( + bmi_model._wrf_hydro_geo_meta.longitude_grid.flatten() + ) + bmi_model.grid_1._shape = bmi_model._wrf_hydro_geo_meta.latitude_grid.shape + bmi_model.grid_1._size = len( + bmi_model._wrf_hydro_geo_meta.latitude_grid.flatten() + ) + bmi_model.grid_1._spacing = ( + bmi_model._wrf_hydro_geo_meta.dx_meters, + bmi_model._wrf_hydro_geo_meta.dy_meters, + ) + bmi_model.grid_1._units = "m" + bmi_model.grid_1._origin = None + + bmi_model._grids = [bmi_model.grid_1] + bmi_model._grid_map = { + var_name: bmi_model.grid_1 for var_name in bmi_model._output_var_names + } + + +class NWMv3_Forcing_Engine_BMI_model_HydroFabric(NWMv3_Forcing_Engine_BMI_model): + """Defines the BMI (Basic Model Interface) for the NWMv3.0 Forcings Engine model. + + It includes methods for initializing the model, updating it, accessing model variables, + and managing model configuration. This class is responsible for interacting with + geospatial data and forcing inputs for the model simulation. + """ + + def __init__(self): + """Create a model that is ready for initialization. + + Initializes the model with default values for time, variables, and grid types. + """ + super().__init__() + + if self._job_meta.grid_type != "hydrofabric": + self._job_meta.errMsg = ( + f"Invalid grid type specified: {self._job_meta.grid_type}." + ) + err_handler.err_out_screen_para(self._job_meta.errMsg, self._mpi_meta) + self._wrf_hydro_geo_meta = HydrofabricGeoMeta(self._job_meta, self._mpi_meta) + + def grid_ranks(self, bmi_model: NWMv3_Forcing_Engine_BMI_model) -> list[int]: + """Get the grid ranks for the hydrofabric domain. + + From bmi_model.py. + """ + return [bmi_model.grid_4.rank] + + def grid_ids(self, bmi_model: NWMv3_Forcing_Engine_BMI_model) -> list[int]: + """Get the grid IDs for the hydrofabric domain. + + From bmi_model.py. + """ + return [bmi_model.grid_4.id] + + def get_size_of_arrays(self, bmi_model: NWMv3_Forcing_Engine_BMI_model): + """Get the size of the flattened 1D arrays from the hydrofabric domain. + + From bmi_model.py. + """ + bmi_model._varsize = len( + np.zeros(bmi_model._wrf_hydro_geo_meta.latitude_grid.shape).flatten() + ) + for model_output in bmi_model.get_output_var_names(): + bmi_model._values[model_output] = np.zeros(bmi_model._varsize, dtype=float) + + def set_var_names(self, bmi_model: NWMv3_Forcing_Engine_BMI_model): + """Set the variables for the hydrofabric geospatial metadata. + + From bmi_model.py. + + Create a Python dictionary that maps CSDMS Standard + Names to the model's internal variable names. + This is going to get long, + since the input variable names could come from any forcing... + """ + # Flag here to indicate whether or not the NWM operational configuration + # will support a BMI field for liquid fraction of precipitation + if bmi_model._job_meta.include_lqfrac == 1: + bmi_model._output_var_names = ( + ["CAT-ID"] + CONSTS["_output_var_names"] + ["LQFRAC_ELEMENT"] + ) + bmi_model._var_name_units_map = ( + {"CAT-ID": ["Catchment ID", ""]} + | CONSTS["_var_name_units_map"] + | { + "LQFRAC_ELEMENT": ["Liquid Fraction of Precipitation", "%"], + } + ) + else: + bmi_model._output_var_names = ["CAT-ID"] + CONSTS["_output_var_names"] + bmi_model._var_name_units_map = {"CAT-ID": ["Catchment ID", ""]} | CONSTS[ + "_var_name_units_map" + ] + + bmi_model.grid_4 = Grid( + 4, 2, GridType.unstructured + ) # Grid 1 is a 2-dimensional grid + + bmi_model.grid_4._grid_y = bmi_model._wrf_hydro_geo_meta.latitude_grid + bmi_model.grid_4._grid_x = bmi_model._wrf_hydro_geo_meta.longitude_grid + bmi_model.grid_4._size = len(bmi_model._wrf_hydro_geo_meta.latitude_grid) + bmi_model._grids = [bmi_model.grid_4] + bmi_model._grid_map = { + var_name: bmi_model.grid_4 for var_name in bmi_model._output_var_names + } + + +class NWMv3_Forcing_Engine_BMI_model_Unstructured(NWMv3_Forcing_Engine_BMI_model): + """Defines the BMI (Basic Model Interface) for the NWMv3.0 Forcings Engine model. + + It includes methods for initializing the model, updating it, accessing model variables, + and managing model configuration. This class is responsible for interacting with + geospatial data and forcing inputs for the model simulation. + """ + + def __init__(self): + """Create a model that is ready for initialization. + + Initializes the model with default values for time, variables, and grid types. + """ + super().__init__() + + if self._job_meta.grid_type != "unstructured": + self._job_meta.errMsg = ( + f"Invalid grid type specified: {self._job_meta.grid_type}." + ) + err_handler.err_out_screen_para(self._job_meta.errMsg, self._mpi_meta) + self._wrf_hydro_geo_meta = UnstructuredGeoMeta(self._job_meta, self._mpi_meta) + + def grid_ranks(self, bmi_model: NWMv3_Forcing_Engine_BMI_model) -> list[int]: + """Get the grid ranks for the unstructured domain. + + From bmi_model.py. + """ + return [bmi_model.grid_2.rank, bmi_model.grid_3.rank] + + def grid_ids(self, bmi_model: NWMv3_Forcing_Engine_BMI_model) -> list[int]: + """Get the grid IDs for the unstructured domain. + + From bmi_model.py. + """ + return [bmi_model.grid_2.id, bmi_model.grid_3.id] + + def get_size_of_arrays(self, bmi_model: NWMv3_Forcing_Engine_BMI_model) -> None: + """Get the size of the flattened 1D arrays for the unstructured domain. + + From bmi_model.py. + """ + bmi_model._varsize = len( + np.zeros(bmi_model._wrf_hydro_geo_meta.latitude_grid.shape).flatten() + ) + bmi_model._varsize_elem = len( + np.zeros(bmi_model._wrf_hydro_geo_meta.latitude_grid_elem.shape).flatten() + ) + + for model_output in bmi_model.get_output_var_names(): + if "ELEMENT" in model_output: + bmi_model._values[model_output] = np.zeros( + bmi_model._varsize_elem, dtype=float + ) + else: + bmi_model._values[model_output] = np.zeros( + bmi_model._varsize, dtype=float + ) + + def set_var_names(self, bmi_model: NWMv3_Forcing_Engine_BMI_model) -> None: + """Set the variable names for the unstructured domain. + + From bmi_model.py. + + Create a Python dictionary that maps CSDMS Standard + Names to the model's internal variable names. + This is going to get long, + since the input variable names could come from any forcing... + """ + # Flag here to indicate whether or not the NWM operational configuration + # will support a BMI field for liquid fraction of precipitation + if bmi_model._job_meta.include_lqfrac == 1: + bmi_model._output_var_names = ( + CONSTS["_output_var_names"] + + ["LQFRAC_ELEMENT"] + + CONSTS["_output_var_names_unstructured"] + ) + +["LQFRAC_NODE"] + + bmi_model._var_name_units_map = ( + CONSTS["_var_name_units_map"] + | {"LQFRAC_ELEMENT": ["Liquid Fraction of Precipitation", "%"]} + | CONSTS["_var_name_units_map_unstructured"] + | {"LQFRAC_NODE": ["Liquid Fraction of Precipitation", "%"]} + ) + + bmi_model._grid_map = ( + {var_name: bmi_model.grid_2 for var_name in CONSTS["_output_var_names"]} + | {"LQFRAC_ELEMENT": bmi_model.grid_2} + | { + var_name: bmi_model.grid_3 + for var_name in CONSTS["_output_var_names_unstructured"] + } + | {"LQFRAC_NODE": bmi_model.grid_3} + ) + else: + bmi_model._output_var_names = ( + CONSTS["_output_var_names"] + CONSTS["_output_var_names_unstructured"] + ) + + bmi_model._var_name_units_map = ( + CONSTS["_var_name_units_map"] + | CONSTS["_var_name_units_map_unstructured"] + ) + + bmi_model._grid_map = { + var_name: bmi_model.grid_2 for var_name in CONSTS["_output_var_names"] + } | { + var_name: bmi_model.grid_3 + for var_name in CONSTS["_output_var_names_unstructured"] + } + + bmi_model.grid_2 = Grid( + 2, 2, GridType.unstructured + ) # Grid 1 is a 2-dimensional grid + bmi_model.grid_3 = Grid( + 3, 2, GridType.unstructured + ) # Grid 1 is a 2-dimensional grid + + bmi_model.grid_2._grid_y = self._wrf_hydro_geo_meta.latitude_grid_elem + bmi_model.grid_2._grid_x = self._wrf_hydro_geo_meta.longitude_grid_elem + + bmi_model.grid_3._grid_y = self._wrf_hydro_geo_meta.latitude_grid + bmi_model.grid_3._grid_x = self._wrf_hydro_geo_meta.longitude_grid + + bmi_model.grid_2._size = len(self._wrf_hydro_geo_meta.latitude_grid_elem) + bmi_model.grid_3._size = len(self._wrf_hydro_geo_meta.latitude_grid) + bmi_model._grids = [bmi_model.grid_2, bmi_model.grid_3] diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py index 7e513b9d..e0b4e637 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py @@ -307,62 +307,6 @@ def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig) -> None for attr in CONSTS[self.__class__.__name__]: setattr(self, attr, None) - def initialize_geo_data( - self, - input_forcings: InputForcings, - ) -> None: - """Initialize geometry-related arrays based on grid type and downscaling options. - - Initialize the local final grid of values. This is represntative - of the local grid for this forcing, for a specific output timesetp. - This grid will be updated from one output timestep to another, and - also through downscaling and bias correction. - """ - input_forcings.final_forcings = np.empty( - [ - input_forcings.force_count, - input_forcings.geo_meta.ny_local, - input_forcings.geo_meta.nx_local, - ], - np.float64, - ) - input_forcings.height = np.empty( - [input_forcings.geo_meta.ny_local, input_forcings.geo_meta.nx_local], - np.float32, - ) - input_forcings.regridded_mask = np.empty( - [input_forcings.geo_meta.ny_local, input_forcings.geo_meta.nx_local], - np.float32, - ) - input_forcings.regridded_mask_AORC = np.empty( - [input_forcings.geo_meta.ny_local, input_forcings.geo_meta.nx_local], - np.float32, - ) - input_forcings.final_forcings_elem = None - input_forcings.height_elem = None - input_forcings.regridded_mask_elem = None - input_forcings.regridded_mask_elem_AORC = None - - def handle_humidity_downscaling( - self, - input_forcings: InputForcings, - ) -> None: - """Initialize temporary arrays for specific humidity downscaling if specified in configuration. - - If we have specified specific humidity downscaling, establish arrays to hold - temporary temperature arrays that are un-downscaled. - """ - input_forcings.t2dTmp = np.empty( - [input_forcings.geo_meta.ny_local, input_forcings.geo_meta.nx_local], - np.float32, - ) - input_forcings.psfcTmp = np.empty( - [input_forcings.geo_meta.ny_local, input_forcings.geo_meta.nx_local], - np.float32, - ) - input_forcings.t2dTmp_elem = None - input_forcings.psfcTmp_elem = None - @broadcast @property @lru_cache @@ -961,46 +905,6 @@ def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): for attr in CONSTS[self.__class__.__name__]: setattr(self, attr, None) - def initialize_geo_data(self, input_forcings: InputForcings) -> None: - """Initialize geometry-related arrays based on grid type and downscaling options. - - From forcingInputMod.py - - Initialize the local final grid of values. This is represntative - of the local grid for this forcing, for a specific output timesetp. - This grid will be updated from one output timestep to another, and - also through downscaling and bias correction. - """ - input_forcings.final_forcings = np.empty( - [input_forcings.force_count, input_forcings.geo_meta.ny_local], np.float64 - ) - input_forcings.height = np.empty([input_forcings.geo_meta.ny_local], np.float32) - input_forcings.regridded_mask = np.empty( - [input_forcings.geo_meta.ny_local], np.float32 - ) - input_forcings.regridded_mask_AORC = np.empty( - [input_forcings.geo_meta.ny_local], np.float32 - ) - input_forcings.final_forcings_elem = None - input_forcings.height_elem = None - input_forcings.regridded_mask_elem = None - input_forcings.regridded_mask_elem_AORC = None - - def handle_humidity_downscaling(self, input_forcings: InputForcings) -> None: - """Initialize temporary arrays for specific humidity downscaling if specified in configuration. - - From forcingInputMod.py - - If we have specified specific humidity downscaling, establish arrays to hold - temporary temperature arrays that are un-downscaled. - """ - input_forcings.t2dTmp = np.empty([input_forcings.geo_meta.ny_local], np.float32) - input_forcings.psfcTmp = np.empty( - [input_forcings.geo_meta.ny_local], np.float32 - ) - input_forcings.t2dTmp_elem = None - input_forcings.psfcTmp_elem = None - @property @lru_cache def lat_bounds(self) -> np.ndarray: @@ -1176,59 +1080,6 @@ def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig) -> None for attr in CONSTS[self.__class__.__name__]: setattr(self, attr, None) - def handle_humidity_downscaling(self, input_forcings: InputForcings) -> None: - """Initialize temporary arrays for specific humidity downscaling if specified in configuration. - - From forcingInputMod.py - - If we have specified specific humidity downscaling, establish arrays to hold - temporary temperature arrays that are un-downscaled. - """ - input_forcings.t2dTmp = np.empty([input_forcings.geo_meta.ny_local], np.float32) - input_forcings.psfcTmp = np.empty( - [input_forcings.geo_meta.ny_local], np.float32 - ) - input_forcings.t2dTmp_elem = np.empty( - [input_forcings.geo_meta.ny_local_elem], np.float32 - ) - input_forcings.psfcTmp_elem = np.empty( - [input_forcings.geo_meta.ny_local_elem], np.float32 - ) - - def initialize_geo_data(self, input_forcings: InputForcings) -> None: - """Initialize geometry-related arrays based on grid type and downscaling options. - - From forcingInputMod.py - - Initialize the local final grid of values. This is represntative - of the local grid for this forcing, for a specific output timesetp. - This grid will be updated from one output timestep to another, and - also through downscaling and bias correction. - """ - input_forcings.final_forcings = np.empty( - [input_forcings.force_count, input_forcings.geo_meta.ny_local], np.float64 - ) - input_forcings.height = np.empty([input_forcings.geo_meta.ny_local], np.float32) - input_forcings.regridded_mask = np.empty( - [input_forcings.geo_meta.ny_local], np.float32 - ) - input_forcings.regridded_mask_AORC = np.empty( - [input_forcings.geo_meta.ny_local], np.float32 - ) - input_forcings.final_forcings_elem = np.empty( - [input_forcings.force_count, input_forcings.geo_meta.ny_local_elem], - np.float64, - ) - input_forcings.height_elem = np.empty( - [input_forcings.geo_meta.ny_local_elem], np.float32 - ) - input_forcings.regridded_mask_elem = np.empty( - [input_forcings.geo_meta.ny_local_elem], np.float32 - ) - input_forcings.regridded_mask_elem_AORC = np.empty( - [input_forcings.geo_meta.ny_local_elem], np.float32 - ) - @broadcast @property @lru_cache @@ -1618,10 +1469,3 @@ def nx_local_elem(self) -> int: def ny_local_elem(self) -> int: """Get the local y dimension size for this processor.""" return len(self.esmf_grid.coords[1][1]) - - -GEOGRID = { - "gridded": GriddedGeoMeta, - "unstructured": UnstructuredGeoMeta, - "hydrofabric": HydrofabricGeoMeta, -} From 832534d05cbd114ba659c556b8b53d07e65e476d Mon Sep 17 00:00:00 2001 From: Matt Deshotel Date: Tue, 24 Feb 2026 13:07:07 -0600 Subject: [PATCH 19/34] remove commented out items --- .../NextGen_Forcings_Engine/core/consts.py | 25 ------------------- 1 file changed, 25 deletions(-) diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/consts.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/consts.py index 5a361aff..29c4b84d 100644 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/consts.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/consts.py @@ -11,34 +11,9 @@ CONSTS = { "geoMod": { "GeoMeta": [ - # "nx_global", - # "ny_global", - # "nx_global_elem", - # "ny_global_elem", - # "dx_meters", - # "dy_meters", - # "latitude_grid", - # "longitude_grid", - # "element_ids", - # "element_ids_global", - # "latitude_grid_elem", - # "longitude_grid_elem", - # "lat_bounds", - # "lon_bounds", - # "mesh_inds", - # "mesh_inds_elem", - # "height", - # "height_elem", - # "sina_grid", - # "cosa_grid", "nodeCoords", "centerCoords", "inds", - # "slope", - # "slp_azi", - # "slope_elem", - # "slp_azi_elem", - # "esmf_grid", "esmf_lat", "esmf_lon", ], From 965dcd552bb777b383d611eaa1b9989c2e10fcbe Mon Sep 17 00:00:00 2001 From: Matt Deshotel Date: Tue, 24 Feb 2026 13:07:40 -0600 Subject: [PATCH 20/34] split input forcing into multiple classes based on specified discretization --- .../core/forcingInputMod.py | 223 +++++++++++++++++- 1 file changed, 222 insertions(+), 1 deletion(-) diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py index 4cd15b8d..cd9a9eb3 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py @@ -10,6 +10,8 @@ from pathlib import Path from typing import TYPE_CHECKING +import numpy as np + from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.consts import CONSTS if TYPE_CHECKING: @@ -247,6 +249,219 @@ def temporal_interpolate_inputs( ) +class InputForcingsGridded(InputForcings): + """Abstract class defining parameters of a single input forcing product. + + This is an abstract class that will define all the parameters + of a single gridded input forcing product. + """ + + def __init__( + self, + force_key: int, + idx: int = None, + config_options: ConfigOptions = None, + geo_meta: GeoMeta = None, + mpi_config: MpiConfig = None, + ) -> None: + """Initialize InputForcingsGridded with configuration options, geospatial metadata, and MPI configuration.""" + super().__init__(force_key, idx, config_options, geo_meta, mpi_config) + + def initialize_geo_data( + self, + input_forcings: InputForcings, + ) -> None: + """Initialize geometry-related arrays based on grid type and downscaling options. + + Initialize the local final grid of values. This is represntative + of the local grid for this forcing, for a specific output timesetp. + This grid will be updated from one output timestep to another, and + also through downscaling and bias correction. + """ + input_forcings.final_forcings = np.empty( + [ + input_forcings.force_count, + input_forcings.geo_meta.ny_local, + input_forcings.geo_meta.nx_local, + ], + np.float64, + ) + input_forcings.height = np.empty( + [input_forcings.geo_meta.ny_local, input_forcings.geo_meta.nx_local], + np.float32, + ) + input_forcings.regridded_mask = np.empty( + [input_forcings.geo_meta.ny_local, input_forcings.geo_meta.nx_local], + np.float32, + ) + input_forcings.regridded_mask_AORC = np.empty( + [input_forcings.geo_meta.ny_local, input_forcings.geo_meta.nx_local], + np.float32, + ) + input_forcings.final_forcings_elem = None + input_forcings.height_elem = None + input_forcings.regridded_mask_elem = None + input_forcings.regridded_mask_elem_AORC = None + + def handle_humidity_downscaling( + self, + input_forcings: InputForcings, + ) -> None: + """Initialize temporary arrays for specific humidity downscaling if specified in configuration. + + If we have specified specific humidity downscaling, establish arrays to hold + temporary temperature arrays that are un-downscaled. + """ + input_forcings.t2dTmp = np.empty( + [input_forcings.geo_meta.ny_local, input_forcings.geo_meta.nx_local], + np.float32, + ) + input_forcings.psfcTmp = np.empty( + [input_forcings.geo_meta.ny_local, input_forcings.geo_meta.nx_local], + np.float32, + ) + input_forcings.t2dTmp_elem = None + input_forcings.psfcTmp_elem = None + + +class InputForcingsHydrofabric(InputForcings): + """Abstract class defining parameters of a single input forcing product. + + This is an abstract class that will define all the parameters + of a single hydrofabric input forcing product. + """ + + def __init__( + self, + force_key: int, + idx: int = None, + config_options: ConfigOptions = None, + geo_meta: GeoMeta = None, + mpi_config: MpiConfig = None, + ) -> None: + """Initialize InputForcingsHydrofabric with configuration options, geospatial metadata, and MPI configuration.""" + super().__init__(force_key, idx, config_options, geo_meta, mpi_config) + + def initialize_geo_data(self, input_forcings: InputForcings) -> None: + """Initialize geometry-related arrays based on grid type and downscaling options. + + From forcingInputMod.py + + Initialize the local final grid of values. This is represntative + of the local grid for this forcing, for a specific output timesetp. + This grid will be updated from one output timestep to another, and + also through downscaling and bias correction. + """ + input_forcings.final_forcings = np.empty( + [input_forcings.force_count, input_forcings.geo_meta.ny_local], np.float64 + ) + input_forcings.height = np.empty([input_forcings.geo_meta.ny_local], np.float32) + input_forcings.regridded_mask = np.empty( + [input_forcings.geo_meta.ny_local], np.float32 + ) + input_forcings.regridded_mask_AORC = np.empty( + [input_forcings.geo_meta.ny_local], np.float32 + ) + input_forcings.final_forcings_elem = None + input_forcings.height_elem = None + input_forcings.regridded_mask_elem = None + input_forcings.regridded_mask_elem_AORC = None + + def handle_humidity_downscaling(self, input_forcings: InputForcings) -> None: + """Initialize temporary arrays for specific humidity downscaling if specified in configuration. + + From forcingInputMod.py + + If we have specified specific humidity downscaling, establish arrays to hold + temporary temperature arrays that are un-downscaled. + """ + input_forcings.t2dTmp = np.empty([input_forcings.geo_meta.ny_local], np.float32) + input_forcings.psfcTmp = np.empty( + [input_forcings.geo_meta.ny_local], np.float32 + ) + input_forcings.t2dTmp_elem = None + input_forcings.psfcTmp_elem = None + + +class InputForcingsUnstructured(InputForcings): + """Abstract class defining parameters of a single input forcing product. + + This is an abstract class that will define all the parameters + of a single unstructured input forcing product. + """ + + def __init__( + self, + force_key: int, + idx: int = None, + config_options: ConfigOptions = None, + geo_meta: GeoMeta = None, + mpi_config: MpiConfig = None, + ) -> None: + """Initialize InputForcingsUnstructured with configuration options, geospatial metadata, and MPI configuration.""" + super().__init__(force_key, idx, config_options, geo_meta, mpi_config) + + def handle_humidity_downscaling(self, input_forcings: InputForcings) -> None: + """Initialize temporary arrays for specific humidity downscaling if specified in configuration. + + From forcingInputMod.py + + If we have specified specific humidity downscaling, establish arrays to hold + temporary temperature arrays that are un-downscaled. + """ + input_forcings.t2dTmp = np.empty([input_forcings.geo_meta.ny_local], np.float32) + input_forcings.psfcTmp = np.empty( + [input_forcings.geo_meta.ny_local], np.float32 + ) + input_forcings.t2dTmp_elem = np.empty( + [input_forcings.geo_meta.ny_local_elem], np.float32 + ) + input_forcings.psfcTmp_elem = np.empty( + [input_forcings.geo_meta.ny_local_elem], np.float32 + ) + + def initialize_geo_data(self, input_forcings: InputForcings) -> None: + """Initialize geometry-related arrays based on grid type and downscaling options. + + From forcingInputMod.py + + Initialize the local final grid of values. This is represntative + of the local grid for this forcing, for a specific output timesetp. + This grid will be updated from one output timestep to another, and + also through downscaling and bias correction. + """ + input_forcings.final_forcings = np.empty( + [input_forcings.force_count, input_forcings.geo_meta.ny_local], np.float64 + ) + input_forcings.height = np.empty([input_forcings.geo_meta.ny_local], np.float32) + input_forcings.regridded_mask = np.empty( + [input_forcings.geo_meta.ny_local], np.float32 + ) + input_forcings.regridded_mask_AORC = np.empty( + [input_forcings.geo_meta.ny_local], np.float32 + ) + input_forcings.final_forcings_elem = np.empty( + [input_forcings.force_count, input_forcings.geo_meta.ny_local_elem], + np.float64, + ) + input_forcings.height_elem = np.empty( + [input_forcings.geo_meta.ny_local_elem], np.float32 + ) + input_forcings.regridded_mask_elem = np.empty( + [input_forcings.geo_meta.ny_local_elem], np.float32 + ) + input_forcings.regridded_mask_elem_AORC = np.empty( + [input_forcings.geo_meta.ny_local_elem], np.float32 + ) + + +INPUTFORCINGS = { + "gridded": InputForcingsGridded, + "unstructured": InputForcingsUnstructured, + "hydrofabric": InputForcingsHydrofabric, +} + + def init_dict( config_options: ConfigOptions, geo_meta: GeoMeta, @@ -268,7 +483,13 @@ def init_dict( custom_count = 0 for idx in range(0, config_options.number_inputs): force_key = config_options.input_forcings[idx] - input_dict[force_key] = InputForcings( + + if config_options.grid_type not in INPUTFORCINGS: + raise TypeError( + f"Invalid grid type specified: {config_options.grid_type}. Valid options are: {list(INPUTFORCINGS.keys())}" + ) + + input_dict[force_key] = INPUTFORCINGS[config_options.grid_type]( force_key, idx, config_options, geo_meta, mpi_config ) # input_dict[force_key].keyValue = force_key From a9546e0572b335c16a49d8782e44815964105536 Mon Sep 17 00:00:00 2001 From: "Matthew.Deshotel" Date: Wed, 25 Feb 2026 10:30:17 -0600 Subject: [PATCH 21/34] update variable names across the repo --- .../NextGen_Forcings_Engine/core/downscale.py | 2 +- .../NextGen_Forcings_Engine/core/regrid.py | 23 ++-- .../core/time_handling.py | 120 +++++++++--------- 3 files changed, 74 insertions(+), 71 deletions(-) diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/downscale.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/downscale.py index bdfbaeb2..4b521568 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/downscale.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/downscale.py @@ -39,7 +39,7 @@ def run_downscaling(input_forcings, config_options, geo_meta_wrf_hydro, mpi_conf # Dictionary mapping to shortwave radiation downscaling downscale_sw = {0: no_downscale, 1: ncar_topo_adj} - downscale_sw[input_forcings.swDowscaleOpt]( + downscale_sw[input_forcings.swDownscaleOpt]( input_forcings, config_options, geo_meta_wrf_hydro, mpi_config ) err_handler.check_program_status(config_options, mpi_config) diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/regrid.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/regrid.py index ae80b704..09bee9d0 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/regrid.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/regrid.py @@ -1,5 +1,7 @@ """Regridding module file for regridding input forcing files.""" +from __future__ import annotations + import hashlib import os import sys @@ -9,7 +11,7 @@ from functools import partial from pathlib import Path from time import monotonic, time - +from typing import TYPE_CHECKING # import mpi4py.util.pool as mpi_pool # For ESMF + shapely 2.x, shapely must be imported first, to avoid segfault "address not mapped to object" stemming from calls such as: # /usr/local/esmf/lib/libO/Linux.gfortran.64.openmpi.default/libesmf_fullylinked.so(get_geom+0x36) @@ -39,13 +41,14 @@ ioMod, timeInterpMod, ) -from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.config import ( - ConfigOptions, -) -from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.geoMod import ( - GeoMeta, -) -from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.parallel import MpiConfig +if TYPE_CHECKING: + from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.config import ( + ConfigOptions, + ) + from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.geoMod import ( + GeoMeta, + ) + from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.parallel import MpiConfig from nextgen_forcings_ewts import MODULE_NAME from ..esmf_utils import ( @@ -12082,7 +12085,7 @@ def make_regrid( extrap_method = ESMF.ExtrapMethod.CREEP_FILL if fill else ESMF.ExtrapMethod.NONE regrid_method = (ESMF.RegridMethod.BILINEAR, ESMF.RegridMethod.NEAREST_STOD)[ - input_forcings.regridOpt - 1 + input_forcings.regrid_opt - 1 ] err_handler.check_program_status(config_options, mpi_config) @@ -12279,7 +12282,7 @@ def calculate_weights( err_handler.check_program_status(config_options, mpi_config) # check if we're doing border trimming and set up mask - border = input_forcings.border # // 5 # HRRR is a 3 km product + border = input_forcings.ignored_border_widths # // 5 # HRRR is a 3 km product if border > 0: try: mask = input_forcings.esmf_grid_in.add_item( diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/time_handling.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/time_handling.py index 04708010..497fbdaf 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/time_handling.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/time_handling.py @@ -123,14 +123,14 @@ def find_nldas_neighbors(input_forcings, config_options, d_current, mpi_config): # Calculate expected file paths. tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/NLDAS_FORA0125_H.A" + input_forcings.fcst_date1.strftime("%Y%m%d.%H%M") + ".002" + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/NLDAS_FORA0125_H.A" + input_forcings.fcst_date1.strftime("%Y%m%d.%H%M") + ".002" @@ -213,7 +213,7 @@ def find_nldas_neighbors(input_forcings, config_options, d_current, mpi_config): # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.isfile(input_forcings.file_in2): - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( "Expected input Custom file: " + input_forcings.file_in2 @@ -255,27 +255,27 @@ def find_aorc_neighbors(input_forcings, config_options, d_current, mpi_config): # Calculate expected file paths. if d_current.year > 2019: tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/AORC-OWP_" + d_current.strftime("%Y%m%d%H") + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/AORC-OWP_" + d_current.strftime("%Y%m%d%H") + input_forcings.file_ext ) else: tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/AORC-OWP_" + d_current.strftime("%Y%m%d%H") + "z" + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/AORC-OWP_" + d_current.strftime("%Y%m%d%H") + "z" @@ -284,13 +284,13 @@ def find_aorc_neighbors(input_forcings, config_options, d_current, mpi_config): if input_forcings.product_name == "AORC_Alaska": # Calculate expected file paths. tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/AK_AORC-OWP_" + d_current.strftime("%Y%m%d%H") + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/AK_AORC-OWP_" + d_current.strftime("%Y%m%d%H") + input_forcings.file_ext @@ -373,7 +373,7 @@ def find_aorc_neighbors(input_forcings, config_options, d_current, mpi_config): if config_options.input_forcings[0] not in [12, 21]: if mpi_config.rank == 0: if not os.path.isfile(input_forcings.file_in2): - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( "Expected input AORC file: " + input_forcings.file_in2 @@ -412,8 +412,8 @@ def find_era5_neighbors(input_forcings, config_options, d_current, mpi_config): :return: """ # Point to ERA5 netcdf input file - tmp_file1 = os.path.join(input_forcings.inDir, os.listdir(input_forcings.inDir)[0]) - tmp_file2 = os.path.join(input_forcings.inDir, os.listdir(input_forcings.inDir)[0]) + tmp_file1 = os.path.join(input_forcings.input_force_dirs, os.listdir(input_forcings.input_force_dirs)[0]) + tmp_file2 = os.path.join(input_forcings.input_force_dirs, os.listdir(input_forcings.input_force_dirs)[0]) if mpi_config.rank == 0: # Check to see if files are already set. If not, then reset, grids and @@ -491,7 +491,7 @@ def find_era5_neighbors(input_forcings, config_options, d_current, mpi_config): # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.isfile(input_forcings.file_in2): - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( "Expected input ERA5 Interim file: " + input_forcings.file_in2 @@ -539,13 +539,13 @@ def find_nwm_neighbors(input_forcings, config_options, d_current, mpi_config): # Calculate expected file paths. tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/" + d_current.strftime("%Y%m%d%H") + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/" + d_current.strftime("%Y%m%d%H") + input_forcings.file_ext @@ -554,13 +554,13 @@ def find_nwm_neighbors(input_forcings, config_options, d_current, mpi_config): # Calculate expected file paths. tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/" + d_current.strftime("%Y%m%d%H%M") + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/" + d_current.strftime("%Y%m%d%H%M") + input_forcings.file_ext @@ -642,7 +642,7 @@ def find_nwm_neighbors(input_forcings, config_options, d_current, mpi_config): # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.isfile(input_forcings.file_in2): - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( "Expected input NWM file: " + input_forcings.file_in2 @@ -747,7 +747,7 @@ def find_ak_ext_ana_neighbors(input_forcings, config_options, d_current, mpi_con # Calculate expected file paths. tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/" + prev_ext_ana_date.strftime("%Y%m%d%H") + "/" @@ -834,7 +834,7 @@ def find_ak_ext_ana_neighbors(input_forcings, config_options, d_current, mpi_con # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.exists(input_forcings.file_in2): - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( "Expected input ExtAnA file: " + input_forcings.file_in2 @@ -946,7 +946,7 @@ def find_conus_hrrr_neighbors(input_forcings, config_options, d_current, mpi_con # Calculate expected file paths. tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/hrrr." + current_hrrr_cycle.strftime("%Y%m%d") + "/hrrr.t" @@ -960,7 +960,7 @@ def find_conus_hrrr_neighbors(input_forcings, config_options, d_current, mpi_con err_handler.log_msg(config_options, mpi_config) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/hrrr." + current_hrrr_cycle.strftime("%Y%m%d") + "/hrrr.t" @@ -975,7 +975,7 @@ def find_conus_hrrr_neighbors(input_forcings, config_options, d_current, mpi_con if os.path.isfile(tmp_file1) == False and os.path.isfile(tmp_file2) == False: # Calculate expected file paths. tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/hrrr." + current_hrrr_cycle.strftime("%Y%m%d") + "/hrrr.t" @@ -989,7 +989,7 @@ def find_conus_hrrr_neighbors(input_forcings, config_options, d_current, mpi_con err_handler.log_msg(config_options, mpi_config) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/hrrr." + current_hrrr_cycle.strftime("%Y%m%d") + "/hrrr.t" @@ -1075,7 +1075,7 @@ def find_conus_hrrr_neighbors(input_forcings, config_options, d_current, mpi_con # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.exists(input_forcings.file_in2): - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( "Expected input HRRR file: " + input_forcings.file_in2 @@ -1353,7 +1353,7 @@ def find_ak_hrrr_neighbors(input_forcings, config_options, d_current, mpi_config # Calculate expected file paths. tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/hrrr." + current_hrrr_cycle.strftime("%Y%m%d") + "/alaska/hrrr.t" @@ -1368,7 +1368,7 @@ def find_ak_hrrr_neighbors(input_forcings, config_options, d_current, mpi_config err_handler.log_msg(config_options, mpi_config) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/hrrr." + current_hrrr_cycle.strftime("%Y%m%d") + "/alaska/hrrr.t" @@ -1452,7 +1452,7 @@ def find_ak_hrrr_neighbors(input_forcings, config_options, d_current, mpi_config # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.exists(input_forcings.file_in2): - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( "Expected input HRRR file: " + input_forcings.file_in2 @@ -1558,7 +1558,7 @@ def find_conus_rap_neighbors(input_forcings, config_options, d_current, mpi_conf # Calculate expected file paths. tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/rap." + current_rap_cycle.strftime("%Y%m%d") + "/rap.t" @@ -1568,7 +1568,7 @@ def find_conus_rap_neighbors(input_forcings, config_options, d_current, mpi_conf + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/rap." + current_rap_cycle.strftime("%Y%m%d") + "/rap.t" @@ -1583,7 +1583,7 @@ def find_conus_rap_neighbors(input_forcings, config_options, d_current, mpi_conf if os.path.isfile(tmp_file1) == False and os.path.isfile(tmp_file2) == False: # Calculate expected file paths. tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/rap." + current_rap_cycle.strftime("%Y%m%d") + "/rap.t" @@ -1593,7 +1593,7 @@ def find_conus_rap_neighbors(input_forcings, config_options, d_current, mpi_conf + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/rap." + current_rap_cycle.strftime("%Y%m%d") + "/rap.t" @@ -1608,7 +1608,7 @@ def find_conus_rap_neighbors(input_forcings, config_options, d_current, mpi_conf if os.path.isfile(tmp_file1) == False and os.path.isfile(tmp_file2) == False: # Calculate expected file paths. tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/rap." + current_rap_cycle.strftime("%Y%m%d") + "/rap.t" @@ -1618,7 +1618,7 @@ def find_conus_rap_neighbors(input_forcings, config_options, d_current, mpi_conf + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/rap." + current_rap_cycle.strftime("%Y%m%d") + "/rap.t" @@ -1702,7 +1702,7 @@ def find_conus_rap_neighbors(input_forcings, config_options, d_current, mpi_conf if mpi_config.rank == 0: if not os.path.isfile(input_forcings.file_in2): missing = input_forcings.file_in2.replace("pgrb", "[bgrb|pgrb]") - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( "Expected input RAP file: " + missing + " not found." ) @@ -1819,7 +1819,7 @@ def find_gfs_neighbors(input_forcings, config_options, d_current, mpi_config): # Calculate expected file paths. if current_gfs_cycle < datetime.datetime(2019, 6, 12, 12): tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/gfs." + current_gfs_cycle.strftime("%Y%m%d%H") + "/gfs.t" @@ -1829,7 +1829,7 @@ def find_gfs_neighbors(input_forcings, config_options, d_current, mpi_config): + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/gfs." + current_gfs_cycle.strftime("%Y%m%d%H") + "/gfs.t" @@ -1841,7 +1841,7 @@ def find_gfs_neighbors(input_forcings, config_options, d_current, mpi_config): else: # FV3 change on June 12th, 2019 tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/gfs." + current_gfs_cycle.strftime("%Y%m%d") + "/" @@ -1853,7 +1853,7 @@ def find_gfs_neighbors(input_forcings, config_options, d_current, mpi_config): + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/gfs." + current_gfs_cycle.strftime("%Y%m%d") + "/" @@ -2016,7 +2016,7 @@ def find_gfs_neighbors(input_forcings, config_options, d_current, mpi_config): # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.isfile(input_forcings.file_in2): - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( "Expected input GFS file: " + input_forcings.file_in2 @@ -2156,7 +2156,7 @@ def find_nam_nest_neighbors(input_forcings, config_options, d_current, mpi_confi # Calculate expected file paths. tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/nam." + current_nam_nest_cycle.strftime("%Y%m%d") + "/nam.t" @@ -2169,7 +2169,7 @@ def find_nam_nest_neighbors(input_forcings, config_options, d_current, mpi_confi + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/nam." + current_nam_nest_cycle.strftime("%Y%m%d") + "/nam.t" @@ -2258,7 +2258,7 @@ def find_nam_nest_neighbors(input_forcings, config_options, d_current, mpi_confi # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.isfile(input_forcings.file_in2): - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( "Expected input NAM Nest file: " + input_forcings.file_in2 @@ -2366,7 +2366,7 @@ def find_cfsv2_neighbors(input_forcings, config_options, d_current, mpi_config): input_forcings.file_ext = ".grb2" tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/cfs." + current_cfs_cycle.strftime("%Y%m%d") + "/" @@ -2383,7 +2383,7 @@ def find_cfsv2_neighbors(input_forcings, config_options, d_current, mpi_config): + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/cfs." + current_cfs_cycle.strftime("%Y%m%d") + "/" @@ -2480,7 +2480,7 @@ def find_cfsv2_neighbors(input_forcings, config_options, d_current, mpi_config): # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.isfile(input_forcings.file_in2): - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( "Expected input CFSv2 file: " + input_forcings.file_in2 @@ -2563,7 +2563,7 @@ def find_custom_hourly_neighbors(input_forcings, config_options, d_current, mpi_ # Calculate expected file paths. tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/custom_hourly." + current_custom_cycle.strftime("%Y%m%d%H") + ".f" @@ -2571,7 +2571,7 @@ def find_custom_hourly_neighbors(input_forcings, config_options, d_current, mpi_ + ".nc" ) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/custom_hourly." + current_custom_cycle.strftime("%Y%m%d%H") + ".f" @@ -2653,7 +2653,7 @@ def find_custom_hourly_neighbors(input_forcings, config_options, d_current, mpi_ # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.isfile(input_forcings.file_in2): - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( "Expected input Custom file: " + input_forcings.file_in2 @@ -3256,7 +3256,7 @@ def find_sbcv2_lwf_neighbors(input_forcings, config_options, d_current, mpi_conf # Calculate expected file paths. tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/SBC_LWF/" + input_forcings.fcst_date1.strftime("%Y%m") + "/" @@ -3339,7 +3339,7 @@ def find_sbcv2_lwf_neighbors(input_forcings, config_options, d_current, mpi_conf # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.isfile(input_forcings.file_in2): - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( "Expected input SBCV2_LWF file: " + input_forcings.file_in2 @@ -4357,7 +4357,7 @@ def find_ndfd_neighbors(input_forcings, config_options, d_current, mpi_config): input_forcings.fcst_hour2 = current_fcst.total_seconds() / 3600 tmp_file1 = os.path.join( - input_forcings.inDir, + input_forcings.input_force_dirs, "NDFD", current_cycle.strftime("%Y%m%d"), "wgrbbul", @@ -4447,7 +4447,7 @@ def find_ndfd_neighbors(input_forcings, config_options, d_current, mpi_config): for tag in ("tmp", "wdir", "wspd", "qpf") ]: if not os.path.isfile(subfile): - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( f"Expected input NDFD file: {subfile} not found." ) @@ -4707,8 +4707,8 @@ def find_input_neighbors(input_forcings, config_options, d_current, mpi_config): input_forcings.fcst_hour1 = input_forcings.fcst_hour1 - 1 if config_options.ana_flag == 0: # Calculate expected file paths. - pattern1 = f"{input_forcings.inDir}/*.{current_input_cycle.strftime('%Y%m%d')}/*{current_input_cycle.strftime('%H')}z*{str(prev_input_forecast_hour).zfill(2)}.grib2" - pattern2 = f"{input_forcings.inDir}/*.{current_input_cycle.strftime('%Y%m%d')}/conus/*{current_input_cycle.strftime('%H')}z*{str(prev_input_forecast_hour).zfill(2)}.grib2" + pattern1 = f"{input_forcings.input_force_dirs}/*.{current_input_cycle.strftime('%Y%m%d')}/*{current_input_cycle.strftime('%H')}z*{str(prev_input_forecast_hour).zfill(2)}.grib2" + pattern2 = f"{input_forcings.input_force_dirs}/*.{current_input_cycle.strftime('%Y%m%d')}/conus/*{current_input_cycle.strftime('%H')}z*{str(prev_input_forecast_hour).zfill(2)}.grib2" files1 = glob.glob(pattern1) + glob.glob(pattern2) tmp_file1 = files1[0] @@ -4716,8 +4716,8 @@ def find_input_neighbors(input_forcings, config_options, d_current, mpi_config): config_options.statusMsg = "Previous input file being used: " + tmp_file1 err_handler.log_msg(config_options, mpi_config, True) # log at debug level - pattern1 = f"{input_forcings.inDir}/*.{current_input_cycle.strftime('%Y%m%d')}/*{current_input_cycle.strftime('%H')}z*{str(next_input_forecast_hour).zfill(2)}.grib2" - pattern2 = f"{input_forcings.inDir}/*.{current_input_cycle.strftime('%Y%m%d')}/conus/*{current_input_cycle.strftime('%H')}z*{str(next_input_forecast_hour).zfill(2)}.grib2" + pattern1 = f"{input_forcings.input_force_dirs}/*.{current_input_cycle.strftime('%Y%m%d')}/*{current_input_cycle.strftime('%H')}z*{str(next_input_forecast_hour).zfill(2)}.grib2" + pattern2 = f"{input_forcings.input_force_dirs}/*.{current_input_cycle.strftime('%Y%m%d')}/conus/*{current_input_cycle.strftime('%H')}z*{str(next_input_forecast_hour).zfill(2)}.grib2" files2 = glob.glob(pattern1) + glob.glob(pattern2) @@ -4729,8 +4729,8 @@ def find_input_neighbors(input_forcings, config_options, d_current, mpi_config): elif config_options.ana_flag == 1: # Calculate expected file paths. - pattern1 = f"{input_forcings.inDir}/*.{current_input_cycle.strftime('%Y%m%d')}/*{current_input_cycle.strftime('%H')}z*{str(prev_input_forecast_hour).zfill(2)}.grib2" - pattern2 = f"{input_forcings.inDir}/*.{current_input_cycle.strftime('%Y%m%d')}/conus/*{current_input_cycle.strftime('%H')}z*{str(prev_input_forecast_hour).zfill(2)}.grib2" + pattern1 = f"{input_forcings.input_force_dirs}/*.{current_input_cycle.strftime('%Y%m%d')}/*{current_input_cycle.strftime('%H')}z*{str(prev_input_forecast_hour).zfill(2)}.grib2" + pattern2 = f"{input_forcings.input_force_dirs}/*.{current_input_cycle.strftime('%Y%m%d')}/conus/*{current_input_cycle.strftime('%H')}z*{str(prev_input_forecast_hour).zfill(2)}.grib2" files1 = glob.glob(pattern1) + glob.glob(pattern2) @@ -4853,7 +4853,7 @@ def find_input_neighbors(input_forcings, config_options, d_current, mpi_config): # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.exists(input_forcings.file_in2): - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( "Expected input file: " + input_forcings.file_in2 + " not found." ) From cacbe50c7f06e28982e6f29413e2ed84aea9e642 Mon Sep 17 00:00:00 2001 From: "Matthew.Deshotel" Date: Wed, 25 Feb 2026 10:31:12 -0600 Subject: [PATCH 22/34] resolve circular imports --- .../NextGen_Forcings_Engine/core/geoMod.py | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py index e0b4e637..9d17bc71 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py @@ -2,7 +2,7 @@ import math from pathlib import Path -from typing import TYPE_CHECKING + import numpy as np @@ -11,10 +11,6 @@ import shapely from scipy import spatial -from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.forcingInputMod import ( - InputForcings, -) - try: import esmpy as ESMF except ImportError: @@ -33,11 +29,6 @@ from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.parallel import MpiConfig from nextgen_forcings_ewts import MODULE_NAME -if TYPE_CHECKING: - from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.forcingInputMod import ( - InputForcings, - ) - LOG = logging.getLogger(MODULE_NAME) CONSTS = CONSTS[Path(__file__).stem] @@ -1469,3 +1460,4 @@ def nx_local_elem(self) -> int: def ny_local_elem(self) -> int: """Get the local y dimension size for this processor.""" return len(self.esmf_grid.coords[1][1]) + From b10ce670a0af9b592d0fedb67170274be2b31522 Mon Sep 17 00:00:00 2001 From: "Matthew.Deshotel" Date: Wed, 25 Feb 2026 10:32:10 -0600 Subject: [PATCH 23/34] get grid type before initializing --- NextGen_Forcings_Engine_BMI/run_bmi_model.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/NextGen_Forcings_Engine_BMI/run_bmi_model.py b/NextGen_Forcings_Engine_BMI/run_bmi_model.py index 08c1403f..c5898de0 100755 --- a/NextGen_Forcings_Engine_BMI/run_bmi_model.py +++ b/NextGen_Forcings_Engine_BMI/run_bmi_model.py @@ -2,12 +2,12 @@ import datetime import pathlib from pathlib import Path - +import yaml import numpy as np import pandas as pd # This is the NextGen Forcings Engine BMI instance to execute -from NextGen_Forcings_Engine.bmi_model import NWMv3_Forcing_Engine_BMI_model +from NextGen_Forcings_Engine.bmi_model import BMIMODEL,parse_config def run_bmi( @@ -37,9 +37,6 @@ def run_bmi( end_time = datetime.datetime.strptime(end_time, "%Y-%m-%d %H:%M:%S") ngen_datetimes = pd.date_range(start=start_time, end=end_time, freq="h") - print("Creating an instance of the BMI model object") - model = NWMv3_Forcing_Engine_BMI_model() - print("Initializing the BMI model") # Set the path for the config file, using the default if none is provided cfg_path = ( @@ -47,6 +44,11 @@ def run_bmi( if config_path is not None else str(Path(__file__).parent.resolve() / "config.yml") ) + with open(cfg_path,"r") as fp: + config=parse_config(yaml.safe_load(fp)) + + print("Creating an instance of the BMI model object") + model = BMIMODEL[config.get("GRID_TYPE")]() # IMPORTANT: We are not calling initialize() directly here. # Instead, we call initialize_with_params(), which handles @@ -72,7 +74,7 @@ def run_bmi( # =============================== if model._grid_type in {"gridded", "hydrofabric"}: varsize = ( - len(model._wrf_hydro_geo_meta.element_ids_global) + len(model.geo_meta.element_ids_global) if model._grid_type == "hydrofabric" else model._varsize ) From 8545a65692121bd21907ea443d0edc8768e104fa Mon Sep 17 00:00:00 2001 From: "Matthew.Deshotel" Date: Wed, 25 Feb 2026 10:33:21 -0600 Subject: [PATCH 24/34] make parse_config a function instead of a method --- .../NextGen_Forcings_Engine/bmi_model.py | 166 +++++++++--------- 1 file changed, 83 insertions(+), 83 deletions(-) diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/bmi_model.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/bmi_model.py index 262b51ca..ff11c747 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/bmi_model.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/bmi_model.py @@ -1459,100 +1459,100 @@ def get_grid_z(self, grid_id: int, z: NDArray[np.float64]) -> NDArray[np.float64 # ------------------------------------------------------------ # ------------------------------------------------------------ - def _parse_config(self, cfg: dict) -> dict: - """Parse the provided configuration dictionary (`cfg`) and modifies it based on certain rules. +def parse_config(cfg: dict) -> dict: + """Parse the provided configuration dictionary (`cfg`) and modifies it based on certain rules. - This function processes specific keys in the configuration dictionary: - - Converts path-like strings to `PosixPath` objects. - - Converts date strings to `pandas` datetime objects. - - Configures lists of integers or strings for specific variables in the configuration. + This function processes specific keys in the configuration dictionary: + - Converts path-like strings to `PosixPath` objects. + - Converts date strings to `pandas` datetime objects. + - Configures lists of integers or strings for specific variables in the configuration. - The function updates the `cfg` dictionary directly, modifying values as needed to match expected formats and types. + The function updates the `cfg` dictionary directly, modifying values as needed to match expected formats and types. - :param cfg: A dictionary containing the configuration settings. The dictionary may include paths, dates, and lists of values. - :return: The updated configuration dictionary with appropriately parsed values. - """ - # LOG.debug(f"Entering _parse_config with cfg type: {type(cfg)}") - if isinstance(cfg, str): - LOG.error( - f"Received string data (raw CSV) instead of dictionary: {cfg[:200]}..." - ) - raise TypeError( - "Expected dictionary in _parse_config, but got a raw CSV string." - ) + :param cfg: A dictionary containing the configuration settings. The dictionary may include paths, dates, and lists of values. + :return: The updated configuration dictionary with appropriately parsed values. + """ + # LOG.debug(f"Entering _parse_config with cfg type: {type(cfg)}") + if isinstance(cfg, str): + LOG.error( + f"Received string data (raw CSV) instead of dictionary: {cfg[:200]}..." + ) + raise TypeError( + "Expected dictionary in _parse_config, but got a raw CSV string." + ) - # if not isinstance(cfg, dict): - # raise TypeError(f"[ERROR] Expected dictionary in _parse_config, got {type(cfg)} with contents: {cfg}") - - for key, val in cfg.items(): - # LOG.debug(f"Processing key: {key}, value type: {type(val)}, value: {val}") - # Convert all path strings to PosixPath objects - if any([key.endswith(x) for x in ["_dir", "_path", "_file", "_files"]]): - if (val is not None) and (val != "None"): - if isinstance(val, list): - temp_list = [] - for element in val: - temp_list.append(Path(element)) - cfg[key] = temp_list - else: - cfg[key] = Path(val) - else: - cfg[key] = None + # if not isinstance(cfg, dict): + # raise TypeError(f"[ERROR] Expected dictionary in _parse_config, got {type(cfg)} with contents: {cfg}") - # Convert Dates to pandas Datetime indices - elif key.endswith("_date"): + for key, val in cfg.items(): + # LOG.debug(f"Processing key: {key}, value type: {type(val)}, value: {val}") + # Convert all path strings to PosixPath objects + if any([key.endswith(x) for x in ["_dir", "_path", "_file", "_files"]]): + if (val is not None) and (val != "None"): if isinstance(val, list): temp_list = [] - for elem in val: - temp_list.append(pd.to_datetime(elem, format="%d/%m/%Y")) + for element in val: + temp_list.append(Path(element)) cfg[key] = temp_list else: - cfg[key] = pd.to_datetime(val, format="%d/%m/%Y") - - # Configure NWMv3.0 input configurations to what the ConfigClass expects - # Flag for variables that need a list of integers - elif key in [ - "InputForcings", - "InputMandatory", - "ForecastInputHorizons", - "ForecastInputOffsets", - "IgnoredBorderWidths", - "RegridOpt", - "TemperatureDownscaling", - "ShortwaveDownscaling", - "PressureDownscaling", - "PrecipDownscaling", - "HumidityDownscaling", - "TemperatureBiasCorrection", - "PressureBiasCorrection", - "HumidityBiasCorrection", - "WindBiasCorrection", - "SwBiasCorrection", - "LwBiasCorrection", - "PrecipBiasCorrection", - "SuppPcp", - "RegridOptSuppPcp", - "SuppPcpTemporalInterpolation", - "SuppPcpMandatory", - "SuppPcpInputOffsets", - "custom_input_fcst_freq", - ]: - cfg[key] = val - - # Flag for variables that need to be a list of strings - elif key in [ - "InputForcingDirectories", - "InputForcingTypes", - "DownscalingParamDirs", - "SuppPcpForcingTypes", - "SuppPcpDirectories", - ]: - cfg[key] = val + cfg[key] = Path(val) else: - pass + cfg[key] = None + + # Convert Dates to pandas Datetime indices + elif key.endswith("_date"): + if isinstance(val, list): + temp_list = [] + for elem in val: + temp_list.append(pd.to_datetime(elem, format="%d/%m/%Y")) + cfg[key] = temp_list + else: + cfg[key] = pd.to_datetime(val, format="%d/%m/%Y") + + # Configure NWMv3.0 input configurations to what the ConfigClass expects + # Flag for variables that need a list of integers + elif key in [ + "InputForcings", + "InputMandatory", + "ForecastInputHorizons", + "ForecastInputOffsets", + "IgnoredBorderWidths", + "RegridOpt", + "TemperatureDownscaling", + "ShortwaveDownscaling", + "PressureDownscaling", + "PrecipDownscaling", + "HumidityDownscaling", + "TemperatureBiasCorrection", + "PressureBiasCorrection", + "HumidityBiasCorrection", + "WindBiasCorrection", + "SwBiasCorrection", + "LwBiasCorrection", + "PrecipBiasCorrection", + "SuppPcp", + "RegridOptSuppPcp", + "SuppPcpTemporalInterpolation", + "SuppPcpMandatory", + "SuppPcpInputOffsets", + "custom_input_fcst_freq", + ]: + cfg[key] = val + + # Flag for variables that need to be a list of strings + elif key in [ + "InputForcingDirectories", + "InputForcingTypes", + "DownscalingParamDirs", + "SuppPcpForcingTypes", + "SuppPcpDirectories", + ]: + cfg[key] = val + else: + pass - # Add more config parsing if necessary - return cfg + # Add more config parsing if necessary + return cfg class NWMv3_Forcing_Engine_BMI_model_Gridded(NWMv3_Forcing_Engine_BMI_model): From bdce41eaf5e4248fffbadd69d0e7cea35fbfb4fb Mon Sep 17 00:00:00 2001 From: "Matthew.Deshotel" Date: Wed, 25 Feb 2026 10:34:06 -0600 Subject: [PATCH 25/34] rename geo_meta and move init logic out of the __init__ method --- .../NextGen_Forcings_Engine/bmi_model.py | 142 ++++++------------ 1 file changed, 50 insertions(+), 92 deletions(-) diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/bmi_model.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/bmi_model.py index ff11c747..946fc669 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/bmi_model.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/bmi_model.py @@ -125,7 +125,7 @@ def __init__(self): self.cfg_bmi = None self._job_meta = None self._mpi_meta = None - self._wrf_hydro_geo_meta = None + self.geo_meta = None self._grid_type = None self._grids = None self._grid_map = None @@ -211,12 +211,12 @@ def initialize(self, config_file: str, output_path: str | None = None) -> None: with bmi_cfg_file.open("r") as fp: cfg = yaml.safe_load(fp) - self.cfg_bmi = self._parse_config(cfg) + self.cfg_bmi = parse_config(cfg) # If _job_meta was not set by initialize_with_params(), create a default one if self._job_meta is None: self._job_meta = ConfigOptions(self.cfg_bmi) - + # Parse the configuration options try: self._job_meta.validate_config(self.cfg_bmi) @@ -240,6 +240,9 @@ def initialize(self, config_file: str, output_path: str | None = None) -> None: # Initialize MPI communication self._mpi_meta = MpiConfig() + + self.geo_meta = HydrofabricGeoMeta(self._job_meta, self._mpi_meta) + try: comm = MPI.Comm.f2py(self._comm) if self._comm is not None else None self._mpi_meta.initialize_comm(self._job_meta, comm=comm) @@ -258,7 +261,7 @@ def initialize(self, config_file: str, output_path: str | None = None) -> None: # Assign grid type to BMI class for grid information self._grid_type = self._job_meta.grid_type.lower() - self._wrf_hydro_geo_meta.set_var_names(self) + self.set_var_names(self) # ----- Create some lookup tabels from the long variable names --------# self._var_name_map_long_first = { @@ -277,8 +280,8 @@ def initialize(self, config_file: str, output_path: str | None = None) -> None: # Check to make sure we have enough dimensionality to run regridding. ESMF requires both grids # to have a size of at least 2. if ( - self._wrf_hydro_geo_meta.nx_local < 2 - or self._wrf_hydro_geo_meta.ny_local < 2 + self.geo_meta.nx_local < 2 + or self.geo_meta.ny_local < 2 ): self._job_meta.errMsg = ( "You have specified too many cores for your WRF-Hydro grid. " @@ -289,7 +292,7 @@ def initialize(self, config_file: str, output_path: str | None = None) -> None: # Initialize our output object, which includes local slabs from the output grid. try: - self._output_obj = ioMod.OutputObj(self._job_meta, self._wrf_hydro_geo_meta) + self._output_obj = ioMod.OutputObj(self._job_meta, self.geo_meta) except Exception as e: err_handler.err_out_screen_para(self._job_meta, self._mpi_meta) err_handler.check_program_status(self._job_meta, self._mpi_meta) @@ -301,7 +304,7 @@ def initialize(self, config_file: str, output_path: str | None = None) -> None: # downscaling and regridding purposes. try: self._input_forcing_mod = forcingInputMod.init_dict( - self._job_meta, self._wrf_hydro_geo_meta, self._mpi_meta + self._job_meta, self.geo_meta, self._mpi_meta ) except Exception as e: err_handler.err_out_screen_para(self._job_meta, self._mpi_meta) @@ -311,7 +314,7 @@ def initialize(self, config_file: str, output_path: str | None = None) -> None: # the supp class. if self._job_meta.number_supp_pcp > 0: self._supp_pcp_mod = suppPrecipMod.initDict( - self._job_meta, self._wrf_hydro_geo_meta + self._job_meta, self.geo_meta ) else: self._supp_pcp_mod = None @@ -335,7 +338,7 @@ def initialize(self, config_file: str, output_path: str | None = None) -> None: # Set catchment ids if using hydrofabric if self._grid_type == "hydrofabric": - self._values["CAT-ID"] = self._wrf_hydro_geo_meta.element_ids_global + self._values["CAT-ID"] = self.geo_meta.element_ids_global self._configure_output_path(output_path) @@ -407,7 +410,7 @@ def _configure_output_path(self, output_path: str | None = None) -> None: ) self._output_obj.init_forcing_file( - self._job_meta, self._wrf_hydro_geo_meta, self._mpi_meta + self._job_meta, self.geo_meta, self._mpi_meta ) self._output_configured = True @@ -451,7 +454,7 @@ def update_until(self, future_time: float): self._values, future_time, self._job_meta, - self._wrf_hydro_geo_meta, + self.geo_meta, self._input_forcing_mod, self._supp_pcp_mod, self._mpi_meta, @@ -468,7 +471,7 @@ def update_until(self, future_time: float): self._values, self._values["current_model_time"], self._job_meta, - self._wrf_hydro_geo_meta, + self.geo_meta, self._input_forcing_mod, self._supp_pcp_mod, self._mpi_meta, @@ -488,7 +491,7 @@ def finalize(self): """ # Force destruction of ESMF objects - self._wrf_hydro_geo_meta = None + self.geo_meta = None self._input_forcing_mod = None self._supp_pcp_mod = None self._model = None @@ -1570,34 +1573,18 @@ def __init__(self): """ super().__init__() - if self._job_meta.grid_type != "gridded": - self._job_meta.errMsg = ( - f"Invalid grid type specified: {self._job_meta.grid_type}." - ) - self._wrf_hydro_geo_meta = GriddedGeoMeta(self._job_meta, self._mpi_meta) - err_handler.err_out_screen_para(self._job_meta.errMsg, self._mpi_meta) - def grid_ranks(self, bmi_model: NWMv3_Forcing_Engine_BMI_model) -> list[int]: - """Get the grid ranks for the gridded domain. - - From bmi_model.py. - """ + """Get the grid ranks for the gridded domain.""" return [bmi_model.grid_4.rank] def grid_ids(self, bmi_model: NWMv3_Forcing_Engine_BMI_model) -> list[int]: - """Get the grid IDs for the gridded domain. - - From bmi_model.py. - """ + """Get the grid IDs for the gridded domain.""" return [bmi_model.grid_1.id] def get_size_of_arrays(self, bmi_model: NWMv3_Forcing_Engine_BMI_model) -> None: - """Get the size of the flattened 2D arrays from the gridded domain. - - From bmi_model.py. - """ + """Get the size of the flattened 2D arrays from the gridded domain.""" bmi_model._varsize = len( - np.zeros(bmi_model._wrf_hydro_geo_meta.latitude_grid.shape).flatten() + np.zeros(bmi_model.geo_meta.latitude_grid.shape).flatten() ) for model_output in bmi_model.get_output_var_names(): @@ -1606,8 +1593,6 @@ def get_size_of_arrays(self, bmi_model: NWMv3_Forcing_Engine_BMI_model) -> None: def set_var_names(self, bmi_model: NWMv3_Forcing_Engine_BMI_model) -> None: """Set the variable names for the BMI model based on the geospatial metadata. - From bmi_model.py. - Create a Python dictionary that maps CSDMS Standard Names to the model's internal variable names. This is going to get long, @@ -1629,17 +1614,17 @@ def set_var_names(self, bmi_model: NWMv3_Forcing_Engine_BMI_model) -> None: bmi_model.grid_1 = Grid( 1, 2, GridType.uniform_rectilinear ) # Grid 1 is a 2-dimensional grid - bmi_model.grid_1._grid_y = bmi_model._wrf_hydro_geo_meta.latitude_grid.flatten() + bmi_model.grid_1._grid_y = bmi_model.geo_meta.latitude_grid.flatten() bmi_model.grid_1._grid_x = ( - bmi_model._wrf_hydro_geo_meta.longitude_grid.flatten() + bmi_model.geo_meta.longitude_grid.flatten() ) - bmi_model.grid_1._shape = bmi_model._wrf_hydro_geo_meta.latitude_grid.shape + bmi_model.grid_1._shape = bmi_model.geo_meta.latitude_grid.shape bmi_model.grid_1._size = len( - bmi_model._wrf_hydro_geo_meta.latitude_grid.flatten() + bmi_model.geo_meta.latitude_grid.flatten() ) bmi_model.grid_1._spacing = ( - bmi_model._wrf_hydro_geo_meta.dx_meters, - bmi_model._wrf_hydro_geo_meta.dy_meters, + bmi_model.geo_meta.dx_meters, + bmi_model.geo_meta.dy_meters, ) bmi_model.grid_1._units = "m" bmi_model.grid_1._origin = None @@ -1665,34 +1650,18 @@ def __init__(self): """ super().__init__() - if self._job_meta.grid_type != "hydrofabric": - self._job_meta.errMsg = ( - f"Invalid grid type specified: {self._job_meta.grid_type}." - ) - err_handler.err_out_screen_para(self._job_meta.errMsg, self._mpi_meta) - self._wrf_hydro_geo_meta = HydrofabricGeoMeta(self._job_meta, self._mpi_meta) - def grid_ranks(self, bmi_model: NWMv3_Forcing_Engine_BMI_model) -> list[int]: - """Get the grid ranks for the hydrofabric domain. - - From bmi_model.py. - """ + """Get the grid ranks for the hydrofabric domain.""" return [bmi_model.grid_4.rank] def grid_ids(self, bmi_model: NWMv3_Forcing_Engine_BMI_model) -> list[int]: - """Get the grid IDs for the hydrofabric domain. - - From bmi_model.py. - """ + """Get the grid IDs for the hydrofabric domain.""" return [bmi_model.grid_4.id] def get_size_of_arrays(self, bmi_model: NWMv3_Forcing_Engine_BMI_model): - """Get the size of the flattened 1D arrays from the hydrofabric domain. - - From bmi_model.py. - """ + """Get the size of the flattened 1D arrays from the hydrofabric domain.""" bmi_model._varsize = len( - np.zeros(bmi_model._wrf_hydro_geo_meta.latitude_grid.shape).flatten() + np.zeros(bmi_model.geo_meta.latitude_grid.shape).flatten() ) for model_output in bmi_model.get_output_var_names(): bmi_model._values[model_output] = np.zeros(bmi_model._varsize, dtype=float) @@ -1700,8 +1669,6 @@ def get_size_of_arrays(self, bmi_model: NWMv3_Forcing_Engine_BMI_model): def set_var_names(self, bmi_model: NWMv3_Forcing_Engine_BMI_model): """Set the variables for the hydrofabric geospatial metadata. - From bmi_model.py. - Create a Python dictionary that maps CSDMS Standard Names to the model's internal variable names. This is going to get long, @@ -1730,9 +1697,9 @@ def set_var_names(self, bmi_model: NWMv3_Forcing_Engine_BMI_model): 4, 2, GridType.unstructured ) # Grid 1 is a 2-dimensional grid - bmi_model.grid_4._grid_y = bmi_model._wrf_hydro_geo_meta.latitude_grid - bmi_model.grid_4._grid_x = bmi_model._wrf_hydro_geo_meta.longitude_grid - bmi_model.grid_4._size = len(bmi_model._wrf_hydro_geo_meta.latitude_grid) + bmi_model.grid_4._grid_y = bmi_model.geo_meta.latitude_grid + bmi_model.grid_4._grid_x = bmi_model.geo_meta.longitude_grid + bmi_model.grid_4._size = len(bmi_model.geo_meta.latitude_grid) bmi_model._grids = [bmi_model.grid_4] bmi_model._grid_map = { var_name: bmi_model.grid_4 for var_name in bmi_model._output_var_names @@ -1754,18 +1721,8 @@ def __init__(self): """ super().__init__() - if self._job_meta.grid_type != "unstructured": - self._job_meta.errMsg = ( - f"Invalid grid type specified: {self._job_meta.grid_type}." - ) - err_handler.err_out_screen_para(self._job_meta.errMsg, self._mpi_meta) - self._wrf_hydro_geo_meta = UnstructuredGeoMeta(self._job_meta, self._mpi_meta) - def grid_ranks(self, bmi_model: NWMv3_Forcing_Engine_BMI_model) -> list[int]: - """Get the grid ranks for the unstructured domain. - - From bmi_model.py. - """ + """Get the grid ranks for the unstructured domain.""" return [bmi_model.grid_2.rank, bmi_model.grid_3.rank] def grid_ids(self, bmi_model: NWMv3_Forcing_Engine_BMI_model) -> list[int]: @@ -1776,15 +1733,12 @@ def grid_ids(self, bmi_model: NWMv3_Forcing_Engine_BMI_model) -> list[int]: return [bmi_model.grid_2.id, bmi_model.grid_3.id] def get_size_of_arrays(self, bmi_model: NWMv3_Forcing_Engine_BMI_model) -> None: - """Get the size of the flattened 1D arrays for the unstructured domain. - - From bmi_model.py. - """ + """Get the size of the flattened 1D arrays for the unstructured domain.""" bmi_model._varsize = len( - np.zeros(bmi_model._wrf_hydro_geo_meta.latitude_grid.shape).flatten() + np.zeros(bmi_model.geo_meta.latitude_grid.shape).flatten() ) bmi_model._varsize_elem = len( - np.zeros(bmi_model._wrf_hydro_geo_meta.latitude_grid_elem.shape).flatten() + np.zeros(bmi_model.geo_meta.latitude_grid_elem.shape).flatten() ) for model_output in bmi_model.get_output_var_names(): @@ -1800,8 +1754,6 @@ def get_size_of_arrays(self, bmi_model: NWMv3_Forcing_Engine_BMI_model) -> None: def set_var_names(self, bmi_model: NWMv3_Forcing_Engine_BMI_model) -> None: """Set the variable names for the unstructured domain. - From bmi_model.py. - Create a Python dictionary that maps CSDMS Standard Names to the model's internal variable names. This is going to get long, @@ -1857,12 +1809,18 @@ def set_var_names(self, bmi_model: NWMv3_Forcing_Engine_BMI_model) -> None: 3, 2, GridType.unstructured ) # Grid 1 is a 2-dimensional grid - bmi_model.grid_2._grid_y = self._wrf_hydro_geo_meta.latitude_grid_elem - bmi_model.grid_2._grid_x = self._wrf_hydro_geo_meta.longitude_grid_elem + bmi_model.grid_2._grid_y = self.geo_meta.latitude_grid_elem + bmi_model.grid_2._grid_x = self.geo_meta.longitude_grid_elem - bmi_model.grid_3._grid_y = self._wrf_hydro_geo_meta.latitude_grid - bmi_model.grid_3._grid_x = self._wrf_hydro_geo_meta.longitude_grid + bmi_model.grid_3._grid_y = self.geo_meta.latitude_grid + bmi_model.grid_3._grid_x = self.geo_meta.longitude_grid - bmi_model.grid_2._size = len(self._wrf_hydro_geo_meta.latitude_grid_elem) - bmi_model.grid_3._size = len(self._wrf_hydro_geo_meta.latitude_grid) + bmi_model.grid_2._size = len(self.geo_meta.latitude_grid_elem) + bmi_model.grid_3._size = len(self.geo_meta.latitude_grid) bmi_model._grids = [bmi_model.grid_2, bmi_model.grid_3] + +BMIMODEL = { + "gridded": NWMv3_Forcing_Engine_BMI_model_Gridded, + "unstructured": NWMv3_Forcing_Engine_BMI_model_Unstructured, + "hydrofabric": NWMv3_Forcing_Engine_BMI_model_HydroFabric, +} From b9142ef3dde1858c8171ff96f96e4262e0deaf18 Mon Sep 17 00:00:00 2001 From: "Matthew.Deshotel" Date: Wed, 25 Feb 2026 10:34:53 -0600 Subject: [PATCH 26/34] move bmi_model conts to the bmi_model dict --- .../NextGen_Forcings_Engine/core/consts.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/consts.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/consts.py index 29c4b84d..30727623 100644 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/consts.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/consts.py @@ -66,6 +66,13 @@ "slope_elem", "slp_azi_elem", ], + }, + "bmi_model": { + "extension_map": { + "gridded": "GRIDDED", + "hydrofabric": "HYDROFABRIC", + "unstructured": "MESH", + }, "_output_var_names": [ "U2D_ELEMENT", "V2D_ELEMENT", @@ -120,13 +127,6 @@ "RAINRATE_NODE": ["Surface Precipitation Rate", "mm/s"], }, }, - "bmi_model": { - "extension_map": { - "gridded": "GRIDDED", - "hydrofabric": "HYDROFABRIC", - "unstructured": "MESH", - } - }, "forcingInputMod": { "InputForcings": [ "nx_global", From 130a0247401de1f9dc01d3beed6656b0c2f82dde Mon Sep 17 00:00:00 2001 From: "Matthew.Deshotel" Date: Wed, 25 Feb 2026 10:35:38 -0600 Subject: [PATCH 27/34] fix method calls and reference base class for consts --- .../NextGen_Forcings_Engine/core/forcingInputMod.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py index cd9a9eb3..7ba9023c 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py @@ -31,9 +31,9 @@ class InputForcings: - """Abstract class defining parameters of a single input forcing product. + """Class defining parameters of a single input forcing product. - This is an abstract class that will define all the parameters + This is a class that will define all the parameters of a single input forcing product. """ @@ -56,7 +56,7 @@ def __init__( self._keyValue = force_key self.idx = idx - for attr in CONSTS[self.__class__.__name__]: + for attr in CONSTS[self.__class__.__base__.__name__]: setattr(self, attr, None) self.find_neighbor_files_map = CONSTS["FIND_NEIGHBOR_FILES_MAP"] @@ -65,13 +65,13 @@ def __init__( self.initialize_config_options() if self.q2dDownscaleOpt > 0: - self.geo_meta.handle_humidity_downscaling() + self.handle_humidity_downscaling() if self.force_count == 8 and 8 in self.input_map_output: # TODO: this assumes that LQFRAC (8) is always the last grib var self.grib_vars = self.grib_vars[:-1] - self.geo_meta.initialize_geo_data(self) + self.initialize_geo_data(self) # Obtain custom input cycle frequencies if self.keyValue == 10 or self.keyValue == 11: From 3e42cef34453afded215c4381474d824ee75e1d7 Mon Sep 17 00:00:00 2001 From: Matt Deshotel Date: Fri, 20 Mar 2026 12:27:38 -0500 Subject: [PATCH 28/34] refactor bugs 1 --- .../NextGen_Forcings_Engine/core/consts.py | 6 ++++ .../core/forcingInputMod.py | 2 +- .../NextGen_Forcings_Engine/core/geoMod.py | 28 ++++++++++--------- 3 files changed, 22 insertions(+), 14 deletions(-) diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/consts.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/consts.py index 30727623..00c2ecf8 100644 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/consts.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/consts.py @@ -50,6 +50,8 @@ "slp_azi_elem", "esmf_lat", "esmf_lon", + "latitude_grid_elem", + "longitude_grid_elem", ], "GriddedGeoMeta": [ "nx_local_elem", @@ -65,6 +67,8 @@ "height_elem", "slope_elem", "slp_azi_elem", + "latitude_grid_elem", + "longitude_grid_elem", ], }, "bmi_model": { @@ -187,6 +191,8 @@ "regridded_precip2", "regridded_precip1_elem", "regridded_precip2_elem", + "_grib_vars", + "_cycle_freq", ], "PRODUCT_NAME": { 1: "NLDAS2_GRIB1", diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py index 7ba9023c..b6dd844f 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py @@ -148,7 +148,7 @@ def grib_vars(self) -> list[str] | None: """Map the forcing key value to the required GRIB variable names.""" if self._grib_vars is None: # First call to getter, initialize - self._grib_vars = [self.keyValue] + self._grib_vars = CONSTS["GRIB_VARS"][self.keyValue] return self._grib_vars @grib_vars.setter diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py index 9d17bc71..660162f4 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py @@ -3,7 +3,6 @@ import math from pathlib import Path - import numpy as np # For ESMF + shapely 2.x, shapely must be imported first, to avoid segfault "address not mapped to object" stemming from calls such as: @@ -158,12 +157,13 @@ def ncattrs(self, var: str) -> list: def get_var(self, ds: xr.Dataset, var: str) -> xr.DataArray: """Get a variable from a xr.Dataset.""" - if self.mpi_config.rank == 0: - try: - return ds.variables[var] - except Exception as e: - self.config_options.errMsg = f"Unable to extract {var} variable from: {self.config_options.spatial_meta} due to {str(e)}" - raise e + if var is None: + return + try: + return ds.variables[var] + except Exception as e: + self.config_options.errMsg = f"Unable to extract {var} variable from: {self.config_options.spatial_meta} due to {str(e)}" + raise e def get_geogrid_var(self, var: str) -> xr.DataArray: """Get a variable from the geogrid file.""" @@ -895,18 +895,19 @@ def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): super().__init__(config_options, mpi_config) for attr in CONSTS[self.__class__.__name__]: setattr(self, attr, None) + pass @property @lru_cache def lat_bounds(self) -> np.ndarray: """Get the latitude bounds for the unstructured domain.""" - return self.get_bound(1) + return self.get_bound(1).values @property @lru_cache def lon_bounds(self) -> np.ndarray: """Get the longitude bounds for the unstructured domain.""" - return self.get_bound(0) + return self.get_bound(0).values def get_bound(self, dim: int) -> np.ndarray: """Get the longitude or latitude bounds for the unstructured domain.""" @@ -918,7 +919,7 @@ def get_bound(self, dim: int) -> np.ndarray: @lru_cache def elementcoords_global(self) -> np.ndarray: """Get the global element coordinates for the unstructured domain.""" - return self.get_geogrid_var(self.config_options.elemcoords_var) + return self.get_geogrid_var(self.config_options.elemcoords_var).values @barrier @broadcast @@ -1015,13 +1016,15 @@ def height(self) -> np.ndarray: @lru_cache def slope(self) -> np.ndarray: """Get the slopes for the unstructured domain.""" - return self.slopes_global[self.pet_element_inds] + if self.slopes_global is not None: + return self.slopes_global[self.pet_element_inds] @property @lru_cache def slp_azi(self) -> np.ndarray: """Get the slope azimuths for the unstructured domain.""" - return self.slp_azi_global[self.pet_element_inds] + if self.slp_azi_global is not None: + return self.slp_azi_global[self.pet_element_inds] @property @lru_cache @@ -1460,4 +1463,3 @@ def nx_local_elem(self) -> int: def ny_local_elem(self) -> int: """Get the local y dimension size for this processor.""" return len(self.esmf_grid.coords[1][1]) - From fc69fba07071c736db49e793bfcfbc44d7d98c93 Mon Sep 17 00:00:00 2001 From: Matt Deshotel Date: Tue, 24 Mar 2026 11:33:29 -0500 Subject: [PATCH 29/34] dynamically manage discretization-specific GeoMeta classes --- .../NextGen_Forcings_Engine/bmi_model.py | 24 ++++++++----------- 1 file changed, 10 insertions(+), 14 deletions(-) diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/bmi_model.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/bmi_model.py index 946fc669..728c5012 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/bmi_model.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/bmi_model.py @@ -216,7 +216,7 @@ def initialize(self, config_file: str, output_path: str | None = None) -> None: # If _job_meta was not set by initialize_with_params(), create a default one if self._job_meta is None: self._job_meta = ConfigOptions(self.cfg_bmi) - + # Parse the configuration options try: self._job_meta.validate_config(self.cfg_bmi) @@ -279,10 +279,7 @@ def initialize(self, config_file: str, output_path: str | None = None) -> None: # Check to make sure we have enough dimensionality to run regridding. ESMF requires both grids # to have a size of at least 2. - if ( - self.geo_meta.nx_local < 2 - or self.geo_meta.ny_local < 2 - ): + if self.geo_meta.nx_local < 2 or self.geo_meta.ny_local < 2: self._job_meta.errMsg = ( "You have specified too many cores for your WRF-Hydro grid. " "Local grid Must have x/y dimension size of 2." @@ -313,9 +310,7 @@ def initialize(self, config_file: str, output_path: str | None = None) -> None: # If we have specified supplemental precipitation products, initialize # the supp class. if self._job_meta.number_supp_pcp > 0: - self._supp_pcp_mod = suppPrecipMod.initDict( - self._job_meta, self.geo_meta - ) + self._supp_pcp_mod = suppPrecipMod.initDict(self._job_meta, self.geo_meta) else: self._supp_pcp_mod = None err_handler.check_program_status(self._job_meta, self._mpi_meta) @@ -1462,6 +1457,7 @@ def get_grid_z(self, grid_id: int, z: NDArray[np.float64]) -> NDArray[np.float64 # ------------------------------------------------------------ # ------------------------------------------------------------ + def parse_config(cfg: dict) -> dict: """Parse the provided configuration dictionary (`cfg`) and modifies it based on certain rules. @@ -1572,6 +1568,7 @@ def __init__(self): Initializes the model with default values for time, variables, and grid types. """ super().__init__() + self.GeoMeta = GriddedGeoMeta def grid_ranks(self, bmi_model: NWMv3_Forcing_Engine_BMI_model) -> list[int]: """Get the grid ranks for the gridded domain.""" @@ -1615,13 +1612,9 @@ def set_var_names(self, bmi_model: NWMv3_Forcing_Engine_BMI_model) -> None: 1, 2, GridType.uniform_rectilinear ) # Grid 1 is a 2-dimensional grid bmi_model.grid_1._grid_y = bmi_model.geo_meta.latitude_grid.flatten() - bmi_model.grid_1._grid_x = ( - bmi_model.geo_meta.longitude_grid.flatten() - ) + bmi_model.grid_1._grid_x = bmi_model.geo_meta.longitude_grid.flatten() bmi_model.grid_1._shape = bmi_model.geo_meta.latitude_grid.shape - bmi_model.grid_1._size = len( - bmi_model.geo_meta.latitude_grid.flatten() - ) + bmi_model.grid_1._size = len(bmi_model.geo_meta.latitude_grid.flatten()) bmi_model.grid_1._spacing = ( bmi_model.geo_meta.dx_meters, bmi_model.geo_meta.dy_meters, @@ -1649,6 +1642,7 @@ def __init__(self): Initializes the model with default values for time, variables, and grid types. """ super().__init__() + self.GeoMeta = HydrofabricGeoMeta def grid_ranks(self, bmi_model: NWMv3_Forcing_Engine_BMI_model) -> list[int]: """Get the grid ranks for the hydrofabric domain.""" @@ -1720,6 +1714,7 @@ def __init__(self): Initializes the model with default values for time, variables, and grid types. """ super().__init__() + self.GeoMeta = UnstructuredGeoMeta def grid_ranks(self, bmi_model: NWMv3_Forcing_Engine_BMI_model) -> list[int]: """Get the grid ranks for the unstructured domain.""" @@ -1819,6 +1814,7 @@ def set_var_names(self, bmi_model: NWMv3_Forcing_Engine_BMI_model) -> None: bmi_model.grid_3._size = len(self.geo_meta.latitude_grid) bmi_model._grids = [bmi_model.grid_2, bmi_model.grid_3] + BMIMODEL = { "gridded": NWMv3_Forcing_Engine_BMI_model_Gridded, "unstructured": NWMv3_Forcing_Engine_BMI_model_Unstructured, From 7f29a3eb343579ef79fdb4e00d0cf6f7b947b4a5 Mon Sep 17 00:00:00 2001 From: Matt Deshotel Date: Tue, 24 Mar 2026 11:34:12 -0500 Subject: [PATCH 30/34] fix geomod bugs --- .../NextGen_Forcings_Engine/core/geoMod.py | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py index 660162f4..a1ef1f4d 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py @@ -157,13 +157,12 @@ def ncattrs(self, var: str) -> list: def get_var(self, ds: xr.Dataset, var: str) -> xr.DataArray: """Get a variable from a xr.Dataset.""" - if var is None: - return - try: - return ds.variables[var] - except Exception as e: - self.config_options.errMsg = f"Unable to extract {var} variable from: {self.config_options.spatial_meta} due to {str(e)}" - raise e + if var is not None: + try: + return ds.variables[var] + except Exception as e: + self.config_options.errMsg = f"Unable to extract {var} variable from: {self.config_options.spatial_meta} due to {str(e)}" + raise e def get_geogrid_var(self, var: str) -> xr.DataArray: """Get a variable from the geogrid file.""" @@ -895,7 +894,6 @@ def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): super().__init__(config_options, mpi_config) for attr in CONSTS[self.__class__.__name__]: setattr(self, attr, None) - pass @property @lru_cache From acb7ad73e42f99377b1f1032b6f89623c4c3cb83 Mon Sep 17 00:00:00 2001 From: Matt Deshotel Date: Tue, 24 Mar 2026 11:35:23 -0500 Subject: [PATCH 31/34] add additional constants for mapping old to new variable names --- .../NextGen_Forcings_Engine/core/consts.py | 33 +++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/consts.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/consts.py index 00c2ecf8..b38bd561 100644 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/consts.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/consts.py @@ -194,6 +194,22 @@ "_grib_vars", "_cycle_freq", ], + "InputForcingsGridded": [ + "t2dTmp_elem", + "psfcTmp_elem", + "final_forcings_elem", + "height_elem", + "regridded_mask_elem", + "regridded_mask_elem_AORC", + ], + "InputForcingsHydrofabric": [ + "final_forcings_elem", + "height_elem", + "regridded_mask_elem", + "regridded_mask_elem_AORC", + "t2dTmp_elem", + "psfcTmp_elem", + ], "PRODUCT_NAME": { 1: "NLDAS2_GRIB1", 2: "NARR_GRIB1", @@ -904,4 +920,21 @@ "ZARR": ".zarr", }, }, + "test_utils": { + "OLD_NEW_VAR_MAP": { + "q2dBiasCorrectOpt": "q2BiasCorrectOpt", + "paramDir": "dScaleParamDirs", + "border": "ignored_border_widths", + "regridOpt": "regrid_opt", + "userFcstHorizon": "fcst_input_horizons", + "inDir": "input_force_dirs", + "swDowscaleOpt": "swDownscaleOpt", + "t2dBiasCorrectOpt": "t2BiasCorrectOpt", + "userCycleOffset": "fcst_input_offsets", + "windBiasCorrectOpt": "windBiasCorrect", + "timeInterpOpt": "forceTemoralInterp", + "enforce": "input_force_mandatory", + "file_type": "input_force_types", + } + }, } From d6ca3fa22ea4c6956145d31a4056e929d9856e62 Mon Sep 17 00:00:00 2001 From: Matt Deshotel Date: Tue, 24 Mar 2026 11:36:02 -0500 Subject: [PATCH 32/34] fix bugs in forcingInputMod --- .../core/forcingInputMod.py | 292 ++++++++++-------- 1 file changed, 161 insertions(+), 131 deletions(-) diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py index b6dd844f..aa3752ce 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py @@ -7,6 +7,7 @@ from __future__ import annotations import logging +from functools import lru_cache from pathlib import Path from typing import TYPE_CHECKING @@ -64,26 +65,21 @@ def __init__( self.temporal_interpolate_inputs_map = CONSTS["TEMPORAL_INTERPOLATE_INPUTS_MAP"] self.initialize_config_options() - if self.q2dDownscaleOpt > 0: - self.handle_humidity_downscaling() if self.force_count == 8 and 8 in self.input_map_output: # TODO: this assumes that LQFRAC (8) is always the last grib var self.grib_vars = self.grib_vars[:-1] - self.initialize_geo_data(self) - # Obtain custom input cycle frequencies if self.keyValue == 10 or self.keyValue == 11: self.cycle_freq = self.config_options.customFcstFreq[self.custom_count] def initialize_config_options(self) -> None: """Initialize configuration options from the config_options attribute.""" - [ - setattr(self, key, val[self.idx]) - for key, val in list(vars(self.config_options).items()) - if isinstance(val, list) and len(val) > 0 - ] + for key, val in list(vars(self.config_options).items()): + if isinstance(val, list) and len(val) > 0: + setattr(self, key, val[self.idx]) + LOG.info(key) @property def force_count(self) -> int: @@ -266,62 +262,68 @@ def __init__( ) -> None: """Initialize InputForcingsGridded with configuration options, geospatial metadata, and MPI configuration.""" super().__init__(force_key, idx, config_options, geo_meta, mpi_config) + for attr in CONSTS[self.__class__.__name__]: + setattr(self, attr, None) - def initialize_geo_data( - self, - input_forcings: InputForcings, - ) -> None: - """Initialize geometry-related arrays based on grid type and downscaling options. - - Initialize the local final grid of values. This is represntative - of the local grid for this forcing, for a specific output timesetp. - This grid will be updated from one output timestep to another, and - also through downscaling and bias correction. - """ - input_forcings.final_forcings = np.empty( + @property + @lru_cache + def final_forcings(self): + """Initialize the local final grid of values.""" + return np.empty( [ - input_forcings.force_count, - input_forcings.geo_meta.ny_local, - input_forcings.geo_meta.nx_local, + self.force_count, + self.geo_meta.ny_local, + self.geo_meta.nx_local, ], np.float64, ) - input_forcings.height = np.empty( - [input_forcings.geo_meta.ny_local, input_forcings.geo_meta.nx_local], + + @property + @lru_cache + def height(self): + """Initialize the local height grid.""" + return np.empty( + [self.geo_meta.ny_local, self.geo_meta.nx_local], np.float32, ) - input_forcings.regridded_mask = np.empty( - [input_forcings.geo_meta.ny_local, input_forcings.geo_meta.nx_local], + + @property + @lru_cache + def regridded_mask(self): + """Initialize the local regridded mask grid.""" + return np.empty( + [self.geo_meta.ny_local, self.geo_meta.nx_local], np.float32, ) - input_forcings.regridded_mask_AORC = np.empty( - [input_forcings.geo_meta.ny_local, input_forcings.geo_meta.nx_local], + + @property + @lru_cache + def regridded_mask_AORC(self): + """Initialize the local regridded AORC mask grid.""" + return np.empty( + [self.geo_meta.ny_local, self.geo_meta.nx_local], np.float32, ) - input_forcings.final_forcings_elem = None - input_forcings.height_elem = None - input_forcings.regridded_mask_elem = None - input_forcings.regridded_mask_elem_AORC = None - def handle_humidity_downscaling( - self, - input_forcings: InputForcings, - ) -> None: - """Initialize temporary arrays for specific humidity downscaling if specified in configuration. + @property + @lru_cache + def t2dTmp(self): + """Initialize temporary array for specific humidity downscaling.""" + if self.q2dDownscaleOpt > 0: + return np.empty( + [self.geo_meta.ny_local, self.geo_meta.nx_local], + np.float32, + ) - If we have specified specific humidity downscaling, establish arrays to hold - temporary temperature arrays that are un-downscaled. - """ - input_forcings.t2dTmp = np.empty( - [input_forcings.geo_meta.ny_local, input_forcings.geo_meta.nx_local], - np.float32, - ) - input_forcings.psfcTmp = np.empty( - [input_forcings.geo_meta.ny_local, input_forcings.geo_meta.nx_local], - np.float32, - ) - input_forcings.t2dTmp_elem = None - input_forcings.psfcTmp_elem = None + @property + @lru_cache + def psfcTmp(self): + """Initialize temporary array for specific humidity downscaling.""" + if self.q2dDownscaleOpt > 0: + return np.empty( + [self.geo_meta.ny_local, self.geo_meta.nx_local], + np.float32, + ) class InputForcingsHydrofabric(InputForcings): @@ -341,46 +343,46 @@ def __init__( ) -> None: """Initialize InputForcingsHydrofabric with configuration options, geospatial metadata, and MPI configuration.""" super().__init__(force_key, idx, config_options, geo_meta, mpi_config) + for attr in CONSTS[self.__class__.__name__]: + setattr(self, attr, None) - def initialize_geo_data(self, input_forcings: InputForcings) -> None: - """Initialize geometry-related arrays based on grid type and downscaling options. + @property + @lru_cache + def final_forcings(self): + """Initialize the local final grid of values.""" + return np.empty([self.force_count, self.geo_meta.ny_local], np.float64) - From forcingInputMod.py + @property + @lru_cache + def height(self): + """Initialize the local height grid.""" + return np.empty([self.geo_meta.ny_local], np.float32) - Initialize the local final grid of values. This is represntative - of the local grid for this forcing, for a specific output timesetp. - This grid will be updated from one output timestep to another, and - also through downscaling and bias correction. - """ - input_forcings.final_forcings = np.empty( - [input_forcings.force_count, input_forcings.geo_meta.ny_local], np.float64 - ) - input_forcings.height = np.empty([input_forcings.geo_meta.ny_local], np.float32) - input_forcings.regridded_mask = np.empty( - [input_forcings.geo_meta.ny_local], np.float32 - ) - input_forcings.regridded_mask_AORC = np.empty( - [input_forcings.geo_meta.ny_local], np.float32 - ) - input_forcings.final_forcings_elem = None - input_forcings.height_elem = None - input_forcings.regridded_mask_elem = None - input_forcings.regridded_mask_elem_AORC = None + @property + @lru_cache + def regridded_mask(self): + """Initialize the local regridded mask grid.""" + return np.empty([self.geo_meta.ny_local], np.float32) - def handle_humidity_downscaling(self, input_forcings: InputForcings) -> None: - """Initialize temporary arrays for specific humidity downscaling if specified in configuration. + @property + @lru_cache + def regridded_mask_AORC(self): + """Initialize the local regridded AORC mask grid.""" + return np.empty([self.geo_meta.ny_local], np.float32) - From forcingInputMod.py + @property + @lru_cache + def t2dTmp(self): + """Initialize temporary array for specific humidity downscaling.""" + if self.q2dDownscaleOpt > 0: + return np.empty([self.geo_meta.ny_local], np.float32) - If we have specified specific humidity downscaling, establish arrays to hold - temporary temperature arrays that are un-downscaled. - """ - input_forcings.t2dTmp = np.empty([input_forcings.geo_meta.ny_local], np.float32) - input_forcings.psfcTmp = np.empty( - [input_forcings.geo_meta.ny_local], np.float32 - ) - input_forcings.t2dTmp_elem = None - input_forcings.psfcTmp_elem = None + @property + @lru_cache + def psfcTmp(self): + """Initialize temporary array for specific humidity downscaling.""" + if self.q2dDownscaleOpt > 0: + return np.empty([self.geo_meta.ny_local], np.float32) class InputForcingsUnstructured(InputForcings): @@ -400,59 +402,87 @@ def __init__( ) -> None: """Initialize InputForcingsUnstructured with configuration options, geospatial metadata, and MPI configuration.""" super().__init__(force_key, idx, config_options, geo_meta, mpi_config) + for attr in CONSTS[self.__class__.__name__]: + setattr(self, attr, None) - def handle_humidity_downscaling(self, input_forcings: InputForcings) -> None: - """Initialize temporary arrays for specific humidity downscaling if specified in configuration. + @property + @lru_cache + def t2dTmp(self): + """Initialize temporary array for specific humidity downscaling.""" + if self.q2dDownscaleOpt > 0: + return np.empty([self.geo_meta.ny_local], np.float32) - From forcingInputMod.py + @property + @lru_cache + def psfcTmp(self): + """Initialize temporary array for specific humidity downscaling.""" + if self.q2dDownscaleOpt > 0: + return np.empty([self.geo_meta.ny_local], np.float32) - If we have specified specific humidity downscaling, establish arrays to hold - temporary temperature arrays that are un-downscaled. - """ - input_forcings.t2dTmp = np.empty([input_forcings.geo_meta.ny_local], np.float32) - input_forcings.psfcTmp = np.empty( - [input_forcings.geo_meta.ny_local], np.float32 - ) - input_forcings.t2dTmp_elem = np.empty( - [input_forcings.geo_meta.ny_local_elem], np.float32 - ) - input_forcings.psfcTmp_elem = np.empty( - [input_forcings.geo_meta.ny_local_elem], np.float32 - ) + @property + @lru_cache + def t2dTmp_elem(self): + """Initialize temporary array for specific humidity downscaling.""" + if self.q2dDownscaleOpt > 0: + return np.empty([self.geo_meta.ny_local_elem], np.float32) - def initialize_geo_data(self, input_forcings: InputForcings) -> None: - """Initialize geometry-related arrays based on grid type and downscaling options. + @property + @lru_cache + def psfcTmp_elem(self): + """Initialize temporary array for specific humidity downscaling.""" + if self.q2dDownscaleOpt > 0: + return np.empty([self.geo_meta.ny_local_elem], np.float32) - From forcingInputMod.py + @property + @lru_cache + def final_forcings(self): + """Initialize the local final grid of values.""" + return np.empty([self.force_count, self.geo_meta.ny_local], np.float64) - Initialize the local final grid of values. This is represntative - of the local grid for this forcing, for a specific output timesetp. - This grid will be updated from one output timestep to another, and - also through downscaling and bias correction. - """ - input_forcings.final_forcings = np.empty( - [input_forcings.force_count, input_forcings.geo_meta.ny_local], np.float64 - ) - input_forcings.height = np.empty([input_forcings.geo_meta.ny_local], np.float32) - input_forcings.regridded_mask = np.empty( - [input_forcings.geo_meta.ny_local], np.float32 - ) - input_forcings.regridded_mask_AORC = np.empty( - [input_forcings.geo_meta.ny_local], np.float32 - ) - input_forcings.final_forcings_elem = np.empty( - [input_forcings.force_count, input_forcings.geo_meta.ny_local_elem], + @property + @lru_cache + def height(self): + """Initialize the local height grid.""" + return np.empty([self.geo_meta.ny_local], np.float32) + + @property + @lru_cache + def regridded_mask(self): + """Initialize the local regridded mask grid.""" + return np.empty([self.geo_meta.ny_local], np.float32) + + @property + @lru_cache + def regridded_mask_AORC(self): + """Initialize the local regridded AORC mask grid.""" + return np.empty([self.geo_meta.ny_local], np.float32) + + @property + @lru_cache + def final_forcings_elem(self): + """Initialize the local final grid of values on elements.""" + return np.empty( + [self.force_count, self.geo_meta.ny_local_elem], np.float64, ) - input_forcings.height_elem = np.empty( - [input_forcings.geo_meta.ny_local_elem], np.float32 - ) - input_forcings.regridded_mask_elem = np.empty( - [input_forcings.geo_meta.ny_local_elem], np.float32 - ) - input_forcings.regridded_mask_elem_AORC = np.empty( - [input_forcings.geo_meta.ny_local_elem], np.float32 - ) + + @property + @lru_cache + def height_elem(self): + """Initialize the local height grid on elements.""" + return np.empty([self.geo_meta.ny_local_elem], np.float32) + + @property + @lru_cache + def regridded_mask_elem(self): + """Initialize the local regridded mask grid on elements.""" + return np.empty([self.geo_meta.ny_local_elem], np.float32) + + @property + @lru_cache + def regridded_mask_elem_AORC(self): + """Initialize the local regridded AORC mask grid on elements.""" + return np.empty([self.geo_meta.ny_local_elem], np.float32) INPUTFORCINGS = { From 10f27249c4fee995ffa4a1a7c8acdb4fa92837de Mon Sep 17 00:00:00 2001 From: Matthew Deshotel Date: Thu, 26 Mar 2026 16:16:43 -0500 Subject: [PATCH 33/34] split consts dict to multiple dicts --- .../NextGen_Forcings_Engine/bmi_model.py | 43 +- .../NextGen_Forcings_Engine/core/consts.py | 1852 +++++++++-------- .../core/forcingInputMod.py | 33 +- .../NextGen_Forcings_Engine/core/geoMod.py | 11 +- 4 files changed, 969 insertions(+), 970 deletions(-) diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/bmi_model.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/bmi_model.py index 728c5012..d7f9a8cc 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/bmi_model.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/bmi_model.py @@ -28,7 +28,7 @@ from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.config import ( ConfigOptions, ) -from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.consts import CONSTS +from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.consts import BMI_MODEL from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.geoMod import ( GriddedGeoMeta, HydrofabricGeoMeta, @@ -68,7 +68,6 @@ from nextgen_forcings_ewts import MODULE_NAME, configure_logging -CONSTS = CONSTS[Path(__file__).stem] configure_logging() @@ -387,7 +386,7 @@ def _configure_output_path(self, output_path: str | None = None) -> None: return # Already configured or no output object to configure if self._job_meta.forcing_output == 1: - ext = CONSTS["extension_map"].get(self._job_meta.grid_type) + ext = BMI_MODEL["extension_map"].get(self._job_meta.grid_type) if ext is None: raise ValueError(f"Invalid grid_type: {self._job_meta.grid_type}") @@ -1598,15 +1597,15 @@ def set_var_names(self, bmi_model: NWMv3_Forcing_Engine_BMI_model) -> None: # Flag here to indicate whether or not the NWM operational configuration # will support a BMI field for liquid fraction of precipitation if bmi_model.config_options.include_lqfrac == 1: - bmi_model._output_var_names = CONSTS["_output_var_names"].append( + bmi_model._output_var_names = BMI_MODEL["_output_var_names"].append( "LQFRAC_ELEMENT" ) - bmi_model._var_name_units_map = CONSTS["_var_name_units_map"] | { + bmi_model._var_name_units_map = BMI_MODEL["_var_name_units_map"] | { "LQFRAC_ELEMENT": ["Liquid Fraction of Precipitation", "%"] } else: - bmi_model._output_var_names = CONSTS["_output_var_names"] - bmi_model._var_name_units_map = CONSTS["_var_name_units_map"] + bmi_model._output_var_names = BMI_MODEL["_output_var_names"] + bmi_model._var_name_units_map = BMI_MODEL["_var_name_units_map"] bmi_model.grid_1 = Grid( 1, 2, GridType.uniform_rectilinear @@ -1672,18 +1671,18 @@ def set_var_names(self, bmi_model: NWMv3_Forcing_Engine_BMI_model): # will support a BMI field for liquid fraction of precipitation if bmi_model._job_meta.include_lqfrac == 1: bmi_model._output_var_names = ( - ["CAT-ID"] + CONSTS["_output_var_names"] + ["LQFRAC_ELEMENT"] + ["CAT-ID"] + BMI_MODEL["_output_var_names"] + ["LQFRAC_ELEMENT"] ) bmi_model._var_name_units_map = ( {"CAT-ID": ["Catchment ID", ""]} - | CONSTS["_var_name_units_map"] + | BMI_MODEL["_var_name_units_map"] | { "LQFRAC_ELEMENT": ["Liquid Fraction of Precipitation", "%"], } ) else: - bmi_model._output_var_names = ["CAT-ID"] + CONSTS["_output_var_names"] - bmi_model._var_name_units_map = {"CAT-ID": ["Catchment ID", ""]} | CONSTS[ + bmi_model._output_var_names = ["CAT-ID"] + BMI_MODEL["_output_var_names"] + bmi_model._var_name_units_map = {"CAT-ID": ["Catchment ID", ""]} | BMI_MODEL[ "_var_name_units_map" ] @@ -1758,43 +1757,43 @@ def set_var_names(self, bmi_model: NWMv3_Forcing_Engine_BMI_model) -> None: # will support a BMI field for liquid fraction of precipitation if bmi_model._job_meta.include_lqfrac == 1: bmi_model._output_var_names = ( - CONSTS["_output_var_names"] + BMI_MODEL["_output_var_names"] + ["LQFRAC_ELEMENT"] - + CONSTS["_output_var_names_unstructured"] + + BMI_MODEL["_output_var_names_unstructured"] ) +["LQFRAC_NODE"] bmi_model._var_name_units_map = ( - CONSTS["_var_name_units_map"] + BMI_MODEL["_var_name_units_map"] | {"LQFRAC_ELEMENT": ["Liquid Fraction of Precipitation", "%"]} - | CONSTS["_var_name_units_map_unstructured"] + | BMI_MODEL["_var_name_units_map_unstructured"] | {"LQFRAC_NODE": ["Liquid Fraction of Precipitation", "%"]} ) bmi_model._grid_map = ( - {var_name: bmi_model.grid_2 for var_name in CONSTS["_output_var_names"]} + {var_name: bmi_model.grid_2 for var_name in BMI_MODEL["_output_var_names"]} | {"LQFRAC_ELEMENT": bmi_model.grid_2} | { var_name: bmi_model.grid_3 - for var_name in CONSTS["_output_var_names_unstructured"] + for var_name in BMI_MODEL["_output_var_names_unstructured"] } | {"LQFRAC_NODE": bmi_model.grid_3} ) else: bmi_model._output_var_names = ( - CONSTS["_output_var_names"] + CONSTS["_output_var_names_unstructured"] + BMI_MODEL["_output_var_names"] + BMI_MODEL["_output_var_names_unstructured"] ) bmi_model._var_name_units_map = ( - CONSTS["_var_name_units_map"] - | CONSTS["_var_name_units_map_unstructured"] + BMI_MODEL["_var_name_units_map"] + | BMI_MODEL["_var_name_units_map_unstructured"] ) bmi_model._grid_map = { - var_name: bmi_model.grid_2 for var_name in CONSTS["_output_var_names"] + var_name: bmi_model.grid_2 for var_name in BMI_MODEL["_output_var_names"] } | { var_name: bmi_model.grid_3 - for var_name in CONSTS["_output_var_names_unstructured"] + for var_name in BMI_MODEL["_output_var_names_unstructured"] } bmi_model.grid_2 = Grid( diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/consts.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/consts.py index b38bd561..e69a4d2e 100644 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/consts.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/consts.py @@ -8,933 +8,935 @@ weighted_average, ) -CONSTS = { - "geoMod": { - "GeoMeta": [ - "nodeCoords", - "centerCoords", - "inds", - "esmf_lat", - "esmf_lon", - ], - "UnstructuredGeoMeta": [ - "x_lower_bound", - "x_upper_bound", - "y_lower_bound", - "y_upper_bound", - "dx_meters", - "dy_meters", - "element_ids", - "element_ids_global", - "sina_grid", - "cosa_grid", - "esmf_lat", - "esmf_lon", - ], - "HydrofabricGeoMeta": [ - "nx_local_elem", - "ny_local_elem", - "x_lower_bound", - "x_upper_bound", - "y_lower_bound", - "y_upper_bound", - "nx_global_elem", - "ny_global_elem", - "dx_meters", - "dy_meters", - "mesh_inds_elem", - "height_elem", - "sina_grid", - "cosa_grid", - "slope_elem", - "slp_azi_elem", - "esmf_lat", - "esmf_lon", - "latitude_grid_elem", - "longitude_grid_elem", - ], - "GriddedGeoMeta": [ - "nx_local_elem", - "ny_local_elem", - "nx_global_elem", - "ny_global_elem", - "element_ids", - "element_ids_global", - "lat_bounds", - "lon_bounds", - "mesh_inds", - "mesh_inds_elem", - "height_elem", - "slope_elem", - "slp_azi_elem", - "latitude_grid_elem", - "longitude_grid_elem", + +GEOMOD= { + "GeoMeta": [ + "nodeCoords", + "centerCoords", + "inds", + "esmf_lat", + "esmf_lon", + ], + "UnstructuredGeoMeta": [ + "x_lower_bound", + "x_upper_bound", + "y_lower_bound", + "y_upper_bound", + "dx_meters", + "dy_meters", + "element_ids", + "element_ids_global", + "sina_grid", + "cosa_grid", + "esmf_lat", + "esmf_lon", + ], + "HydrofabricGeoMeta": [ + "nx_local_elem", + "ny_local_elem", + "x_lower_bound", + "x_upper_bound", + "y_lower_bound", + "y_upper_bound", + "nx_global_elem", + "ny_global_elem", + "dx_meters", + "dy_meters", + "mesh_inds_elem", + "height_elem", + "sina_grid", + "cosa_grid", + "slope_elem", + "slp_azi_elem", + "esmf_lat", + "esmf_lon", + "latitude_grid_elem", + "longitude_grid_elem", + ], + "GriddedGeoMeta": [ + "nx_local_elem", + "ny_local_elem", + "nx_global_elem", + "ny_global_elem", + "element_ids", + "element_ids_global", + "lat_bounds", + "lon_bounds", + "mesh_inds", + "mesh_inds_elem", + "height_elem", + "slope_elem", + "slp_azi_elem", + "latitude_grid_elem", + "longitude_grid_elem", + ], +} +BMI_MODEL= { + "extension_map": { + "gridded": "GRIDDED", + "hydrofabric": "HYDROFABRIC", + "unstructured": "MESH", + }, + "_output_var_names": [ + "U2D_ELEMENT", + "V2D_ELEMENT", + "LWDOWN_ELEMENT", + "SWDOWN_ELEMENT", + "T2D_ELEMENT", + "Q2D_ELEMENT", + "PSFC_ELEMENT", + "RAINRATE_ELEMENT", + ], + "_output_var_names_unstructured": [ + "U2D_NODE", + "V2D_NODE", + "LWDOWN_NODE", + "SWDOWN_NODE", + "T2D_NODE", + "Q2D_NODE", + "PSFC_NODE", + "RAINRATE_NODE", + "LQFRAC_NODE", + ], + "_var_name_units_map": { + "U2D_ELEMENT": ["10-m U-component of wind", "m/s"], + "V2D_ELEMENT": ["10-m V-component of wind", "m/s"], + "T2D_ELEMENT": ["2-m Air Temperature", "K"], + "Q2D_ELEMENT": ["2-m Specific Humidity", "kg/kg"], + "LWDOWN_ELEMENT": [ + "Surface downward long-wave radiation flux", + "W/m^2", + ], + "SWDOWN_ELEMENT": [ + "Surface downward short-wave radiation flux", + "W/m^2", ], + "PSFC_ELEMENT": ["Surface Pressure", "Pa"], + "RAINRATE_ELEMENT": ["Surface Precipitation Rate", "mm/s"], }, - "bmi_model": { - "extension_map": { - "gridded": "GRIDDED", - "hydrofabric": "HYDROFABRIC", - "unstructured": "MESH", - }, - "_output_var_names": [ - "U2D_ELEMENT", - "V2D_ELEMENT", - "LWDOWN_ELEMENT", - "SWDOWN_ELEMENT", - "T2D_ELEMENT", - "Q2D_ELEMENT", - "PSFC_ELEMENT", - "RAINRATE_ELEMENT", - ], - "_output_var_names_unstructured": [ - "U2D_NODE", - "V2D_NODE", - "LWDOWN_NODE", - "SWDOWN_NODE", - "T2D_NODE", - "Q2D_NODE", - "PSFC_NODE", - "RAINRATE_NODE", - "LQFRAC_NODE", - ], - "_var_name_units_map": { - "U2D_ELEMENT": ["10-m U-component of wind", "m/s"], - "V2D_ELEMENT": ["10-m V-component of wind", "m/s"], - "T2D_ELEMENT": ["2-m Air Temperature", "K"], - "Q2D_ELEMENT": ["2-m Specific Humidity", "kg/kg"], - "LWDOWN_ELEMENT": [ - "Surface downward long-wave radiation flux", - "W/m^2", - ], - "SWDOWN_ELEMENT": [ - "Surface downward short-wave radiation flux", - "W/m^2", - ], - "PSFC_ELEMENT": ["Surface Pressure", "Pa"], - "RAINRATE_ELEMENT": ["Surface Precipitation Rate", "mm/s"], - }, - "_var_name_units_map_unstructured": { - "U2D_NODE": ["10-m U-component of wind", "m/s"], - "V2D_NODE": ["10-m V-component of wind", "m/s"], - "T2D_NODE": ["2-m Air Temperature", "K"], - "Q2D_NODE": ["2-m Specific Humidity", "kg/kg"], - "LWDOWN_NODE": [ - "Surface downward long-wave radiation flux", - "W/m^2", - ], - "SWDOWN_NODE": [ - "Surface downward short-wave radiation flux", - "W/m^2", - ], - "PSFC_NODE": ["Surface Pressure", "Pa"], - "RAINRATE_NODE": ["Surface Precipitation Rate", "mm/s"], - }, + "_var_name_units_map_unstructured": { + "U2D_NODE": ["10-m U-component of wind", "m/s"], + "V2D_NODE": ["10-m V-component of wind", "m/s"], + "T2D_NODE": ["2-m Air Temperature", "K"], + "Q2D_NODE": ["2-m Specific Humidity", "kg/kg"], + "LWDOWN_NODE": [ + "Surface downward long-wave radiation flux", + "W/m^2", + ], + "SWDOWN_NODE": [ + "Surface downward short-wave radiation flux", + "W/m^2", + ], + "PSFC_NODE": ["Surface Pressure", "Pa"], + "RAINRATE_NODE": ["Surface Precipitation Rate", "mm/s"], }, - "forcingInputMod": { - "InputForcings": [ - "nx_global", - "ny_global", - "nx_local", - "ny_local", - "nx_local_corner", - "ny_local_corner", - "x_lower_bound", - "x_upper_bound", - "y_lower_bound", - "y_upper_bound", - "x_lower_bound_corner", - "x_upper_bound_corner", - "y_lower_bound_corner", - "y_upper_bound_corner", - "outFreq", - "lapseGrid", - "rqiClimoGrid", - "nwmPRISM_numGrid", - "nwmPRISM_denGrid", - "esmf_lats", - "esmf_lons", - "esmf_grid_in", - "esmf_grid_in_elem", - "regridObj", - "regridObj_elem", - "esmf_field_in", - "esmf_field_in_elem", - "esmf_field_out", - "esmf_field_out_elem", - # -------------------------------- - # Only used for CFSv2 bias correction - # as bias correction needs to take - # place prior to regridding. - "coarse_input_forcings1", - "coarse_input_forcings2", - # -------------------------------- - "regridded_forcings1", - "regridded_forcings2", - "globalPcpRate1", - "globalPcpRate2", - "regridded_forcings1_elem", - "regridded_forcings2_elem", - "globalPcpRate1_elem", - "globalPcpRate2_elem", - "ndv", - "file_in1", - "file_in2", - "fcst_hour1", - "fcst_hour2", - "fcst_date1", - "fcst_date2", - "height_elem", - "tmpFile", - "tmpFileHeight", - "regridded_precip1", - "regridded_precip2", - "regridded_precip1_elem", - "regridded_precip2_elem", - "_grib_vars", - "_cycle_freq", - ], - "InputForcingsGridded": [ - "t2dTmp_elem", - "psfcTmp_elem", - "final_forcings_elem", - "height_elem", - "regridded_mask_elem", - "regridded_mask_elem_AORC", - ], - "InputForcingsHydrofabric": [ - "final_forcings_elem", - "height_elem", - "regridded_mask_elem", - "regridded_mask_elem_AORC", - "t2dTmp_elem", - "psfcTmp_elem", - ], - "PRODUCT_NAME": { - 1: "NLDAS2_GRIB1", - 2: "NARR_GRIB1", - 3: "GFS_Production_GRIB2", - 4: "NAM_Conus_Nest_GRIB2", - 5: "HRRR_Conus_GRIB2", - 6: "RAP_Conus_GRIB2", - 7: "CFSv2_6Hr_Global_GRIB2", - 8: "WRF_ARW_Hawaii_GRIB2", - 9: "GFS_Production_025d_GRIB2", - 10: "Custom_NetCDF_Hourly", - 11: "Custom_NetCDF_Hourly", - 12: "AORC", - 13: "NAM_Nest_3km_Hawaii", - 14: "NAM_Nest_3km_PuertoRico", - 15: "NAM_Nest_3km_Alaska", - 16: "NAM_Nest_3km_Hawaii_Radiation-Only", - 17: "NAM_Nest_3km_PuertoRico_Radiation-Only", - 18: "WRF_ARW_PuertoRico_GRIB2", - 19: "HRRR_Alaska_GRIB2", - 20: "Alaska_AnA", - 21: "AORC_Alaska", - 22: "Alaska_ExtAnA", - 23: "ERA5", - 24: "NBM", - 25: "NDFD", - 26: "HRRR_15min", - 27: "NWM", - }, - "CYCLE_FREQ": { - 1: 60, - 2: 180, - 3: 360, - 4: 360, - 5: 60, - 6: 60, - 7: 360, - 8: 1440, - 9: 360, - 10: -9999, - 11: -9999, - 12: -9999, - 13: 360, - 14: 360, - 15: 360, - 16: 360, - 17: 360, - 18: 1440, - 19: 180, - 20: 180, - 21: -9999, - 22: 180, - 23: -9999, - 24: 60, - 25: 1440, - 26: 15, - 27: -9999, - }, - "GRIB_VARS": { - 1: ["TMP", "SPFH", "UGRD", "VGRD", "PRATE", "DSWRF", "DLWRF", "PRES"], - 2: None, - 3: [ - "TMP", - "SPFH", - "UGRD", - "VGRD", - "PRATE", - "DSWRF", - "DLWRF", - "PRES", - "CPOFP", - ], - 4: None, - 5: [ - "TMP", - "SPFH", - "UGRD", - "VGRD", - "APCP", - "DSWRF", - "DLWRF", - "PRES", - "CPOFP", - ], - 6: [ - "TMP", - "SPFH", - "UGRD", - "VGRD", - "APCP", - "DSWRF", - "DLWRF", - "PRES", - "FROZR", - ], - 7: ["TMP", "SPFH", "UGRD", "VGRD", "PRATE", "DSWRF", "DLWRF", "PRES"], - 8: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "PRES"], - 9: ["TMP", "SPFH", "UGRD", "VGRD", "PRATE", "DSWRF", "DLWRF", "PRES"], - 10: None, - 11: None, - 12: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "DSWRF", "DLWRF", "PRES"], - 13: ["TMP", "SPFH", "UGRD", "VGRD", "PRATE", "DSWRF", "DLWRF", "PRES"], - 14: ["TMP", "SPFH", "UGRD", "VGRD", "PRATE", "DSWRF", "DLWRF", "PRES"], - 15: ["TMP", "SPFH", "UGRD", "VGRD", "PRATE", "DSWRF", "DLWRF", "PRES"], - 16: ["DSWRF", "DLWRF"], - 17: ["DSWRF", "DLWRF"], - 18: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "PRES"], - 19: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "DSWRF", "DLWRF", "PRES"], - 20: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "DSWRF", "DLWRF", "PRES"], - 21: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "DSWRF", "DLWRF", "PRES"], - 22: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "DSWRF", "DLWRF", "PRES"], - 23: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "DSWRF", "DLWRF", "PRES"], - 24: ["TMP", "APCP"], - 25: ["TMP", "WDIR", "WSPD", "APCP"], - 26: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "DSWRF", "DLWRF", "PRES"], - 27: [ - "T2D", - "Q2D", - "U2D", - "V2D", - "RAINRATE", - "SWDOWN", - "LWDOWN", - "PSFC", - ], - }, - "GRIB_LEVELS": { - 1: [ - "2 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - "surface", - "surface", - ], - 2: None, - 3: [ - "2 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - "surface", - "surface", - "surface", - ], - 4: None, - 5: [ - "2 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - "surface", - "surface", - "surface", - ], - 6: [ - "2 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - "surface", - "surface", - "surface", - ], - 7: [ - "2 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - "surface", - "surface", - ], - 8: [ - "80 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - ], - 9: [ - "2 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - "surface", - "surface", - ], - 10: None, - 11: None, - 12: None, - 13: [ - "2 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - "surface", - "surface", - ], - 14: [ - "2 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - "surface", - "surface", - ], - 15: [ - "2 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - "surface", - "surface", - ], - 16: ["surface", "surface"], - 17: ["surface", "surface"], - 18: [ - "80 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - ], - 19: [ - "2 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - "surface", - "surface", - ], - 20: [ - "2 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - "surface", - "surface", - ], - 21: None, - 22: [ - "2 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - "surface", - "surface", - ], - 23: None, - 24: ["2 m above ground", "surface"], - 25: [ - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - ], - 26: [ - "2 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - "surface", - "surface", - ], - 27: None, - }, - "NET_CDF_VARS_NAMES": { - 1: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "APCP_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - ], - 2: None, - 3: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "PRATE_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - "CPOFP_surface", - ], - 4: None, - 5: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "APCP_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - "CPOFP_surface", - ], - 6: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "APCP_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - "FROZR_surface", - ], - 7: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "PRATE_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - ], - 8: [ - "TMP_80maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "APCP_surface", - "PRES_surface", - ], - 9: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "PRATE_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - ], - 10: ["T2D", "Q2D", "U10", "V10", "RAINRATE", "DSWRF", "DLWRF", "PRES"], - 11: ["T2D", "Q2D", "U10", "V10", "RAINRATE", "DSWRF", "DLWRF", "PRES"], - 12: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "APCP_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - ], - 13: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "PRATE_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - ], - 14: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "PRATE_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - ], - 15: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "PRATE_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - ], - 16: ["DSWRF_surface", "DLWRF_surface"], - 17: ["DSWRF_surface", "DLWRF_surface"], - 18: [ - "TMP_80maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "APCP_surface", - "PRES_surface", - ], - 19: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "APCP_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - ], - 20: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "APCP_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - ], - 21: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "APCP_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - ], - 22: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "APCP_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - ], - 23: ["t2m", "d2m", "u10", "v10", "mtpr", "msdwswrf", "msdwlwrf", "sp"], - 24: ["TMP_2maboveground", "APCP_surface"], - 25: [ - "TMP_2maboveground", - "WDIR_10maboveground", - "WIND_10maboveground", - "APCP_surface", - ], - 26: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "APCP_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - ], - 27: ["T2D", "Q2D", "U2D", "V2D", "RAINRATE", "SWDOWN", "LWDOWN", "PSFC"], - }, - "GRIB_MES_IDX": { - 1: None, - 2: None, - 3: None, - 4: None, - 5: None, - 6: None, - 7: None, - 8: None, - 9: [33, 34, 39, 40, 43, 88, 91, 6], - 10: None, - 11: None, - 12: None, - 13: None, - 14: None, - 15: None, - 16: None, - 17: None, - 18: None, - 19: None, - 20: None, - 21: None, - 22: None, - 23: None, - 24: None, - 25: None, - 26: None, - 27: None, - }, - "INPUT_MAP_OUTPUT": { - 1: [4, 5, 0, 1, 3, 7, 2, 6], - 2: None, - 3: [4, 5, 0, 1, 3, 7, 2, 6, 8], - 4: None, - 5: [4, 5, 0, 1, 3, 7, 2, 6, 8], - 6: [4, 5, 0, 1, 3, 7, 2, 6, 8], - 7: [4, 5, 0, 1, 3, 7, 2, 6], - 8: [4, 5, 0, 1, 3, 6], - 9: [4, 5, 0, 1, 3, 7, 2, 6], - 10: [4, 5, 0, 1, 3, 7, 2, 6], - 11: [4, 5, 0, 1, 3, 7, 2, 6], - 12: [4, 5, 0, 1, 3, 7, 2, 6], - 13: [4, 5, 0, 1, 3, 7, 2, 6], - 14: [4, 5, 0, 1, 3, 7, 2, 6], - 15: [4, 5, 0, 1, 3, 7, 2, 6], - 16: [7, 2], - 17: [7, 2], - 18: [4, 5, 0, 1, 3, 6], - 19: [4, 5, 0, 1, 3, 7, 2, 6], - 20: [4, 5, 0, 1, 3, 7, 2, 6], - 21: [4, 5, 0, 1, 3, 7, 2, 6], - 22: [4, 5, 0, 1, 3, 7, 2, 6], - 23: [4, 5, 0, 1, 3, 7, 2, 6], - 24: [4, 3], - 25: [4, 0, 1, 3], - 26: [4, 5, 0, 1, 3, 7, 2, 6], - 27: [4, 5, 0, 1, 3, 7, 2, 6], - }, - "FORECAST_HORIZONS": { - 1: None, - 2: None, - 3: None, - 4: None, - 5: [ - 18, - 18, - 18, - 18, - 18, - 18, - 36, - 18, - 18, - 18, - 18, - 18, - 36, - 18, - 18, - 18, - 18, - 18, - 36, - 18, - 18, - 18, - 18, - 18, - ], - 6: [ - 21, - 21, - 21, - 39, - 21, - 21, - 21, - 21, - 21, - 39, - 21, - 21, - 21, - 21, - 21, - 39, - 21, - 21, - 21, - 21, - 21, - 39, - 21, - 21, - ], - 7: None, - 8: None, - 9: None, - 10: None, - 11: None, - 12: None, - 13: None, - 14: None, - 15: None, - 16: None, - 17: None, - 18: None, - 19: None, - 20: None, - 21: None, - 22: None, - 23: None, - 24: None, - 25: None, - 26: [ - 18, - 18, - 18, - 18, - 18, - 18, - 36, - 18, - 18, - 18, - 18, - 18, - 36, - 18, - 18, - 18, - 18, - 18, - 36, - 18, - 18, - 18, - 18, - 18, - ], - 27: None, - }, - "FIND_NEIGHBOR_FILES_MAP": { - 1: time_handling.find_nldas_neighbors, - 3: time_handling.find_gfs_neighbors, - 5: time_handling.find_input_neighbors, - 6: time_handling.find_input_neighbors, - 7: time_handling.find_cfsv2_neighbors, - 8: time_handling.find_hourly_wrf_arw_neighbors, - 9: time_handling.find_gfs_neighbors, - 10: time_handling.find_custom_hourly_neighbors, - 11: time_handling.find_custom_hourly_neighbors, - 12: time_handling.find_aorc_neighbors, - 13: time_handling.find_nam_nest_neighbors, - 14: time_handling.find_nam_nest_neighbors, - 15: time_handling.find_nam_nest_neighbors, - 16: time_handling.find_nam_nest_neighbors, - 17: time_handling.find_nam_nest_neighbors, - 18: time_handling.find_hourly_wrf_arw_neighbors, - 19: time_handling.find_ak_hrrr_neighbors, - 20: time_handling.find_ak_hrrr_neighbors, - 21: time_handling.find_aorc_neighbors, - 22: time_handling.find_ak_hrrr_neighbors, - 23: time_handling.find_era5_neighbors, - 24: time_handling.find_hourly_nbm_neighbors, - 25: time_handling.find_ndfd_neighbors, - 26: time_handling.find_input_neighbors, - 27: time_handling.find_nwm_neighbors, - }, - "REGRID_MAP": { - 1: regrid.regrid_conus_rap, - 3: regrid.regrid_gfs, - 5: regrid.regrid_conus_hrrr, - 6: regrid.regrid_conus_rap, - 7: regrid.regrid_cfsv2, - 8: regrid.regrid_hourly_wrf_arw, - 9: regrid.regrid_gfs, - 10: regrid.regrid_custom_hourly_netcdf, - 11: regrid.regrid_custom_hourly_netcdf, - 12: regrid.regrid_custom_hourly_netcdf, - 13: regrid.regrid_nam_nest, - 14: regrid.regrid_nam_nest, - 15: regrid.regrid_nam_nest, - 16: regrid.regrid_nam_nest, - 17: regrid.regrid_nam_nest, - 18: regrid.regrid_hourly_wrf_arw, - 19: regrid.regrid_conus_hrrr, - 20: regrid.regrid_conus_hrrr, - 21: regrid.regrid_custom_hourly_netcdf, - 22: regrid.regrid_conus_hrrr, - 23: regrid.regrid_era5, - 24: regrid.regrid_hourly_nbm, - 25: regrid.regrid_ndfd, - 26: regrid.regrid_conus_hrrr, - 27: regrid.regrid_nwm, - }, - "TEMPORAL_INTERPOLATE_INPUTS_MAP": { - 0: no_interpolation, - 1: nearest_neighbor, - 2: weighted_average, - }, - "FILE_EXT": { - "GRIB1": ".grb", - "GRIB2": ".grib2", - "NETCDF": ".nc", - "NETCDF4": ".nc4", - "NWM": ".LDASIN_DOMAIN1", - "ZARR": ".zarr", - }, +} +FORCINGINPUTMOD= { + "InputForcings": [ + "nx_global", + "ny_global", + "nx_local", + "ny_local", + "nx_local_corner", + "ny_local_corner", + "x_lower_bound", + "x_upper_bound", + "y_lower_bound", + "y_upper_bound", + "x_lower_bound_corner", + "x_upper_bound_corner", + "y_lower_bound_corner", + "y_upper_bound_corner", + "outFreq", + "lapseGrid", + "rqiClimoGrid", + "nwmPRISM_numGrid", + "nwmPRISM_denGrid", + "esmf_lats", + "esmf_lons", + "esmf_grid_in", + "esmf_grid_in_elem", + "regridObj", + "regridObj_elem", + "esmf_field_in", + "esmf_field_in_elem", + "esmf_field_out", + "esmf_field_out_elem", + # -------------------------------- + # Only used for CFSv2 bias correction + # as bias correction needs to take + # place prior to regridding. + "coarse_input_forcings1", + "coarse_input_forcings2", + # -------------------------------- + "regridded_forcings1", + "regridded_forcings2", + "globalPcpRate1", + "globalPcpRate2", + "regridded_forcings1_elem", + "regridded_forcings2_elem", + "globalPcpRate1_elem", + "globalPcpRate2_elem", + "ndv", + "file_in1", + "file_in2", + "fcst_hour1", + "fcst_hour2", + "fcst_date1", + "fcst_date2", + "height_elem", + "tmpFile", + "tmpFileHeight", + "regridded_precip1", + "regridded_precip2", + "regridded_precip1_elem", + "regridded_precip2_elem", + "_grib_vars", + "_cycle_freq", + "_t2dTmp", + "_psfcTmp", + "_final_forcings", + ], + "InputForcingsGridded": [ + "t2dTmp_elem", + "psfcTmp_elem", + "final_forcings_elem", + "height_elem", + "regridded_mask_elem", + "regridded_mask_elem_AORC", + ], + "InputForcingsHydrofabric": [ + "final_forcings_elem", + "height_elem", + "regridded_mask_elem", + "regridded_mask_elem_AORC", + "t2dTmp_elem", + "psfcTmp_elem", + ], + "PRODUCT_NAME": { + 1: "NLDAS2_GRIB1", + 2: "NARR_GRIB1", + 3: "GFS_Production_GRIB2", + 4: "NAM_Conus_Nest_GRIB2", + 5: "HRRR_Conus_GRIB2", + 6: "RAP_Conus_GRIB2", + 7: "CFSv2_6Hr_Global_GRIB2", + 8: "WRF_ARW_Hawaii_GRIB2", + 9: "GFS_Production_025d_GRIB2", + 10: "Custom_NetCDF_Hourly", + 11: "Custom_NetCDF_Hourly", + 12: "AORC", + 13: "NAM_Nest_3km_Hawaii", + 14: "NAM_Nest_3km_PuertoRico", + 15: "NAM_Nest_3km_Alaska", + 16: "NAM_Nest_3km_Hawaii_Radiation-Only", + 17: "NAM_Nest_3km_PuertoRico_Radiation-Only", + 18: "WRF_ARW_PuertoRico_GRIB2", + 19: "HRRR_Alaska_GRIB2", + 20: "Alaska_AnA", + 21: "AORC_Alaska", + 22: "Alaska_ExtAnA", + 23: "ERA5", + 24: "NBM", + 25: "NDFD", + 26: "HRRR_15min", + 27: "NWM", }, - "test_utils": { - "OLD_NEW_VAR_MAP": { - "q2dBiasCorrectOpt": "q2BiasCorrectOpt", - "paramDir": "dScaleParamDirs", - "border": "ignored_border_widths", - "regridOpt": "regrid_opt", - "userFcstHorizon": "fcst_input_horizons", - "inDir": "input_force_dirs", - "swDowscaleOpt": "swDownscaleOpt", - "t2dBiasCorrectOpt": "t2BiasCorrectOpt", - "userCycleOffset": "fcst_input_offsets", - "windBiasCorrectOpt": "windBiasCorrect", - "timeInterpOpt": "forceTemoralInterp", - "enforce": "input_force_mandatory", - "file_type": "input_force_types", - } + "CYCLE_FREQ": { + 1: 60, + 2: 180, + 3: 360, + 4: 360, + 5: 60, + 6: 60, + 7: 360, + 8: 1440, + 9: 360, + 10: -9999, + 11: -9999, + 12: -9999, + 13: 360, + 14: 360, + 15: 360, + 16: 360, + 17: 360, + 18: 1440, + 19: 180, + 20: 180, + 21: -9999, + 22: 180, + 23: -9999, + 24: 60, + 25: 1440, + 26: 15, + 27: -9999, }, -} + "GRIB_VARS": { + 1: ["TMP", "SPFH", "UGRD", "VGRD", "PRATE", "DSWRF", "DLWRF", "PRES"], + 2: None, + 3: [ + "TMP", + "SPFH", + "UGRD", + "VGRD", + "PRATE", + "DSWRF", + "DLWRF", + "PRES", + "CPOFP", + ], + 4: None, + 5: [ + "TMP", + "SPFH", + "UGRD", + "VGRD", + "APCP", + "DSWRF", + "DLWRF", + "PRES", + "CPOFP", + ], + 6: [ + "TMP", + "SPFH", + "UGRD", + "VGRD", + "APCP", + "DSWRF", + "DLWRF", + "PRES", + "FROZR", + ], + 7: ["TMP", "SPFH", "UGRD", "VGRD", "PRATE", "DSWRF", "DLWRF", "PRES"], + 8: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "PRES"], + 9: ["TMP", "SPFH", "UGRD", "VGRD", "PRATE", "DSWRF", "DLWRF", "PRES"], + 10: None, + 11: None, + 12: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "DSWRF", "DLWRF", "PRES"], + 13: ["TMP", "SPFH", "UGRD", "VGRD", "PRATE", "DSWRF", "DLWRF", "PRES"], + 14: ["TMP", "SPFH", "UGRD", "VGRD", "PRATE", "DSWRF", "DLWRF", "PRES"], + 15: ["TMP", "SPFH", "UGRD", "VGRD", "PRATE", "DSWRF", "DLWRF", "PRES"], + 16: ["DSWRF", "DLWRF"], + 17: ["DSWRF", "DLWRF"], + 18: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "PRES"], + 19: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "DSWRF", "DLWRF", "PRES"], + 20: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "DSWRF", "DLWRF", "PRES"], + 21: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "DSWRF", "DLWRF", "PRES"], + 22: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "DSWRF", "DLWRF", "PRES"], + 23: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "DSWRF", "DLWRF", "PRES"], + 24: ["TMP", "APCP"], + 25: ["TMP", "WDIR", "WSPD", "APCP"], + 26: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "DSWRF", "DLWRF", "PRES"], + 27: [ + "T2D", + "Q2D", + "U2D", + "V2D", + "RAINRATE", + "SWDOWN", + "LWDOWN", + "PSFC", + ], + }, + "GRIB_LEVELS": { + 1: [ + "2 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + "surface", + "surface", + ], + 2: None, + 3: [ + "2 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + "surface", + "surface", + "surface", + ], + 4: None, + 5: [ + "2 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + "surface", + "surface", + "surface", + ], + 6: [ + "2 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + "surface", + "surface", + "surface", + ], + 7: [ + "2 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + "surface", + "surface", + ], + 8: [ + "80 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + ], + 9: [ + "2 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + "surface", + "surface", + ], + 10: None, + 11: None, + 12: None, + 13: [ + "2 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + "surface", + "surface", + ], + 14: [ + "2 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + "surface", + "surface", + ], + 15: [ + "2 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + "surface", + "surface", + ], + 16: ["surface", "surface"], + 17: ["surface", "surface"], + 18: [ + "80 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + ], + 19: [ + "2 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + "surface", + "surface", + ], + 20: [ + "2 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + "surface", + "surface", + ], + 21: None, + 22: [ + "2 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + "surface", + "surface", + ], + 23: None, + 24: ["2 m above ground", "surface"], + 25: [ + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + ], + 26: [ + "2 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + "surface", + "surface", + ], + 27: None, + }, + "NET_CDF_VARS_NAMES": { + 1: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "APCP_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + ], + 2: None, + 3: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "PRATE_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + "CPOFP_surface", + ], + 4: None, + 5: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "APCP_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + "CPOFP_surface", + ], + 6: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "APCP_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + "FROZR_surface", + ], + 7: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "PRATE_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + ], + 8: [ + "TMP_80maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "APCP_surface", + "PRES_surface", + ], + 9: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "PRATE_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + ], + 10: ["T2D", "Q2D", "U10", "V10", "RAINRATE", "DSWRF", "DLWRF", "PRES"], + 11: ["T2D", "Q2D", "U10", "V10", "RAINRATE", "DSWRF", "DLWRF", "PRES"], + 12: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "APCP_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + ], + 13: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "PRATE_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + ], + 14: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "PRATE_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + ], + 15: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "PRATE_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + ], + 16: ["DSWRF_surface", "DLWRF_surface"], + 17: ["DSWRF_surface", "DLWRF_surface"], + 18: [ + "TMP_80maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "APCP_surface", + "PRES_surface", + ], + 19: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "APCP_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + ], + 20: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "APCP_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + ], + 21: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "APCP_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + ], + 22: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "APCP_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + ], + 23: ["t2m", "d2m", "u10", "v10", "mtpr", "msdwswrf", "msdwlwrf", "sp"], + 24: ["TMP_2maboveground", "APCP_surface"], + 25: [ + "TMP_2maboveground", + "WDIR_10maboveground", + "WIND_10maboveground", + "APCP_surface", + ], + 26: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "APCP_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + ], + 27: ["T2D", "Q2D", "U2D", "V2D", "RAINRATE", "SWDOWN", "LWDOWN", "PSFC"], + }, + "GRIB_MES_IDX": { + 1: None, + 2: None, + 3: None, + 4: None, + 5: None, + 6: None, + 7: None, + 8: None, + 9: [33, 34, 39, 40, 43, 88, 91, 6], + 10: None, + 11: None, + 12: None, + 13: None, + 14: None, + 15: None, + 16: None, + 17: None, + 18: None, + 19: None, + 20: None, + 21: None, + 22: None, + 23: None, + 24: None, + 25: None, + 26: None, + 27: None, + }, + "INPUT_MAP_OUTPUT": { + 1: [4, 5, 0, 1, 3, 7, 2, 6], + 2: None, + 3: [4, 5, 0, 1, 3, 7, 2, 6, 8], + 4: None, + 5: [4, 5, 0, 1, 3, 7, 2, 6, 8], + 6: [4, 5, 0, 1, 3, 7, 2, 6, 8], + 7: [4, 5, 0, 1, 3, 7, 2, 6], + 8: [4, 5, 0, 1, 3, 6], + 9: [4, 5, 0, 1, 3, 7, 2, 6], + 10: [4, 5, 0, 1, 3, 7, 2, 6], + 11: [4, 5, 0, 1, 3, 7, 2, 6], + 12: [4, 5, 0, 1, 3, 7, 2, 6], + 13: [4, 5, 0, 1, 3, 7, 2, 6], + 14: [4, 5, 0, 1, 3, 7, 2, 6], + 15: [4, 5, 0, 1, 3, 7, 2, 6], + 16: [7, 2], + 17: [7, 2], + 18: [4, 5, 0, 1, 3, 6], + 19: [4, 5, 0, 1, 3, 7, 2, 6], + 20: [4, 5, 0, 1, 3, 7, 2, 6], + 21: [4, 5, 0, 1, 3, 7, 2, 6], + 22: [4, 5, 0, 1, 3, 7, 2, 6], + 23: [4, 5, 0, 1, 3, 7, 2, 6], + 24: [4, 3], + 25: [4, 0, 1, 3], + 26: [4, 5, 0, 1, 3, 7, 2, 6], + 27: [4, 5, 0, 1, 3, 7, 2, 6], + }, + "FORECAST_HORIZONS": { + 1: None, + 2: None, + 3: None, + 4: None, + 5: [ + 18, + 18, + 18, + 18, + 18, + 18, + 36, + 18, + 18, + 18, + 18, + 18, + 36, + 18, + 18, + 18, + 18, + 18, + 36, + 18, + 18, + 18, + 18, + 18, + ], + 6: [ + 21, + 21, + 21, + 39, + 21, + 21, + 21, + 21, + 21, + 39, + 21, + 21, + 21, + 21, + 21, + 39, + 21, + 21, + 21, + 21, + 21, + 39, + 21, + 21, + ], + 7: None, + 8: None, + 9: None, + 10: None, + 11: None, + 12: None, + 13: None, + 14: None, + 15: None, + 16: None, + 17: None, + 18: None, + 19: None, + 20: None, + 21: None, + 22: None, + 23: None, + 24: None, + 25: None, + 26: [ + 18, + 18, + 18, + 18, + 18, + 18, + 36, + 18, + 18, + 18, + 18, + 18, + 36, + 18, + 18, + 18, + 18, + 18, + 36, + 18, + 18, + 18, + 18, + 18, + ], + 27: None, + }, + "FIND_NEIGHBOR_FILES_MAP": { + 1: time_handling.find_nldas_neighbors, + 3: time_handling.find_gfs_neighbors, + 5: time_handling.find_conus_hrrr_neighbors, + 6: time_handling.find_conus_rap_neighbors, + 7: time_handling.find_cfsv2_neighbors, + 8: time_handling.find_hourly_wrf_arw_neighbors, + 9: time_handling.find_gfs_neighbors, + 10: time_handling.find_custom_hourly_neighbors, + 11: time_handling.find_custom_hourly_neighbors, + 12: time_handling.find_aorc_neighbors, + 13: time_handling.find_nam_nest_neighbors, + 14: time_handling.find_nam_nest_neighbors, + 15: time_handling.find_nam_nest_neighbors, + 16: time_handling.find_nam_nest_neighbors, + 17: time_handling.find_nam_nest_neighbors, + 18: time_handling.find_hourly_wrf_arw_neighbors, + 19: time_handling.find_ak_hrrr_neighbors, + 20: time_handling.find_ak_hrrr_neighbors, + 21: time_handling.find_aorc_neighbors, + 22: time_handling.find_ak_hrrr_neighbors, + 23: time_handling.find_era5_neighbors, + 24: time_handling.find_hourly_nbm_neighbors, + 25: time_handling.find_ndfd_neighbors, + 26: time_handling.find_input_neighbors, + 27: time_handling.find_nwm_neighbors, + }, + "REGRID_MAP": { + 1: regrid.regrid_conus_rap, + 3: regrid.regrid_gfs, + 5: regrid.regrid_conus_hrrr, + 6: regrid.regrid_conus_rap, + 7: regrid.regrid_cfsv2, + 8: regrid.regrid_hourly_wrf_arw, + 9: regrid.regrid_gfs, + 10: regrid.regrid_custom_hourly_netcdf, + 11: regrid.regrid_custom_hourly_netcdf, + 12: regrid.regrid_custom_hourly_netcdf, + 13: regrid.regrid_nam_nest, + 14: regrid.regrid_nam_nest, + 15: regrid.regrid_nam_nest, + 16: regrid.regrid_nam_nest, + 17: regrid.regrid_nam_nest, + 18: regrid.regrid_hourly_wrf_arw, + 19: regrid.regrid_conus_hrrr, + 20: regrid.regrid_conus_hrrr, + 21: regrid.regrid_custom_hourly_netcdf, + 22: regrid.regrid_conus_hrrr, + 23: regrid.regrid_era5, + 24: regrid.regrid_hourly_nbm, + 25: regrid.regrid_ndfd, + 26: regrid.regrid_conus_hrrr, + 27: regrid.regrid_nwm, + }, + "TEMPORAL_INTERPOLATE_INPUTS_MAP": { + 0: no_interpolation, + 1: nearest_neighbor, + 2: weighted_average, + }, + "FILE_EXT": { + "GRIB1": ".grb", + "GRIB2": ".grib2", + "NETCDF": ".nc", + "NETCDF4": ".nc4", + "NWM": ".LDASIN_DOMAIN1", + "ZARR": ".zarr", + }, +}, +TEST_UTILS={ + "OLD_NEW_VAR_MAP": { + "q2dBiasCorrectOpt": "q2BiasCorrectOpt", + "paramDir": "dScaleParamDirs", + "border": "ignored_border_widths", + "regridOpt": "regrid_opt", + "userFcstHorizon": "fcst_input_horizons", + "inDir": "input_force_dirs", + "swDowscaleOpt": "swDownscaleOpt", + "t2dBiasCorrectOpt": "t2BiasCorrectOpt", + "userCycleOffset": "fcst_input_offsets", + "windBiasCorrectOpt": "windBiasCorrect", + "timeInterpOpt": "forceTemoralInterp", + "enforce": "input_force_mandatory", + "file_type": "input_force_types", + } +}, diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py index aa3752ce..9a58fd02 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py @@ -13,7 +13,7 @@ import numpy as np -from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.consts import CONSTS +from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.consts import FORCINGINPUTMOD if TYPE_CHECKING: from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.config import ( @@ -28,7 +28,6 @@ from nextgen_forcings_ewts import MODULE_NAME LOG = logging.getLogger(MODULE_NAME) -CONSTS = CONSTS[Path(__file__).stem] class InputForcings: @@ -57,12 +56,12 @@ def __init__( self._keyValue = force_key self.idx = idx - for attr in CONSTS[self.__class__.__base__.__name__]: + for attr in FORCINGINPUTMOD[self.__class__.__base__.__name__]: setattr(self, attr, None) - self.find_neighbor_files_map = CONSTS["FIND_NEIGHBOR_FILES_MAP"] - self.regrid_map = CONSTS["REGRID_MAP"] - self.temporal_interpolate_inputs_map = CONSTS["TEMPORAL_INTERPOLATE_INPUTS_MAP"] + self.find_neighbor_files_map = FORCINGINPUTMOD["FIND_NEIGHBOR_FILES_MAP"] + self.regrid_map = FORCINGINPUTMOD["REGRID_MAP"] + self.temporal_interpolate_inputs_map = FORCINGINPUTMOD["TEMPORAL_INTERPOLATE_INPUTS_MAP"] self.initialize_config_options() @@ -89,7 +88,7 @@ def force_count(self) -> int: @property def product_name(self) -> str: """Map the forcing key value to the product name.""" - return CONSTS["PRODUCT_NAME"][self.keyValue] + return FORCINGINPUTMOD["PRODUCT_NAME"][self.keyValue] @property def keyValue(self) -> int: @@ -108,7 +107,7 @@ def keyValue(self, val: int) -> int: @property def file_ext(self) -> str: """Map the forcing file type to the file extension.""" - ext = CONSTS["FILE_EXT"].get(self.input_force_types) + ext = FORCINGINPUTMOD["FILE_EXT"].get(self.input_force_types) if ext is None: raise ValueError(f"Unexpected file_type: {self.input_force_types}") self._file_ext = ext @@ -128,7 +127,7 @@ def cycle_freq(self) -> int: """Map the forcing key value to the cycle frequency in minutes.""" if self._cycle_freq is None: # First call to getter, initialize - self._cycle_freq = CONSTS["CYCLE_FREQ"][self.keyValue] + self._cycle_freq = FORCINGINPUTMOD["CYCLE_FREQ"][self.keyValue] return self._cycle_freq @cycle_freq.setter @@ -144,7 +143,7 @@ def grib_vars(self) -> list[str] | None: """Map the forcing key value to the required GRIB variable names.""" if self._grib_vars is None: # First call to getter, initialize - self._grib_vars = CONSTS["GRIB_VARS"][self.keyValue] + self._grib_vars = FORCINGINPUTMOD["GRIB_VARS"][self.keyValue] return self._grib_vars @grib_vars.setter @@ -158,12 +157,12 @@ def grib_vars(self, val: list[str]) -> list[str] | None: @property def grib_levels(self) -> str: """Map the forcing key value to the required GRIB variable levels.""" - return CONSTS["GRIB_LEVELS"][self.keyValue] + return FORCINGINPUTMOD["GRIB_LEVELS"][self.keyValue] @property def netcdf_var_names(self) -> str: """Map the forcing key value to the required NetCDF variable names.""" - return CONSTS["NET_CDF_VARS_NAMES"][self.keyValue] + return FORCINGINPUTMOD["NET_CDF_VARS_NAMES"][self.keyValue] @property def grib_mes_idx(self) -> list[int] | None: @@ -172,17 +171,17 @@ def grib_mes_idx(self) -> list[int] | None: arrays that store the message ids of required forcing variables for each forcing type TODO fill these arrays for forcing types other than GFS """ - return CONSTS["GRIB_MES_IDX"][self.keyValue] + return FORCINGINPUTMOD["GRIB_MES_IDX"][self.keyValue] @property def input_map_output(self) -> list[int] | None: """Map the forcing key value to the input to output variable mapping.""" - return CONSTS["INPUT_MAP_OUTPUT"][self.keyValue] + return FORCINGINPUTMOD["INPUT_MAP_OUTPUT"][self.keyValue] @property def forecast_horizons(self) -> list[int] | None: """Map the forcing key value to the forecast horizons list.""" - return CONSTS["FORECAST_HORIZONS"][self.keyValue] + return FORCINGINPUTMOD["FORECAST_HORIZONS"][self.keyValue] def calc_neighbor_files( self, config_options: ConfigOptions, dcurrent, mpi_config: MpiConfig @@ -262,7 +261,7 @@ def __init__( ) -> None: """Initialize InputForcingsGridded with configuration options, geospatial metadata, and MPI configuration.""" super().__init__(force_key, idx, config_options, geo_meta, mpi_config) - for attr in CONSTS[self.__class__.__name__]: + for attr in FORCINGINPUTMOD[self.__class__.__name__]: setattr(self, attr, None) @property @@ -402,7 +401,7 @@ def __init__( ) -> None: """Initialize InputForcingsUnstructured with configuration options, geospatial metadata, and MPI configuration.""" super().__init__(force_key, idx, config_options, geo_meta, mpi_config) - for attr in CONSTS[self.__class__.__name__]: + for attr in FORCINGINPUTMOD[self.__class__.__name__]: setattr(self, attr, None) @property diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py index a1ef1f4d..4620c458 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py @@ -24,12 +24,11 @@ from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.config import ( ConfigOptions, ) -from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.consts import CONSTS +from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.consts import GEOMOD from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.parallel import MpiConfig from nextgen_forcings_ewts import MODULE_NAME LOG = logging.getLogger(MODULE_NAME) -CONSTS = CONSTS[Path(__file__).stem] def set_none(func) -> Any: @@ -104,7 +103,7 @@ def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig) -> None """Initialize GeoMeta class variables.""" self.config_options = config_options self.mpi_config = mpi_config - for attr in CONSTS[self.__class__.__base__.__name__]: + for attr in GEOMOD[self.__class__.__base__.__name__]: setattr(self, attr, None) @property @@ -294,7 +293,7 @@ def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig) -> None :return: """ super().__init__(config_options, mpi_config) - for attr in CONSTS[self.__class__.__name__]: + for attr in GEOMOD[self.__class__.__name__]: setattr(self, attr, None) @broadcast @@ -892,7 +891,7 @@ def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): :return: """ super().__init__(config_options, mpi_config) - for attr in CONSTS[self.__class__.__name__]: + for attr in GEOMOD[self.__class__.__name__]: setattr(self, attr, None) @property @@ -1069,7 +1068,7 @@ def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig) -> None :return: """ super().__init__(config_options, mpi_config) - for attr in CONSTS[self.__class__.__name__]: + for attr in GEOMOD[self.__class__.__name__]: setattr(self, attr, None) @broadcast From 9be38db7349b90e92e5a312d0c08065ea12dbab8 Mon Sep 17 00:00:00 2001 From: Matthew Deshotel Date: Thu, 26 Mar 2026 16:17:03 -0500 Subject: [PATCH 34/34] add setters for properties --- .../core/forcingInputMod.py | 98 ++++++++++++++++--- 1 file changed, 82 insertions(+), 16 deletions(-) diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py index 9a58fd02..3851065c 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py @@ -268,14 +268,22 @@ def __init__( @lru_cache def final_forcings(self): """Initialize the local final grid of values.""" - return np.empty( - [ - self.force_count, - self.geo_meta.ny_local, - self.geo_meta.nx_local, - ], - np.float64, - ) + if self._final_forcings is not None: + return self._final_forcings + else: + return np.empty( + [ + self.force_count, + self.geo_meta.ny_local, + self.geo_meta.nx_local, + ], + np.float64, + ) + + @final_forcings.setter + def final_forcings(self, value): + "Setter for final_forcings." + self._final_forcings = value @property @lru_cache @@ -308,22 +316,36 @@ def regridded_mask_AORC(self): @lru_cache def t2dTmp(self): """Initialize temporary array for specific humidity downscaling.""" - if self.q2dDownscaleOpt > 0: + if self._t2dTmp is not None: + return self._t2dTmp + elif self.q2dDownscaleOpt > 0: return np.empty( [self.geo_meta.ny_local, self.geo_meta.nx_local], np.float32, ) + @t2dTmp.setter + def t2dTmp(self, value): + """Setter for t2dTmp""" + self._t2dTmp = value + @property @lru_cache def psfcTmp(self): """Initialize temporary array for specific humidity downscaling.""" - if self.q2dDownscaleOpt > 0: + if self._psfcTmp is not None: + return self._psfcTmp + elif self.q2dDownscaleOpt > 0: return np.empty( [self.geo_meta.ny_local, self.geo_meta.nx_local], np.float32, ) + @psfcTmp.setter + def psfcTmp(self, value): + """Setter for psfcTmp""" + self._psfcTmp = value + class InputForcingsHydrofabric(InputForcings): """Abstract class defining parameters of a single input forcing product. @@ -342,14 +364,22 @@ def __init__( ) -> None: """Initialize InputForcingsHydrofabric with configuration options, geospatial metadata, and MPI configuration.""" super().__init__(force_key, idx, config_options, geo_meta, mpi_config) - for attr in CONSTS[self.__class__.__name__]: + for attr in FORCINGINPUTMOD[self.__class__.__name__]: setattr(self, attr, None) @property @lru_cache def final_forcings(self): """Initialize the local final grid of values.""" - return np.empty([self.force_count, self.geo_meta.ny_local], np.float64) + if self._final_forcings is not None: + return self._final_forcings + else: + return np.empty([self.force_count, self.geo_meta.ny_local], np.float64) + + @final_forcings.setter + def final_forcings(self, value): + "Setter for final_forcings." + self._final_forcings = value @property @lru_cache @@ -373,16 +403,30 @@ def regridded_mask_AORC(self): @lru_cache def t2dTmp(self): """Initialize temporary array for specific humidity downscaling.""" - if self.q2dDownscaleOpt > 0: + if self._t2dTmp is not None: + return self._t2dTmp + elif self.q2dDownscaleOpt > 0: return np.empty([self.geo_meta.ny_local], np.float32) + @t2dTmp.setter + def t2dTmp(self, value): + """Setter for t2dTmp""" + self._t2dTmp = value + @property @lru_cache def psfcTmp(self): """Initialize temporary array for specific humidity downscaling.""" + if self._psfcTmp is not None: + return self._psfcTmp if self.q2dDownscaleOpt > 0: return np.empty([self.geo_meta.ny_local], np.float32) + @psfcTmp.setter + def psfcTmp(self, value): + """Setter for psfcTmp""" + self._psfcTmp = value + class InputForcingsUnstructured(InputForcings): """Abstract class defining parameters of a single input forcing product. @@ -408,16 +452,30 @@ def __init__( @lru_cache def t2dTmp(self): """Initialize temporary array for specific humidity downscaling.""" - if self.q2dDownscaleOpt > 0: + if self._t2dTmp is not None: + return self._t2dTmp + elif self.q2dDownscaleOpt > 0: return np.empty([self.geo_meta.ny_local], np.float32) + @t2dTmp.setter + def t2dTmp(self, value): + """Setter for t2dTmp""" + self._t2dTmp = value + @property @lru_cache def psfcTmp(self): """Initialize temporary array for specific humidity downscaling.""" - if self.q2dDownscaleOpt > 0: + if self._psfcTmp is not None: + return self._psfcTmp + elif self.q2dDownscaleOpt > 0: return np.empty([self.geo_meta.ny_local], np.float32) + @psfcTmp.setter + def psfcTmp(self, value): + """Setter for psfcTmp""" + self._psfcTmp = value + @property @lru_cache def t2dTmp_elem(self): @@ -436,7 +494,15 @@ def psfcTmp_elem(self): @lru_cache def final_forcings(self): """Initialize the local final grid of values.""" - return np.empty([self.force_count, self.geo_meta.ny_local], np.float64) + if self._final_forcings is not None: + return self._final_forcings + else: + return np.empty([self.force_count, self.geo_meta.ny_local], np.float64) + + @final_forcings.setter + def final_forcings(self, value): + """Setter for final_forcings.""" + self._final_forcings = value @property @lru_cache