diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/bmi_model.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/bmi_model.py index 6858056a..d7f9a8cc 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/bmi_model.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/bmi_model.py @@ -24,15 +24,22 @@ from mpi4py import MPI from NextGen_Forcings_Engine_BMI import esmf_creation, forcing_extraction +from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.bmi_grid import Grid, GridType +from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.config import ( + ConfigOptions, +) +from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.consts import BMI_MODEL +from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.geoMod import ( + GriddedGeoMeta, + HydrofabricGeoMeta, + UnstructuredGeoMeta, +) +from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.parallel import MpiConfig -from .bmi_grid import Grid, GridType from .core import ( - config, err_handler, forcingInputMod, - geoMod, ioMod, - parallel, suppPrecipMod, ) from .model import NWMv3ForcingEngineModel @@ -117,7 +124,7 @@ def __init__(self): self.cfg_bmi = None self._job_meta = None self._mpi_meta = None - self._wrf_hydro_geo_meta = None + self.geo_meta = None self._grid_type = None self._grids = None self._grid_map = None @@ -203,11 +210,11 @@ def initialize(self, config_file: str, output_path: str | None = None) -> None: with bmi_cfg_file.open("r") as fp: cfg = yaml.safe_load(fp) - self.cfg_bmi = self._parse_config(cfg) + self.cfg_bmi = parse_config(cfg) # If _job_meta was not set by initialize_with_params(), create a default one if self._job_meta is None: - self._job_meta = config.ConfigOptions(self.cfg_bmi) + self._job_meta = ConfigOptions(self.cfg_bmi) # Parse the configuration options try: @@ -231,7 +238,10 @@ def initialize(self, config_file: str, output_path: str | None = None) -> None: self._job_meta.nwmConfig = self.cfg_bmi["NWM_CONFIG"] # Initialize MPI communication - self._mpi_meta = parallel.MpiConfig() + self._mpi_meta = MpiConfig() + + self.geo_meta = HydrofabricGeoMeta(self._job_meta, self._mpi_meta) + try: comm = MPI.Comm.f2py(self._comm) if self._comm is not None else None self._mpi_meta.initialize_comm(self._job_meta, comm=comm) @@ -248,502 +258,9 @@ def initialize(self, config_file: str, output_path: str | None = None) -> None: if self._job_meta.nwmConfig not in ["AORC", "NWM"]: forcing_extraction.retrieve_forcing(self._job_meta) - # Initialize our WRF-Hydro geospatial object, which contains - # information about the modeling domain, local processor - # grid boundaries, and ESMF grid objects/fields to be used - # in regridding. - self._wrf_hydro_geo_meta = geoMod.GeoMetaWrfHydro() - - if self._job_meta.grid_type == "gridded": - self._wrf_hydro_geo_meta.initialize_destination_geo_gridded( - self._job_meta, self._mpi_meta - ) - elif self._job_meta.grid_type == "unstructured": - self._wrf_hydro_geo_meta.initialize_destination_geo_unstructured( - self._job_meta, self._mpi_meta - ) - elif self._job_meta.grid_type == "hydrofabric": - self._wrf_hydro_geo_meta.initialize_destination_geo_hydrofabric( - self._job_meta, self._mpi_meta - ) - else: - self._job_meta.errMsg = "You must specify a proper grid_type (gridded, unstructured, hydrofabric) in the config." - err_handler.err_out_screen_para(self._job_meta.errMsg, self._mpi_meta) - # Assign grid type to BMI class for grid information self._grid_type = self._job_meta.grid_type.lower() - - # Set output var names based on grid type - if self._grid_type == "gridded": - # --------------------------------------------- - # Output variable names (CSDMS standard names) - # --------------------------------------------- - - # Flag here to indicate whether or not the NWM operational configuration - # will support a BMI field for liquid fraction of precipitation - if self._job_meta.include_lqfrac == 1: - self._output_var_names = [ - "U2D_ELEMENT", - "V2D_ELEMENT", - "LWDOWN_ELEMENT", - "SWDOWN_ELEMENT", - "T2D_ELEMENT", - "Q2D_ELEMENT", - "PSFC_ELEMENT", - "RAINRATE_ELEMENT", - "LQFRAC_ELEMENT", - ] - - # ------------------------------------------------------ - # Create a Python dictionary that maps CSDMS Standard - # Names to the model's internal variable names. - # This is going to get long, - # since the input variable names could come from any forcing... - # ------------------------------------------------------ - self._var_name_units_map = { - "U2D_ELEMENT": ["10-m U-component of wind", "m/s"], - "V2D_ELEMENT": ["10-m V-component of wind", "m/s"], - "T2D_ELEMENT": ["2-m Air Temperature", "K"], - "Q2D_ELEMENT": ["2-m Specific Humidity", "kg/kg"], - "LWDOWN_ELEMENT": [ - "Surface downward long-wave radiation flux", - "W/m^2", - ], - "SWDOWN_ELEMENT": [ - "Surface downward short-wave radiation flux", - "W/m^2", - ], - "PSFC_ELEMENT": ["Surface Pressure", "Pa"], - "RAINRATE_ELEMENT": ["Surface Precipitation Rate", "mm/s"], - "LQFRAC_ELEMENT": ["Liquid Fraction of Precipitation", "%"], - } - - self.grid_1: Grid = Grid( - 1, 2, GridType.uniform_rectilinear - ) # Grid 1 is a 2-dimensional grid - self.grid_1._grid_y = self._wrf_hydro_geo_meta.latitude_grid.flatten() - self.grid_1._grid_x = self._wrf_hydro_geo_meta.longitude_grid.flatten() - self.grid_1._shape = self._wrf_hydro_geo_meta.latitude_grid.shape - self.grid_1._size = len( - self._wrf_hydro_geo_meta.latitude_grid.flatten() - ) - self.grid_1._spacing = ( - self._wrf_hydro_geo_meta.dx_meters, - self._wrf_hydro_geo_meta.dy_meters, - ) - self.grid_1._units = "m" - self.grid_1._origin = None - - self._grids = [self.grid_1] - - self._grid_map = { - "U2D_ELEMENT": self.grid_1, - "V2D_ELEMENT": self.grid_1, - "LWDOWN_ELEMENT": self.grid_1, - "SWDOWN_ELEMENT": self.grid_1, - "T2D_ELEMENT": self.grid_1, - "Q2D_ELEMENT": self.grid_1, - "PSFC_ELEMENT": self.grid_1, - "RAINRATE_ELEMENT": self.grid_1, - "LQFRAC_ELEMENT": self.grid_1, - } - - else: - self._output_var_names = [ - "U2D_ELEMENT", - "V2D_ELEMENT", - "LWDOWN_ELEMENT", - "SWDOWN_ELEMENT", - "T2D_ELEMENT", - "Q2D_ELEMENT", - "PSFC_ELEMENT", - "RAINRATE_ELEMENT", - ] - - # ------------------------------------------------------ - # Create a Python dictionary that maps CSDMS Standard - # Names to the model's internal variable names. - # This is going to get long, - # since the input variable names could come from any forcing... - # ------------------------------------------------------ - self._var_name_units_map = { - "U2D_ELEMENT": ["10-m U-component of wind", "m/s"], - "V2D_ELEMENT": ["10-m V-component of wind", "m/s"], - "T2D_ELEMENT": ["2-m Air Temperature", "K"], - "Q2D_ELEMENT": ["2-m Specific Humidity", "kg/kg"], - "LWDOWN_ELEMENT": [ - "Surface downward long-wave radiation flux", - "W/m^2", - ], - "SWDOWN_ELEMENT": [ - "Surface downward short-wave radiation flux", - "W/m^2", - ], - "PSFC_ELEMENT": ["Surface Pressure", "Pa"], - "RAINRATE_ELEMENT": ["Surface Precipitation Rate", "mm/s"], - } - - self.grid_1: Grid = Grid( - 1, 2, GridType.uniform_rectilinear - ) # Grid 1 is a 2-dimensional grid - self.grid_1._grid_y = self._wrf_hydro_geo_meta.latitude_grid.flatten() - self.grid_1._grid_x = self._wrf_hydro_geo_meta.longitude_grid.flatten() - self.grid_1._shape = self._wrf_hydro_geo_meta.latitude_grid.shape - self.grid_1._size = len( - self._wrf_hydro_geo_meta.latitude_grid.flatten() - ) - self.grid_1._spacing = ( - self._wrf_hydro_geo_meta.dx_meters, - self._wrf_hydro_geo_meta.dy_meters, - ) - self.grid_1._units = "m" - self.grid_1._origin = None - - self._grids = [self.grid_1] - - self._grid_map = { - "U2D_ELEMENT": self.grid_1, - "V2D_ELEMENT": self.grid_1, - "LWDOWN_ELEMENT": self.grid_1, - "SWDOWN_ELEMENT": self.grid_1, - "T2D_ELEMENT": self.grid_1, - "Q2D_ELEMENT": self.grid_1, - "PSFC_ELEMENT": self.grid_1, - "RAINRATE_ELEMENT": self.grid_1, - } - - elif self._grid_type == "unstructured": - # Flag here to indicate whether or not the NWM operational configuration - # will support a BMI field for liquid fraction of precipitation - if self._job_meta.include_lqfrac == 1: - # --------------------------------------------- - # Output variable names (CSDMS standard names) - # --------------------------------------------- - self._output_var_names = [ - "U2D_NODE", - "V2D_NODE", - "LWDOWN_NODE", - "SWDOWN_NODE", - "T2D_NODE", - "Q2D_NODE", - "PSFC_NODE", - "RAINRATE_NODE", - "LQFRAC_NODE", - "U2D_ELEMENT", - "V2D_ELEMENT", - "LWDOWN_ELEMENT", - "SWDOWN_ELEMENT", - "T2D_ELEMENT", - "Q2D_ELEMENT", - "PSFC_ELEMENT", - "RAINRATE_ELEMENT", - "LQFRAC_ELEMENT", - ] - - # ------------------------------------------------------ - # Create a Python dictionary that maps CSDMS Standard - # Names to the model's internal variable names. - # This is going to get long, - # since the input variable names could come from any forcing... - # ------------------------------------------------------ - self._var_name_units_map = { - "U2D_NODE": ["10-m U-component of wind", "m/s"], - "V2D_NODE": ["10-m V-component of wind", "m/s"], - "T2D_NODE": ["2-m Air Temperature", "K"], - "Q2D_NODE": ["2-m Specific Humidity", "kg/kg"], - "LWDOWN_NODE": [ - "Surface downward long-wave radiation flux", - "W/m^2", - ], - "SWDOWN_NODE": [ - "Surface downward short-wave radiation flux", - "W/m^2", - ], - "PSFC_NODE": ["Surface Pressure", "Pa"], - "RAINRATE_NODE": ["Surface Precipitation Rate", "mm/s"], - "LQFRAC_NODE": ["Liquid Fraction of Precipitation", "%"], - "U2D_ELEMENT": ["10-m U-component of wind", "m/s"], - "V2D_ELEMENT": ["10-m V-component of wind", "m/s"], - "T2D_ELEMENT": ["2-m Air Temperature", "K"], - "Q2D_ELEMENT": ["2-m Specific Humidity", "kg/kg"], - "LWDOWN_ELEMENT": [ - "Surface downward long-wave radiation flux", - "W/m^2", - ], - "SWDOWN_ELEMENT": [ - "Surface downward short-wave radiation flux", - "W/m^2", - ], - "PSFC_ELEMENT": ["Surface Pressure", "Pa"], - "RAINRATE_ELEMENT": ["Surface Precipitation Rate", "mm/s"], - "LQFRAC_ELEMENT": ["Liquid Fraction of Precipitation", "%"], - } - - self.grid_2: Grid = Grid( - 2, 2, GridType.unstructured - ) # Grid 1 is a 2-dimensional grid - self.grid_3: Grid = Grid( - 3, 2, GridType.unstructured - ) # Grid 1 is a 2-dimensional grid - - self.grid_2._grid_y = self._wrf_hydro_geo_meta.latitude_grid_elem - self.grid_2._grid_x = self._wrf_hydro_geo_meta.longitude_grid_elem - - self.grid_3._grid_y = self._wrf_hydro_geo_meta.latitude_grid - self.grid_3._grid_x = self._wrf_hydro_geo_meta.longitude_grid - - self.grid_2._size = len(self._wrf_hydro_geo_meta.latitude_grid_elem) - self.grid_3._size = len(self._wrf_hydro_geo_meta.latitude_grid) - - self._grids = [self.grid_2, self.grid_3] - - self._grid_map = { - "U2D_ELEMENT": self.grid_2, - "V2D_ELEMENT": self.grid_2, - "LWDOWN_ELEMENT": self.grid_2, - "SWDOWN_ELEMENT": self.grid_2, - "T2D_ELEMENT": self.grid_2, - "Q2D_ELEMENT": self.grid_2, - "PSFC_ELEMENT": self.grid_2, - "RAINRATE_ELEMENT": self.grid_2, - "LQFRAC_ELEMENT": self.grid_2, - "U2D_NODE": self.grid_3, - "V2D_NODE": self.grid_3, - "LWDOWN_NODE": self.grid_3, - "SWDOWN_NODE": self.grid_3, - "T2D_NODE": self.grid_3, - "Q2D_NODE": self.grid_3, - "PSFC_NODE": self.grid_3, - "RAINRATE_NODE": self.grid_3, - "LQFRAC_NODE": self.grid_3, - } - else: - # --------------------------------------------- - # Output variable names (CSDMS standard names) - # --------------------------------------------- - self._output_var_names = [ - "U2D_NODE", - "V2D_NODE", - "LWDOWN_NODE", - "SWDOWN_NODE", - "T2D_NODE", - "Q2D_NODE", - "PSFC_NODE", - "RAINRATE_NODE", - "U2D_ELEMENT", - "V2D_ELEMENT", - "LWDOWN_ELEMENT", - "SWDOWN_ELEMENT", - "T2D_ELEMENT", - "Q2D_ELEMENT", - "PSFC_ELEMENT", - "RAINRATE_ELEMENT", - ] - - # ------------------------------------------------------ - # Create a Python dictionary that maps CSDMS Standard - # Names to the model's internal variable names. - # This is going to get long, - # since the input variable names could come from any forcing... - # ------------------------------------------------------ - self._var_name_units_map = { - "U2D_NODE": ["10-m U-component of wind", "m/s"], - "V2D_NODE": ["10-m V-component of wind", "m/s"], - "T2D_NODE": ["2-m Air Temperature", "K"], - "Q2D_NODE": ["2-m Specific Humidity", "kg/kg"], - "LWDOWN_NODE": [ - "Surface downward long-wave radiation flux", - "W/m^2", - ], - "SWDOWN_NODE": [ - "Surface downward short-wave radiation flux", - "W/m^2", - ], - "PSFC_NODE": ["Surface Pressure", "Pa"], - "RAINRATE_NODE": ["Surface Precipitation Rate", "mm/s"], - "U2D_ELEMENT": ["10-m U-component of wind", "m/s"], - "V2D_ELEMENT": ["10-m V-component of wind", "m/s"], - "T2D_ELEMENT": ["2-m Air Temperature", "K"], - "Q2D_ELEMENT": ["2-m Specific Humidity", "kg/kg"], - "LWDOWN_ELEMENT": [ - "Surface downward long-wave radiation flux", - "W/m^2", - ], - "SWDOWN_ELEMENT": [ - "Surface downward short-wave radiation flux", - "W/m^2", - ], - "PSFC_ELEMENT": ["Surface Pressure", "Pa"], - "RAINRATE_ELEMENT": ["Surface Precipitation Rate", "mm/s"], - } - - self.grid_2: Grid = Grid( - 2, 2, GridType.unstructured - ) # Grid 1 is a 2-dimensional grid - self.grid_3: Grid = Grid( - 3, 2, GridType.unstructured - ) # Grid 1 is a 2-dimensional grid - - self.grid_2._grid_y = self._wrf_hydro_geo_meta.latitude_grid_elem - self.grid_2._grid_x = self._wrf_hydro_geo_meta.longitude_grid_elem - - self.grid_3._grid_y = self._wrf_hydro_geo_meta.latitude_grid - self.grid_3._grid_x = self._wrf_hydro_geo_meta.longitude_grid - - self.grid_2._size = len(self._wrf_hydro_geo_meta.latitude_grid_elem) - self.grid_3._size = len(self._wrf_hydro_geo_meta.latitude_grid) - - self._grids = [self.grid_2, self.grid_3] - - self._grid_map = { - "U2D_ELEMENT": self.grid_2, - "V2D_ELEMENT": self.grid_2, - "LWDOWN_ELEMENT": self.grid_2, - "SWDOWN_ELEMENT": self.grid_2, - "T2D_ELEMENT": self.grid_2, - "Q2D_ELEMENT": self.grid_2, - "PSFC_ELEMENT": self.grid_2, - "RAINRATE_ELEMENT": self.grid_2, - "U2D_NODE": self.grid_3, - "V2D_NODE": self.grid_3, - "LWDOWN_NODE": self.grid_3, - "SWDOWN_NODE": self.grid_3, - "T2D_NODE": self.grid_3, - "Q2D_NODE": self.grid_3, - "PSFC_NODE": self.grid_3, - "RAINRATE_NODE": self.grid_3, - } - - elif self._grid_type == "hydrofabric": - # Flag here to indicate whether or not the NWM operational configuration - # will support a BMI field for liquid fraction of precipitation - if self._job_meta.include_lqfrac == 1: - # --------------------------------------------- - # Output variable names (CSDMS standard names) - # --------------------------------------------- - self._output_var_names = [ - "CAT-ID", - "U2D_ELEMENT", - "V2D_ELEMENT", - "LWDOWN_ELEMENT", - "SWDOWN_ELEMENT", - "T2D_ELEMENT", - "Q2D_ELEMENT", - "PSFC_ELEMENT", - "RAINRATE_ELEMENT", - "LQFRAC_ELEMENT", - ] - - # ------------------------------------------------------ - # Create a Python dictionary that maps CSDMS Standard - # Names to the model's internal variable names. - # This is going to get long, - # since the input variable names could come from any forcing... - # ------------------------------------------------------ - self._var_name_units_map = { - "CAT-ID": ["Catchment ID", ""], - "U2D_ELEMENT": ["10-m U-component of wind", "m/s"], - "V2D_ELEMENT": ["10-m V-component of wind", "m/s"], - "T2D_ELEMENT": ["2-m Air Temperature", "K"], - "Q2D_ELEMENT": ["2-m Specific Humidity", "kg/kg"], - "LWDOWN_ELEMENT": [ - "Surface downward long-wave radiation flux", - "W/m^2", - ], - "SWDOWN_ELEMENT": [ - "Surface downward short-wave radiation flux", - "W/m^2", - ], - "PSFC_ELEMENT": ["Surface Pressure", "Pa"], - "RAINRATE_ELEMENT": ["Surface Precipitation Rate", "mm/s"], - "LQFRAC_ELEMENT": ["Liquid Fraction of Precipitation", "%"], - } - - self.grid_4: Grid = Grid( - 4, 2, GridType.unstructured - ) # Grid 1 is a 2-dimensional grid - - self.grid_4._grid_y = self._wrf_hydro_geo_meta.latitude_grid - self.grid_4._grid_x = self._wrf_hydro_geo_meta.longitude_grid - - self.grid_4._size = len(self._wrf_hydro_geo_meta.latitude_grid) - - self._grids = [self.grid_4] - - self._grid_map = { - "CAT-ID": self.grid_4, - "U2D_ELEMENT": self.grid_4, - "V2D_ELEMENT": self.grid_4, - "LWDOWN_ELEMENT": self.grid_4, - "SWDOWN_ELEMENT": self.grid_4, - "T2D_ELEMENT": self.grid_4, - "Q2D_ELEMENT": self.grid_4, - "PSFC_ELEMENT": self.grid_4, - "RAINRATE_ELEMENT": self.grid_4, - "LQFRAC_ELEMENT": self.grid_4, - } - else: - # --------------------------------------------- - # Output variable names (CSDMS standard names) - # --------------------------------------------- - self._output_var_names = [ - "CAT-ID", - "U2D_ELEMENT", - "V2D_ELEMENT", - "LWDOWN_ELEMENT", - "SWDOWN_ELEMENT", - "T2D_ELEMENT", - "Q2D_ELEMENT", - "PSFC_ELEMENT", - "RAINRATE_ELEMENT", - ] - - # ------------------------------------------------------ - # Create a Python dictionary that maps CSDMS Standard - # Names to the model's internal variable names. - # This is going to get long, - # since the input variable names could come from any forcing... - # ------------------------------------------------------ - self._var_name_units_map = { - "CAT-ID": ["Catchment ID", ""], - "U2D_ELEMENT": ["10-m U-component of wind", "m/s"], - "V2D_ELEMENT": ["10-m V-component of wind", "m/s"], - "T2D_ELEMENT": ["2-m Air Temperature", "K"], - "Q2D_ELEMENT": ["2-m Specific Humidity", "kg/kg"], - "LWDOWN_ELEMENT": [ - "Surface downward long-wave radiation flux", - "W/m^2", - ], - "SWDOWN_ELEMENT": [ - "Surface downward short-wave radiation flux", - "W/m^2", - ], - "PSFC_ELEMENT": ["Surface Pressure", "Pa"], - "RAINRATE_ELEMENT": ["Surface Precipitation Rate", "mm/s"], - } - - self.grid_4: Grid = Grid( - 4, 2, GridType.unstructured - ) # Grid 1 is a 2-dimensional grid - - self.grid_4._grid_y = self._wrf_hydro_geo_meta.latitude_grid - self.grid_4._grid_x = self._wrf_hydro_geo_meta.longitude_grid - - self.grid_4._size = len(self._wrf_hydro_geo_meta.latitude_grid) - - self._grids = [self.grid_4] - - self._grid_map = { - "CAT-ID": self.grid_4, - "U2D_ELEMENT": self.grid_4, - "V2D_ELEMENT": self.grid_4, - "LWDOWN_ELEMENT": self.grid_4, - "SWDOWN_ELEMENT": self.grid_4, - "T2D_ELEMENT": self.grid_4, - "Q2D_ELEMENT": self.grid_4, - "PSFC_ELEMENT": self.grid_4, - "RAINRATE_ELEMENT": self.grid_4, - } + self.set_var_names(self) # ----- Create some lookup tabels from the long variable names --------# self._var_name_map_long_first = { @@ -759,21 +276,9 @@ def initialize(self, config_file: str, output_path: str | None = None) -> None: for long_name in self._var_name_units_map.keys() } - if self._job_meta.spatial_meta is not None: - try: - self._wrf_hydro_geo_meta.initialize_geospatial_metadata( - self._job_meta, self._mpi_meta - ) - except Exception as e: - err_handler.err_out_screen_para(self._job_meta.errMsg, self._mpi_meta) - err_handler.check_program_status(self._job_meta, self._mpi_meta) - # Check to make sure we have enough dimensionality to run regridding. ESMF requires both grids # to have a size of at least 2. - if ( - self._wrf_hydro_geo_meta.nx_local < 2 - or self._wrf_hydro_geo_meta.ny_local < 2 - ): + if self.geo_meta.nx_local < 2 or self.geo_meta.ny_local < 2: self._job_meta.errMsg = ( "You have specified too many cores for your WRF-Hydro grid. " "Local grid Must have x/y dimension size of 2." @@ -783,7 +288,7 @@ def initialize(self, config_file: str, output_path: str | None = None) -> None: # Initialize our output object, which includes local slabs from the output grid. try: - self._output_obj = ioMod.OutputObj(self._job_meta, self._wrf_hydro_geo_meta) + self._output_obj = ioMod.OutputObj(self._job_meta, self.geo_meta) except Exception as e: err_handler.err_out_screen_para(self._job_meta, self._mpi_meta) err_handler.check_program_status(self._job_meta, self._mpi_meta) @@ -795,7 +300,7 @@ def initialize(self, config_file: str, output_path: str | None = None) -> None: # downscaling and regridding purposes. try: self._input_forcing_mod = forcingInputMod.init_dict( - self._job_meta, self._wrf_hydro_geo_meta, self._mpi_meta + self._job_meta, self.geo_meta, self._mpi_meta ) except Exception as e: err_handler.err_out_screen_para(self._job_meta, self._mpi_meta) @@ -804,9 +309,7 @@ def initialize(self, config_file: str, output_path: str | None = None) -> None: # If we have specified supplemental precipitation products, initialize # the supp class. if self._job_meta.number_supp_pcp > 0: - self._supp_pcp_mod = suppPrecipMod.initDict( - self._job_meta, self._wrf_hydro_geo_meta - ) + self._supp_pcp_mod = suppPrecipMod.initDict(self._job_meta, self.geo_meta) else: self._supp_pcp_mod = None err_handler.check_program_status(self._job_meta, self._mpi_meta) @@ -815,42 +318,7 @@ def initialize(self, config_file: str, output_path: str | None = None) -> None: for parm in self._model_parameters_list: self._values[self._var_name_map_short_first[parm]] = self.cfg_bmi[parm] - if self._job_meta.grid_type == "gridded": - # -----------------------------------------------------------------------# - # Get the size of the flattened 2D arrays from the gridded domain - self._varsize = len( - np.zeros(self._wrf_hydro_geo_meta.latitude_grid.shape).flatten() - ) - - for model_output in self.get_output_var_names(): - self._values[model_output] = np.zeros(self._varsize, dtype=float) - - elif self._job_meta.grid_type == "unstructured": - # -----------------------------------------------------------------------# - # Get the size of the flattened 1D arrays from the unstructured domain - self._varsize = len( - np.zeros(self._wrf_hydro_geo_meta.latitude_grid.shape).flatten() - ) - self._varsize_elem = len( - np.zeros(self._wrf_hydro_geo_meta.latitude_grid_elem.shape).flatten() - ) - - for model_output in self.get_output_var_names(): - if "ELEMENT" in model_output: - self._values[model_output] = np.zeros( - self._varsize_elem, dtype=float - ) - else: - self._values[model_output] = np.zeros(self._varsize, dtype=float) - - elif self._job_meta.grid_type == "hydrofabric": - # -----------------------------------------------------------------------# - # Get the size of the flattened 1D arrays from the hydrofabric domain - self._varsize = len( - np.zeros(self._wrf_hydro_geo_meta.latitude_grid.shape).flatten() - ) - for model_output in self.get_output_var_names(): - self._values[model_output] = np.zeros(self._varsize, dtype=float) + self.get_size_of_arrays(self) # for model_input in self.get_input_var_names(): # self._values[model_input] = np.zeros(self._varsize, dtype=float) @@ -864,7 +332,7 @@ def initialize(self, config_file: str, output_path: str | None = None) -> None: # Set catchment ids if using hydrofabric if self._grid_type == "hydrofabric": - self._values["CAT-ID"] = self._wrf_hydro_geo_meta.element_ids_global + self._values["CAT-ID"] = self.geo_meta.element_ids_global self._configure_output_path(output_path) @@ -897,9 +365,7 @@ def initialize_with_params( :raises ValueError: If an invalid grid type is specified, an exception is raised. """ # Set the job metadata parameters (b_date, geogrid) using config_options - self._job_meta = config.ConfigOptions( - self.cfg_bmi, b_date=b_date, geogrid_arg=geogrid - ) + self._job_meta = ConfigOptions(self.cfg_bmi, b_date=b_date, geogrid_arg=geogrid) # Now that _job_meta is set, call initialize() to set up the core model self.initialize(config_file, output_path=output_path) @@ -920,11 +386,7 @@ def _configure_output_path(self, output_path: str | None = None) -> None: return # Already configured or no output object to configure if self._job_meta.forcing_output == 1: - ext = { - "gridded": "GRIDDED", - "hydrofabric": "HYDROFABRIC", - "unstructured": "MESH", - }.get(self._job_meta.grid_type) + ext = BMI_MODEL["extension_map"].get(self._job_meta.grid_type) if ext is None: raise ValueError(f"Invalid grid_type: {self._job_meta.grid_type}") @@ -942,7 +404,7 @@ def _configure_output_path(self, output_path: str | None = None) -> None: ) self._output_obj.init_forcing_file( - self._job_meta, self._wrf_hydro_geo_meta, self._mpi_meta + self._job_meta, self.geo_meta, self._mpi_meta ) self._output_configured = True @@ -986,7 +448,7 @@ def update_until(self, future_time: float): self._values, future_time, self._job_meta, - self._wrf_hydro_geo_meta, + self.geo_meta, self._input_forcing_mod, self._supp_pcp_mod, self._mpi_meta, @@ -1003,7 +465,7 @@ def update_until(self, future_time: float): self._values, self._values["current_model_time"], self._job_meta, - self._wrf_hydro_geo_meta, + self.geo_meta, self._input_forcing_mod, self._supp_pcp_mod, self._mpi_meta, @@ -1023,7 +485,7 @@ def finalize(self): """ # Force destruction of ESMF objects - self._wrf_hydro_geo_meta = None + self.geo_meta = None self._input_forcing_mod = None self._supp_pcp_mod = None self._model = None @@ -1150,22 +612,13 @@ def get_value(self, var_name: str, dest: NDArray[Any]) -> NDArray[Any]: LOG.debug( f"[BMI get_value] Special case: 'grid:ids', grid_type: {self._job_meta.grid_type}" ) - if self._job_meta.grid_type == "gridded": - dest[:] = [self.grid_1.id] - elif self._job_meta.grid_type == "unstructured": - dest[:] = [self.grid_2.id, self.grid_3.id] - elif self._job_meta.grid_type == "hydrofabric": - dest[:] = [self.grid_4.id] + dest[:] = self.grid_ids(self) + elif var_name == "grid:ranks": LOG.debug( f"[BMI get_value] Special case: 'grid:ranks', grid_type: {self._job_meta.grid_type}" ) - if self._job_meta.grid_type == "gridded": - dest[:] = [self.grid_1.rank] - elif self._job_meta.grid_type == "unstructured": - dest[:] = [self.grid_2.rank, self.grid_3.rank] - elif self._job_meta.grid_type == "hydrofabric": - dest[:] = [self.grid_4.rank] + dest[:] = self.grid_ranks(self) else: src = self.get_value_ptr(var_name) LOG.debug( @@ -2003,97 +1456,366 @@ def get_grid_z(self, grid_id: int, z: NDArray[np.float64]) -> NDArray[np.float64 # ------------------------------------------------------------ # ------------------------------------------------------------ - def _parse_config(self, cfg: dict) -> dict: - """Parse the provided configuration dictionary (`cfg`) and modifies it based on certain rules. - This function processes specific keys in the configuration dictionary: - - Converts path-like strings to `PosixPath` objects. - - Converts date strings to `pandas` datetime objects. - - Configures lists of integers or strings for specific variables in the configuration. +def parse_config(cfg: dict) -> dict: + """Parse the provided configuration dictionary (`cfg`) and modifies it based on certain rules. - The function updates the `cfg` dictionary directly, modifying values as needed to match expected formats and types. + This function processes specific keys in the configuration dictionary: + - Converts path-like strings to `PosixPath` objects. + - Converts date strings to `pandas` datetime objects. + - Configures lists of integers or strings for specific variables in the configuration. - :param cfg: A dictionary containing the configuration settings. The dictionary may include paths, dates, and lists of values. - :return: The updated configuration dictionary with appropriately parsed values. - """ - # LOG.debug(f"Entering _parse_config with cfg type: {type(cfg)}") - if isinstance(cfg, str): - LOG.error( - f"Received string data (raw CSV) instead of dictionary: {cfg[:200]}..." - ) - raise TypeError( - "Expected dictionary in _parse_config, but got a raw CSV string." - ) + The function updates the `cfg` dictionary directly, modifying values as needed to match expected formats and types. - # if not isinstance(cfg, dict): - # raise TypeError(f"[ERROR] Expected dictionary in _parse_config, got {type(cfg)} with contents: {cfg}") - - for key, val in cfg.items(): - # LOG.debug(f"Processing key: {key}, value type: {type(val)}, value: {val}") - # Convert all path strings to PosixPath objects - if any([key.endswith(x) for x in ["_dir", "_path", "_file", "_files"]]): - if (val is not None) and (val != "None"): - if isinstance(val, list): - temp_list = [] - for element in val: - temp_list.append(Path(element)) - cfg[key] = temp_list - else: - cfg[key] = Path(val) - else: - cfg[key] = None + :param cfg: A dictionary containing the configuration settings. The dictionary may include paths, dates, and lists of values. + :return: The updated configuration dictionary with appropriately parsed values. + """ + # LOG.debug(f"Entering _parse_config with cfg type: {type(cfg)}") + if isinstance(cfg, str): + LOG.error( + f"Received string data (raw CSV) instead of dictionary: {cfg[:200]}..." + ) + raise TypeError( + "Expected dictionary in _parse_config, but got a raw CSV string." + ) - # Convert Dates to pandas Datetime indices - elif key.endswith("_date"): + # if not isinstance(cfg, dict): + # raise TypeError(f"[ERROR] Expected dictionary in _parse_config, got {type(cfg)} with contents: {cfg}") + + for key, val in cfg.items(): + # LOG.debug(f"Processing key: {key}, value type: {type(val)}, value: {val}") + # Convert all path strings to PosixPath objects + if any([key.endswith(x) for x in ["_dir", "_path", "_file", "_files"]]): + if (val is not None) and (val != "None"): if isinstance(val, list): temp_list = [] - for elem in val: - temp_list.append(pd.to_datetime(elem, format="%d/%m/%Y")) + for element in val: + temp_list.append(Path(element)) cfg[key] = temp_list else: - cfg[key] = pd.to_datetime(val, format="%d/%m/%Y") - - # Configure NWMv3.0 input configurations to what the ConfigClass expects - # Flag for variables that need a list of integers - elif key in [ - "InputForcings", - "InputMandatory", - "ForecastInputHorizons", - "ForecastInputOffsets", - "IgnoredBorderWidths", - "RegridOpt", - "TemperatureDownscaling", - "ShortwaveDownscaling", - "PressureDownscaling", - "PrecipDownscaling", - "HumidityDownscaling", - "TemperatureBiasCorrection", - "PressureBiasCorrection", - "HumidityBiasCorrection", - "WindBiasCorrection", - "SwBiasCorrection", - "LwBiasCorrection", - "PrecipBiasCorrection", - "SuppPcp", - "RegridOptSuppPcp", - "SuppPcpTemporalInterpolation", - "SuppPcpMandatory", - "SuppPcpInputOffsets", - "custom_input_fcst_freq", - ]: - cfg[key] = val - - # Flag for variables that need to be a list of strings - elif key in [ - "InputForcingDirectories", - "InputForcingTypes", - "DownscalingParamDirs", - "SuppPcpForcingTypes", - "SuppPcpDirectories", - ]: - cfg[key] = val + cfg[key] = Path(val) + else: + cfg[key] = None + + # Convert Dates to pandas Datetime indices + elif key.endswith("_date"): + if isinstance(val, list): + temp_list = [] + for elem in val: + temp_list.append(pd.to_datetime(elem, format="%d/%m/%Y")) + cfg[key] = temp_list + else: + cfg[key] = pd.to_datetime(val, format="%d/%m/%Y") + + # Configure NWMv3.0 input configurations to what the ConfigClass expects + # Flag for variables that need a list of integers + elif key in [ + "InputForcings", + "InputMandatory", + "ForecastInputHorizons", + "ForecastInputOffsets", + "IgnoredBorderWidths", + "RegridOpt", + "TemperatureDownscaling", + "ShortwaveDownscaling", + "PressureDownscaling", + "PrecipDownscaling", + "HumidityDownscaling", + "TemperatureBiasCorrection", + "PressureBiasCorrection", + "HumidityBiasCorrection", + "WindBiasCorrection", + "SwBiasCorrection", + "LwBiasCorrection", + "PrecipBiasCorrection", + "SuppPcp", + "RegridOptSuppPcp", + "SuppPcpTemporalInterpolation", + "SuppPcpMandatory", + "SuppPcpInputOffsets", + "custom_input_fcst_freq", + ]: + cfg[key] = val + + # Flag for variables that need to be a list of strings + elif key in [ + "InputForcingDirectories", + "InputForcingTypes", + "DownscalingParamDirs", + "SuppPcpForcingTypes", + "SuppPcpDirectories", + ]: + cfg[key] = val + else: + pass + + # Add more config parsing if necessary + return cfg + + +class NWMv3_Forcing_Engine_BMI_model_Gridded(NWMv3_Forcing_Engine_BMI_model): + """Defines the BMI (Basic Model Interface) for the NWMv3.0 Forcings Engine model. + + It includes methods for initializing the model, updating it, accessing model variables, + and managing model configuration. This class is responsible for interacting with + geospatial data and forcing inputs for the model simulation. + """ + + def __init__(self): + """Create a model that is ready for initialization. + + Initializes the model with default values for time, variables, and grid types. + """ + super().__init__() + self.GeoMeta = GriddedGeoMeta + + def grid_ranks(self, bmi_model: NWMv3_Forcing_Engine_BMI_model) -> list[int]: + """Get the grid ranks for the gridded domain.""" + return [bmi_model.grid_4.rank] + + def grid_ids(self, bmi_model: NWMv3_Forcing_Engine_BMI_model) -> list[int]: + """Get the grid IDs for the gridded domain.""" + return [bmi_model.grid_1.id] + + def get_size_of_arrays(self, bmi_model: NWMv3_Forcing_Engine_BMI_model) -> None: + """Get the size of the flattened 2D arrays from the gridded domain.""" + bmi_model._varsize = len( + np.zeros(bmi_model.geo_meta.latitude_grid.shape).flatten() + ) + + for model_output in bmi_model.get_output_var_names(): + bmi_model._values[model_output] = np.zeros(bmi_model._varsize, dtype=float) + + def set_var_names(self, bmi_model: NWMv3_Forcing_Engine_BMI_model) -> None: + """Set the variable names for the BMI model based on the geospatial metadata. + + Create a Python dictionary that maps CSDMS Standard + Names to the model's internal variable names. + This is going to get long, + since the input variable names could come from any forcing... + """ + # Flag here to indicate whether or not the NWM operational configuration + # will support a BMI field for liquid fraction of precipitation + if bmi_model.config_options.include_lqfrac == 1: + bmi_model._output_var_names = BMI_MODEL["_output_var_names"].append( + "LQFRAC_ELEMENT" + ) + bmi_model._var_name_units_map = BMI_MODEL["_var_name_units_map"] | { + "LQFRAC_ELEMENT": ["Liquid Fraction of Precipitation", "%"] + } + else: + bmi_model._output_var_names = BMI_MODEL["_output_var_names"] + bmi_model._var_name_units_map = BMI_MODEL["_var_name_units_map"] + + bmi_model.grid_1 = Grid( + 1, 2, GridType.uniform_rectilinear + ) # Grid 1 is a 2-dimensional grid + bmi_model.grid_1._grid_y = bmi_model.geo_meta.latitude_grid.flatten() + bmi_model.grid_1._grid_x = bmi_model.geo_meta.longitude_grid.flatten() + bmi_model.grid_1._shape = bmi_model.geo_meta.latitude_grid.shape + bmi_model.grid_1._size = len(bmi_model.geo_meta.latitude_grid.flatten()) + bmi_model.grid_1._spacing = ( + bmi_model.geo_meta.dx_meters, + bmi_model.geo_meta.dy_meters, + ) + bmi_model.grid_1._units = "m" + bmi_model.grid_1._origin = None + + bmi_model._grids = [bmi_model.grid_1] + bmi_model._grid_map = { + var_name: bmi_model.grid_1 for var_name in bmi_model._output_var_names + } + + +class NWMv3_Forcing_Engine_BMI_model_HydroFabric(NWMv3_Forcing_Engine_BMI_model): + """Defines the BMI (Basic Model Interface) for the NWMv3.0 Forcings Engine model. + + It includes methods for initializing the model, updating it, accessing model variables, + and managing model configuration. This class is responsible for interacting with + geospatial data and forcing inputs for the model simulation. + """ + + def __init__(self): + """Create a model that is ready for initialization. + + Initializes the model with default values for time, variables, and grid types. + """ + super().__init__() + self.GeoMeta = HydrofabricGeoMeta + + def grid_ranks(self, bmi_model: NWMv3_Forcing_Engine_BMI_model) -> list[int]: + """Get the grid ranks for the hydrofabric domain.""" + return [bmi_model.grid_4.rank] + + def grid_ids(self, bmi_model: NWMv3_Forcing_Engine_BMI_model) -> list[int]: + """Get the grid IDs for the hydrofabric domain.""" + return [bmi_model.grid_4.id] + + def get_size_of_arrays(self, bmi_model: NWMv3_Forcing_Engine_BMI_model): + """Get the size of the flattened 1D arrays from the hydrofabric domain.""" + bmi_model._varsize = len( + np.zeros(bmi_model.geo_meta.latitude_grid.shape).flatten() + ) + for model_output in bmi_model.get_output_var_names(): + bmi_model._values[model_output] = np.zeros(bmi_model._varsize, dtype=float) + + def set_var_names(self, bmi_model: NWMv3_Forcing_Engine_BMI_model): + """Set the variables for the hydrofabric geospatial metadata. + + Create a Python dictionary that maps CSDMS Standard + Names to the model's internal variable names. + This is going to get long, + since the input variable names could come from any forcing... + """ + # Flag here to indicate whether or not the NWM operational configuration + # will support a BMI field for liquid fraction of precipitation + if bmi_model._job_meta.include_lqfrac == 1: + bmi_model._output_var_names = ( + ["CAT-ID"] + BMI_MODEL["_output_var_names"] + ["LQFRAC_ELEMENT"] + ) + bmi_model._var_name_units_map = ( + {"CAT-ID": ["Catchment ID", ""]} + | BMI_MODEL["_var_name_units_map"] + | { + "LQFRAC_ELEMENT": ["Liquid Fraction of Precipitation", "%"], + } + ) + else: + bmi_model._output_var_names = ["CAT-ID"] + BMI_MODEL["_output_var_names"] + bmi_model._var_name_units_map = {"CAT-ID": ["Catchment ID", ""]} | BMI_MODEL[ + "_var_name_units_map" + ] + + bmi_model.grid_4 = Grid( + 4, 2, GridType.unstructured + ) # Grid 1 is a 2-dimensional grid + + bmi_model.grid_4._grid_y = bmi_model.geo_meta.latitude_grid + bmi_model.grid_4._grid_x = bmi_model.geo_meta.longitude_grid + bmi_model.grid_4._size = len(bmi_model.geo_meta.latitude_grid) + bmi_model._grids = [bmi_model.grid_4] + bmi_model._grid_map = { + var_name: bmi_model.grid_4 for var_name in bmi_model._output_var_names + } + + +class NWMv3_Forcing_Engine_BMI_model_Unstructured(NWMv3_Forcing_Engine_BMI_model): + """Defines the BMI (Basic Model Interface) for the NWMv3.0 Forcings Engine model. + + It includes methods for initializing the model, updating it, accessing model variables, + and managing model configuration. This class is responsible for interacting with + geospatial data and forcing inputs for the model simulation. + """ + + def __init__(self): + """Create a model that is ready for initialization. + + Initializes the model with default values for time, variables, and grid types. + """ + super().__init__() + self.GeoMeta = UnstructuredGeoMeta + + def grid_ranks(self, bmi_model: NWMv3_Forcing_Engine_BMI_model) -> list[int]: + """Get the grid ranks for the unstructured domain.""" + return [bmi_model.grid_2.rank, bmi_model.grid_3.rank] + + def grid_ids(self, bmi_model: NWMv3_Forcing_Engine_BMI_model) -> list[int]: + """Get the grid IDs for the unstructured domain. + + From bmi_model.py. + """ + return [bmi_model.grid_2.id, bmi_model.grid_3.id] + + def get_size_of_arrays(self, bmi_model: NWMv3_Forcing_Engine_BMI_model) -> None: + """Get the size of the flattened 1D arrays for the unstructured domain.""" + bmi_model._varsize = len( + np.zeros(bmi_model.geo_meta.latitude_grid.shape).flatten() + ) + bmi_model._varsize_elem = len( + np.zeros(bmi_model.geo_meta.latitude_grid_elem.shape).flatten() + ) + + for model_output in bmi_model.get_output_var_names(): + if "ELEMENT" in model_output: + bmi_model._values[model_output] = np.zeros( + bmi_model._varsize_elem, dtype=float + ) else: - pass + bmi_model._values[model_output] = np.zeros( + bmi_model._varsize, dtype=float + ) + + def set_var_names(self, bmi_model: NWMv3_Forcing_Engine_BMI_model) -> None: + """Set the variable names for the unstructured domain. + + Create a Python dictionary that maps CSDMS Standard + Names to the model's internal variable names. + This is going to get long, + since the input variable names could come from any forcing... + """ + # Flag here to indicate whether or not the NWM operational configuration + # will support a BMI field for liquid fraction of precipitation + if bmi_model._job_meta.include_lqfrac == 1: + bmi_model._output_var_names = ( + BMI_MODEL["_output_var_names"] + + ["LQFRAC_ELEMENT"] + + BMI_MODEL["_output_var_names_unstructured"] + ) + +["LQFRAC_NODE"] + + bmi_model._var_name_units_map = ( + BMI_MODEL["_var_name_units_map"] + | {"LQFRAC_ELEMENT": ["Liquid Fraction of Precipitation", "%"]} + | BMI_MODEL["_var_name_units_map_unstructured"] + | {"LQFRAC_NODE": ["Liquid Fraction of Precipitation", "%"]} + ) + + bmi_model._grid_map = ( + {var_name: bmi_model.grid_2 for var_name in BMI_MODEL["_output_var_names"]} + | {"LQFRAC_ELEMENT": bmi_model.grid_2} + | { + var_name: bmi_model.grid_3 + for var_name in BMI_MODEL["_output_var_names_unstructured"] + } + | {"LQFRAC_NODE": bmi_model.grid_3} + ) + else: + bmi_model._output_var_names = ( + BMI_MODEL["_output_var_names"] + BMI_MODEL["_output_var_names_unstructured"] + ) + + bmi_model._var_name_units_map = ( + BMI_MODEL["_var_name_units_map"] + | BMI_MODEL["_var_name_units_map_unstructured"] + ) + + bmi_model._grid_map = { + var_name: bmi_model.grid_2 for var_name in BMI_MODEL["_output_var_names"] + } | { + var_name: bmi_model.grid_3 + for var_name in BMI_MODEL["_output_var_names_unstructured"] + } + + bmi_model.grid_2 = Grid( + 2, 2, GridType.unstructured + ) # Grid 1 is a 2-dimensional grid + bmi_model.grid_3 = Grid( + 3, 2, GridType.unstructured + ) # Grid 1 is a 2-dimensional grid + + bmi_model.grid_2._grid_y = self.geo_meta.latitude_grid_elem + bmi_model.grid_2._grid_x = self.geo_meta.longitude_grid_elem + + bmi_model.grid_3._grid_y = self.geo_meta.latitude_grid + bmi_model.grid_3._grid_x = self.geo_meta.longitude_grid + + bmi_model.grid_2._size = len(self.geo_meta.latitude_grid_elem) + bmi_model.grid_3._size = len(self.geo_meta.latitude_grid) + bmi_model._grids = [bmi_model.grid_2, bmi_model.grid_3] + - # Add more config parsing if necessary - return cfg +BMIMODEL = { + "gridded": NWMv3_Forcing_Engine_BMI_model_Gridded, + "unstructured": NWMv3_Forcing_Engine_BMI_model_Unstructured, + "hydrofabric": NWMv3_Forcing_Engine_BMI_model_HydroFabric, +} diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/bias_correction.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/bias_correction.py index ae96b86f..49d5972e 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/bias_correction.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/bias_correction.py @@ -54,7 +54,7 @@ def run_bias_correction(input_forcings, config_options, geo_meta_wrf_hydro, mpi_ 3: ncar_temp_gfs_bias_correct, 4: ncar_temp_hrrr_bias_correct, } - bias_correct_temperature[input_forcings.t2dBiasCorrectOpt]( + bias_correct_temperature[input_forcings.t2BiasCorrectOpt]( input_forcings, config_options, mpi_config, 0 ) err_handler.check_program_status(config_options, mpi_config) @@ -65,7 +65,7 @@ def run_bias_correction(input_forcings, config_options, geo_meta_wrf_hydro, mpi_ 1: cfsv2_nldas_nwm_bias_correct, 2: ncar_tbl_correction, } - bias_correct_humidity[input_forcings.q2dBiasCorrectOpt]( + bias_correct_humidity[input_forcings.q2BiasCorrectOpt]( input_forcings, config_options, mpi_config, 1 ) err_handler.check_program_status(config_options, mpi_config) @@ -114,12 +114,12 @@ def run_bias_correction(input_forcings, config_options, geo_meta_wrf_hydro, mpi_ 4: ncar_wspd_hrrr_bias_correct, } # Run for U-Wind - bias_correct_wind[input_forcings.windBiasCorrectOpt]( + bias_correct_wind[input_forcings.windBiasCorrect]( input_forcings, config_options, mpi_config, 2 ) err_handler.check_program_status(config_options, mpi_config) # Run for V-Wind - bias_correct_wind[input_forcings.windBiasCorrectOpt]( + bias_correct_wind[input_forcings.windBiasCorrect]( input_forcings, config_options, mpi_config, 3 ) err_handler.check_program_status(config_options, mpi_config) @@ -2473,7 +2473,7 @@ def cfsv2_nldas_nwm_bias_correct(input_forcings, config_options, mpi_config, for id_nldas_param = nldas_param_file = None if mpi_config.rank == 0: nldas_param_file = ( - input_forcings.paramDir + input_forcings.dScaleParamDirs + "/NLDAS_Climo/nldas2_" + config_options.current_output_date.strftime("%m%d%H") + "_dist_params.nc" @@ -2700,7 +2700,7 @@ def cfsv2_nldas_nwm_bias_correct(input_forcings, config_options, mpi_config, for if mpi_config.rank == 0: # Read in the CFSv2 parameter files, based on the previous CFSv2 dates cfs_param_path1 = ( - input_forcings.paramDir + input_forcings.dScaleParamDirs + "/CFSv2_Climo/cfs_" + cfs_param_path_vars[force_num] + "_" @@ -2711,7 +2711,7 @@ def cfsv2_nldas_nwm_bias_correct(input_forcings, config_options, mpi_config, for ) cfs_param_path2 = ( - input_forcings.paramDir + input_forcings.dScaleParamDirs + "/CFSv2_Climo/cfs_" + cfs_param_path_vars[force_num] + "_" diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/consts.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/consts.py new file mode 100644 index 00000000..e69a4d2e --- /dev/null +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/consts.py @@ -0,0 +1,942 @@ +from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core import ( + regrid, + time_handling, +) +from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.timeInterpMod import ( + nearest_neighbor, + no_interpolation, + weighted_average, +) + + +GEOMOD= { + "GeoMeta": [ + "nodeCoords", + "centerCoords", + "inds", + "esmf_lat", + "esmf_lon", + ], + "UnstructuredGeoMeta": [ + "x_lower_bound", + "x_upper_bound", + "y_lower_bound", + "y_upper_bound", + "dx_meters", + "dy_meters", + "element_ids", + "element_ids_global", + "sina_grid", + "cosa_grid", + "esmf_lat", + "esmf_lon", + ], + "HydrofabricGeoMeta": [ + "nx_local_elem", + "ny_local_elem", + "x_lower_bound", + "x_upper_bound", + "y_lower_bound", + "y_upper_bound", + "nx_global_elem", + "ny_global_elem", + "dx_meters", + "dy_meters", + "mesh_inds_elem", + "height_elem", + "sina_grid", + "cosa_grid", + "slope_elem", + "slp_azi_elem", + "esmf_lat", + "esmf_lon", + "latitude_grid_elem", + "longitude_grid_elem", + ], + "GriddedGeoMeta": [ + "nx_local_elem", + "ny_local_elem", + "nx_global_elem", + "ny_global_elem", + "element_ids", + "element_ids_global", + "lat_bounds", + "lon_bounds", + "mesh_inds", + "mesh_inds_elem", + "height_elem", + "slope_elem", + "slp_azi_elem", + "latitude_grid_elem", + "longitude_grid_elem", + ], +} +BMI_MODEL= { + "extension_map": { + "gridded": "GRIDDED", + "hydrofabric": "HYDROFABRIC", + "unstructured": "MESH", + }, + "_output_var_names": [ + "U2D_ELEMENT", + "V2D_ELEMENT", + "LWDOWN_ELEMENT", + "SWDOWN_ELEMENT", + "T2D_ELEMENT", + "Q2D_ELEMENT", + "PSFC_ELEMENT", + "RAINRATE_ELEMENT", + ], + "_output_var_names_unstructured": [ + "U2D_NODE", + "V2D_NODE", + "LWDOWN_NODE", + "SWDOWN_NODE", + "T2D_NODE", + "Q2D_NODE", + "PSFC_NODE", + "RAINRATE_NODE", + "LQFRAC_NODE", + ], + "_var_name_units_map": { + "U2D_ELEMENT": ["10-m U-component of wind", "m/s"], + "V2D_ELEMENT": ["10-m V-component of wind", "m/s"], + "T2D_ELEMENT": ["2-m Air Temperature", "K"], + "Q2D_ELEMENT": ["2-m Specific Humidity", "kg/kg"], + "LWDOWN_ELEMENT": [ + "Surface downward long-wave radiation flux", + "W/m^2", + ], + "SWDOWN_ELEMENT": [ + "Surface downward short-wave radiation flux", + "W/m^2", + ], + "PSFC_ELEMENT": ["Surface Pressure", "Pa"], + "RAINRATE_ELEMENT": ["Surface Precipitation Rate", "mm/s"], + }, + "_var_name_units_map_unstructured": { + "U2D_NODE": ["10-m U-component of wind", "m/s"], + "V2D_NODE": ["10-m V-component of wind", "m/s"], + "T2D_NODE": ["2-m Air Temperature", "K"], + "Q2D_NODE": ["2-m Specific Humidity", "kg/kg"], + "LWDOWN_NODE": [ + "Surface downward long-wave radiation flux", + "W/m^2", + ], + "SWDOWN_NODE": [ + "Surface downward short-wave radiation flux", + "W/m^2", + ], + "PSFC_NODE": ["Surface Pressure", "Pa"], + "RAINRATE_NODE": ["Surface Precipitation Rate", "mm/s"], + }, +} +FORCINGINPUTMOD= { + "InputForcings": [ + "nx_global", + "ny_global", + "nx_local", + "ny_local", + "nx_local_corner", + "ny_local_corner", + "x_lower_bound", + "x_upper_bound", + "y_lower_bound", + "y_upper_bound", + "x_lower_bound_corner", + "x_upper_bound_corner", + "y_lower_bound_corner", + "y_upper_bound_corner", + "outFreq", + "lapseGrid", + "rqiClimoGrid", + "nwmPRISM_numGrid", + "nwmPRISM_denGrid", + "esmf_lats", + "esmf_lons", + "esmf_grid_in", + "esmf_grid_in_elem", + "regridObj", + "regridObj_elem", + "esmf_field_in", + "esmf_field_in_elem", + "esmf_field_out", + "esmf_field_out_elem", + # -------------------------------- + # Only used for CFSv2 bias correction + # as bias correction needs to take + # place prior to regridding. + "coarse_input_forcings1", + "coarse_input_forcings2", + # -------------------------------- + "regridded_forcings1", + "regridded_forcings2", + "globalPcpRate1", + "globalPcpRate2", + "regridded_forcings1_elem", + "regridded_forcings2_elem", + "globalPcpRate1_elem", + "globalPcpRate2_elem", + "ndv", + "file_in1", + "file_in2", + "fcst_hour1", + "fcst_hour2", + "fcst_date1", + "fcst_date2", + "height_elem", + "tmpFile", + "tmpFileHeight", + "regridded_precip1", + "regridded_precip2", + "regridded_precip1_elem", + "regridded_precip2_elem", + "_grib_vars", + "_cycle_freq", + "_t2dTmp", + "_psfcTmp", + "_final_forcings", + ], + "InputForcingsGridded": [ + "t2dTmp_elem", + "psfcTmp_elem", + "final_forcings_elem", + "height_elem", + "regridded_mask_elem", + "regridded_mask_elem_AORC", + ], + "InputForcingsHydrofabric": [ + "final_forcings_elem", + "height_elem", + "regridded_mask_elem", + "regridded_mask_elem_AORC", + "t2dTmp_elem", + "psfcTmp_elem", + ], + "PRODUCT_NAME": { + 1: "NLDAS2_GRIB1", + 2: "NARR_GRIB1", + 3: "GFS_Production_GRIB2", + 4: "NAM_Conus_Nest_GRIB2", + 5: "HRRR_Conus_GRIB2", + 6: "RAP_Conus_GRIB2", + 7: "CFSv2_6Hr_Global_GRIB2", + 8: "WRF_ARW_Hawaii_GRIB2", + 9: "GFS_Production_025d_GRIB2", + 10: "Custom_NetCDF_Hourly", + 11: "Custom_NetCDF_Hourly", + 12: "AORC", + 13: "NAM_Nest_3km_Hawaii", + 14: "NAM_Nest_3km_PuertoRico", + 15: "NAM_Nest_3km_Alaska", + 16: "NAM_Nest_3km_Hawaii_Radiation-Only", + 17: "NAM_Nest_3km_PuertoRico_Radiation-Only", + 18: "WRF_ARW_PuertoRico_GRIB2", + 19: "HRRR_Alaska_GRIB2", + 20: "Alaska_AnA", + 21: "AORC_Alaska", + 22: "Alaska_ExtAnA", + 23: "ERA5", + 24: "NBM", + 25: "NDFD", + 26: "HRRR_15min", + 27: "NWM", + }, + "CYCLE_FREQ": { + 1: 60, + 2: 180, + 3: 360, + 4: 360, + 5: 60, + 6: 60, + 7: 360, + 8: 1440, + 9: 360, + 10: -9999, + 11: -9999, + 12: -9999, + 13: 360, + 14: 360, + 15: 360, + 16: 360, + 17: 360, + 18: 1440, + 19: 180, + 20: 180, + 21: -9999, + 22: 180, + 23: -9999, + 24: 60, + 25: 1440, + 26: 15, + 27: -9999, + }, + "GRIB_VARS": { + 1: ["TMP", "SPFH", "UGRD", "VGRD", "PRATE", "DSWRF", "DLWRF", "PRES"], + 2: None, + 3: [ + "TMP", + "SPFH", + "UGRD", + "VGRD", + "PRATE", + "DSWRF", + "DLWRF", + "PRES", + "CPOFP", + ], + 4: None, + 5: [ + "TMP", + "SPFH", + "UGRD", + "VGRD", + "APCP", + "DSWRF", + "DLWRF", + "PRES", + "CPOFP", + ], + 6: [ + "TMP", + "SPFH", + "UGRD", + "VGRD", + "APCP", + "DSWRF", + "DLWRF", + "PRES", + "FROZR", + ], + 7: ["TMP", "SPFH", "UGRD", "VGRD", "PRATE", "DSWRF", "DLWRF", "PRES"], + 8: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "PRES"], + 9: ["TMP", "SPFH", "UGRD", "VGRD", "PRATE", "DSWRF", "DLWRF", "PRES"], + 10: None, + 11: None, + 12: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "DSWRF", "DLWRF", "PRES"], + 13: ["TMP", "SPFH", "UGRD", "VGRD", "PRATE", "DSWRF", "DLWRF", "PRES"], + 14: ["TMP", "SPFH", "UGRD", "VGRD", "PRATE", "DSWRF", "DLWRF", "PRES"], + 15: ["TMP", "SPFH", "UGRD", "VGRD", "PRATE", "DSWRF", "DLWRF", "PRES"], + 16: ["DSWRF", "DLWRF"], + 17: ["DSWRF", "DLWRF"], + 18: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "PRES"], + 19: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "DSWRF", "DLWRF", "PRES"], + 20: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "DSWRF", "DLWRF", "PRES"], + 21: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "DSWRF", "DLWRF", "PRES"], + 22: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "DSWRF", "DLWRF", "PRES"], + 23: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "DSWRF", "DLWRF", "PRES"], + 24: ["TMP", "APCP"], + 25: ["TMP", "WDIR", "WSPD", "APCP"], + 26: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "DSWRF", "DLWRF", "PRES"], + 27: [ + "T2D", + "Q2D", + "U2D", + "V2D", + "RAINRATE", + "SWDOWN", + "LWDOWN", + "PSFC", + ], + }, + "GRIB_LEVELS": { + 1: [ + "2 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + "surface", + "surface", + ], + 2: None, + 3: [ + "2 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + "surface", + "surface", + "surface", + ], + 4: None, + 5: [ + "2 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + "surface", + "surface", + "surface", + ], + 6: [ + "2 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + "surface", + "surface", + "surface", + ], + 7: [ + "2 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + "surface", + "surface", + ], + 8: [ + "80 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + ], + 9: [ + "2 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + "surface", + "surface", + ], + 10: None, + 11: None, + 12: None, + 13: [ + "2 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + "surface", + "surface", + ], + 14: [ + "2 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + "surface", + "surface", + ], + 15: [ + "2 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + "surface", + "surface", + ], + 16: ["surface", "surface"], + 17: ["surface", "surface"], + 18: [ + "80 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + ], + 19: [ + "2 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + "surface", + "surface", + ], + 20: [ + "2 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + "surface", + "surface", + ], + 21: None, + 22: [ + "2 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + "surface", + "surface", + ], + 23: None, + 24: ["2 m above ground", "surface"], + 25: [ + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + ], + 26: [ + "2 m above ground", + "2 m above ground", + "10 m above ground", + "10 m above ground", + "surface", + "surface", + "surface", + "surface", + ], + 27: None, + }, + "NET_CDF_VARS_NAMES": { + 1: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "APCP_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + ], + 2: None, + 3: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "PRATE_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + "CPOFP_surface", + ], + 4: None, + 5: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "APCP_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + "CPOFP_surface", + ], + 6: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "APCP_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + "FROZR_surface", + ], + 7: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "PRATE_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + ], + 8: [ + "TMP_80maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "APCP_surface", + "PRES_surface", + ], + 9: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "PRATE_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + ], + 10: ["T2D", "Q2D", "U10", "V10", "RAINRATE", "DSWRF", "DLWRF", "PRES"], + 11: ["T2D", "Q2D", "U10", "V10", "RAINRATE", "DSWRF", "DLWRF", "PRES"], + 12: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "APCP_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + ], + 13: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "PRATE_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + ], + 14: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "PRATE_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + ], + 15: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "PRATE_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + ], + 16: ["DSWRF_surface", "DLWRF_surface"], + 17: ["DSWRF_surface", "DLWRF_surface"], + 18: [ + "TMP_80maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "APCP_surface", + "PRES_surface", + ], + 19: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "APCP_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + ], + 20: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "APCP_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + ], + 21: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "APCP_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + ], + 22: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "APCP_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + ], + 23: ["t2m", "d2m", "u10", "v10", "mtpr", "msdwswrf", "msdwlwrf", "sp"], + 24: ["TMP_2maboveground", "APCP_surface"], + 25: [ + "TMP_2maboveground", + "WDIR_10maboveground", + "WIND_10maboveground", + "APCP_surface", + ], + 26: [ + "TMP_2maboveground", + "SPFH_2maboveground", + "UGRD_10maboveground", + "VGRD_10maboveground", + "APCP_surface", + "DSWRF_surface", + "DLWRF_surface", + "PRES_surface", + ], + 27: ["T2D", "Q2D", "U2D", "V2D", "RAINRATE", "SWDOWN", "LWDOWN", "PSFC"], + }, + "GRIB_MES_IDX": { + 1: None, + 2: None, + 3: None, + 4: None, + 5: None, + 6: None, + 7: None, + 8: None, + 9: [33, 34, 39, 40, 43, 88, 91, 6], + 10: None, + 11: None, + 12: None, + 13: None, + 14: None, + 15: None, + 16: None, + 17: None, + 18: None, + 19: None, + 20: None, + 21: None, + 22: None, + 23: None, + 24: None, + 25: None, + 26: None, + 27: None, + }, + "INPUT_MAP_OUTPUT": { + 1: [4, 5, 0, 1, 3, 7, 2, 6], + 2: None, + 3: [4, 5, 0, 1, 3, 7, 2, 6, 8], + 4: None, + 5: [4, 5, 0, 1, 3, 7, 2, 6, 8], + 6: [4, 5, 0, 1, 3, 7, 2, 6, 8], + 7: [4, 5, 0, 1, 3, 7, 2, 6], + 8: [4, 5, 0, 1, 3, 6], + 9: [4, 5, 0, 1, 3, 7, 2, 6], + 10: [4, 5, 0, 1, 3, 7, 2, 6], + 11: [4, 5, 0, 1, 3, 7, 2, 6], + 12: [4, 5, 0, 1, 3, 7, 2, 6], + 13: [4, 5, 0, 1, 3, 7, 2, 6], + 14: [4, 5, 0, 1, 3, 7, 2, 6], + 15: [4, 5, 0, 1, 3, 7, 2, 6], + 16: [7, 2], + 17: [7, 2], + 18: [4, 5, 0, 1, 3, 6], + 19: [4, 5, 0, 1, 3, 7, 2, 6], + 20: [4, 5, 0, 1, 3, 7, 2, 6], + 21: [4, 5, 0, 1, 3, 7, 2, 6], + 22: [4, 5, 0, 1, 3, 7, 2, 6], + 23: [4, 5, 0, 1, 3, 7, 2, 6], + 24: [4, 3], + 25: [4, 0, 1, 3], + 26: [4, 5, 0, 1, 3, 7, 2, 6], + 27: [4, 5, 0, 1, 3, 7, 2, 6], + }, + "FORECAST_HORIZONS": { + 1: None, + 2: None, + 3: None, + 4: None, + 5: [ + 18, + 18, + 18, + 18, + 18, + 18, + 36, + 18, + 18, + 18, + 18, + 18, + 36, + 18, + 18, + 18, + 18, + 18, + 36, + 18, + 18, + 18, + 18, + 18, + ], + 6: [ + 21, + 21, + 21, + 39, + 21, + 21, + 21, + 21, + 21, + 39, + 21, + 21, + 21, + 21, + 21, + 39, + 21, + 21, + 21, + 21, + 21, + 39, + 21, + 21, + ], + 7: None, + 8: None, + 9: None, + 10: None, + 11: None, + 12: None, + 13: None, + 14: None, + 15: None, + 16: None, + 17: None, + 18: None, + 19: None, + 20: None, + 21: None, + 22: None, + 23: None, + 24: None, + 25: None, + 26: [ + 18, + 18, + 18, + 18, + 18, + 18, + 36, + 18, + 18, + 18, + 18, + 18, + 36, + 18, + 18, + 18, + 18, + 18, + 36, + 18, + 18, + 18, + 18, + 18, + ], + 27: None, + }, + "FIND_NEIGHBOR_FILES_MAP": { + 1: time_handling.find_nldas_neighbors, + 3: time_handling.find_gfs_neighbors, + 5: time_handling.find_conus_hrrr_neighbors, + 6: time_handling.find_conus_rap_neighbors, + 7: time_handling.find_cfsv2_neighbors, + 8: time_handling.find_hourly_wrf_arw_neighbors, + 9: time_handling.find_gfs_neighbors, + 10: time_handling.find_custom_hourly_neighbors, + 11: time_handling.find_custom_hourly_neighbors, + 12: time_handling.find_aorc_neighbors, + 13: time_handling.find_nam_nest_neighbors, + 14: time_handling.find_nam_nest_neighbors, + 15: time_handling.find_nam_nest_neighbors, + 16: time_handling.find_nam_nest_neighbors, + 17: time_handling.find_nam_nest_neighbors, + 18: time_handling.find_hourly_wrf_arw_neighbors, + 19: time_handling.find_ak_hrrr_neighbors, + 20: time_handling.find_ak_hrrr_neighbors, + 21: time_handling.find_aorc_neighbors, + 22: time_handling.find_ak_hrrr_neighbors, + 23: time_handling.find_era5_neighbors, + 24: time_handling.find_hourly_nbm_neighbors, + 25: time_handling.find_ndfd_neighbors, + 26: time_handling.find_input_neighbors, + 27: time_handling.find_nwm_neighbors, + }, + "REGRID_MAP": { + 1: regrid.regrid_conus_rap, + 3: regrid.regrid_gfs, + 5: regrid.regrid_conus_hrrr, + 6: regrid.regrid_conus_rap, + 7: regrid.regrid_cfsv2, + 8: regrid.regrid_hourly_wrf_arw, + 9: regrid.regrid_gfs, + 10: regrid.regrid_custom_hourly_netcdf, + 11: regrid.regrid_custom_hourly_netcdf, + 12: regrid.regrid_custom_hourly_netcdf, + 13: regrid.regrid_nam_nest, + 14: regrid.regrid_nam_nest, + 15: regrid.regrid_nam_nest, + 16: regrid.regrid_nam_nest, + 17: regrid.regrid_nam_nest, + 18: regrid.regrid_hourly_wrf_arw, + 19: regrid.regrid_conus_hrrr, + 20: regrid.regrid_conus_hrrr, + 21: regrid.regrid_custom_hourly_netcdf, + 22: regrid.regrid_conus_hrrr, + 23: regrid.regrid_era5, + 24: regrid.regrid_hourly_nbm, + 25: regrid.regrid_ndfd, + 26: regrid.regrid_conus_hrrr, + 27: regrid.regrid_nwm, + }, + "TEMPORAL_INTERPOLATE_INPUTS_MAP": { + 0: no_interpolation, + 1: nearest_neighbor, + 2: weighted_average, + }, + "FILE_EXT": { + "GRIB1": ".grb", + "GRIB2": ".grib2", + "NETCDF": ".nc", + "NETCDF4": ".nc4", + "NWM": ".LDASIN_DOMAIN1", + "ZARR": ".zarr", + }, +}, +TEST_UTILS={ + "OLD_NEW_VAR_MAP": { + "q2dBiasCorrectOpt": "q2BiasCorrectOpt", + "paramDir": "dScaleParamDirs", + "border": "ignored_border_widths", + "regridOpt": "regrid_opt", + "userFcstHorizon": "fcst_input_horizons", + "inDir": "input_force_dirs", + "swDowscaleOpt": "swDownscaleOpt", + "t2dBiasCorrectOpt": "t2BiasCorrectOpt", + "userCycleOffset": "fcst_input_offsets", + "windBiasCorrectOpt": "windBiasCorrect", + "timeInterpOpt": "forceTemoralInterp", + "enforce": "input_force_mandatory", + "file_type": "input_force_types", + } +}, diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/disaggregateMod.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/disaggregateMod.py index 2b5812ac..d7a3d473 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/disaggregateMod.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/disaggregateMod.py @@ -128,7 +128,7 @@ def ak_ext_ana_disaggregate( # (begin_date,end_date] date_iter += timedelta(hours=1) while date_iter <= end_date: - tmp_file = f"{input_forcings.inDir}/{date_iter.strftime('%Y%m%d%H')}/{date_iter.strftime('%Y%m%d%H')}00.LDASIN_DOMAIN1" + tmp_file = f"{input_forcings.input_force_dirs}/{date_iter.strftime('%Y%m%d%H')}/{date_iter.strftime('%Y%m%d%H')}00.LDASIN_DOMAIN1" if os.path.exists(tmp_file): config_options.statusMsg = f"Reading {input_forcings.netcdf_var_names[3]} from {tmp_file} for disaggregation" err_handler.log_msg(config_options, mpi_config) diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/downscale.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/downscale.py index bdfbaeb2..4b521568 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/downscale.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/downscale.py @@ -39,7 +39,7 @@ def run_downscaling(input_forcings, config_options, geo_meta_wrf_hydro, mpi_conf # Dictionary mapping to shortwave radiation downscaling downscale_sw = {0: no_downscale, 1: ncar_topo_adj} - downscale_sw[input_forcings.swDowscaleOpt]( + downscale_sw[input_forcings.swDownscaleOpt]( input_forcings, config_options, geo_meta_wrf_hydro, mpi_config ) err_handler.check_program_status(config_options, mpi_config) diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py index 9931100b..3851065c 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/forcingInputMod.py @@ -4,175 +4,101 @@ initializing ESMF grids and regrid objects), etc """ +from __future__ import annotations + import logging +from functools import lru_cache +from pathlib import Path +from typing import TYPE_CHECKING import numpy as np -from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.config import ( - ConfigOptions, -) -from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.geoMod import ( - GeoMetaWrfHydro, -) -from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.parallel import MpiConfig +from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.consts import FORCINGINPUTMOD + +if TYPE_CHECKING: + from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.config import ( + ConfigOptions, + ) + from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.geoMod import ( + GeoMeta, + ) + from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.parallel import ( + MpiConfig, + ) from nextgen_forcings_ewts import MODULE_NAME -from . import regrid, time_handling, timeInterpMod - LOG = logging.getLogger(MODULE_NAME) class InputForcings: - """Abstract class defining parameters of a single input forcing product. + """Class defining parameters of a single input forcing product. - This is an abstract class that will define all the parameters + This is a class that will define all the parameters of a single input forcing product. """ - def __init__(self): - """Initialize all attributes and objects to None.""" - self.inDir = None - self.enforce = None - self.paramDir = None - self.userFcstHorizon = None - self.userCycleOffset = None - self.file_type = None - self.nx_global = None - self.ny_global = None - self.nx_local = None - self.ny_local = None - self.nx_local_corner = None - self.ny_local_corner = None - self.x_lower_bound = None - self.x_upper_bound = None - self.y_lower_bound = None - self.y_upper_bound = None - self.x_lower_bound_corner = None - self.x_upper_bound_corner = None - self.y_lower_bound_corner = None - self.y_upper_bound_corner = None - self.outFreq = None - self.regridOpt = None - self.timeInterpOpt = None - self.t2dDownscaleOpt = None - self.lapseGrid = None - self.rqiClimoGrid = None - self.swDowscaleOpt = None - self.precipDownscaleOpt = None - self.nwmPRISM_numGrid = None - self.nwmPRISM_denGrid = None - self.q2dDownscaleOpt = None - self.psfcDownscaleOpt = None - self.t2dBiasCorrectOpt = None - self.swBiasCorrectOpt = None - self.precipBiasCorrectOpt = None - self.q2dBiasCorrectOpt = None - self.windBiasCorrectOpt = None - self.psfcBiasCorrectOpt = None - self.lwBiasCorrectOpt = None - self.esmf_lats = None - self.esmf_lons = None - self.esmf_grid_in = None - self.esmf_grid_in_elem = None + def __init__( + self, + force_key: int, + idx: int = None, + config_options: ConfigOptions = None, + geo_meta: GeoMeta = None, + mpi_config: MpiConfig = None, + ) -> None: + """Initialize InputForcings with configuration options, geospatial metadata, and MPI configuration.""" + self.config_options = config_options + self.geo_meta = geo_meta + self.mpi_config = mpi_config + self.regridComplete = False self.regridComplete = False - self.regridObj = None - self.regridObj_elem = None - self.esmf_field_in = None - self.esmf_field_in_elem = None - self.esmf_field_out = None - self.esmf_field_out_elem = None - # -------------------------------- - # Only used for CFSv2 bias correction - # as bias correction needs to take - # place prior to regridding. - self.coarse_input_forcings1 = None - self.coarse_input_forcings2 = None - # -------------------------------- - self.regridded_forcings1 = None - self.regridded_forcings2 = None - self.globalPcpRate1 = None - self.globalPcpRate2 = None - self.regridded_mask = None - self.regridded_mask_AORC = None - self.final_forcings = None - self.regridded_forcings1_elem = None - self.regridded_forcings2_elem = None - self.globalPcpRate1_elem = None - self.globalPcpRate2_elem = None - self.regridded_mask_elem = None - self.regridded_mask_elem_AORC = None - self.final_forcings_elem = None - self.ndv = None - self.file_in1 = None - self.file_in2 = None - self.fcst_hour1 = None - self.fcst_hour2 = None - self.fcst_date1 = None - self.fcst_date2 = None - self.height = None - self.height_elem = None - self.tmpFile = None - self.tmpFileHeight = None - self.psfcTmp = None - self.t2dTmp = None - self.psfcTmp_elem = None - self.t2dTmp_elem = None self.rstFlag = 0 - self.regridded_precip1 = None - self.regridded_precip2 = None - self.regridded_precip1_elem = None - self.regridded_precip2_elem = None - self.border = None self.skip = False + self._keyValue = force_key + self.idx = idx + + for attr in FORCINGINPUTMOD[self.__class__.__base__.__name__]: + setattr(self, attr, None) - # Private attrs that have associated @property setter/getter - self._keyValue = None - self._file_ext = None - self._cycle_freq = None - self._grib_vars = None + self.find_neighbor_files_map = FORCINGINPUTMOD["FIND_NEIGHBOR_FILES_MAP"] + self.regrid_map = FORCINGINPUTMOD["REGRID_MAP"] + self.temporal_interpolate_inputs_map = FORCINGINPUTMOD["TEMPORAL_INTERPOLATE_INPUTS_MAP"] + + self.initialize_config_options() + + if self.force_count == 8 and 8 in self.input_map_output: + # TODO: this assumes that LQFRAC (8) is always the last grib var + self.grib_vars = self.grib_vars[:-1] + + # Obtain custom input cycle frequencies + if self.keyValue == 10 or self.keyValue == 11: + self.cycle_freq = self.config_options.customFcstFreq[self.custom_count] + + def initialize_config_options(self) -> None: + """Initialize configuration options from the config_options attribute.""" + for key, val in list(vars(self.config_options).items()): + if isinstance(val, list) and len(val) > 0: + setattr(self, key, val[self.idx]) + LOG.info(key) @property - def product_name(self): + def force_count(self) -> int: + """Force count.""" + return 9 if self.config_options.include_lqfrac else 8 + + @property + def product_name(self) -> str: """Map the forcing key value to the product name.""" - return { - 1: "NLDAS2_GRIB1", - 2: "NARR_GRIB1", - 3: "GFS_Production_GRIB2", - 4: "NAM_Conus_Nest_GRIB2", - 5: "HRRR_Conus_GRIB2", - 6: "RAP_Conus_GRIB2", - 7: "CFSv2_6Hr_Global_GRIB2", - 8: "WRF_ARW_Hawaii_GRIB2", - 9: "GFS_Production_025d_GRIB2", - 10: "Custom_NetCDF_Hourly", - 11: "Custom_NetCDF_Hourly", - 12: "AORC", - 13: "NAM_Nest_3km_Hawaii", - 14: "NAM_Nest_3km_PuertoRico", - 15: "NAM_Nest_3km_Alaska", - 16: "NAM_Nest_3km_Hawaii_Radiation-Only", - 17: "NAM_Nest_3km_PuertoRico_Radiation-Only", - 18: "WRF_ARW_PuertoRico_GRIB2", - 19: "HRRR_Alaska_GRIB2", - 20: "Alaska_AnA", - 21: "AORC_Alaska", - 22: "Alaska_ExtAnA", - 23: "ERA5", - 24: "NBM", - 25: "NDFD", - 26: "HRRR_15min", - 27: "NWM", - }[self.keyValue] + return FORCINGINPUTMOD["PRODUCT_NAME"][self.keyValue] @property - def keyValue(self): + def keyValue(self) -> int: """Get the forcing key value.""" if self._keyValue is None: raise RuntimeError("keyValue has not yet been set") return self._keyValue @keyValue.setter - def keyValue(self, val): + def keyValue(self, val: int) -> int: """Set the forcing key value.""" if self._keyValue is not None: raise RuntimeError(f"keyValue has already been set (to {self._keyValue}).") @@ -181,28 +107,15 @@ def keyValue(self, val): @property def file_ext(self) -> str: """Map the forcing file type to the file extension.""" - if self._file_ext is None: - # First call to getter, initialize - if self.file_type == "GRIB1": - ext = ".grb" - elif self.file_type == "GRIB2": - ext = ".grib2" - elif self.file_type == "NETCDF": - ext = ".nc" - elif self.file_type == "NETCDF4": - ext = ".nc4" - elif self.file_type == "NWM": - ext = ".LDASIN_DOMAIN1" - elif self.file_type == "ZARR": - ext = ".zarr" - else: - raise ValueError(f"Unexpected file_type: {self.file_type}") - self._file_ext = ext + ext = FORCINGINPUTMOD["FILE_EXT"].get(self.input_force_types) + if ext is None: + raise ValueError(f"Unexpected file_type: {self.input_force_types}") + self._file_ext = ext return self._file_ext @file_ext.setter - def file_ext(self, val): + def file_ext(self, val: str) -> str: if val is None: raise TypeError( "Cannot set file_ext to None since that value indicates an uninitialized state" @@ -214,39 +127,11 @@ def cycle_freq(self) -> int: """Map the forcing key value to the cycle frequency in minutes.""" if self._cycle_freq is None: # First call to getter, initialize - self._cycle_freq = { - 1: 60, - 2: 180, - 3: 360, - 4: 360, - 5: 60, - 6: 60, - 7: 360, - 8: 1440, - 9: 360, - 10: -9999, - 11: -9999, - 12: -9999, - 13: 360, - 14: 360, - 15: 360, - 16: 360, - 17: 360, - 18: 1440, - 19: 180, - 20: 180, - 21: -9999, - 22: 180, - 23: -9999, - 24: 60, - 25: 1440, - 26: 15, - 27: -9999, - }[self.keyValue] + self._cycle_freq = FORCINGINPUTMOD["CYCLE_FREQ"][self.keyValue] return self._cycle_freq @cycle_freq.setter - def cycle_freq(self, val): + def cycle_freq(self, val: int) -> int: if val is None: raise TypeError( "Cannot set cycle_freq to None since that value indicates an uninitialized state" @@ -258,78 +143,11 @@ def grib_vars(self) -> list[str] | None: """Map the forcing key value to the required GRIB variable names.""" if self._grib_vars is None: # First call to getter, initialize - self._grib_vars = { - 1: ["TMP", "SPFH", "UGRD", "VGRD", "PRATE", "DSWRF", "DLWRF", "PRES"], - 2: None, - 3: [ - "TMP", - "SPFH", - "UGRD", - "VGRD", - "PRATE", - "DSWRF", - "DLWRF", - "PRES", - "CPOFP", - ], - 4: None, - 5: [ - "TMP", - "SPFH", - "UGRD", - "VGRD", - "APCP", - "DSWRF", - "DLWRF", - "PRES", - "CPOFP", - ], - 6: [ - "TMP", - "SPFH", - "UGRD", - "VGRD", - "APCP", - "DSWRF", - "DLWRF", - "PRES", - "FROZR", - ], - 7: ["TMP", "SPFH", "UGRD", "VGRD", "PRATE", "DSWRF", "DLWRF", "PRES"], - 8: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "PRES"], - 9: ["TMP", "SPFH", "UGRD", "VGRD", "PRATE", "DSWRF", "DLWRF", "PRES"], - 10: None, - 11: None, - 12: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "DSWRF", "DLWRF", "PRES"], - 13: ["TMP", "SPFH", "UGRD", "VGRD", "PRATE", "DSWRF", "DLWRF", "PRES"], - 14: ["TMP", "SPFH", "UGRD", "VGRD", "PRATE", "DSWRF", "DLWRF", "PRES"], - 15: ["TMP", "SPFH", "UGRD", "VGRD", "PRATE", "DSWRF", "DLWRF", "PRES"], - 16: ["DSWRF", "DLWRF"], - 17: ["DSWRF", "DLWRF"], - 18: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "PRES"], - 19: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "DSWRF", "DLWRF", "PRES"], - 20: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "DSWRF", "DLWRF", "PRES"], - 21: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "DSWRF", "DLWRF", "PRES"], - 22: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "DSWRF", "DLWRF", "PRES"], - 23: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "DSWRF", "DLWRF", "PRES"], - 24: ["TMP", "APCP"], - 25: ["TMP", "WDIR", "WSPD", "APCP"], - 26: ["TMP", "SPFH", "UGRD", "VGRD", "APCP", "DSWRF", "DLWRF", "PRES"], - 27: [ - "T2D", - "Q2D", - "U2D", - "V2D", - "RAINRATE", - "SWDOWN", - "LWDOWN", - "PSFC", - ], - }[self.keyValue] + self._grib_vars = FORCINGINPUTMOD["GRIB_VARS"][self.keyValue] return self._grib_vars @grib_vars.setter - def grib_vars(self, val): + def grib_vars(self, val: list[str]) -> list[str] | None: if val is None: raise TypeError( "Cannot set grib_vars to None since that value indicates an uninitialized state" @@ -337,579 +155,37 @@ def grib_vars(self, val): self._grib_vars = val @property - def grib_levels(self): + def grib_levels(self) -> str: """Map the forcing key value to the required GRIB variable levels.""" - return { - 1: [ - "2 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - "surface", - "surface", - ], - 2: None, - 3: [ - "2 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - "surface", - "surface", - "surface", - ], - 4: None, - 5: [ - "2 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - "surface", - "surface", - "surface", - ], - 6: [ - "2 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - "surface", - "surface", - "surface", - ], - 7: [ - "2 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - "surface", - "surface", - ], - 8: [ - "80 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - ], - 9: [ - "2 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - "surface", - "surface", - ], - 10: None, - 11: None, - 12: None, - 13: [ - "2 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - "surface", - "surface", - ], - 14: [ - "2 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - "surface", - "surface", - ], - 15: [ - "2 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - "surface", - "surface", - ], - 16: ["surface", "surface"], - 17: ["surface", "surface"], - 18: [ - "80 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - ], - 19: [ - "2 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - "surface", - "surface", - ], - 20: [ - "2 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - "surface", - "surface", - ], - 21: None, - 22: [ - "2 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - "surface", - "surface", - ], - 23: None, - 24: ["2 m above ground", "surface"], - 25: [ - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - ], - 26: [ - "2 m above ground", - "2 m above ground", - "10 m above ground", - "10 m above ground", - "surface", - "surface", - "surface", - "surface", - ], - 27: None, - }[self.keyValue] + return FORCINGINPUTMOD["GRIB_LEVELS"][self.keyValue] @property - def netcdf_var_names(self): + def netcdf_var_names(self) -> str: """Map the forcing key value to the required NetCDF variable names.""" - return { - 1: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "APCP_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - ], - 2: None, - 3: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "PRATE_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - "CPOFP_surface", - ], - 4: None, - 5: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "APCP_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - "CPOFP_surface", - ], - 6: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "APCP_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - "FROZR_surface", - ], - 7: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "PRATE_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - ], - 8: [ - "TMP_80maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "APCP_surface", - "PRES_surface", - ], - 9: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "PRATE_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - ], - 10: ["T2D", "Q2D", "U10", "V10", "RAINRATE", "DSWRF", "DLWRF", "PRES"], - 11: ["T2D", "Q2D", "U10", "V10", "RAINRATE", "DSWRF", "DLWRF", "PRES"], - 12: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "APCP_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - ], - 13: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "PRATE_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - ], - 14: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "PRATE_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - ], - 15: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "PRATE_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - ], - 16: ["DSWRF_surface", "DLWRF_surface"], - 17: ["DSWRF_surface", "DLWRF_surface"], - 18: [ - "TMP_80maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "APCP_surface", - "PRES_surface", - ], - 19: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "APCP_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - ], - 20: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "APCP_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - ], - 21: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "APCP_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - ], - 22: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "APCP_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - ], - 23: ["t2m", "d2m", "u10", "v10", "mtpr", "msdwswrf", "msdwlwrf", "sp"], - 24: ["TMP_2maboveground", "APCP_surface"], - 25: [ - "TMP_2maboveground", - "WDIR_10maboveground", - "WIND_10maboveground", - "APCP_surface", - ], - 26: [ - "TMP_2maboveground", - "SPFH_2maboveground", - "UGRD_10maboveground", - "VGRD_10maboveground", - "APCP_surface", - "DSWRF_surface", - "DLWRF_surface", - "PRES_surface", - ], - 27: ["T2D", "Q2D", "U2D", "V2D", "RAINRATE", "SWDOWN", "LWDOWN", "PSFC"], - }[self.keyValue] + return FORCINGINPUTMOD["NET_CDF_VARS_NAMES"][self.keyValue] @property - def grib_mes_idx(self): + def grib_mes_idx(self) -> list[int] | None: """Map the forcing key value to the required GRIB message ids. arrays that store the message ids of required forcing variables for each forcing type TODO fill these arrays for forcing types other than GFS """ - return { - 1: None, - 2: None, - 3: None, - 4: None, - 5: None, - 6: None, - 7: None, - 8: None, - 9: [33, 34, 39, 40, 43, 88, 91, 6], - 10: None, - 11: None, - 12: None, - 13: None, - 14: None, - 15: None, - 16: None, - 17: None, - 18: None, - 19: None, - 20: None, - 21: None, - 22: None, - 23: None, - 24: None, - 25: None, - 26: None, - 27: None, - }[self.keyValue] + return FORCINGINPUTMOD["GRIB_MES_IDX"][self.keyValue] @property - def input_map_output(self): + def input_map_output(self) -> list[int] | None: """Map the forcing key value to the input to output variable mapping.""" - return { - 1: [4, 5, 0, 1, 3, 7, 2, 6], - 2: None, - 3: [4, 5, 0, 1, 3, 7, 2, 6, 8], - 4: None, - 5: [4, 5, 0, 1, 3, 7, 2, 6, 8], - 6: [4, 5, 0, 1, 3, 7, 2, 6, 8], - 7: [4, 5, 0, 1, 3, 7, 2, 6], - 8: [4, 5, 0, 1, 3, 6], - 9: [4, 5, 0, 1, 3, 7, 2, 6], - 10: [4, 5, 0, 1, 3, 7, 2, 6], - 11: [4, 5, 0, 1, 3, 7, 2, 6], - 12: [4, 5, 0, 1, 3, 7, 2, 6], - 13: [4, 5, 0, 1, 3, 7, 2, 6], - 14: [4, 5, 0, 1, 3, 7, 2, 6], - 15: [4, 5, 0, 1, 3, 7, 2, 6], - 16: [7, 2], - 17: [7, 2], - 18: [4, 5, 0, 1, 3, 6], - 19: [4, 5, 0, 1, 3, 7, 2, 6], - 20: [4, 5, 0, 1, 3, 7, 2, 6], - 21: [4, 5, 0, 1, 3, 7, 2, 6], - 22: [4, 5, 0, 1, 3, 7, 2, 6], - 23: [4, 5, 0, 1, 3, 7, 2, 6], - 24: [4, 3], - 25: [4, 0, 1, 3], - 26: [4, 5, 0, 1, 3, 7, 2, 6], - 27: [4, 5, 0, 1, 3, 7, 2, 6], - }[self.keyValue] + return FORCINGINPUTMOD["INPUT_MAP_OUTPUT"][self.keyValue] @property - def forecast_horizons(self): + def forecast_horizons(self) -> list[int] | None: """Map the forcing key value to the forecast horizons list.""" - return { - 1: None, - 2: None, - 3: None, - 4: None, - 5: [ - 18, - 18, - 18, - 18, - 18, - 18, - 36, - 18, - 18, - 18, - 18, - 18, - 36, - 18, - 18, - 18, - 18, - 18, - 36, - 18, - 18, - 18, - 18, - 18, - ], - 6: [ - 21, - 21, - 21, - 39, - 21, - 21, - 21, - 21, - 21, - 39, - 21, - 21, - 21, - 21, - 21, - 39, - 21, - 21, - 21, - 21, - 21, - 39, - 21, - 21, - ], - 7: None, - 8: None, - 9: None, - 10: None, - 11: None, - 12: None, - 13: None, - 14: None, - 15: None, - 16: None, - 17: None, - 18: None, - 19: None, - 20: None, - 21: None, - 22: None, - 23: None, - 24: None, - 25: None, - 26: [ - 18, - 18, - 18, - 18, - 18, - 18, - 36, - 18, - 18, - 18, - 18, - 18, - 36, - 18, - 18, - 18, - 18, - 18, - 36, - 18, - 18, - 18, - 18, - 18, - ], - 27: None, - }[self.keyValue] - - @property - def find_neighbor_files_map(self): - """Map the forcing key value to the neighbor file finding function.""" - return { - 1: time_handling.find_nldas_neighbors, - 3: time_handling.find_gfs_neighbors, - 5: time_handling.find_input_neighbors, - 6: time_handling.find_input_neighbors, - 7: time_handling.find_cfsv2_neighbors, - 8: time_handling.find_hourly_wrf_arw_neighbors, - 9: time_handling.find_gfs_neighbors, - 10: time_handling.find_custom_hourly_neighbors, - 11: time_handling.find_custom_hourly_neighbors, - 12: time_handling.find_aorc_neighbors, - 13: time_handling.find_nam_nest_neighbors, - 14: time_handling.find_nam_nest_neighbors, - 15: time_handling.find_nam_nest_neighbors, - 16: time_handling.find_nam_nest_neighbors, - 17: time_handling.find_nam_nest_neighbors, - 18: time_handling.find_hourly_wrf_arw_neighbors, - 19: time_handling.find_ak_hrrr_neighbors, - 20: time_handling.find_ak_hrrr_neighbors, - 21: time_handling.find_aorc_neighbors, - 22: time_handling.find_ak_hrrr_neighbors, - 23: time_handling.find_era5_neighbors, - 24: time_handling.find_hourly_nbm_neighbors, - 25: time_handling.find_ndfd_neighbors, - 26: time_handling.find_input_neighbors, - 27: time_handling.find_nwm_neighbors, - } + return FORCINGINPUTMOD["FORECAST_HORIZONS"][self.keyValue] def calc_neighbor_files( self, config_options: ConfigOptions, dcurrent, mpi_config: MpiConfig - ): + ) -> None: """Calculate the last/next expected input forcing file based on the current time step. Function that will calculate the last/next expected @@ -929,43 +205,12 @@ def calc_neighbor_files( self, config_options, dcurrent, mpi_config ) - @property - def regrid_map(self): - """Map the forcing key value to the regridding function.""" - return { - 1: regrid.regrid_conus_rap, - 3: regrid.regrid_gfs, - 5: regrid.regrid_conus_hrrr, - 6: regrid.regrid_conus_rap, - 7: regrid.regrid_cfsv2, - 8: regrid.regrid_hourly_wrf_arw, - 9: regrid.regrid_gfs, - 10: regrid.regrid_custom_hourly_netcdf, - 11: regrid.regrid_custom_hourly_netcdf, - 12: regrid.regrid_custom_hourly_netcdf, - 13: regrid.regrid_nam_nest, - 14: regrid.regrid_nam_nest, - 15: regrid.regrid_nam_nest, - 16: regrid.regrid_nam_nest, - 17: regrid.regrid_nam_nest, - 18: regrid.regrid_hourly_wrf_arw, - 19: regrid.regrid_conus_hrrr, - 20: regrid.regrid_conus_hrrr, - 21: regrid.regrid_custom_hourly_netcdf, - 22: regrid.regrid_conus_hrrr, - 23: regrid.regrid_era5, - 24: regrid.regrid_hourly_nbm, - 25: regrid.regrid_ndfd, - 26: regrid.regrid_conus_hrrr, - 27: regrid.regrid_nwm, - } - def regrid_inputs( self, config_options: ConfigOptions, - wrf_hyro_geo_meta: GeoMetaWrfHydro, + geo_meta: GeoMeta, mpi_config: MpiConfig, - ): + ) -> None: """Regrid input forcings to the final output grids for this timestep. Polymorphic function that will regrid input forcings to the @@ -978,22 +223,11 @@ def regrid_inputs( """ # Establish a mapping dictionary that will point the # code to the functions to that will regrid the data. - self.regrid_map[self.keyValue]( - self, config_options, wrf_hyro_geo_meta, mpi_config - ) - - @property - def temporal_interpolate_inputs_map(self): - """Map the temporal interpolation options to the functions.""" - return { - 0: timeInterpMod.no_interpolation, - 1: timeInterpMod.nearest_neighbor, - 2: timeInterpMod.weighted_average, - } + self.regrid_map[self.keyValue](self, config_options, geo_meta, mpi_config) def temporal_interpolate_inputs( self, config_options: ConfigOptions, mpi_config: MpiConfig - ): + ) -> None: """Run temporal interpolation of the input forcing grids that have been regridded. Polymorphic function that will run temporal interpolation of @@ -1005,14 +239,327 @@ def temporal_interpolate_inputs( :param mpi_config: :return: """ - self.temporal_interpolate_inputs_map[self.timeInterpOpt]( + self.temporal_interpolate_inputs_map[self.forceTemoralInterp]( self, config_options, mpi_config ) +class InputForcingsGridded(InputForcings): + """Abstract class defining parameters of a single input forcing product. + + This is an abstract class that will define all the parameters + of a single gridded input forcing product. + """ + + def __init__( + self, + force_key: int, + idx: int = None, + config_options: ConfigOptions = None, + geo_meta: GeoMeta = None, + mpi_config: MpiConfig = None, + ) -> None: + """Initialize InputForcingsGridded with configuration options, geospatial metadata, and MPI configuration.""" + super().__init__(force_key, idx, config_options, geo_meta, mpi_config) + for attr in FORCINGINPUTMOD[self.__class__.__name__]: + setattr(self, attr, None) + + @property + @lru_cache + def final_forcings(self): + """Initialize the local final grid of values.""" + if self._final_forcings is not None: + return self._final_forcings + else: + return np.empty( + [ + self.force_count, + self.geo_meta.ny_local, + self.geo_meta.nx_local, + ], + np.float64, + ) + + @final_forcings.setter + def final_forcings(self, value): + "Setter for final_forcings." + self._final_forcings = value + + @property + @lru_cache + def height(self): + """Initialize the local height grid.""" + return np.empty( + [self.geo_meta.ny_local, self.geo_meta.nx_local], + np.float32, + ) + + @property + @lru_cache + def regridded_mask(self): + """Initialize the local regridded mask grid.""" + return np.empty( + [self.geo_meta.ny_local, self.geo_meta.nx_local], + np.float32, + ) + + @property + @lru_cache + def regridded_mask_AORC(self): + """Initialize the local regridded AORC mask grid.""" + return np.empty( + [self.geo_meta.ny_local, self.geo_meta.nx_local], + np.float32, + ) + + @property + @lru_cache + def t2dTmp(self): + """Initialize temporary array for specific humidity downscaling.""" + if self._t2dTmp is not None: + return self._t2dTmp + elif self.q2dDownscaleOpt > 0: + return np.empty( + [self.geo_meta.ny_local, self.geo_meta.nx_local], + np.float32, + ) + + @t2dTmp.setter + def t2dTmp(self, value): + """Setter for t2dTmp""" + self._t2dTmp = value + + @property + @lru_cache + def psfcTmp(self): + """Initialize temporary array for specific humidity downscaling.""" + if self._psfcTmp is not None: + return self._psfcTmp + elif self.q2dDownscaleOpt > 0: + return np.empty( + [self.geo_meta.ny_local, self.geo_meta.nx_local], + np.float32, + ) + + @psfcTmp.setter + def psfcTmp(self, value): + """Setter for psfcTmp""" + self._psfcTmp = value + + +class InputForcingsHydrofabric(InputForcings): + """Abstract class defining parameters of a single input forcing product. + + This is an abstract class that will define all the parameters + of a single hydrofabric input forcing product. + """ + + def __init__( + self, + force_key: int, + idx: int = None, + config_options: ConfigOptions = None, + geo_meta: GeoMeta = None, + mpi_config: MpiConfig = None, + ) -> None: + """Initialize InputForcingsHydrofabric with configuration options, geospatial metadata, and MPI configuration.""" + super().__init__(force_key, idx, config_options, geo_meta, mpi_config) + for attr in FORCINGINPUTMOD[self.__class__.__name__]: + setattr(self, attr, None) + + @property + @lru_cache + def final_forcings(self): + """Initialize the local final grid of values.""" + if self._final_forcings is not None: + return self._final_forcings + else: + return np.empty([self.force_count, self.geo_meta.ny_local], np.float64) + + @final_forcings.setter + def final_forcings(self, value): + "Setter for final_forcings." + self._final_forcings = value + + @property + @lru_cache + def height(self): + """Initialize the local height grid.""" + return np.empty([self.geo_meta.ny_local], np.float32) + + @property + @lru_cache + def regridded_mask(self): + """Initialize the local regridded mask grid.""" + return np.empty([self.geo_meta.ny_local], np.float32) + + @property + @lru_cache + def regridded_mask_AORC(self): + """Initialize the local regridded AORC mask grid.""" + return np.empty([self.geo_meta.ny_local], np.float32) + + @property + @lru_cache + def t2dTmp(self): + """Initialize temporary array for specific humidity downscaling.""" + if self._t2dTmp is not None: + return self._t2dTmp + elif self.q2dDownscaleOpt > 0: + return np.empty([self.geo_meta.ny_local], np.float32) + + @t2dTmp.setter + def t2dTmp(self, value): + """Setter for t2dTmp""" + self._t2dTmp = value + + @property + @lru_cache + def psfcTmp(self): + """Initialize temporary array for specific humidity downscaling.""" + if self._psfcTmp is not None: + return self._psfcTmp + if self.q2dDownscaleOpt > 0: + return np.empty([self.geo_meta.ny_local], np.float32) + + @psfcTmp.setter + def psfcTmp(self, value): + """Setter for psfcTmp""" + self._psfcTmp = value + + +class InputForcingsUnstructured(InputForcings): + """Abstract class defining parameters of a single input forcing product. + + This is an abstract class that will define all the parameters + of a single unstructured input forcing product. + """ + + def __init__( + self, + force_key: int, + idx: int = None, + config_options: ConfigOptions = None, + geo_meta: GeoMeta = None, + mpi_config: MpiConfig = None, + ) -> None: + """Initialize InputForcingsUnstructured with configuration options, geospatial metadata, and MPI configuration.""" + super().__init__(force_key, idx, config_options, geo_meta, mpi_config) + for attr in FORCINGINPUTMOD[self.__class__.__name__]: + setattr(self, attr, None) + + @property + @lru_cache + def t2dTmp(self): + """Initialize temporary array for specific humidity downscaling.""" + if self._t2dTmp is not None: + return self._t2dTmp + elif self.q2dDownscaleOpt > 0: + return np.empty([self.geo_meta.ny_local], np.float32) + + @t2dTmp.setter + def t2dTmp(self, value): + """Setter for t2dTmp""" + self._t2dTmp = value + + @property + @lru_cache + def psfcTmp(self): + """Initialize temporary array for specific humidity downscaling.""" + if self._psfcTmp is not None: + return self._psfcTmp + elif self.q2dDownscaleOpt > 0: + return np.empty([self.geo_meta.ny_local], np.float32) + + @psfcTmp.setter + def psfcTmp(self, value): + """Setter for psfcTmp""" + self._psfcTmp = value + + @property + @lru_cache + def t2dTmp_elem(self): + """Initialize temporary array for specific humidity downscaling.""" + if self.q2dDownscaleOpt > 0: + return np.empty([self.geo_meta.ny_local_elem], np.float32) + + @property + @lru_cache + def psfcTmp_elem(self): + """Initialize temporary array for specific humidity downscaling.""" + if self.q2dDownscaleOpt > 0: + return np.empty([self.geo_meta.ny_local_elem], np.float32) + + @property + @lru_cache + def final_forcings(self): + """Initialize the local final grid of values.""" + if self._final_forcings is not None: + return self._final_forcings + else: + return np.empty([self.force_count, self.geo_meta.ny_local], np.float64) + + @final_forcings.setter + def final_forcings(self, value): + """Setter for final_forcings.""" + self._final_forcings = value + + @property + @lru_cache + def height(self): + """Initialize the local height grid.""" + return np.empty([self.geo_meta.ny_local], np.float32) + + @property + @lru_cache + def regridded_mask(self): + """Initialize the local regridded mask grid.""" + return np.empty([self.geo_meta.ny_local], np.float32) + + @property + @lru_cache + def regridded_mask_AORC(self): + """Initialize the local regridded AORC mask grid.""" + return np.empty([self.geo_meta.ny_local], np.float32) + + @property + @lru_cache + def final_forcings_elem(self): + """Initialize the local final grid of values on elements.""" + return np.empty( + [self.force_count, self.geo_meta.ny_local_elem], + np.float64, + ) + + @property + @lru_cache + def height_elem(self): + """Initialize the local height grid on elements.""" + return np.empty([self.geo_meta.ny_local_elem], np.float32) + + @property + @lru_cache + def regridded_mask_elem(self): + """Initialize the local regridded mask grid on elements.""" + return np.empty([self.geo_meta.ny_local_elem], np.float32) + + @property + @lru_cache + def regridded_mask_elem_AORC(self): + """Initialize the local regridded AORC mask grid on elements.""" + return np.empty([self.geo_meta.ny_local_elem], np.float32) + + +INPUTFORCINGS = { + "gridded": InputForcingsGridded, + "unstructured": InputForcingsUnstructured, + "hydrofabric": InputForcingsHydrofabric, +} + + def init_dict( config_options: ConfigOptions, - geo_meta_wrf_hydro: GeoMetaWrfHydro, + geo_meta: GeoMeta, mpi_config: MpiConfig, ) -> dict: """Initialize the input forcing dictionary. @@ -1023,178 +570,27 @@ def init_dict( :param config_options: :return: input_dict - A dictionary defining our inputs. """ - # Initialize an empty dictionary input_dict = {} - if config_options.precip_only_flag: return input_dict # Loop through and initialize the empty class for each product. custom_count = 0 - for force_tmp in range(0, config_options.number_inputs): - force_key = config_options.input_forcings[force_tmp] - input_dict[force_key] = InputForcings() - input_dict[force_key].keyValue = force_key - input_dict[force_key].regridOpt = config_options.regrid_opt[force_tmp] - input_dict[force_key].enforce = config_options.input_force_mandatory[force_tmp] - input_dict[force_key].timeInterpOpt = config_options.forceTemoralInterp[ - force_tmp - ] - input_dict[force_key].q2dDownscaleOpt = config_options.q2dDownscaleOpt[ - force_tmp - ] - input_dict[force_key].t2dDownscaleOpt = config_options.t2dDownscaleOpt[ - force_tmp - ] - input_dict[force_key].precipDownscaleOpt = config_options.precipDownscaleOpt[ - force_tmp - ] - input_dict[force_key].swDowscaleOpt = config_options.swDownscaleOpt[force_tmp] - input_dict[force_key].psfcDownscaleOpt = config_options.psfcDownscaleOpt[ - force_tmp - ] - # Check to make sure the necessary input files for downscaling are present. - # if input_dict[force_key].t2dDownscaleOpt == 2: - # # We are using a pre-calculated lapse rate on the WRF-Hydro grid. - # pathCheck = config_options.downscaleParamDir = "/T2M_Lapse_Rate_" + \ - # input_dict[force_key].product_name + ".nc" - # if not os.path.isfile(pathCheck): - # config_options.errMsg = "Expected temperature lapse rate grid: " + \ - # pathCheck + " not found." - # raise Exception - - input_dict[force_key].t2dBiasCorrectOpt = config_options.t2BiasCorrectOpt[ - force_tmp - ] - input_dict[force_key].q2dBiasCorrectOpt = config_options.q2BiasCorrectOpt[ - force_tmp - ] - input_dict[ - force_key - ].precipBiasCorrectOpt = config_options.precipBiasCorrectOpt[force_tmp] - input_dict[force_key].swBiasCorrectOpt = config_options.swBiasCorrectOpt[ - force_tmp - ] - input_dict[force_key].lwBiasCorrectOpt = config_options.lwBiasCorrectOpt[ - force_tmp - ] - input_dict[force_key].windBiasCorrectOpt = config_options.windBiasCorrect[ - force_tmp - ] - input_dict[force_key].psfcBiasCorrectOpt = config_options.psfcBiasCorrectOpt[ - force_tmp - ] - - input_dict[force_key].inDir = config_options.input_force_dirs[force_tmp] - input_dict[force_key].paramDir = config_options.dScaleParamDirs[force_tmp] - input_dict[force_key].file_type = config_options.input_force_types[force_tmp] - input_dict[force_key].userFcstHorizon = config_options.fcst_input_horizons[ - force_tmp - ] - input_dict[force_key].userCycleOffset = config_options.fcst_input_offsets[ - force_tmp - ] - - input_dict[force_key].border = config_options.ignored_border_widths[force_tmp] - - # If we have specified specific humidity downscaling, establish arrays to hold - # temporary temperature arrays that are un-downscaled. - if input_dict[force_key].q2dDownscaleOpt > 0: - if config_options.grid_type == "gridded": - input_dict[force_key].t2dTmp = np.empty( - [geo_meta_wrf_hydro.ny_local, geo_meta_wrf_hydro.nx_local], - np.float32, - ) - input_dict[force_key].psfcTmp = np.empty( - [geo_meta_wrf_hydro.ny_local, geo_meta_wrf_hydro.nx_local], - np.float32, - ) - elif config_options.grid_type == "unstructured": - input_dict[force_key].t2dTmp = np.empty( - [geo_meta_wrf_hydro.ny_local], np.float32 - ) - input_dict[force_key].psfcTmp = np.empty( - [geo_meta_wrf_hydro.ny_local], np.float32 - ) - input_dict[force_key].t2dTmp_elem = np.empty( - [geo_meta_wrf_hydro.ny_local_elem], np.float32 - ) - input_dict[force_key].psfcTmp_elem = np.empty( - [geo_meta_wrf_hydro.ny_local_elem], np.float32 - ) - elif config_options.grid_type == "hydrofabric": - input_dict[force_key].t2dTmp = np.empty( - [geo_meta_wrf_hydro.ny_local], np.float32 - ) - input_dict[force_key].psfcTmp = np.empty( - [geo_meta_wrf_hydro.ny_local], np.float32 - ) - # Initialize the local final grid of values. This is represntative - # of the local grid for this forcing, for a specific output timesetp. - # This grid will be updated from one output timestep to another, and - # also through downscaling and bias correction. - force_count = 9 if config_options.include_lqfrac else 8 - if force_count == 8 and 8 in input_dict[force_key].input_map_output: - # TODO: this assumes that LQFRAC (8) is always the last grib var - input_dict[force_key].grib_vars = input_dict[force_key].grib_vars[:-1] + for idx in range(0, config_options.number_inputs): + force_key = config_options.input_forcings[idx] - if config_options.grid_type == "gridded": - input_dict[force_key].final_forcings = np.empty( - [force_count, geo_meta_wrf_hydro.ny_local, geo_meta_wrf_hydro.nx_local], - np.float64, - ) - input_dict[force_key].height = np.empty( - [geo_meta_wrf_hydro.ny_local, geo_meta_wrf_hydro.nx_local], np.float32 - ) - input_dict[force_key].regridded_mask = np.empty( - [geo_meta_wrf_hydro.ny_local, geo_meta_wrf_hydro.nx_local], np.float32 - ) - input_dict[force_key].regridded_mask_AORC = np.empty( - [geo_meta_wrf_hydro.ny_local, geo_meta_wrf_hydro.nx_local], np.float32 - ) - elif config_options.grid_type == "unstructured": - input_dict[force_key].final_forcings = np.empty( - [force_count, geo_meta_wrf_hydro.ny_local], np.float64 - ) - input_dict[force_key].height = np.empty( - [geo_meta_wrf_hydro.ny_local], np.float32 - ) - input_dict[force_key].regridded_mask = np.empty( - [geo_meta_wrf_hydro.ny_local], np.float32 - ) - input_dict[force_key].regridded_mask_AORC = np.empty( - [geo_meta_wrf_hydro.ny_local], np.float32 - ) - input_dict[force_key].final_forcings_elem = np.empty( - [force_count, geo_meta_wrf_hydro.ny_local_elem], np.float64 - ) - input_dict[force_key].height_elem = np.empty( - [geo_meta_wrf_hydro.ny_local_elem], np.float32 - ) - input_dict[force_key].regridded_mask_elem = np.empty( - [geo_meta_wrf_hydro.ny_local_elem], np.float32 - ) - input_dict[force_key].regridded_mask_elem_AORC = np.empty( - [geo_meta_wrf_hydro.ny_local_elem], np.float32 - ) - elif config_options.grid_type == "hydrofabric": - input_dict[force_key].final_forcings = np.empty( - [force_count, geo_meta_wrf_hydro.ny_local], np.float64 - ) - input_dict[force_key].height = np.empty( - [geo_meta_wrf_hydro.ny_local], np.float32 - ) - input_dict[force_key].regridded_mask = np.empty( - [geo_meta_wrf_hydro.ny_local], np.float32 - ) - input_dict[force_key].regridded_mask_AORC = np.empty( - [geo_meta_wrf_hydro.ny_local], np.float32 + if config_options.grid_type not in INPUTFORCINGS: + raise TypeError( + f"Invalid grid type specified: {config_options.grid_type}. Valid options are: {list(INPUTFORCINGS.keys())}" ) + + input_dict[force_key] = INPUTFORCINGS[config_options.grid_type]( + force_key, idx, config_options, geo_meta, mpi_config + ) + # input_dict[force_key].keyValue = force_key + # Obtain custom input cycle frequencies if force_key == 10 or force_key == 11: - input_dict[force_key].cycle_freq = config_options.customFcstFreq[ - custom_count - ] custom_count = custom_count + 1 return input_dict diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py index 8902ebbe..4620c458 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/geoMod.py @@ -1,7 +1,8 @@ +from __future__ import annotations + import math -from time import time +from pathlib import Path -import netCDF4 import numpy as np # For ESMF + shapely 2.x, shapely must be imported first, to avoid segfault "address not mapped to object" stemming from calls such as: @@ -9,1360 +10,1453 @@ import shapely from scipy import spatial -from .. import esmf_utils, nc_utils -from . import err_handler - try: import esmpy as ESMF except ImportError: import ESMF import logging +from functools import lru_cache, wraps +from typing import Any + +import xarray as xr +from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.config import ( + ConfigOptions, +) +from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.consts import GEOMOD +from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.parallel import MpiConfig from nextgen_forcings_ewts import MODULE_NAME LOG = logging.getLogger(MODULE_NAME) -class GeoMetaWrfHydro: - """Abstract class for handling information about the WRF-Hydro domain we are processing forcings too.""" - - def __init__(self): - """Initialize GeoMetaWrfHydro class variables.""" - self.nx_global = None - self.ny_global = None - self.nx_global_elem = None - self.ny_global_elem = None - self.dx_meters = None - self.dy_meters = None - self.nx_local = None - self.ny_local = None - self.nx_local_elem = None - self.ny_local_elem = None - self.x_lower_bound = None - self.x_upper_bound = None - self.y_lower_bound = None - self.y_upper_bound = None - self.latitude_grid = None - self.longitude_grid = None - self.element_ids = None - self.element_ids_global = None - self.latitude_grid_elem = None - self.longitude_grid_elem = None - self.lat_bounds = None - self.lon_bounds = None - self.mesh_inds = None - self.mesh_inds_elem = None - self.height = None - self.height_elem = None - self.sina_grid = None - self.cosa_grid = None - self.nodeCoords = None - self.centerCoords = None - self.inds = None - self.slope = None - self.slp_azi = None - self.slope_elem = None - self.slp_azi_elem = None - self.esmf_grid = None - self.esmf_lat = None - self.esmf_lon = None - self.crs_atts = None - self.x_coord_atts = None - self.x_coords = None - self.y_coord_atts = None - self.y_coords = None - self.spatial_global_atts = None - - def get_processor_bounds(self, config_options): - """Calculate the local grid boundaries for this processor. - - ESMF operates under the hood and the boundary values - are calculated within the ESMF software. - :return: - """ - if config_options.grid_type == "gridded": - self.x_lower_bound = self.esmf_grid.lower_bounds[ESMF.StaggerLoc.CENTER][1] - self.x_upper_bound = self.esmf_grid.upper_bounds[ESMF.StaggerLoc.CENTER][1] - self.y_lower_bound = self.esmf_grid.lower_bounds[ESMF.StaggerLoc.CENTER][0] - self.y_upper_bound = self.esmf_grid.upper_bounds[ESMF.StaggerLoc.CENTER][0] - self.nx_local = self.x_upper_bound - self.x_lower_bound - self.ny_local = self.y_upper_bound - self.y_lower_bound - elif config_options.grid_type == "unstructured": - self.nx_local = len(self.esmf_grid.coords[0][1]) - self.ny_local = len(self.esmf_grid.coords[0][1]) - self.nx_local_elem = len(self.esmf_grid.coords[1][1]) - self.ny_local_elem = len(self.esmf_grid.coords[1][1]) - # LOG.debug("ESMF Mesh nx local node is " + str(self.nx_local)) - # LOG.debug("ESMF Mesh nx local elem is " + str(self.nx_local_elem)) - elif config_options.grid_type == "hydrofabric": - self.nx_local = len(self.esmf_grid.coords[1][1]) - self.ny_local = len(self.esmf_grid.coords[1][1]) - # self.nx_local_poly = len(self.esmf_poly_coords) - # self.ny_local_poly = len(self.esmf_poly_coords) - # LOG.debug("ESMF Mesh nx local elem is " + str(self.nx_local)) - # LOG.debug("ESMF Mesh nx local poly is " + str(self.nx_local_poly)) - # LOG.debug("WRF-HYDRO LOCAL X BOUND 1 = " + str(self.x_lower_bound)) - # LOG.debug("WRF-HYDRO LOCAL X BOUND 2 = " + str(self.x_upper_bound)) - # LOG.debug("WRF-HYDRO LOCAL Y BOUND 1 = " + str(self.y_lower_bound)) - # LOG.debug("WRF-HYDRO LOCAL Y BOUND 2 = " + str(self.y_upper_bound)) - - def initialize_destination_geo_gridded(self, config_options, mpi_config): - """Initialize GeoMetaWrfHydro class variables. +def set_none(func) -> Any: + """Set the output of a function to None if an exception is raised.""" - Initialization function to initialize ESMF through ESMPy, - calculate the global parameters of the WRF-Hydro grid - being processed to, along with the local parameters - for this particular processor. - :return: - """ - # Open the geogrid file and extract necessary information - # to create ESMF fields. - if mpi_config.rank == 0: - try: - idTmp = netCDF4.Dataset(config_options.geogrid, "r") - except Exception as e: - config_options.errMsg = ( - "Unable to open the WRF-Hydro geogrid file: " - + config_options.geogrid - ) - raise Exception - if idTmp.variables[config_options.lat_var].ndim == 3: - try: - self.nx_global = idTmp.variables[config_options.lat_var].shape[2] - except Exception as e: - config_options.errMsg = ( - "Unable to extract X dimension size from latitude variable in: " - + config_options.geogrid - ) - raise Exception + @wraps(func) + def wrapper(self) -> Any: + """Set the output of a function to None if an exception is raised.""" + if self.spatial_metadata_exists: + return func(self) + else: + return None - try: - self.ny_global = idTmp.variables[config_options.lat_var].shape[1] - except Exception as e: - config_options.errMsg = ( - "Unable to extract Y dimension size from latitude in: " - + config_options.geogrid - ) - raise Exception + return wrapper - try: - self.dx_meters = idTmp.DX - except Exception as e: - config_options.errMsg = ( - "Unable to extract DX global attribute in: " - + config_options.geogrid - ) - raise Exception - try: - self.dy_meters = idTmp.DY - except Exception as e: - config_options.errMsg = ( - "Unable to extract DY global attribute in: " - + config_options.geogrid - ) - raise Exception - elif idTmp.variables[config_options.lat_var].ndim == 2: - try: - self.nx_global = idTmp.variables[config_options.lat_var].shape[1] - except Exception as e: - config_options.errMsg = ( - "Unable to extract X dimension size from latitude variable in: " - + config_options.geogrid - ) - raise Exception +def broadcast(prop) -> Any: + """Broadcast the output of a function to all processors.""" - try: - self.ny_global = idTmp.variables[config_options.lat_var].shape[0] - except Exception as e: - config_options.errMsg = ( - "Unable to extract Y dimension size from latitude in: " - + config_options.geogrid - ) - raise Exception + @wraps(prop) + def wrapper(self) -> Any: + """Broadcast the output of a function to all processors.""" + result = prop.fget(self) + return self.mpi_config.comm.bcast(result, root=0) - try: - self.dx_meters = idTmp.variables[config_options.lon_var].dx - except Exception as e: - config_options.errMsg = ( - "Unable to extract DX global attribute in: " - + config_options.geogrid - ) - raise Exception + return property(wrapper) - try: - self.dy_meters = idTmp.variables[config_options.lat_var].dy - except Exception as e: - config_options.errMsg = ( - "Unable to extract DY global attribute in: " - + config_options.geogrid - ) - raise Exception - else: - try: - self.nx_global = idTmp.variables[config_options.lon_var].shape[0] - except Exception as e: - config_options.errMsg = ( - "Unable to extract X dimension size from longitude variable in: " - + config_options.geogrid - ) - raise Exception +def barrier(prop) -> Any: + """Synchronize all processors at a barrier.""" - try: - self.ny_global = idTmp.variables[config_options.lat_var].shape[0] - except Exception as e: - config_options.errMsg = ( - "Unable to extract Y dimension size from latitude in: " - + config_options.geogrid - ) - raise Exception - if config_options.input_forcings[0] != 23: - try: - self.dx_meters = idTmp.variables[config_options.lon_var].dx - except Exception as e: - config_options.errMsg = ( - "Unable to extract dx metadata attribute in: " - + config_options.geogrid - ) - raise Exception - - try: - self.dy_meters = idTmp.variables[config_options.lat_var].dy - except Exception as e: - config_options.errMsg = ( - "Unable to extract dy metadata attribute in: " - + config_options.geogrid - ) - raise Exception - else: - # Manually input the grid spacing since ERA5-Interim does not - # internally have this geospatial information within the netcdf file - self.dx_meters = 31000 - self.dy_meters = 31000 + @wraps(prop) + def wrapper(self) -> Any: + """Synchronize all processors at a barrier.""" + result = prop.fget(self) + self.mpi_config.comm.barrier() + return result - # mpi_config.comm.barrier() + return property(wrapper) - # Broadcast global dimensions to the other processors. - self.nx_global = mpi_config.broadcast_parameter( - self.nx_global, config_options, param_type=int - ) - self.ny_global = mpi_config.broadcast_parameter( - self.ny_global, config_options, param_type=int - ) - self.dx_meters = mpi_config.broadcast_parameter( - self.dx_meters, config_options, param_type=float - ) - self.dy_meters = mpi_config.broadcast_parameter( - self.dy_meters, config_options, param_type=float - ) - # mpi_config.comm.barrier() +def scatter(prop) -> Any: + """Scatter the output of a function to all processors.""" + @wraps(prop) + def wrapper(self) -> Any: + """Scatter the output of a function to all processors.""" try: - self.esmf_grid = ESMF.Grid( - np.array([self.ny_global, self.nx_global]), - staggerloc=ESMF.StaggerLoc.CENTER, - coord_sys=ESMF.CoordSys.SPH_DEG, - ) + var, name, config_options, post_slice = prop.fget(self) + var = self.mpi_config.scatter_array(self, var, config_options) + if post_slice: + return var[:, :] + else: + return var except Exception as e: - config_options.errMsg = ( - "Unable to create ESMF grid for WRF-Hydro geogrid: " - + config_options.geogrid + self.config_options.errMsg = ( + f"Unable to subset {name} from geogrid file into ESMF object" ) - raise Exception - - # mpi_config.comm.barrier() - - self.esmf_lat = self.esmf_grid.get_coords(1) - self.esmf_lon = self.esmf_grid.get_coords(0) + raise e - # mpi_config.comm.barrier() + return property(wrapper) - # Obtain the local boundaries for this processor. - self.get_processor_bounds(config_options) - # Scatter global XLAT_M grid to processors.. - if mpi_config.rank == 0: - if idTmp.variables[config_options.lat_var].ndim == 3: - varTmp = idTmp.variables[config_options.lat_var][0, :, :] - elif idTmp.variables[config_options.lat_var].ndim == 2: - varTmp = idTmp.variables[config_options.lat_var][:, :] - elif idTmp.variables[config_options.lat_var].ndim == 1: - lat = idTmp.variables[config_options.lat_var][:] - lon = idTmp.variables[config_options.lon_var][:] - varTmp = np.meshgrid(lon, lat)[1] - lat = None - lon = None - # Flag to grab entire array for AWS slicing - if config_options.aws: - self.lat_bounds = varTmp - else: - varTmp = None +class GeoMeta: + """GeoMeta class for handling information about the geometry metadata. - # mpi_config.comm.barrier() + Extract names of variable attributes from each of the input geospatial variables. These + can change, so we are making this as flexible as possible to accomodate future changes. + """ - varSubTmp = mpi_config.scatter_array(self, varTmp, config_options) + def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig) -> None: + """Initialize GeoMeta class variables.""" + self.config_options = config_options + self.mpi_config = mpi_config + for attr in GEOMOD[self.__class__.__base__.__name__]: + setattr(self, attr, None) - # mpi_config.comm.barrier() + @property + @lru_cache + def spatial_metadata_exists(self) -> bool: + """Check to make sure the geospatial metadata file exists.""" + if self.config_options.spatial_meta is None: + return False + else: + return True - # Place the local lat/lon grid slices from the parent geogrid file into - # the ESMF lat/lon grids. + @property + @lru_cache + def geogrid_ds(self) -> xr.Dataset: + """Get the geogrid file path.""" + try: + with xr.open_dataset(self.config_options.geogrid) as ds: + return ds + except Exception as e: + self.config_options.errMsg = "Unable to open geogrid file with xarray" + raise e + + @property + @lru_cache + @set_none + def esmf_ds(self) -> xr.Dataset: + """Open the geospatial metadata file and return the xarray dataset object.""" try: - self.esmf_lat[:, :] = varSubTmp - self.latitude_grid = varSubTmp - varSubTmp = None - varTmp = None + with xr.open_dataset(self.config_options.spatial_meta) as ds: + esmf_ds = ds.load() except Exception as e: - config_options.errMsg = ( - "Unable to subset latitude from geogrid file into ESMF object" + self.config_options.errMsg = ( + f"Unable to open esmf file: {self.config_options.spatial_meta}" ) - raise Exception + raise e + self._check_variables_exist(esmf_ds) + return esmf_ds + + def _check_variables_exist(self, esmf_ds: xr.Dataset): + """Check to make sure the expected variables are present in the geospatial metadata file.""" + if self.mpi_config.rank == 0: + for var in ["crs", "x", "y"]: + if var not in esmf_ds.variables.keys(): + self.config_options.errMsg = f"Unable to locate {var} variable in: {self.config_options.spatial_meta}" + raise Exception - # mpi_config.comm.barrier() + def ncattrs(self, var: str) -> list: + """Extract variable attribute names from the geospatial metadata file.""" + return self.get_esmf_var(var).ncattrs() + + def get_var(self, ds: xr.Dataset, var: str) -> xr.DataArray: + """Get a variable from a xr.Dataset.""" + if var is not None: + try: + return ds.variables[var] + except Exception as e: + self.config_options.errMsg = f"Unable to extract {var} variable from: {self.config_options.spatial_meta} due to {str(e)}" + raise e + + def get_geogrid_var(self, var: str) -> xr.DataArray: + """Get a variable from the geogrid file.""" + return self.get_var(self.geogrid_ds, var) + + def get_esmf_var(self, var: str) -> xr.DataArray: + """Get a variable from the geospatial metadata file.""" + return self.get_var(self.esmf_ds, var) + + @property + @lru_cache + @set_none + def _crs_att_names(self) -> list: + """Extract crs attribute names from the geospatial metadata file.""" + return self.ncattrs("crs") + + @property + @lru_cache + @set_none + def _x_coord_att_names(self) -> list: + """Extract x coordinate attribute names from the geospatial metadata file.""" + return self.ncattrs("x") + + @property + @lru_cache + @set_none + def _y_coord_att_names(self) -> list: + """Extract y coordinate attribute names from the geospatial metadata file.""" + return self.ncattrs("y") + + def getncattr(self, var: str) -> dict: + """Extract variable attribute values from the geospatial metadata file.""" + return { + item: self.get_esmf_var(var).getncattr(item) for item in self.ncattrs(var) + } + + @property + @lru_cache + @set_none + def x_coord_atts(self) -> dict: + """Extract x coordinate attribute values from the geospatial metadata file.""" + return self.getncattr("x") + + @property + @lru_cache + @set_none + def y_coord_atts(self) -> dict: + """Extract y coordinate attribute values from the geospatial metadata file.""" + return self.getncattr("y") + + @property + @lru_cache + @set_none + def crs_atts(self) -> dict: + """Extract crs coordinate attribute values from the geospatial metadata file.""" + return self.getncattr("crs") + + @property + @lru_cache + @set_none + def _global_att_names(self) -> list: + """Extract global attribute values from the geospatial metadata file.""" + if self.mpi_config.rank == 0: + try: + return self.esmf_ds.ncattrs() + except Exception as e: + self.config_options.errMsg = f"Unable to extract global attribute names from: {self.config_options.spatial_meta}" + raise e + + @property + @lru_cache + @set_none + def spatial_global_atts(self) -> dict: + """Extract global attribute values from the geospatial metadata file.""" + if self.mpi_config.rank == 0: + try: + return { + item: self.esmf_ds.getncattr(item) + for item in self._global_att_names + } + except Exception as e: + self.config_options.errMsg = f"Unable to extract global attributes from: {self.config_options.spatial_meta}" + raise e + + def extract_coords(self, dimension: str) -> np.ndarray: + """Extract coordinate values from the geospatial metadata file.""" + if self.mpi_config.rank == 0: + if len(self.get_esmf_var(dimension).shape) == 1: + return self.get_esmf_var(dimension)[:].data + elif len(self.get_esmf_var(dimension).shape) == 2: + return self.get_esmf_var(dimension)[:, :].data + + @property + @lru_cache + @set_none + def x_coords(self) -> np.ndarray: + """Extract x coordinate values from the geospatial metadata file.""" + return self.extract_coords("x") + + @property + @lru_cache + @set_none + def y_coords(self) -> np.ndarray: + """Extract y coordinate values from the geospatial metadata file. + + Check to see if the Y coordinates are North-South. If so, flip them. + """ + if self.mpi_config.rank == 0: + y_coords = self.extract_coords("y") + if len(self.get_esmf_var("y").shape) == 1: + if y_coords[1] < y_coords[0]: + y_coords[:] = np.flip(y_coords[:], axis=0) + elif len(self.get_esmf_var("y").shape) == 2: + if y_coords[1, 0] > y_coords[0, 0]: + y_coords[:, :] = np.flipud(y_coords[:, :]) + return y_coords - # Scatter global XLONG_M grid to processors.. - if mpi_config.rank == 0: - if idTmp.variables[config_options.lat_var].ndim == 3: - varTmp = idTmp.variables[config_options.lon_var][0, :, :] - elif idTmp.variables[config_options.lon_var].ndim == 2: - varTmp = idTmp.variables[config_options.lon_var][:, :] - elif idTmp.variables[config_options.lon_var].ndim == 1: - lat = idTmp.variables[config_options.lat_var][:] - lon = idTmp.variables[config_options.lon_var][:] - varTmp = np.meshgrid(lon, lat)[0] - lat = None - lon = None - # Flag to grab entire array for AWS slicing - if config_options.aws: - self.lon_bounds = varTmp - else: - varTmp = None - # mpi_config.comm.barrier() +class GriddedGeoMeta(GeoMeta): + """Class for handling information about the gridded domains for forcing.""" - varSubTmp = mpi_config.scatter_array(self, varTmp, config_options) + def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig) -> None: + """Initialize GriddedGeoMeta class variables. - # mpi_config.comm.barrier() + Initialization function to initialize ESMF through ESMPy, + calculate the global parameters of the WRF-Hydro grid + being processed to, along with the local parameters + for this particular processor. + :return: + """ + super().__init__(config_options, mpi_config) + for attr in GEOMOD[self.__class__.__name__]: + setattr(self, attr, None) + + @broadcast + @property + @lru_cache + def nx_global(self) -> int: + """Get the global x dimension size for the gridded domain.""" + if self.mpi_config.rank == 0: + try: + if self.ndim_lat == 3: + return self.lat_var.shape[2] + elif self.ndim_lat == 2: + return self.lat_var.shape[1] + else: + # NOTE Is this correct? using lon_var + return self.lon_var.shape[0] + except Exception as e: + self.config_options.errMsg = f"Unable to extract X dimension size from longitude variable in: {self.config_options.geogrid}" + raise e + + @broadcast + @property + @lru_cache + def ny_global(self) -> int: + """Get the global y dimension size for the gridded domain.""" + if self.mpi_config.rank == 0: + try: + if self.ndim_lat == 3: + return self.lat_var.shape[1] + else: + return self.lat_var.shape[0] + except Exception as e: + self.config_options.errMsg = f"Unable to extract Y dimension size from latitude in: {self.config_options.geogrid}" + raise e + + @property + @lru_cache + def ndim_lat(self) -> int: + """Get the number of dimensions for the latitude variable.""" + return self.lat_var.ndim + + @property + @lru_cache + def ndim_lon(self) -> int: + """Get the number of dimensions for the longitude variable.""" + return self.lon_var.ndim + + @broadcast + @property + @lru_cache + def dy_meters(self) -> float: + """Get the DY distance in meters for the latitude variable.""" + if self.mpi_config.rank == 0: + try: + if self.ndim_lat == 3: + return self.geogrid_ds.DY + elif self.ndim_lat == 2: + return self.lat_var.dy + else: + if self.config_options.input_forcings[0] != 23: + return self.lat_var.dy + else: + # Manually input the grid spacing since ERA5-Interim does not + # internally have this geospatial information within the netcdf file + return 31000 + except Exception as e: + self.config_options.errMsg = f"Unable to extract DY global attribute in: {self.config_options.geogrid}" + raise e + + @broadcast + @property + @lru_cache + def dx_meters(self) -> float: + """Get the DX distance in meters for the longitude variable.""" + if self.mpi_config.rank == 0: + try: + if self.ndim_lat == 3: + return self.geogrid_ds.DX + elif self.ndim_lat == 2: + return self.lon_var.dx + else: + if self.config_options.input_forcings[0] != 23: + return self.lon_var.dx + else: + # Manually input the grid spacing since ERA5-Interim does not + # internally have this geospatial information within the netcdf file + return 31000 + except Exception as e: + self.config_options.errMsg = f"Unable to extract dx metadata attribute in: {self.config_options.geogrid}" + raise e + @property + @lru_cache + def esmf_grid(self) -> ESMF.Grid: + """Create the ESMF grid object for the gridded domain.""" try: - self.esmf_lon[:, :] = varSubTmp - self.longitude_grid = varSubTmp - varSubTmp = None - varTmp = None - except Exception as e: - config_options.errMsg = ( - "Unable to subset longitude from geogrid file into ESMF object" + return ESMF.Grid( + np.array([self.ny_global, self.nx_global]), + staggerloc=ESMF.StaggerLoc.CENTER, + coord_sys=ESMF.CoordSys.SPH_DEG, ) - raise Exception + except Exception as e: + self.config_options.errMsg = f"Unable to create ESMF grid for WRF-Hydro geogrid: {self.config_options.geogrid}" + raise e + + @property + @lru_cache + def esmf_lat(self) -> np.ndarray: + """Get the ESMF latitude grid.""" + esmf_lat = self.esmf_grid.get_coords(1) + esmf_lat[:, :] = self.latitude_grid + return esmf_lat + + @property + @lru_cache + def esmf_lon(self) -> np.ndarray: + """Get the ESMF longitude grid.""" + esmf_lon = self.esmf_grid.get_coords(0) + esmf_lon[:, :] = self.longitude_grid + return esmf_lon + + @scatter + @property + @lru_cache + def latitude_grid(self) -> np.ndarray: + """Get the latitude grid for the gridded domain.""" + # Scatter global XLAT_M grid to processors.. + if self.mpi_config.rank == 0: + if self.ndim_lat == 3: + var_tmp = self.lat_var[0, :, :] + elif self.ndim_lat == 2: + var_tmp = self.lat_var[:, :] + elif self.ndim_lat == 1: + lat = self.lat_var[:] + lon = self.lon_var[:] + var_tmp = np.meshgrid(lon, lat)[1] - # mpi_config.comm.barrier() + # Flag to grab entire array for AWS slicing + if self.config_options.aws: + self.lat_bounds = var_tmp + else: + var_tmp = None + return var_tmp, "latitude_grid", self.config_options, False + + @property + @lru_cache + def lon_var(self) -> xr.DataArray: + """Get the longitude variable from the geospatial metadata file.""" + return self.get_geogrid_var(self.config_options.lon_var) + + @property + @lru_cache + def lat_var(self) -> xr.DataArray: + """Get the latitude variable from the geospatial metadata file.""" + return self.get_geogrid_var(self.config_options.lat_var) + + @scatter + @property + @lru_cache + def longitude_grid(self) -> np.ndarray: + """Get the longitude grid for the gridded domain.""" + # Scatter global XLONG_M grid to processors.. + if self.mpi_config.rank == 0: + if self.ndim_lat == 3: + var_tmp = self.lon_var[0, :, :] + elif self.ndim_lat == 2: + var_tmp = self.lon_var[:, :] + elif self.ndim_lat == 1: + lat = self.lat_var[:] + lon = self.lon_var[:] + var_tmp = np.meshgrid(lon, lat)[0] + # Flag to grab entire array for AWS slicing + if self.config_options.aws: + self.lon_bounds = var_tmp + else: + var_tmp = None + + return var_tmp, "longitude_grid", self.config_options, False + + @property + @lru_cache + def cosalpha_var(self) -> xr.DataArray: + """Get the COSALPHA variable from the geospatial metadata file.""" + return self.get_geogrid_var(self.config_options.cosalpha_var) + + @scatter + @property + @lru_cache + def cosa_grid(self) -> np.ndarray: + """Get the COSALPHA grid for the gridded domain.""" if ( - config_options.cosalpha_var is not None - and config_options.sinalpha_var is not None + self.config_options.cosalpha_var is not None + and self.config_options.sinalpha_var is not None ): # Scatter the COSALPHA,SINALPHA grids to the processors. - if mpi_config.rank == 0: - if idTmp.variables[config_options.cosalpha_var].ndim == 3: - varTmp = idTmp.variables[config_options.cosalpha_var][0, :, :] + if self.mpi_config.rank == 0: + if self.cosalpha_var.ndim == 3: + cosa = self.cosa_grid_from_geogrid_n3 else: - varTmp = idTmp.variables[config_options.cosalpha_var][:, :] + cosa = self.cosalpha_var[:, :] else: - varTmp = None - # mpi_config.comm.barrier() - - varSubTmp = mpi_config.scatter_array(self, varTmp, config_options) - # mpi_config.comm.barrier() + cosa = None - self.cosa_grid = varSubTmp[:, :] - varSubTmp = None - varTmp = None + return cosa, "cosa", self.config_options, True - if mpi_config.rank == 0: - if idTmp.variables[config_options.sinalpha_var].ndim == 3: - varTmp = idTmp.variables[config_options.sinalpha_var][0, :, :] - else: - varTmp = idTmp.variables[config_options.sinalpha_var][:, :] - else: - varTmp = None - # mpi_config.comm.barrier() - - varSubTmp = mpi_config.scatter_array(self, varTmp, config_options) - # mpi_config.comm.barrier() - self.sina_grid = varSubTmp[:, :] - varSubTmp = None - varTmp = None - - if config_options.hgt_var is not None: - # Read in a scatter the WRF-Hydro elevation, which is used for downscaling - # purposes. - if mpi_config.rank == 0: - if idTmp.variables[config_options.hgt_var].ndim == 3: - varTmp = idTmp.variables[config_options.hgt_var][0, :, :] - else: - varTmp = idTmp.variables[config_options.hgt_var][:, :] - else: - varTmp = None - # mpi_config.comm.barrier() - - varSubTmp = mpi_config.scatter_array(self, varTmp, config_options) - # mpi_config.comm.barrier() - self.height = varSubTmp - varSubTmp = None - varTmp = None + @property + @lru_cache + def sinalpha_var(self) -> xr.DataArray: + """Get the SINALPHA variable from the geospatial metadata file.""" + return self.get_geogrid_var(self.config_options.sinalpha_var) + @property + @lru_cache + def sina_grid(self) -> np.ndarray: + """Get the SINALPHA grid for the gridded domain.""" if ( - config_options.cosalpha_var is not None - and config_options.sinalpha_var is not None + self.config_options.cosalpha_var is not None + and self.config_options.sinalpha_var is not None ): - # Calculate the slope from the domain using elevation on the WRF-Hydro domain. This will - # be used for downscaling purposes. - if mpi_config.rank == 0: - try: - slopeTmp, slp_azi_tmp = self.calc_slope(idTmp, config_options) - except Exception: - raise Exception + if self.mpi_config.rank == 0: + if self.sinalpha_var.ndim == 3: + sina = self.sina_grid_from_geogrid_n3 + else: + sina = self.sinalpha_var[:, :] else: - slopeTmp = None - slp_azi_tmp = None - # mpi_config.comm.barrier() - - slopeSubTmp = mpi_config.scatter_array(self, slopeTmp, config_options) - self.slope = slopeSubTmp[:, :] - slopeSubTmp = None + sina = None - slp_azi_sub = mpi_config.scatter_array(self, slp_azi_tmp, config_options) - self.slp_azi = slp_azi_sub[:, :] - slp_azi_tmp = None + return sina, "sina", self.config_options, True - elif ( - config_options.slope_var is not None - and config_options.slope_azimuth_var is not None - ): - if mpi_config.rank == 0: - if idTmp.variables[config_options.slope_var].ndim == 3: - varTmp = idTmp.variables[config_options.slope_var][0, :, :] - else: - varTmp = idTmp.variables[config_options.slope_var][:, :] - else: - varTmp = None + @property + @lru_cache + def hgt_var(self) -> xr.DataArray: + """Get the HGT variable from the geospatial metadata file.""" + return self.get_geogrid_var(self.config_options.hgt_var) - slopeSubTmp = mpi_config.scatter_array(self, varTmp, config_options) - self.slope = slopeSubTmp - varTmp = None + @scatter + @property + @lru_cache + def height(self) -> np.ndarray: + """Get the height grid for the gridded domain. - if mpi_config.rank == 0: - if idTmp.variables[config_options.slope_azimuth_var].ndim == 3: - varTmp = idTmp.variables[config_options.slope_azimuth_var][0, :, :] + Used for downscaling purposes. + """ + if self.config_options.hgt_var is not None: + if self.mpi_config.rank == 0: + if self.hgt_var.ndim == 3: + height = self.hgt_grid_from_geogrid_n3 else: - varTmp = idTmp.variables[config_options.slope_azimuth_var][:, :] - else: - varTmp = None - - slp_azi_sub = mpi_config.scatter_array(self, varTmp, config_options) - self.slp_azi = slp_azi_sub[:, :] - varTmp = None - - elif config_options.hgt_var is not None: - # Calculate the slope from the domain using elevation of the gridded model and other approximations - if mpi_config.rank == 0: - try: - slopeTmp, slp_azi_tmp = self.calc_slope_gridded( - idTmp, config_options - ) - except Exception: - raise Exception + height = self.hgt_var[:, :] else: - slopeTmp = None - slp_azi_tmp = None - # mpi_config.comm.barrier() - - slopeSubTmp = mpi_config.scatter_array(self, slopeTmp, config_options) - self.slope = slopeSubTmp[:, :] - slopeSubTmp = None - - slp_azi_sub = mpi_config.scatter_array(self, slp_azi_tmp, config_options) - self.slp_azi = slp_azi_sub[:, :] - slp_azi_tmp = None + height = None + + return height, "height", self.config_options, False + + @property + @lru_cache + def slope_var(self) -> xr.DataArray: + """Get the slope variable from the geospatial metadata file.""" + return self.get_geogrid_var(self.config_options.slope_var) + + @property + @lru_cache + def slope_azimuth_var(self) -> xr.DataArray: + """Get the slope azimuth variable from the geospatial metadata file.""" + return self.get_geogrid_var(self.config_options.slope_azimuth_var) + + @property + @lru_cache + def dx(self) -> np.ndarray: + """Calculate the dx distance in meters for the longitude variable.""" + dx = np.empty( + ( + self.lat_var.shape[0], + self.lon_var.shape[0], + ), + dtype=float, + ) + dx[:] = self.lon_var.dx + return dx + + @property + @lru_cache + def dy(self) -> np.ndarray: + """Calculate the dy distance in meters for the latitude variable.""" + dy = np.empty( + ( + self.lat_var.shape[0], + self.lon_var.shape[0], + ), + dtype=float, + ) + dy[:] = self.lat_var.dy + return dy + + @property + @lru_cache + def dz(self) -> np.ndarray: + """Calculate the dz distance in meters for the height variable.""" + dz_init = np.diff(self.hgt_var, axis=0) + dz = np.empty(self.dx.shape, dtype=float) + dz[0 : dz_init.shape[0], 0 : dz_init.shape[1]] = dz_init + dz[dz_init.shape[0] :, :] = dz_init[-1, :] + return dz - if mpi_config.rank == 0: - # Close the geogrid file - try: - idTmp.close() - except Exception as e: - config_options.errMsg = ( - "Unable to close geogrid file: " + config_options.geogrid - ) - raise Exception + @scatter + @property + @lru_cache + def slope(self) -> np.ndarray: + """Calculate slope grids needed for incoming shortwave radiation downscaling. - # Reset temporary variables to free up memory - slopeTmp = None - slp_azi_tmp = None - varTmp = None + Calculate slope from sina_grid, cosa_grid, and height variables if they are + present in the geogrid file, otherwise calculate slope from slope and slope + azimuth variables, and if those are not present, calculate slope from height variable. - def initialize_geospatial_metadata(self, config_options, mpi_config): - """Initialize GeoMetaWrfHydro class variables. - Function that will read in crs/x/y geospatial metadata and coordinates - from the optional geospatial metadata file IF it was specified by the user in - the configuration file. - :param config_options: - :return: + Calculate grid coordinates dx distances in meters + based on general geospatial formula approximations + on a spherical grid. """ - # We will only read information on processor 0. This data is not necessary for the - # other processors, and is only used in the output routines. - if mpi_config.rank == 0: - # Open the geospatial metadata file. - try: - idTmp = netCDF4.Dataset(config_options.spatial_meta, "r") - except Exception as e: - config_options.errMsg = ( - "Unable to open spatial metadata file: " - + config_options.spatial_meta - ) - raise Exception - - # Make sure the expected variables are present in the file. - if "crs" not in idTmp.variables.keys(): - config_options.errMsg = ( - "Unable to locate crs variable in: " + config_options.spatial_meta - ) - raise Exception - if "x" not in idTmp.variables.keys(): - config_options.errMsg = ( - "Unable to locate x variable in: " + config_options.spatial_meta - ) - raise Exception - if "y" not in idTmp.variables.keys(): - config_options.errMsg = ( - "Unable to locate y variable in: " + config_options.spatial_meta - ) - raise Exception - # Extract names of variable attributes from each of the input geospatial variables. These - # can change, so we are making this as flexible as possible to accomodate future changes. - try: - crs_att_names = idTmp.variables["crs"].ncattrs() - except Exception as e: - config_options.errMsg = ( - "Unable to extract crs attribute names from: " - + config_options.spatial_meta - ) - raise Exception - try: - x_coord_att_names = idTmp.variables["x"].ncattrs() - except Exception as e: - config_options.errMsg = ( - "Unable to extract x attribute names from: " - + config_options.spatial_meta - ) - raise Exception - try: - y_coord_att_names = idTmp.variables["y"].ncattrs() - except Exception as e: - config_options.errMsg = ( - "Unable to extract y attribute names from: " - + config_options.spatial_meta - ) - raise Exception - # Extract attribute values - try: - self.x_coord_atts = { - item: idTmp.variables["x"].getncattr(item) - for item in x_coord_att_names - } - except Exception as e: - config_options.errMsg = ( - "Unable to extract x coordinate attributes from: " - + config_options.spatial_meta - ) - raise Exception - try: - self.y_coord_atts = { - item: idTmp.variables["y"].getncattr(item) - for item in y_coord_att_names - } - except Exception as e: - config_options.errMsg = ( - "Unable to extract y coordinate attributes from: " - + config_options.spatial_meta - ) - raise Exception - try: - self.crs_atts = { - item: idTmp.variables["crs"].getncattr(item) - for item in crs_att_names - } - except Exception as e: - config_options.errMsg = ( - "Unable to extract crs coordinate attributes from: " - + config_options.spatial_meta - ) - raise Exception - - # Extract global attributes - try: - global_att_names = idTmp.ncattrs() - except Exception as e: - config_options.errMsg = ( - "Unable to extract global attribute names from: " - + config_options.spatial_meta - ) - raise Exception - try: - self.spatial_global_atts = { - item: idTmp.getncattr(item) for item in global_att_names - } - except Exception as e: - config_options.errMsg = ( - "Unable to extract global attributes from: " - + config_options.spatial_meta - ) - raise Exception - - # Extract x/y coordinate values - if len(idTmp.variables["x"].shape) == 1: - try: - self.x_coords = idTmp.variables["x"][:].data - except Exception as e: - config_options.errMsg = ( - "Unable to extract x coordinate values from: " - + config_options.spatial_meta - ) - raise Exception - try: - self.y_coords = idTmp.variables["y"][:].data - except Exception as e: - config_options.errMsg = ( - "Unable to extract y coordinate values from: " - + config_options.spatial_meta - ) - raise Exception - # Check to see if the Y coordinates are North-South. If so, flip them. - if self.y_coords[1] < self.y_coords[0]: - self.y_coords[:] = np.flip(self.y_coords[:], axis=0) - - if len(idTmp.variables["x"].shape) == 2: - try: - self.x_coords = idTmp.variables["x"][:, :].data - except Exception as e: - config_options.errMsg = ( - "Unable to extract x coordinate values from: " - + config_options.spatial_meta - ) - raise Exception - try: - self.y_coords = idTmp.variables["y"][:, :].data - except Exception as e: - config_options.errMsg = ( - "Unable to extract y coordinate values from: " - + config_options.spatial_meta - ) - raise Exception - # Check to see if the Y coordinates are North-South. If so, flip them. - if self.y_coords[1, 0] > self.y_coords[0, 0]: - self.y_coords[:, :] = np.flipud(self.y_coords[:, :]) - - # Close the geospatial metadata file. - try: - idTmp.close() - except Exception as e: - config_options.errMsg = ( - "Unable to close spatial metadata file: " - + config_options.spatial_meta - ) - raise Exception - - # mpi_config.comm.barrier() + if ( + self.config_options.cosalpha_var is not None + and self.config_options.sinalpha_var is not None + ): + slope = self.slope_from_cosalpha_sinalpha + elif ( + self.config_options.slope_var is not None + and self.config_options.slope_azimuth_var is not None + ): + slope = self.slope_from_slope_azimuth + elif self.config_options.hgt_var is not None: + slope = self.slope_from_height + else: + raise Exception( + "Unable to calculate slope grid for incoming shortwave radiation downscaling. No geospatial metadata variables provided to calculate slope." + ) + return slope, "slope", self.config_options, True - def calc_slope(self, idTmp, config_options): - """Calculate slope grids needed for incoming shortwave radiation downscaling. + @scatter + @property + @lru_cache + def slp_azi(self) -> np.ndarray: + """Calculate slope azimuth grids needed for incoming shortwave radiation downscaling. - Function to calculate slope grids needed for incoming shortwave radiation downscaling - later during the program. - :param idTmp: - :param config_options: - :return: + Calculate slp_azi from sina_grid, cosa_grid, and height variables if they are + present in the geogrid file, otherwise calculate slope from slope and slope + azimuth variables, and if those are not present, calculate slope from height variable. """ - # First extract the sina,cosa, and elevation variables from the geogrid file. - try: - sinaGrid = idTmp.variables[config_options.sinalpha_var][0, :, :] - except Exception as e: - config_options.errMsg = ( - "Unable to extract SINALPHA from: " + config_options.geogrid - ) - raise + if ( + self.config_options.cosalpha_var is not None + and self.config_options.sinalpha_var is not None + ): + slp_azi = self.slp_azi_from_cosalpha_sinalpha + elif ( + self.config_options.slope_var is not None + and self.config_options.slope_azimuth_var is not None + ): + slp_azi = self.slp_azi_from_slope_azimuth - try: - cosaGrid = idTmp.variables[config_options.cosalpha_var][0, :, :] - except Exception as e: - config_options.errMsg = ( - "Unable to extract COSALPHA from: " + config_options.geogrid + elif self.config_options.hgt_var is not None: + slp_azi = self.slp_azi_from_height + else: + raise Exception( + "Unable to calculate slope azimuth grid for incoming shortwave radiation downscaling. No geospatial metadata variables provided to calculate slope azimuth." ) - raise - try: - heightDest = idTmp.variables[config_options.hgt_var][0, :, :] - except Exception as e: - config_options.errMsg = ( - "Unable to extract HGT_M from: " + config_options.geogrid - ) - raise + return slp_azi, "slp_azi", self.config_options, True - # Ensure cosa/sina are correct dimensions - if sinaGrid.shape[0] != self.ny_global or sinaGrid.shape[1] != self.nx_global: - config_options.errMsg = ( - "SINALPHA dimensions mismatch in: " + config_options.geogrid + @property + @lru_cache + def slp_azi_from_slope_azimuth(self) -> np.ndarray: + """Calculate slope azimuth from slope and slope azimuth variables.""" + if self.mpi_config.rank == 0: + if self.slope_azimuth_var.ndim == 3: + return self.slope_azimuth_var[0, :, :] + else: + return self.slope_azimuth_var[:, :] + + @property + @lru_cache + def slp_azi_from_height(self) -> np.ndarray: + """Calculate slope azimuth from height variable.""" + if self.mpi_config.rank == 0: + return (180 / np.pi) * np.arctan(self.dx / self.dy) + + @property + @lru_cache + def slope_from_height(self) -> np.ndarray: + """Calculate slope from height variable.""" + if self.mpi_config.rank == 0: + return self.dz / np.sqrt((self.dx**2) + (self.dy**2)) + + @property + @lru_cache + def slope_from_slope_azimuth(self) -> np.ndarray: + """Calculate slope from slope and slope azimuth variables.""" + if self.mpi_config.rank == 0: + if self.slope_var.ndim == 3: + return self.slope_var[0, :, :] + else: + return self.slope_var[:, :] + + @property + @lru_cache + def slope_from_cosalpha_sinalpha(self) -> np.ndarray: + """Calculate slope from COSALPHA and SINALPHA variables.""" + if self.mpi_config.rank == 0: + slope_tmp = np.arctan( + (self.hx[self.ind_orig] ** 2 + self.hy[self.ind_orig] ** 2) ** 0.5 ) - raise Exception - if cosaGrid.shape[0] != self.ny_global or cosaGrid.shape[1] != self.nx_global: - config_options.errMsg = ( - "COSALPHA dimensions mismatch in: " + config_options.geogrid + slope_tmp[np.where(slope_tmp < 1e-4)] = 0.0 + return slope_tmp + + @property + @lru_cache + def slp_azi_from_cosalpha_sinalpha(self) -> np.ndarray: + """Calculate slope azimuth from COSALPHA and SINALPHA variables.""" + if self.mpi_config.rank == 0: + slp_azi = np.empty([self.ny_global, self.nx_global], np.float32) + slp_azi[np.where(self.slope_from_cosalpha_sinalpha < 1e-4)] = 0.0 + ind_valesmf_ds = np.where(self.slope_from_cosalpha_sinalpha >= 1e-4) + slp_azi[ind_valesmf_ds] = ( + np.arctan2(self.hx[ind_valesmf_ds], self.hy[ind_valesmf_ds]) + math.pi ) - raise Exception - if ( - heightDest.shape[0] != self.ny_global - or heightDest.shape[1] != self.nx_global - ): - config_options.errMsg = ( - "HGT_M dimension mismatch in: " + config_options.geogrid + ind_valesmf_ds = np.where(self.cosa_grid_from_geogrid_n3 >= 0.0) + slp_azi[ind_valesmf_ds] = slp_azi[ind_valesmf_ds] - np.arcsin( + self.sina_grid_from_geogrid_n3[ind_valesmf_ds] ) - raise Exception - - # Establish constants + ind_valesmf_ds = np.where(self.cosa_grid_from_geogrid_n3 < 0.0) + slp_azi[ind_valesmf_ds] = slp_azi[ind_valesmf_ds] - ( + math.pi - np.arcsin(self.sina_grid_from_geogrid_n3[ind_valesmf_ds]) + ) + return slp_azi + + @property + @lru_cache + def ind_orig(self) -> tuple[np.ndarray, np.ndarray]: + """Calculate the indices of the original grid points for the height variable.""" + return np.where(self.hgt_grid_from_geogrid_n3 == self.hgt_grid_from_geogrid_n3) + + @property + @lru_cache + def hx(self) -> np.ndarray: + """Calculate the slope in the x direction from the height variable.""" rdx = 1.0 / self.dx_meters - rdy = 1.0 / self.dy_meters msftx = 1.0 - msfty = 1.0 - - slopeOut = np.empty([self.ny_global, self.nx_global], np.float32) toposlpx = np.empty([self.ny_global, self.nx_global], np.float32) - toposlpy = np.empty([self.ny_global, self.nx_global], np.float32) - slp_azi = np.empty([self.ny_global, self.nx_global], np.float32) - ipDiff = np.empty([self.ny_global, self.nx_global], np.int32) - jpDiff = np.empty([self.ny_global, self.nx_global], np.int32) + ip_diff = np.empty([self.ny_global, self.nx_global], np.int32) hx = np.empty([self.ny_global, self.nx_global], np.float32) - hy = np.empty([self.ny_global, self.nx_global], np.float32) # Create index arrays that will be used to calculate slope. - xTmp = np.arange(self.nx_global) - yTmp = np.arange(self.ny_global) - xGrid = np.tile(xTmp[:], (self.ny_global, 1)) - yGrid = np.repeat(yTmp[:, np.newaxis], self.nx_global, axis=1) - indOrig = np.where(heightDest == heightDest) - indIp1 = ((indOrig[0]), (indOrig[1] + 1)) - indIm1 = ((indOrig[0]), (indOrig[1] - 1)) - indJp1 = ((indOrig[0] + 1), (indOrig[1])) - indJm1 = ((indOrig[0] - 1), (indOrig[1])) - indIp1[1][np.where(indIp1[1] >= self.nx_global)] = self.nx_global - 1 - indJp1[0][np.where(indJp1[0] >= self.ny_global)] = self.ny_global - 1 - indIm1[1][np.where(indIm1[1] < 0)] = 0 - indJm1[0][np.where(indJm1[0] < 0)] = 0 - - ipDiff[indOrig] = xGrid[indIp1] - xGrid[indIm1] - jpDiff[indOrig] = yGrid[indJp1] - yGrid[indJm1] - - toposlpx[indOrig] = ( - (heightDest[indIp1] - heightDest[indIm1]) * msftx * rdx - ) / ipDiff[indOrig] - toposlpy[indOrig] = ( - (heightDest[indJp1] - heightDest[indJm1]) * msfty * rdy - ) / jpDiff[indOrig] - hx[indOrig] = toposlpx[indOrig] - hy[indOrig] = toposlpy[indOrig] - slopeOut[indOrig] = np.arctan((hx[indOrig] ** 2 + hy[indOrig] ** 2) ** 0.5) - slopeOut[np.where(slopeOut < 1e-4)] = 0.0 - slp_azi[np.where(slopeOut < 1e-4)] = 0.0 - indValidTmp = np.where(slopeOut >= 1e-4) - slp_azi[indValidTmp] = np.arctan2(hx[indValidTmp], hy[indValidTmp]) + math.pi - indValidTmp = np.where(cosaGrid >= 0.0) - slp_azi[indValidTmp] = slp_azi[indValidTmp] - np.arcsin(sinaGrid[indValidTmp]) - indValidTmp = np.where(cosaGrid < 0.0) - slp_azi[indValidTmp] = slp_azi[indValidTmp] - ( - math.pi - np.arcsin(sinaGrid[indValidTmp]) - ) - - # Reset temporary arrays to None to free up memory - toposlpx = None - toposlpy = None - heightDest = None - sinaGrid = None - cosaGrid = None - indValidTmp = None - xTmp = None - yTmp = None - xGrid = None - ipDiff = None - jpDiff = None - indOrig = None - indJm1 = None - indJp1 = None - indIm1 = None - indIp1 = None - hx = None - hy = None - - return slopeOut, slp_azi - - def calc_slope_gridded(self, idTmp, config_options): - """Calculate slope grids needed for incoming shortwave radiation downscaling. + x_tmp = np.arange(self.nx_global) + x_grid = np.tile(x_tmp[:], (self.ny_global, 1)) + ind_ip1 = ((self.ind_orig[0]), (self.ind_orig[1] + 1)) + ind_im1 = ((self.ind_orig[0]), (self.ind_orig[1] - 1)) + ind_ip1[1][np.where(ind_ip1[1] >= self.nx_global)] = self.nx_global - 1 + ind_im1[1][np.where(ind_im1[1] < 0)] = 0 + + ip_diff[self.ind_orig] = x_grid[ind_ip1] - x_grid[ind_im1] + toposlpx[self.ind_orig] = ( + ( + self.hgt_grid_from_geogrid_n3[ind_ip1] + - self.hgt_grid_from_geogrid_n3[ind_im1] + ) + * msftx + * rdx + ) / ip_diff[self.ind_orig] + hx = np.empty([self.ny_global, self.nx_global], np.float32) + hx[self.ind_orig] = toposlpx[self.ind_orig] + return hx - Function to calculate slope grids needed for incoming shortwave radiation downscaling - later during the program. This calculates the slopes for grid cells - :param idTmp: - :param config_options: - :return: - """ - idTmp = netCDF4.Dataset(config_options.geogrid, "r") + @property + @lru_cache + def hy(self) -> np.ndarray: + """Calculate the slope in the y direction from the height variable.""" + rdy = 1.0 / self.dy_meters + msfty = 1.0 + toposlpy = np.empty([self.ny_global, self.nx_global], np.float32) + jp_diff = np.empty([self.ny_global, self.nx_global], np.int32) + hy = np.empty([self.ny_global, self.nx_global], np.float32) + # Create index arrays that will be used to calculate slope. + y_tmp = np.arange(self.ny_global) + y_grid = np.repeat(y_tmp[:, np.newaxis], self.nx_global, axis=1) + ind_jp1 = ((self.ind_orig[0] + 1), (self.ind_orig[1])) + ind_jm1 = ((self.ind_orig[0] - 1), (self.ind_orig[1])) + ind_jp1[0][np.where(ind_jp1[0] >= self.ny_global)] = self.ny_global - 1 + ind_jm1[0][np.where(ind_jm1[0] < 0)] = 0 + + jp_diff[self.ind_orig] = y_grid[ind_jp1] - y_grid[ind_jm1] + toposlpy[self.ind_orig] = ( + ( + self.hgt_grid_from_geogrid_n3[ind_jp1] + - self.hgt_grid_from_geogrid_n3[ind_jm1] + ) + * msfty + * rdy + ) / jp_diff[self.ind_orig] + hy[self.ind_orig] = toposlpy[self.ind_orig] + return hy + + @property + @lru_cache + def x_lower_bound(self) -> float: + """Get the local x lower bound for this processor.""" + return self.esmf_grid.lower_bounds[ESMF.StaggerLoc.CENTER][1] + + @property + @lru_cache + def x_upper_bound(self) -> float: + """Get the local x upper bound for this processor.""" + return self.esmf_grid.upper_bounds[ESMF.StaggerLoc.CENTER][1] + + @property + @lru_cache + def y_lower_bound(self) -> float: + """Get the local y lower bound for this processor.""" + return self.esmf_grid.lower_bounds[ESMF.StaggerLoc.CENTER][0] + + @property + @lru_cache + def y_upper_bound(self) -> float: + """Get the local y upper bound for this processor.""" + return self.esmf_grid.upper_bounds[ESMF.StaggerLoc.CENTER][0] + + @property + @lru_cache + def nx_local(self) -> int: + """Get the local x dimension size for this processor.""" + return self.x_upper_bound - self.x_lower_bound + + @property + @lru_cache + def ny_local(self) -> int: + """Get the local y dimension size for this processor.""" + return self.y_upper_bound - self.y_lower_bound + + @property + @lru_cache + def sina_grid_from_geogrid_n3(self) -> np.ndarray: + """Get the SINALPHA grid for the gridded domain directly from the geogrid file.""" try: - lons = idTmp.variables[config_options.lon_var][:] - lats = idTmp.variables[config_options.lat_var][:] + return self.check_grid(self.sinalpha_var[0, :, :]) except Exception as e: - config_options.errMsg = ( - "Unable to extract gridded coordinates in " + config_options.geogrid + self.config_options.errMsg = ( + f"Unable to extract SINALPHA from: {self.config_options.geogrid}" + ) + raise e + + def check_grid(self, grid: np.ndarray) -> np.ndarray: + """Check to make sure the grid dimensions match the expected dimensions for the gridded domain.""" + if grid.shape[0] != self.ny_global or grid.shape[1] != self.nx_global: + self.config_options.errMsg = ( + f"Grid dimensions mismatch in: {self.config_options.geogrid}" ) raise Exception + return grid + + @property + @lru_cache + def cosa_grid_from_geogrid_n3(self) -> np.ndarray: + """Get the COSALPHA grid for the gridded domain directly from the geogrid file.""" try: - dx = np.empty( - ( - idTmp.variables[config_options.lat_var].shape[0], - idTmp.variables[config_options.lon_var].shape[0], - ), - dtype=float, - ) - dy = np.empty( - ( - idTmp.variables[config_options.lat_var].shape[0], - idTmp.variables[config_options.lon_var].shape[0], - ), - dtype=float, - ) - dx[:] = idTmp.variables[config_options.lon_var].dx - dy[:] = idTmp.variables[config_options.lat_var].dy + return self.check_grid(self.cosalpha_var[0, :, :]) except Exception as e: - config_options.errMsg = ( - "Unable to extract dx and dy distances in " + config_options.geogrid + self.config_options.errMsg = ( + f"Unable to extract COSALPHA from: {self.config_options.geogrid}" ) - raise Exception + raise e + + @property + @lru_cache + def hgt_grid_from_geogrid_n3(self) -> np.ndarray: + """Get the HGT_M grid for the gridded domain directly from the geogrid file.""" try: - heights = idTmp.variables[config_options.hgt_var][:] + return self.check_grid(self.hgt_var[0, :, :]) except Exception as e: - config_options.errMsg = ( - "Unable to extract heights of grid cells in " + config_options.geogrid + self.config_options.errMsg = ( + f"Unable to extract HGT_M from: {self.config_options.geogrid}" ) - raise Exception + raise e - idTmp.close() - # calculate grid coordinates dx distances in meters - # based on general geospatial formula approximations - # on a spherical grid - dz_init = np.diff(heights, axis=0) - dz = np.empty(dx.shape, dtype=float) - dz[0 : dz_init.shape[0], 0 : dz_init.shape[1]] = dz_init - dz[dz_init.shape[0] :, :] = dz_init[-1, :] +class HydrofabricGeoMeta(GeoMeta): + """Class for handling information about the hydrofabric domain forcing.""" - slope = dz / np.sqrt((dx**2) + (dy**2)) - slp_azi = (180 / np.pi) * np.arctan(dx / dy) - - # Reset temporary arrays to None to free up memory - lons = None - lats = None - heights = None - dx = None - dy = None - dz = None - - return slope, slp_azi - - def initialize_destination_geo_unstructured(self, config_options, mpi_config): - """Initialize GeoMetaWrfHydro class variables. + def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig): + """Initialize HydrofabricGeoMeta class variables. Initialization function to initialize ESMF through ESMPy, - calculate the global parameters of the WRF-Hydro grid + calculate the global parameters of the hydrofabric being processed to, along with the local parameters for this particular processor. :return: """ - # Open the geogrid file and extract necessary information - # to create ESMF fields. - if mpi_config.rank == 0: - try: - idTmp = netCDF4.Dataset(config_options.geogrid, "r") - except Exception as e: - config_options.errMsg = ( - "Unable to open the unstructured mesh file: " - + config_options.geogrid - ) - raise Exception - - try: - self.nx_global = idTmp.variables[config_options.nodecoords_var].shape[0] - except Exception as e: - config_options.errMsg = ( - "Unable to extract X dimension size in " + config_options.geogrid - ) - raise Exception - - try: - self.ny_global = idTmp.variables[config_options.nodecoords_var].shape[0] - except Exception as e: - config_options.errMsg = ( - "Unable to extract Y dimension size in " + config_options.geogrid - ) - raise Exception - - try: - self.nx_global_elem = idTmp.variables[ - config_options.elemcoords_var - ].shape[0] - except Exception as e: - config_options.errMsg = ( - "Unable to extract X dimension size in " + config_options.geogrid - ) - raise Exception + super().__init__(config_options, mpi_config) + for attr in GEOMOD[self.__class__.__name__]: + setattr(self, attr, None) + + @property + @lru_cache + def lat_bounds(self) -> np.ndarray: + """Get the latitude bounds for the unstructured domain.""" + return self.get_bound(1).values + + @property + @lru_cache + def lon_bounds(self) -> np.ndarray: + """Get the longitude bounds for the unstructured domain.""" + return self.get_bound(0).values + + def get_bound(self, dim: int) -> np.ndarray: + """Get the longitude or latitude bounds for the unstructured domain.""" + if self.config_options.aws: + return self.get_geogrid_var(self.config_options.nodecoords_var)[:, dim] + + @broadcast + @property + @lru_cache + def elementcoords_global(self) -> np.ndarray: + """Get the global element coordinates for the unstructured domain.""" + return self.get_geogrid_var(self.config_options.elemcoords_var).values + + @barrier + @broadcast + @property + @lru_cache + def nx_global(self) -> int: + """Get the global x dimension size for the unstructured domain.""" + return self.elementcoords_global.shape[0] + + @barrier + @broadcast + @property + @lru_cache + def ny_global(self) -> int: + """Get the global y dimension size for the unstructured domain. + + Same as nx_global. + """ + return self.nx_global + @property + @lru_cache + def esmf_grid(self) -> ESMF.Mesh: + """Create the ESMF Mesh object for the unstructured domain.""" + try: + return ESMF.Mesh( + filename=self.config_options.geogrid, filetype=ESMF.FileFormat.ESMFMESH + ) + except Exception as e: + LOG.critical( + f"Unable to create ESMF Mesh: {self.config_options.geogrid} " + f"due to {str(e)}" + ) + raise e + + @property + @lru_cache + def latitude_grid(self) -> np.ndarray: + """Get the latitude grid for the unstructured domain.""" + return self.esmf_grid.coords[1][1] + + @property + @lru_cache + def longitude_grid(self) -> np.ndarray: + """Get the longitude grid for the unstructured domain.""" + return self.esmf_grid.coords[1][0] + + @property + @lru_cache + def pet_element_inds(self) -> np.ndarray: + """Get the PET element indices for the unstructured domain.""" + if self.mpi_config.rank == 0: try: - self.ny_global_elem = idTmp.variables[ - config_options.elemcoords_var - ].shape[0] + tree = spatial.KDTree(self.elementcoords_global) + return tree.query( + np.column_stack([self.longitude_grid, self.latitude_grid]) + )[1] except Exception as e: - config_options.errMsg = ( - "Unable to extract Y dimension size in " + config_options.geogrid + LOG.critical( + f"Failed to open mesh file: {self.config_options.geogrid} " + f"due to {str(e)}" ) - raise Exception + raise e + + @property + @lru_cache + def element_ids(self) -> np.ndarray: + """Get the element IDs for the unstructured domain.""" + return self.element_ids_global[self.pet_element_inds] + + @broadcast + @property + @lru_cache + def element_ids_global(self) -> np.ndarray: + """Get the global element IDs for the unstructured domain.""" + return self.get_geogrid_var(self.config_options.element_id_var).values + + @broadcast + @property + @lru_cache + def heights_global(self) -> np.ndarray: + """Get the global heights for the unstructured domain.""" + return self.get_geogrid_var(self.config_options.hgt_var) + + @property + @lru_cache + def height(self) -> np.ndarray: + """Get the height grid for the unstructured domain.""" + if self.mpi_config.rank == 0: + if self.config_options.hgt_var is not None: + return self.heights_global[self.pet_element_inds] + + @property + @lru_cache + def slope(self) -> np.ndarray: + """Get the slopes for the unstructured domain.""" + if self.slopes_global is not None: + return self.slopes_global[self.pet_element_inds] + + @property + @lru_cache + def slp_azi(self) -> np.ndarray: + """Get the slope azimuths for the unstructured domain.""" + if self.slp_azi_global is not None: + return self.slp_azi_global[self.pet_element_inds] + + @property + @lru_cache + def mesh_inds(self) -> np.ndarray: + """Get the mesh indices for the unstructured domain.""" + return self.pet_element_inds + + @broadcast + @property + @lru_cache + def slopes_global(self) -> np.ndarray: + """Get the global slopes for the unstructured domain.""" + return self.get_geogrid_var(self.config_options.slope_var) + + @property + @lru_cache + def slp_azi_global(self) -> np.ndarray: + """Get the global slope azimuths for the unstructured domain.""" + return self.get_geogrid_var(self.config_options.slope_azimuth_var) + + @property + @lru_cache + def nx_local(self) -> int: + """Get the local x dimension size for this processor.""" + return len(self.esmf_grid.coords[1][1]) + + @property + @lru_cache + def ny_local(self) -> int: + """Get the local y dimension size for this processor.""" + return len(self.esmf_grid.coords[1][1]) + + +class UnstructuredGeoMeta(GeoMeta): + """Class for handling information about the hydrofabric domain forcing.""" + + def __init__(self, config_options: ConfigOptions, mpi_config: MpiConfig) -> None: + """Initialize HydrofabricGeoMeta class variables. + Initialization function to initialize ESMF through ESMPy, + calculate the global parameters of the unstructured mesh + being processed to, along with the local parameters + for this particular processor. + :return: + """ + super().__init__(config_options, mpi_config) + for attr in GEOMOD[self.__class__.__name__]: + setattr(self, attr, None) + + @broadcast + @property + @lru_cache + def nx_global(self) -> int: + """Get the global x dimension size for the unstructured domain.""" + return self.get_geogrid_var(self.config_options.nodecoords_var).shape[0] + + @broadcast + @property + @lru_cache + def ny_global(self) -> int: + """Get the global y dimension size for the unstructured domain.""" + return self.get_geogrid_var(self.config_options.nodecoords_var).shape[0] + + @broadcast + @property + @lru_cache + def nx_global_elem(self) -> int: + """Get the global x dimension size for the unstructured domain elements.""" + return self.get_esmf_var(self.config_options.elemcoords_var).shape[0] + + @broadcast + @property + @lru_cache + def ny_global_elem(self) -> int: + """Get the global y dimension size for the unstructured domain elements.""" + return self.get_esmf_var(self.config_options.elemcoords_var).shape[0] + + @property + @lru_cache + def lon_bounds(self) -> np.ndarray: + """Get the longitude bounds for the unstructured domain.""" + return self.get_bound(0) + + @property + @lru_cache + def lat_bounds(self) -> np.ndarray: + """Get the latitude bounds for the unstructured domain.""" + return self.get_bound(1) + + def get_bound(self, dim: int) -> np.ndarray: + """Get the longitude or latitude bounds for the unstructured domain.""" + if self.mpi_config.rank == 0: # Flag to grab entire array for AWS slicing - if config_options.aws: - self.lat_bounds = idTmp.variables[config_options.nodecoords_var][:][ - :, 1 - ] - self.lon_bounds = idTmp.variables[config_options.nodecoords_var][:][ - :, 0 - ] - - # mpi_config.comm.barrier() - - # Broadcast global dimensions to the other processors. - self.nx_global = mpi_config.broadcast_parameter( - self.nx_global, config_options, param_type=int - ) - self.ny_global = mpi_config.broadcast_parameter( - self.ny_global, config_options, param_type=int - ) - self.nx_global_elem = mpi_config.broadcast_parameter( - self.nx_global_elem, config_options, param_type=int - ) - self.ny_global_elem = mpi_config.broadcast_parameter( - self.ny_global_elem, config_options, param_type=int - ) - - # mpi_config.comm.barrier() + if self.config_options.aws: + return self.get_esmf_var(self.config_options.nodecoords_var)[:][:, dim] - if mpi_config.rank == 0: - # Close the geogrid file - try: - idTmp.close() - except Exception as e: - config_options.errMsg = ( - "Unable to close geogrid Mesh file: " + config_options.geogrid - ) - raise Exception + @property + @lru_cache + def esmf_grid(self) -> ESMF.Mesh: + """Create the ESMF grid object for the unstructured domain. + Removed argument coord_sys=ESMF.CoordSys.SPH_DEG since we are always reading from a file + From ESMF documentation + If you create a mesh from a file (like NetCDF/ESMF-Mesh), coord_sys is ignored. The mesh’s coordinate system should be embedded in the file or inferred. + """ try: - # Removed argument coord_sys=ESMF.CoordSys.SPH_DEG since we are always reading from a file - # From ESMF documentation - # If you create a mesh from a file (like NetCDF/ESMF-Mesh), coord_sys is ignored. The mesh’s coordinate system should be embedded in the file or inferred. - self.esmf_grid = ESMF.Mesh( - filename=config_options.geogrid, filetype=ESMF.FileFormat.ESMFMESH + return ESMF.Mesh( + filename=self.config_options.geogrid, filetype=ESMF.FileFormat.ESMFMESH ) except Exception as e: - config_options.errMsg = ( - "Unable to create ESMF Mesh from geogrid file: " - + config_options.geogrid - ) - raise Exception + self.config_options.errMsg = f"Unable to create ESMF Mesh from geogrid file: {self.config_options.geogrid}" + raise e - # mpi_config.comm.barrier() + @property + @lru_cache + def latitude_grid(self) -> np.ndarray: + """Get the latitude grid for the unstructured domain. - # Obtain the local boundaries for this processor. - self.get_processor_bounds(config_options) + Place the local lat/lon grid slices from the parent geogrid file into + the ESMF lat/lon grids that have already been seperated by processors. + """ + return self.esmf_grid.coords[0][1] - # Place the local lat/lon grid slices from the parent geogrid file into - # the ESMF lat/lon grids that have already been seperated by processors. - try: - self.latitude_grid = self.esmf_grid.coords[0][1] - self.latitude_grid_elem = self.esmf_grid.coords[1][1] - varSubTmp = None - varTmp = None - except Exception as e: - config_options.errMsg = ( - "Unable to subset node latitudes from ESMF Mesh object" - ) - raise Exception - try: - self.longitude_grid = self.esmf_grid.coords[0][0] - self.longitude_grid_elem = self.esmf_grid.coords[1][0] - varSubTmp = None - varTmp = None - except Exception as e: - config_options.errMsg = ( - "Unable to subset XLONG_M from geogrid file into ESMF Mesh object" - ) - raise Exception + @property + @lru_cache + def latitude_grid_elem(self) -> np.ndarray: + """Get the latitude grid for the unstructured domain elements. - idTmp = netCDF4.Dataset(config_options.geogrid, "r") + Place the local lat/lon grid slices from the parent geogrid file into + the ESMF lat/lon grids that have already been seperated by processors. + """ + return self.esmf_grid.coords[1][1] - # Get lat and lon global variables for pet extraction of indices - nodecoords_global = idTmp.variables[config_options.nodecoords_var][:].data - elementcoords_global = idTmp.variables[config_options.elemcoords_var][:].data + @property + @lru_cache + def longitude_grid(self) -> np.ndarray: + """Get the longitude grid for the unstructured domain. + + Place the local lat/lon grid slices from the parent geogrid file into + the ESMF lat/lon grids that have already been seperated by processors. + """ + return self.esmf_grid.coords[0][0] + @property + @lru_cache + def longitude_grid_elem(self) -> np.ndarray: + """Get the longitude grid for the unstructured domain elements. + + Place the local lat/lon grid slices from the parent geogrid file into + the ESMF lat/lon grids that have already been seperated by processors. + """ + return self.esmf_grid.coords[1][0] + + @property + @lru_cache + def pet_element_inds(self) -> np.ndarray: + """Get the local node indices for the unstructured domain elements.""" + # Get lat and lon global variables for pet extraction of indices + elementcoords_global = self.get_var( + self.geogrid_ds, self.config_options.elemcoords_var + )[:].data # Find the corresponding local indices to slice global heights and slope # variables that are based on the partitioning on the unstructured mesh - pet_nodecoords = np.empty((len(self.latitude_grid), 2), dtype=float) pet_elementcoords = np.empty((len(self.latitude_grid_elem), 2), dtype=float) - pet_nodecoords[:, 0] = self.longitude_grid - pet_nodecoords[:, 1] = self.latitude_grid pet_elementcoords[:, 0] = self.longitude_grid_elem pet_elementcoords[:, 1] = self.latitude_grid_elem + return spatial.KDTree(elementcoords_global).query(pet_elementcoords)[1] - distance, pet_node_inds = spatial.KDTree(nodecoords_global).query( - pet_nodecoords - ) - distance, pet_element_inds = spatial.KDTree(elementcoords_global).query( - pet_elementcoords - ) + @property + @lru_cache + def pet_node_inds(self) -> np.ndarray: + """Get the local node indices for the unstructured domain nodes.""" + # Get lat and lon global variables for pet extraction of indices + nodecoords_global = self.get_var( + self.geogrid_ds, self.config_options.nodecoords_var + )[:].data + # Find the corresponding local indices to slice global heights and slope + # variables that are based on the partitioning on the unstructured mesh + pet_nodecoords = np.empty((len(self.latitude_grid), 2), dtype=float) + pet_nodecoords[:, 0] = self.longitude_grid + pet_nodecoords[:, 1] = self.latitude_grid - # reset variables to free up memory - nodecoords_global = None - elementcoords_global = None - pet_nodecoords = None - pet_elementcoords = None - distance = None - - # Not accepting cosalpha and sinalpha at this time for unstructured meshes, only - # accepting the pre-calculated slope and slope azmiuth variables if available, - # otherwise calculate slope from height estimates - # if(config_options.cosalpha_var != None and config_options.sinalpha_var != None): - # self.cosa_grid = idTmp.variables[config_options.cosalpha_var][:].data[pet_node_inds] - # self.sina_grid = idTmp.variables[config_options.sinalpha_var][:].data[pet_node_inds] - # slopeTmp, slp_azi_tmp = self.calc_slope(idTmp,config_options) - # self.slope = slope_node_Tmp[pet_node_inds] - # self.slp_azi = slp_azi_node_tmp[pet_node_inds] + return spatial.KDTree(nodecoords_global).query(pet_nodecoords)[1] + + # NOTE this is a note/commented out code from before refactor on 2/19/2026. + # Not accepting cosalpha and sinalpha at this time for unstructured meshes, only + # accepting the pre-calculated slope and slope azmiuth variables if available, + # otherwise calculate slope from height estimates + # if(config_options.cosalpha_var != None and config_options.sinalpha_var != None): + # self.cosa_grid = esmf_ds.variables[config_options.cosalpha_var][:].data[pet_node_inds] + # self.sina_grid = esmf_ds.variables[config_options.sinalpha_var][:].data[pet_node_inds] + # slope_tmp, slp_azi_tmp = self.calc_slope(esmf_ds,config_options) + # self.slope = slope_node_tmp[pet_node_inds] + # self.slp_azi = slp_azi_node_tmp[pet_node_inds] + + @property + @lru_cache + def slope(self) -> np.ndarray: + """Get the slope grid for the unstructured domain.""" if ( - config_options.slope_var is not None - and config_options.slp_azi_var is not None + self.config_options.slope_var is not None + and self.config_options.slp_azi_var is not None ): - self.slope = idTmp.variables[config_options.slope_var][:].data[ - pet_node_inds - ] - self.slp_azi = idTmp.variables[config_options.slope_azimuth_var][:].data[ - pet_node_inds + return self.get_geogrid_var(self.config_options.slope_var)[ + self.pet_node_inds ] - self.slope_elem = idTmp.variables[config_options.slope_var_elem][:].data[ - pet_element_inds + elif self.config_options.hgt_var is not None: + return ( + self.dz_node + / np.sqrt((self.dx_node**2) + (self.dy_node**2))[self.pet_node_inds] + ) + + @property + @lru_cache + def slp_azi(self) -> np.ndarray: + """Get the slope azimuth grid for the unstructured domain.""" + if ( + self.config_options.slope_var is not None + and self.config_options.slp_azi_var is not None + ): + return self.get_geogrid_var(self.config_options.slope_azimuth_var)[ + self.pet_node_inds ] - self.slp_azi_elem = idTmp.variables[config_options.slope_azimuth_var_elem][ - : - ].data[pet_element_inds] - - # Read in a scatter the mesh node elevation, which is used for downscaling purposes - self.height = idTmp.variables[config_options.hgt_var][:].data[pet_node_inds] - # Read in a scatter the mesh element elevation, which is used for downscaling purposes. - self.height_elem = idTmp.variables[config_options.hgt_elem_var][:].data[ - pet_element_inds + elif self.config_options.hgt_var is not None: + return (180 / np.pi) * np.arctan(self.dx_node / self.dy_node)[ + self.pet_node_inds ] - elif config_options.hgt_var is not None: - # Read in a scatter the mesh node elevation, which is used for downscaling purposes - self.height = idTmp.variables[config_options.hgt_var][:].data[pet_node_inds] - - # Read in a scatter the mesh element elevation, which is used for downscaling purposes. - self.height_elem = idTmp.variables[config_options.hgt_elem_var][:].data[ - pet_element_inds + @property + @lru_cache + def slope_elem(self) -> np.ndarray: + """Get the slope grid for the unstructured domain elements.""" + if ( + self.config_options.slope_var is not None + and self.config_options.slp_azi_var is not None + ): + return self.get_geogrid_var(self.config_options.slope_var_elem)[:].data[ + self.pet_element_inds ] - - # Calculate the slope from the domain using elevation on the WRF-Hydro domain. This will - # be used for downscaling purposes. - slope_node_Tmp, slp_azi_node_tmp, slope_elem_Tmp, slp_azi_elem_tmp = ( - self.calc_slope_unstructured(idTmp, config_options) + elif self.config_options.hgt_var is not None: + return ( + self.dz_elem + / np.sqrt((self.dx_elem**2) + (self.dy_elem**2))[self.pet_element_inds] ) - self.slope = slope_node_Tmp[pet_node_inds] - slope_node_Tmp = None - - self.slp_azi = slp_azi_node_tmp[pet_node_inds] - slp_azi__node_tmp = None - - self.slope_elem = slope_elem_Tmp[pet_element_inds] - slope_elem_Tmp = None - - self.slp_azi_elem = slp_azi_elem_tmp[pet_element_inds] - slp_azi_elem_tmp = None - - # save indices where mesh was partition for future scatter functions - self.mesh_inds = pet_node_inds - self.mesh_inds_elem = pet_element_inds - - # reset variables to free up memory - pet_node_inds = None - pet_element_inds = None + @property + @lru_cache + def slp_azi_elem(self) -> np.ndarray: + """Get the slope azimuth grid for the unstructured domain elements.""" + if ( + self.config_options.slope_var is not None + and self.config_options.slp_azi_var is not None + ): + return self.get_var( + self.geogrid_ds, self.config_options.slope_azimuth_var_elem + )[:].data[self.pet_element_inds] + elif self.config_options.hgt_var is not None: + return (180 / np.pi) * np.arctan(self.dx_elem / self.dy_elem)[ + self.pet_element_inds + ] - def calc_slope_unstructured(self, idTmp, config_options): - """Calculate slope grids needed for incoming shortwave radiation downscaling. + @property + @lru_cache + def height(self) -> np.ndarray: + """Get the height grid for the unstructured domain nodes.""" + if ( + self.config_options.slope_var is not None + and self.config_options.slp_azi_var is not None + ): + return self.get_geogrid_var(self.config_options.hgt_var)[:].data[ + self.pet_node_inds + ] + elif self.config_options.hgt_var is not None: + return self.self.get_geogrid_var(self.config_options.hgt_var)[:].data[ + self.pet_node_inds + ] - Function to calculate slope grids needed for incoming shortwave radiation downscaling - later during the program. This calculates the slopes for both nodes and elements - :param idTmp: - :param config_options: - :return: - """ - idTmp = netCDF4.Dataset(config_options.geogrid, "r") + @property + @lru_cache + def height_elem(self) -> np.ndarray: + """Get the height grid for the unstructured domain elements.""" + if ( + self.config_options.slope_var is not None + and self.config_options.slp_azi_var is not None + ): + return self.get_geogrid_var(self.config_options.hgt_elem_var)[:].data[ + self.pet_element_inds + ] + elif self.config_options.hgt_var is not None: + return self.get_geogrid_var(self.config_options.hgt_elem_var)[:].data[ + self.pet_element_inds + ] - try: - node_lons = idTmp.variables[config_options.nodecoords_var][:][:, 0] - node_lats = idTmp.variables[config_options.nodecoords_var][:][:, 1] - except Exception as e: - config_options.errMsg = ( - "Unable to extract node coordinates in " + config_options.geogrid - ) - raise Exception - try: - elem_lons = idTmp.variables[config_options.elemcoords_var][:][:, 0] - elem_lats = idTmp.variables[config_options.elemcoords_var][:][:, 1] - except Exception as e: - config_options.errMsg = ( - "Unable to extract element coordinates in " + config_options.geogrid - ) - raise Exception - try: - elem_conn = idTmp.variables[config_options.elemconn_var][:][:, 0] - except Exception as e: - config_options.errMsg = ( - "Unable to extract element connectivity in " + config_options.geogrid - ) - raise Exception - try: - node_heights = idTmp.variables[config_options.hgt_var][:] - except Exception as e: - config_options.errMsg = ( - "Unable to extract HGT_M from: " + config_options.geogrid - ) - raise + @property + @lru_cache + def node_lons(self) -> np.ndarray: + """Get the longitude grid for the unstructured domain nodes.""" + return self.get_geogrid_var(self.config_options.nodecoords_var)[:][:, 0] + + @property + @lru_cache + def node_lats(self) -> np.ndarray: + """Get the latitude grid for the unstructured domain nodes.""" + return self.get_geogrid_var(self.config_options.nodecoords_var)[:][:, 1] + + @property + @lru_cache + def elem_lons(self) -> np.ndarray: + """Get the longitude grid for the unstructured domain elements.""" + return self.get_geogrid_var(self.config_options.elemcoords_var)[:][:, 0] + + @property + @lru_cache + def elem_lats(self) -> np.ndarray: + """Get the latitude grid for the unstructured domain elements.""" + return self.get_geogrid_var(self.config_options.elemcoords_var)[:][:, 1] + + @property + @lru_cache + def elem_conn(self) -> np.ndarray: + """Get the element connectivity for the unstructured domain.""" + return self.get_geogrid_var(self.config_options.elemconn_var)[:][:, 0] + + @property + @lru_cache + def node_heights(self) -> np.ndarray: + """Get the height grid for the unstructured domain nodes.""" + node_heights = self.get_geogrid_var(self.config_options.hgt_var)[:] if node_heights.shape[0] != self.ny_global: - config_options.errMsg = ( - "HGT_M dimension mismatch in: " + config_options.geogrid + self.config_options.errMsg = ( + f"HGT_M dimension mismatch in: {self.config_options.geogrid}" ) raise Exception - - try: - elem_heights = idTmp.variables[config_options.hgt_elem_var][:] - except Exception as e: - config_options.errMsg = ( - "Unable to extract HGT_M_ELEM from: " + config_options.geogrid - ) - raise - - if elem_heights.shape[0] != len(elem_lons): - config_options.errMsg = ( - "HGT_M_ELEM dimension mismatch in: " + config_options.geogrid + return node_heights + + @property + @lru_cache + def elem_heights(self) -> np.ndarray: + """Get the height grid for the unstructured domain elements.""" + elem_heights = self.get_var(self.geogrid_ds, self.config_options.hgt_elem_var)[ + : + ] + + if elem_heights.shape[0] != len(self.elem_lons): + self.config_options.errMsg = ( + f"HGT_M_ELEM dimension mismatch in: {self.config_options.geogrid}" ) raise Exception - - idTmp.close() - - # calculate node coordinate distances in meters - # based on general geospatial formula approximations - # on a spherical grid - dx = np.diff(node_lons) * 40075160 * np.cos(node_lats[0:-1] * np.pi / 180) / 360 - dx = np.append(dx, dx[-1]) - dy = np.diff(node_lats) * 40008000 / 360 - dy = np.append(dy, dy[-1]) - dz = np.diff(node_heights) - dz = np.append(dz, dz[-1]) - - slope_nodes = dz / np.sqrt((dx**2) + (dy**2)) - slp_azi_nodes = (180 / np.pi) * np.arctan(dx / dy) - - # calculate element coordinate distances in meters - # based on general geospatial formula approximations - # on a spherical grid - dx = np.diff(elem_lons) * 40075160 * np.cos(elem_lats[0:-1] * np.pi / 180) / 360 - dx = np.append(dx, dx[-1]) - dy = np.diff(elem_lats) * 40008000 / 360 - dy = np.append(dy, dy[-1]) - dz = np.diff(elem_heights) - dz = np.append(dz, dz[-1]) - - slope_elem = dz / np.sqrt((dx**2) + (dy**2)) - slp_azi_elem = (180 / np.pi) * np.arctan(dx / dy) - - # Reset temporary arrays to None to free up memory - node_lons = None - node_lats = None - elem_lons = None - elem_lats = None - node_heights = None - elem_heights = None - dx = None - dy = None - dz = None - - return slope_nodes, slp_azi_nodes, slope_elem, slp_azi_elem - - def initialize_destination_geo_hydrofabric(self, config_options, mpi_config): - """Initialize GeoMetaWrfHydro class variables. - - Initialization function to initialize ESMF through ESMPy, - calculate the global parameters of the WRF-Hydro grid - being processed to, along with the local parameters - for this particular processor. - :return: - """ - - if config_options.geogrid is not None: - # Phase 1: Rank 0 extracts all needed global data - if mpi_config.rank == 0: - try: - idTmp = nc_utils.nc_Dataset_retry( - mpi_config, - config_options, - err_handler, - config_options.geogrid, - "r", - ) - - # Extract everything we need with retries - tmp_vars = idTmp.variables - - if config_options.aws: - nodecoords_data = nc_utils.nc_read_var_retry( - mpi_config, - config_options, - err_handler, - tmp_vars[config_options.nodecoords_var], - ) - self.lat_bounds = nodecoords_data[:, 1] - self.lon_bounds = nodecoords_data[:, 0] - - # Store these for later broadcast/scatter - elementcoords_global = nc_utils.nc_read_var_retry( - mpi_config, - config_options, - err_handler, - tmp_vars[config_options.elemcoords_var], - ) - - self.nx_global = elementcoords_global.shape[0] - self.ny_global = self.nx_global - - element_ids_global = nc_utils.nc_read_var_retry( - mpi_config, - config_options, - err_handler, - tmp_vars[config_options.element_id_var], - ) - - heights_global = None - if config_options.hgt_var is not None: - heights_global = nc_utils.nc_read_var_retry( - mpi_config, - config_options, - err_handler, - tmp_vars[config_options.hgt_var], - ) - slopes_global = None - slp_azi_global = None - if config_options.slope_var is not None: - slopes_global = nc_utils.nc_read_var_retry( - mpi_config, - config_options, - err_handler, - tmp_vars[config_options.slope_var], - ) - if config_options.slope_azimuth_var is not None: - slp_azi_global = nc_utils.nc_read_var_retry( - mpi_config, - config_options, - err_handler, - tmp_vars[config_options.slope_azimuth_var], - ) - - except Exception as e: - LOG.critical( - f"Failed to open mesh file: {config_options.geogrid} " - f"due to {str(e)}" - ) - raise - finally: - idTmp.close() - else: - elementcoords_global = None - element_ids_global = None - heights_global = None - slopes_global = None - slp_azi_global = None - - # Broadcast dimensions - self.nx_global = mpi_config.broadcast_parameter( - self.nx_global, config_options, param_type=int - ) - self.ny_global = mpi_config.broadcast_parameter( - self.ny_global, config_options, param_type=int - ) - - mpi_config.comm.barrier() - - # Phase 2: Create ESMF Mesh (collective operation with retry) - try: - self.esmf_grid = esmf_utils.esmf_mesh_retry( - mpi_config, - config_options, - err_handler, - filename=config_options.geogrid, - filetype=ESMF.FileFormat.ESMFMESH, - ) - except Exception as e: - LOG.critical( - f"Unable to create ESMF Mesh: {config_options.geogrid} " - f"due to {str(e)}" - ) - raise - - # Get processor bounds - self.get_processor_bounds(config_options) - - # Extract local coordinates from ESMF mesh - self.latitude_grid = self.esmf_grid.coords[1][1] - self.longitude_grid = self.esmf_grid.coords[1][0] - - # Phase 3: Broadcast global arrays and compute local indices - elementcoords_global = mpi_config.comm.bcast(elementcoords_global, root=0) - element_ids_global = mpi_config.comm.bcast(element_ids_global, root=0) - - # Each rank computes its own local indices - pet_elementcoords = np.column_stack( - [self.longitude_grid, self.latitude_grid] - ) - tree = spatial.KDTree(elementcoords_global) - _, pet_element_inds = tree.query(pet_elementcoords) - - self.element_ids = element_ids_global[pet_element_inds] - self.element_ids_global = element_ids_global - - # Broadcast and extract height/slope data - if config_options.hgt_var is not None: - heights_global = mpi_config.comm.bcast(heights_global, root=0) - self.height = heights_global[pet_element_inds] - - if config_options.slope_var is not None: - slopes_global = mpi_config.comm.bcast(slopes_global, root=0) - slp_azi_global = mpi_config.comm.bcast(slp_azi_global, root=0) - self.slope = slopes_global[pet_element_inds] - self.slp_azi = slp_azi_global[pet_element_inds] - - self.mesh_inds = pet_element_inds + return elem_heights + + @property + @lru_cache + def dx_elem(self) -> np.ndarray: + """Calculate the dx distance in meters for the longitude variable for the unstructured domain elements.""" + dx = ( + np.diff(self.elem_lons) + * 40075160 + * np.cos(self.elem_lats[0:-1] * np.pi / 180) + / 360 + ) + return np.append(dx, dx[-1]) + + @property + @lru_cache + def dy_elem(self) -> np.ndarray: + """Calculate the dy distance in meters for the latitude variable for the unstructured domain elements.""" + dy = np.diff(self.elem_lats) * 40008000 / 360 + return np.append(dy, dy[-1]) + + @property + @lru_cache + def dz_elem(self) -> np.ndarray: + """Calculate the dz distance in meters for the height variable for the unstructured domain elements.""" + dz = np.diff(self.elem_heights) + return np.append(dz, dz[-1]) + + @property + @lru_cache + def dx_node(self) -> np.ndarray: + """Calculate the dx distance in meters for the longitude variable for the unstructured domain nodes.""" + dx = ( + np.diff(self.node_lons) + * 40075160 + * np.cos(self.node_lats[0:-1] * np.pi / 180) + / 360 + ) + return np.append(dx, dx[-1]) + + @property + @lru_cache + def dy_node(self) -> np.ndarray: + """Calculate the dy distance in meters for the latitude variable for the unstructured domain nodes.""" + dy = np.diff(self.node_lats) * 40008000 / 360 + return np.append(dy, dy[-1]) + + @property + @lru_cache + def dz_node(self) -> np.ndarray: + """Calculate the dz distance in meters for the height variable for the unstructured domain nodes.""" + dz = np.diff(self.node_heights) + return np.append(dz, dz[-1]) + + @property + @lru_cache + def mesh_inds(self) -> np.ndarray: + """Get the local mesh node indices for the unstructured domain.""" + return self.pet_node_inds + + @property + @lru_cache + def mesh_inds_elem(self) -> np.ndarray: + """Get the local mesh element indices for the unstructured domain.""" + return self.pet_element_inds + + @property + @lru_cache + def nx_local(self) -> int: + """Get the local x dimension size for this processor.""" + return len(self.esmf_grid.coords[0][1]) + + @property + @lru_cache + def ny_local(self) -> int: + """Get the local y dimension size for this processor.""" + return len(self.esmf_grid.coords[0][1]) + + @property + @lru_cache + def nx_local_elem(self) -> int: + """Get the local x dimension size for this processor.""" + return len(self.esmf_grid.coords[1][1]) + + @property + @lru_cache + def ny_local_elem(self) -> int: + """Get the local y dimension size for this processor.""" + return len(self.esmf_grid.coords[1][1]) diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/regrid.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/regrid.py index 99126c62..09bee9d0 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/regrid.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/regrid.py @@ -1,15 +1,17 @@ """Regridding module file for regridding input forcing files.""" -from functools import partial +from __future__ import annotations + import hashlib import os import sys import traceback from contextlib import contextmanager from datetime import datetime, timedelta +from functools import partial from pathlib import Path from time import monotonic, time - +from typing import TYPE_CHECKING # import mpi4py.util.pool as mpi_pool # For ESMF + shapely 2.x, shapely must be imported first, to avoid segfault "address not mapped to object" stemming from calls such as: # /usr/local/esmf/lib/libO/Linux.gfortran.64.openmpi.default/libesmf_fullylinked.so(get_geom+0x36) @@ -39,13 +41,14 @@ ioMod, timeInterpMod, ) -from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.config import ( - ConfigOptions, -) -from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.geoMod import ( - GeoMetaWrfHydro, -) -from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.parallel import MpiConfig +if TYPE_CHECKING: + from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.config import ( + ConfigOptions, + ) + from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.geoMod import ( + GeoMeta, + ) + from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.parallel import MpiConfig from nextgen_forcings_ewts import MODULE_NAME from ..esmf_utils import ( @@ -11979,7 +11982,7 @@ def check_supp_pcp_regrid_status( def get_weight_file_names( mpi_config: MpiConfig, config_options: ConfigOptions, - input_forcings: GeoMetaWrfHydro, + input_forcings: GeoMeta, ) -> tuple[str | None, str | None]: """Get weight file names for regridding.""" if not config_options.weightsDir: @@ -12005,7 +12008,7 @@ def get_weight_file_names( def load_weight_file( mpi_config: MpiConfig, config_options: ConfigOptions, - input_forcings: GeoMetaWrfHydro, + input_forcings: GeoMeta, weight_file: str, element_mode: bool, ) -> None: @@ -12051,7 +12054,7 @@ def load_weight_file( def make_regrid( mpi_config: MpiConfig, config_options: ConfigOptions, - input_forcings: GeoMetaWrfHydro, + input_forcings: GeoMeta, weight_file: str | None, fill: bool, element_mode: bool, @@ -12082,7 +12085,7 @@ def make_regrid( extrap_method = ESMF.ExtrapMethod.CREEP_FILL if fill else ESMF.ExtrapMethod.NONE regrid_method = (ESMF.RegridMethod.BILINEAR, ESMF.RegridMethod.NEAREST_STOD)[ - input_forcings.regridOpt - 1 + input_forcings.regrid_opt - 1 ] err_handler.check_program_status(config_options, mpi_config) @@ -12120,7 +12123,7 @@ def make_regrid( def execute_regrid( mpi_config: MpiConfig, config_options: ConfigOptions, - input_forcings: GeoMetaWrfHydro, + input_forcings: GeoMeta, weight_file: str, element_mode: bool, ) -> None: @@ -12279,7 +12282,7 @@ def calculate_weights( err_handler.check_program_status(config_options, mpi_config) # check if we're doing border trimming and set up mask - border = input_forcings.border # // 5 # HRRR is a 3 km product + border = input_forcings.ignored_border_widths # // 5 # HRRR is a 3 km product if border > 0: try: mask = input_forcings.esmf_grid_in.add_item( diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/time_handling.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/time_handling.py index 04708010..497fbdaf 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/time_handling.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/core/time_handling.py @@ -123,14 +123,14 @@ def find_nldas_neighbors(input_forcings, config_options, d_current, mpi_config): # Calculate expected file paths. tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/NLDAS_FORA0125_H.A" + input_forcings.fcst_date1.strftime("%Y%m%d.%H%M") + ".002" + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/NLDAS_FORA0125_H.A" + input_forcings.fcst_date1.strftime("%Y%m%d.%H%M") + ".002" @@ -213,7 +213,7 @@ def find_nldas_neighbors(input_forcings, config_options, d_current, mpi_config): # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.isfile(input_forcings.file_in2): - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( "Expected input Custom file: " + input_forcings.file_in2 @@ -255,27 +255,27 @@ def find_aorc_neighbors(input_forcings, config_options, d_current, mpi_config): # Calculate expected file paths. if d_current.year > 2019: tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/AORC-OWP_" + d_current.strftime("%Y%m%d%H") + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/AORC-OWP_" + d_current.strftime("%Y%m%d%H") + input_forcings.file_ext ) else: tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/AORC-OWP_" + d_current.strftime("%Y%m%d%H") + "z" + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/AORC-OWP_" + d_current.strftime("%Y%m%d%H") + "z" @@ -284,13 +284,13 @@ def find_aorc_neighbors(input_forcings, config_options, d_current, mpi_config): if input_forcings.product_name == "AORC_Alaska": # Calculate expected file paths. tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/AK_AORC-OWP_" + d_current.strftime("%Y%m%d%H") + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/AK_AORC-OWP_" + d_current.strftime("%Y%m%d%H") + input_forcings.file_ext @@ -373,7 +373,7 @@ def find_aorc_neighbors(input_forcings, config_options, d_current, mpi_config): if config_options.input_forcings[0] not in [12, 21]: if mpi_config.rank == 0: if not os.path.isfile(input_forcings.file_in2): - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( "Expected input AORC file: " + input_forcings.file_in2 @@ -412,8 +412,8 @@ def find_era5_neighbors(input_forcings, config_options, d_current, mpi_config): :return: """ # Point to ERA5 netcdf input file - tmp_file1 = os.path.join(input_forcings.inDir, os.listdir(input_forcings.inDir)[0]) - tmp_file2 = os.path.join(input_forcings.inDir, os.listdir(input_forcings.inDir)[0]) + tmp_file1 = os.path.join(input_forcings.input_force_dirs, os.listdir(input_forcings.input_force_dirs)[0]) + tmp_file2 = os.path.join(input_forcings.input_force_dirs, os.listdir(input_forcings.input_force_dirs)[0]) if mpi_config.rank == 0: # Check to see if files are already set. If not, then reset, grids and @@ -491,7 +491,7 @@ def find_era5_neighbors(input_forcings, config_options, d_current, mpi_config): # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.isfile(input_forcings.file_in2): - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( "Expected input ERA5 Interim file: " + input_forcings.file_in2 @@ -539,13 +539,13 @@ def find_nwm_neighbors(input_forcings, config_options, d_current, mpi_config): # Calculate expected file paths. tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/" + d_current.strftime("%Y%m%d%H") + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/" + d_current.strftime("%Y%m%d%H") + input_forcings.file_ext @@ -554,13 +554,13 @@ def find_nwm_neighbors(input_forcings, config_options, d_current, mpi_config): # Calculate expected file paths. tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/" + d_current.strftime("%Y%m%d%H%M") + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/" + d_current.strftime("%Y%m%d%H%M") + input_forcings.file_ext @@ -642,7 +642,7 @@ def find_nwm_neighbors(input_forcings, config_options, d_current, mpi_config): # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.isfile(input_forcings.file_in2): - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( "Expected input NWM file: " + input_forcings.file_in2 @@ -747,7 +747,7 @@ def find_ak_ext_ana_neighbors(input_forcings, config_options, d_current, mpi_con # Calculate expected file paths. tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/" + prev_ext_ana_date.strftime("%Y%m%d%H") + "/" @@ -834,7 +834,7 @@ def find_ak_ext_ana_neighbors(input_forcings, config_options, d_current, mpi_con # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.exists(input_forcings.file_in2): - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( "Expected input ExtAnA file: " + input_forcings.file_in2 @@ -946,7 +946,7 @@ def find_conus_hrrr_neighbors(input_forcings, config_options, d_current, mpi_con # Calculate expected file paths. tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/hrrr." + current_hrrr_cycle.strftime("%Y%m%d") + "/hrrr.t" @@ -960,7 +960,7 @@ def find_conus_hrrr_neighbors(input_forcings, config_options, d_current, mpi_con err_handler.log_msg(config_options, mpi_config) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/hrrr." + current_hrrr_cycle.strftime("%Y%m%d") + "/hrrr.t" @@ -975,7 +975,7 @@ def find_conus_hrrr_neighbors(input_forcings, config_options, d_current, mpi_con if os.path.isfile(tmp_file1) == False and os.path.isfile(tmp_file2) == False: # Calculate expected file paths. tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/hrrr." + current_hrrr_cycle.strftime("%Y%m%d") + "/hrrr.t" @@ -989,7 +989,7 @@ def find_conus_hrrr_neighbors(input_forcings, config_options, d_current, mpi_con err_handler.log_msg(config_options, mpi_config) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/hrrr." + current_hrrr_cycle.strftime("%Y%m%d") + "/hrrr.t" @@ -1075,7 +1075,7 @@ def find_conus_hrrr_neighbors(input_forcings, config_options, d_current, mpi_con # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.exists(input_forcings.file_in2): - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( "Expected input HRRR file: " + input_forcings.file_in2 @@ -1353,7 +1353,7 @@ def find_ak_hrrr_neighbors(input_forcings, config_options, d_current, mpi_config # Calculate expected file paths. tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/hrrr." + current_hrrr_cycle.strftime("%Y%m%d") + "/alaska/hrrr.t" @@ -1368,7 +1368,7 @@ def find_ak_hrrr_neighbors(input_forcings, config_options, d_current, mpi_config err_handler.log_msg(config_options, mpi_config) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/hrrr." + current_hrrr_cycle.strftime("%Y%m%d") + "/alaska/hrrr.t" @@ -1452,7 +1452,7 @@ def find_ak_hrrr_neighbors(input_forcings, config_options, d_current, mpi_config # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.exists(input_forcings.file_in2): - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( "Expected input HRRR file: " + input_forcings.file_in2 @@ -1558,7 +1558,7 @@ def find_conus_rap_neighbors(input_forcings, config_options, d_current, mpi_conf # Calculate expected file paths. tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/rap." + current_rap_cycle.strftime("%Y%m%d") + "/rap.t" @@ -1568,7 +1568,7 @@ def find_conus_rap_neighbors(input_forcings, config_options, d_current, mpi_conf + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/rap." + current_rap_cycle.strftime("%Y%m%d") + "/rap.t" @@ -1583,7 +1583,7 @@ def find_conus_rap_neighbors(input_forcings, config_options, d_current, mpi_conf if os.path.isfile(tmp_file1) == False and os.path.isfile(tmp_file2) == False: # Calculate expected file paths. tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/rap." + current_rap_cycle.strftime("%Y%m%d") + "/rap.t" @@ -1593,7 +1593,7 @@ def find_conus_rap_neighbors(input_forcings, config_options, d_current, mpi_conf + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/rap." + current_rap_cycle.strftime("%Y%m%d") + "/rap.t" @@ -1608,7 +1608,7 @@ def find_conus_rap_neighbors(input_forcings, config_options, d_current, mpi_conf if os.path.isfile(tmp_file1) == False and os.path.isfile(tmp_file2) == False: # Calculate expected file paths. tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/rap." + current_rap_cycle.strftime("%Y%m%d") + "/rap.t" @@ -1618,7 +1618,7 @@ def find_conus_rap_neighbors(input_forcings, config_options, d_current, mpi_conf + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/rap." + current_rap_cycle.strftime("%Y%m%d") + "/rap.t" @@ -1702,7 +1702,7 @@ def find_conus_rap_neighbors(input_forcings, config_options, d_current, mpi_conf if mpi_config.rank == 0: if not os.path.isfile(input_forcings.file_in2): missing = input_forcings.file_in2.replace("pgrb", "[bgrb|pgrb]") - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( "Expected input RAP file: " + missing + " not found." ) @@ -1819,7 +1819,7 @@ def find_gfs_neighbors(input_forcings, config_options, d_current, mpi_config): # Calculate expected file paths. if current_gfs_cycle < datetime.datetime(2019, 6, 12, 12): tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/gfs." + current_gfs_cycle.strftime("%Y%m%d%H") + "/gfs.t" @@ -1829,7 +1829,7 @@ def find_gfs_neighbors(input_forcings, config_options, d_current, mpi_config): + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/gfs." + current_gfs_cycle.strftime("%Y%m%d%H") + "/gfs.t" @@ -1841,7 +1841,7 @@ def find_gfs_neighbors(input_forcings, config_options, d_current, mpi_config): else: # FV3 change on June 12th, 2019 tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/gfs." + current_gfs_cycle.strftime("%Y%m%d") + "/" @@ -1853,7 +1853,7 @@ def find_gfs_neighbors(input_forcings, config_options, d_current, mpi_config): + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/gfs." + current_gfs_cycle.strftime("%Y%m%d") + "/" @@ -2016,7 +2016,7 @@ def find_gfs_neighbors(input_forcings, config_options, d_current, mpi_config): # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.isfile(input_forcings.file_in2): - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( "Expected input GFS file: " + input_forcings.file_in2 @@ -2156,7 +2156,7 @@ def find_nam_nest_neighbors(input_forcings, config_options, d_current, mpi_confi # Calculate expected file paths. tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/nam." + current_nam_nest_cycle.strftime("%Y%m%d") + "/nam.t" @@ -2169,7 +2169,7 @@ def find_nam_nest_neighbors(input_forcings, config_options, d_current, mpi_confi + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/nam." + current_nam_nest_cycle.strftime("%Y%m%d") + "/nam.t" @@ -2258,7 +2258,7 @@ def find_nam_nest_neighbors(input_forcings, config_options, d_current, mpi_confi # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.isfile(input_forcings.file_in2): - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( "Expected input NAM Nest file: " + input_forcings.file_in2 @@ -2366,7 +2366,7 @@ def find_cfsv2_neighbors(input_forcings, config_options, d_current, mpi_config): input_forcings.file_ext = ".grb2" tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/cfs." + current_cfs_cycle.strftime("%Y%m%d") + "/" @@ -2383,7 +2383,7 @@ def find_cfsv2_neighbors(input_forcings, config_options, d_current, mpi_config): + input_forcings.file_ext ) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/cfs." + current_cfs_cycle.strftime("%Y%m%d") + "/" @@ -2480,7 +2480,7 @@ def find_cfsv2_neighbors(input_forcings, config_options, d_current, mpi_config): # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.isfile(input_forcings.file_in2): - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( "Expected input CFSv2 file: " + input_forcings.file_in2 @@ -2563,7 +2563,7 @@ def find_custom_hourly_neighbors(input_forcings, config_options, d_current, mpi_ # Calculate expected file paths. tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/custom_hourly." + current_custom_cycle.strftime("%Y%m%d%H") + ".f" @@ -2571,7 +2571,7 @@ def find_custom_hourly_neighbors(input_forcings, config_options, d_current, mpi_ + ".nc" ) tmp_file2 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/custom_hourly." + current_custom_cycle.strftime("%Y%m%d%H") + ".f" @@ -2653,7 +2653,7 @@ def find_custom_hourly_neighbors(input_forcings, config_options, d_current, mpi_ # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.isfile(input_forcings.file_in2): - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( "Expected input Custom file: " + input_forcings.file_in2 @@ -3256,7 +3256,7 @@ def find_sbcv2_lwf_neighbors(input_forcings, config_options, d_current, mpi_conf # Calculate expected file paths. tmp_file1 = ( - input_forcings.inDir + input_forcings.input_force_dirs + "/SBC_LWF/" + input_forcings.fcst_date1.strftime("%Y%m") + "/" @@ -3339,7 +3339,7 @@ def find_sbcv2_lwf_neighbors(input_forcings, config_options, d_current, mpi_conf # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.isfile(input_forcings.file_in2): - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( "Expected input SBCV2_LWF file: " + input_forcings.file_in2 @@ -4357,7 +4357,7 @@ def find_ndfd_neighbors(input_forcings, config_options, d_current, mpi_config): input_forcings.fcst_hour2 = current_fcst.total_seconds() / 3600 tmp_file1 = os.path.join( - input_forcings.inDir, + input_forcings.input_force_dirs, "NDFD", current_cycle.strftime("%Y%m%d"), "wgrbbul", @@ -4447,7 +4447,7 @@ def find_ndfd_neighbors(input_forcings, config_options, d_current, mpi_config): for tag in ("tmp", "wdir", "wspd", "qpf") ]: if not os.path.isfile(subfile): - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( f"Expected input NDFD file: {subfile} not found." ) @@ -4707,8 +4707,8 @@ def find_input_neighbors(input_forcings, config_options, d_current, mpi_config): input_forcings.fcst_hour1 = input_forcings.fcst_hour1 - 1 if config_options.ana_flag == 0: # Calculate expected file paths. - pattern1 = f"{input_forcings.inDir}/*.{current_input_cycle.strftime('%Y%m%d')}/*{current_input_cycle.strftime('%H')}z*{str(prev_input_forecast_hour).zfill(2)}.grib2" - pattern2 = f"{input_forcings.inDir}/*.{current_input_cycle.strftime('%Y%m%d')}/conus/*{current_input_cycle.strftime('%H')}z*{str(prev_input_forecast_hour).zfill(2)}.grib2" + pattern1 = f"{input_forcings.input_force_dirs}/*.{current_input_cycle.strftime('%Y%m%d')}/*{current_input_cycle.strftime('%H')}z*{str(prev_input_forecast_hour).zfill(2)}.grib2" + pattern2 = f"{input_forcings.input_force_dirs}/*.{current_input_cycle.strftime('%Y%m%d')}/conus/*{current_input_cycle.strftime('%H')}z*{str(prev_input_forecast_hour).zfill(2)}.grib2" files1 = glob.glob(pattern1) + glob.glob(pattern2) tmp_file1 = files1[0] @@ -4716,8 +4716,8 @@ def find_input_neighbors(input_forcings, config_options, d_current, mpi_config): config_options.statusMsg = "Previous input file being used: " + tmp_file1 err_handler.log_msg(config_options, mpi_config, True) # log at debug level - pattern1 = f"{input_forcings.inDir}/*.{current_input_cycle.strftime('%Y%m%d')}/*{current_input_cycle.strftime('%H')}z*{str(next_input_forecast_hour).zfill(2)}.grib2" - pattern2 = f"{input_forcings.inDir}/*.{current_input_cycle.strftime('%Y%m%d')}/conus/*{current_input_cycle.strftime('%H')}z*{str(next_input_forecast_hour).zfill(2)}.grib2" + pattern1 = f"{input_forcings.input_force_dirs}/*.{current_input_cycle.strftime('%Y%m%d')}/*{current_input_cycle.strftime('%H')}z*{str(next_input_forecast_hour).zfill(2)}.grib2" + pattern2 = f"{input_forcings.input_force_dirs}/*.{current_input_cycle.strftime('%Y%m%d')}/conus/*{current_input_cycle.strftime('%H')}z*{str(next_input_forecast_hour).zfill(2)}.grib2" files2 = glob.glob(pattern1) + glob.glob(pattern2) @@ -4729,8 +4729,8 @@ def find_input_neighbors(input_forcings, config_options, d_current, mpi_config): elif config_options.ana_flag == 1: # Calculate expected file paths. - pattern1 = f"{input_forcings.inDir}/*.{current_input_cycle.strftime('%Y%m%d')}/*{current_input_cycle.strftime('%H')}z*{str(prev_input_forecast_hour).zfill(2)}.grib2" - pattern2 = f"{input_forcings.inDir}/*.{current_input_cycle.strftime('%Y%m%d')}/conus/*{current_input_cycle.strftime('%H')}z*{str(prev_input_forecast_hour).zfill(2)}.grib2" + pattern1 = f"{input_forcings.input_force_dirs}/*.{current_input_cycle.strftime('%Y%m%d')}/*{current_input_cycle.strftime('%H')}z*{str(prev_input_forecast_hour).zfill(2)}.grib2" + pattern2 = f"{input_forcings.input_force_dirs}/*.{current_input_cycle.strftime('%Y%m%d')}/conus/*{current_input_cycle.strftime('%H')}z*{str(prev_input_forecast_hour).zfill(2)}.grib2" files1 = glob.glob(pattern1) + glob.glob(pattern2) @@ -4853,7 +4853,7 @@ def find_input_neighbors(input_forcings, config_options, d_current, mpi_config): # Ensure we have the necessary new file if mpi_config.rank == 0: if not os.path.exists(input_forcings.file_in2): - if input_forcings.enforce == 1: + if input_forcings.input_force_mandatory == 1: config_options.errMsg = ( "Expected input file: " + input_forcings.file_in2 + " not found." ) diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/model.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/model.py index a71a2f8c..d6bd80dc 100755 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/model.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/model.py @@ -18,7 +18,7 @@ ConfigOptions, ) from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.geoMod import ( - GeoMetaWrfHydro, + GeoMeta, ) from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.ioMod import OutputObj from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.parallel import MpiConfig @@ -80,7 +80,7 @@ def run( model: dict, future_time: float, config_options: ConfigOptions, - wrf_hydro_geo_meta: GeoMetaWrfHydro, + wrf_hydro_geo_meta: GeoMeta, input_forcing_mod: dict, supp_pcp_mod: dict, mpi_config: MpiConfig, @@ -347,7 +347,7 @@ def loop_through_forcing_products( self, future_time: float, config_options: ConfigOptions, - wrf_hydro_geo_meta: GeoMetaWrfHydro, + wrf_hydro_geo_meta: GeoMeta, input_forcing_mod: dict, supp_pcp_mod: dict, mpi_config: MpiConfig, @@ -664,7 +664,7 @@ def loop_through_forcing_products( def process_suplemental_precip( self, config_options: ConfigOptions, - wrf_hydro_geo_meta: GeoMetaWrfHydro, + wrf_hydro_geo_meta: GeoMeta, supp_pcp_mod: dict, mpi_config: MpiConfig, output_obj: OutputObj, @@ -736,7 +736,7 @@ def process_suplemental_precip( def write_output( self, config_options: ConfigOptions, - wrf_hydro_geo_meta: GeoMetaWrfHydro, + wrf_hydro_geo_meta: GeoMeta, mpi_config: MpiConfig, output_obj: OutputObj, ): @@ -764,7 +764,7 @@ def update_dict( self, model: dict, config_options: ConfigOptions, - wrf_hydro_geo_meta: GeoMetaWrfHydro, + wrf_hydro_geo_meta: GeoMeta, output_obj: OutputObj, ): """Flatten the Forcings Engine output object and update the BMI dictionary.""" diff --git a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/retry_utils.py b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/retry_utils.py index 5b2598a2..dc14dfb5 100644 --- a/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/retry_utils.py +++ b/NextGen_Forcings_Engine_BMI/NextGen_Forcings_Engine/retry_utils.py @@ -3,8 +3,8 @@ import traceback import types -from .core.config import ConfigOptions -from .core.parallel import MpiConfig +from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.parallel import MpiConfig +from NextGen_Forcings_Engine_BMI.NextGen_Forcings_Engine.core.config import ConfigOptions def retry_w_mpi_context( diff --git a/NextGen_Forcings_Engine_BMI/run_bmi_model.py b/NextGen_Forcings_Engine_BMI/run_bmi_model.py index 08c1403f..c5898de0 100755 --- a/NextGen_Forcings_Engine_BMI/run_bmi_model.py +++ b/NextGen_Forcings_Engine_BMI/run_bmi_model.py @@ -2,12 +2,12 @@ import datetime import pathlib from pathlib import Path - +import yaml import numpy as np import pandas as pd # This is the NextGen Forcings Engine BMI instance to execute -from NextGen_Forcings_Engine.bmi_model import NWMv3_Forcing_Engine_BMI_model +from NextGen_Forcings_Engine.bmi_model import BMIMODEL,parse_config def run_bmi( @@ -37,9 +37,6 @@ def run_bmi( end_time = datetime.datetime.strptime(end_time, "%Y-%m-%d %H:%M:%S") ngen_datetimes = pd.date_range(start=start_time, end=end_time, freq="h") - print("Creating an instance of the BMI model object") - model = NWMv3_Forcing_Engine_BMI_model() - print("Initializing the BMI model") # Set the path for the config file, using the default if none is provided cfg_path = ( @@ -47,6 +44,11 @@ def run_bmi( if config_path is not None else str(Path(__file__).parent.resolve() / "config.yml") ) + with open(cfg_path,"r") as fp: + config=parse_config(yaml.safe_load(fp)) + + print("Creating an instance of the BMI model object") + model = BMIMODEL[config.get("GRID_TYPE")]() # IMPORTANT: We are not calling initialize() directly here. # Instead, we call initialize_with_params(), which handles @@ -72,7 +74,7 @@ def run_bmi( # =============================== if model._grid_type in {"gridded", "hydrofabric"}: varsize = ( - len(model._wrf_hydro_geo_meta.element_ids_global) + len(model.geo_meta.element_ids_global) if model._grid_type == "hydrofabric" else model._varsize )