From d6be07870ad7f4e7b2077815d9cf1e60deb20697 Mon Sep 17 00:00:00 2001 From: "guoqing.ge" Date: Tue, 12 Aug 2025 16:04:10 -0400 Subject: [PATCH 01/10] reorg notebooks/, adding etc/ under it; change the ioda class to the obsSpace class --- DAmonitor/base.py | 10 +++-- DAmonitor/obs/__init__.py | 2 +- DAmonitor/obs/{ioda.py => obsSpace.py} | 25 ++++++----- DAmonitor/shapes.py | 1 + notebooks/{ => etc}/Untitled.ipynb | 0 notebooks/{ => etc}/bokeh3.7.0.ipynb | 0 notebooks/{ => etc}/contour_t_inc.ipynb | 0 notebooks/{ => etc}/jdiag_plots.ipynb | 0 notebooks/{ => etc}/jdiag_satellite.ipynb | 0 notebooks/{ => etc}/pandas.ipynb | 0 notebooks/{ => etc}/plotly.ipynb | 0 notebooks/{ => etc}/uxarray.ipynb | 0 notebooks/{ => etc}/vapor.ipynb | 0 notebooks/{obs.ipynb => obs_exploring.ipynb} | 44 +++++++++++--------- 14 files changed, 47 insertions(+), 35 deletions(-) rename DAmonitor/obs/{ioda.py => obsSpace.py} (81%) rename notebooks/{ => etc}/Untitled.ipynb (100%) rename notebooks/{ => etc}/bokeh3.7.0.ipynb (100%) rename notebooks/{ => etc}/contour_t_inc.ipynb (100%) rename notebooks/{ => etc}/jdiag_plots.ipynb (100%) rename notebooks/{ => etc}/jdiag_satellite.ipynb (100%) rename notebooks/{ => etc}/pandas.ipynb (100%) rename notebooks/{ => etc}/plotly.ipynb (100%) rename notebooks/{ => etc}/uxarray.ipynb (100%) rename notebooks/{ => etc}/vapor.ipynb (100%) rename notebooks/{obs.ipynb => obs_exploring.ipynb} (90%) diff --git a/DAmonitor/base.py b/DAmonitor/base.py index 0a7a671..9805870 100644 --- a/DAmonitor/base.py +++ b/DAmonitor/base.py @@ -93,7 +93,7 @@ def load_inv_bkg_ana(files): return datasets -def query_dataset(dataset): +def query_dataset(dataset, meta_exclude=None): if dataset.groups: for grp in dataset.groups: print(grp) @@ -107,7 +107,8 @@ def query_dataset(dataset): print(text + text2.rstrip(",")) else: for var in dataset.groups[grp].variables: - text += f"{var}, " + if meta_exclude is None or meta_exclude not in var: + text += f"{var}, " print(text.rstrip(",")) else: text = "" @@ -116,12 +117,13 @@ def query_dataset(dataset): print(text.rstrip(",")) -def query_data(data): +def query_data(data, meta_exclude=None): text = "" if data.data: data = data.data for var in data: - text += f"{var}, " + if meta_exclude is None or meta_exclude not in var: + text += f"{var}, " print(text.rstrip(",")) diff --git a/DAmonitor/obs/__init__.py b/DAmonitor/obs/__init__.py index e77b199..fce21ac 100644 --- a/DAmonitor/obs/__init__.py +++ b/DAmonitor/obs/__init__.py @@ -1,4 +1,4 @@ -from .ioda import ioda +from .obsSpace import obsSpace from .fit_rate import fit_rate __all__ = [ diff --git a/DAmonitor/obs/ioda.py b/DAmonitor/obs/obsSpace.py similarity index 81% rename from DAmonitor/obs/ioda.py rename to DAmonitor/obs/obsSpace.py index 5724381..517a9a8 100644 --- a/DAmonitor/obs/ioda.py +++ b/DAmonitor/obs/obsSpace.py @@ -16,10 +16,10 @@ def __getattr__(self, name): raise AttributeError(f"No variable '{name}_{self.varname}' found.") from e -class ioda: +class obsSpace: def __init__(self, filepath): """ - Initialize an IODA object and load the NetCDF file. + Initialize an obsSpace object and load the NetCDF file. Parameters: - filepath (str): Path to the NetCDF file. @@ -39,8 +39,8 @@ def __init__(self, filepath): # self._get_metadata() - # Remove groups, provide direct access to varaibles, such as ioda.t, ioda.q, ioda.u, ioda.v, etc - for var in ["airTemperature", "windEastward", "windNorthward", "specificHumidity"]: + # Remove groups, provide direct access to varaibles, such as obsSpace.t, obsSpace.q, obsSpace.u, obsSpace.v, etc + for var in ["airTemperature", "windEastward", "windNorthward", "specificHumidity", "brightnessTemperature"]: self._get_data_by_varname(var) def get_valid_subset(data, item, condition={"EffectiveQC2": 0}): @@ -64,8 +64,9 @@ def _get_data_by_varname(self, varname): for grp in dataset.groups: if dataset.groups[grp].groups: for nestgrp in dataset.groups[grp].groups: # DiagnosticFlags - data[nestgrp] = dataset.groups[grp].groups[nestgrp].variables[varname][:] - only_has_metadata = False + if varname in dataset.groups[grp].groups[nestgrp].variables: + data[nestgrp] = dataset.groups[grp].groups[nestgrp].variables[varname][:] + only_has_metadata = False else: if grp == "MetaData": for var in dataset.groups['MetaData'].variables: @@ -74,7 +75,7 @@ def _get_data_by_varname(self, varname): elif grp == "ObsError" and varname == "specificHumidity": data["ObsError"] = dataset.groups["ObsError"].variables["relativeHumidity"][:] only_has_metadata = False - elif varname == "brightnessTemperature" and (grp == "ObsValue" or grp == "ObsValueAdj"): + elif varname == "brightnessTemperature" and (grp == "ObsValue" or grp == "ObsValueAdj") and "brightnessTemperature" in dataset.groups[grp].variables: data[grp] = dataset.groups[grp].variables["radiance"][:] only_has_metadata = False elif varname in dataset.groups[grp].variables: @@ -92,9 +93,11 @@ def _get_data_by_varname(self, varname): self.v = _ObsDF(data) elif varname == "specificHumidity": self.q = _ObsDF(data) + elif varname == "brightnessTemperature": + self.bt = _ObsDF(data) def __getitem__(self, key): - # Enable ioda["t"] + # Enable obsSpace["t"] if key in ["t", "airTemperature"]: return self.t elif key in ["u", "windEastward"]: @@ -103,11 +106,13 @@ def __getitem__(self, key): return self.u elif key in ["q", "specificHumidity"]: return self.q + elif key in ["bt", "brightnessTemperature"]: + return self.bt raise KeyError(f"Key '{key}' not found.") def __getattr__(self, name): - # Enable myioda.t (only called if attribute not found normally) + # Enable obsSpace.t try: return self.__getitem__(name) except KeyError: - raise AttributeError(f"'ioda' object has no attribute or variable '{name}'") + raise AttributeError(f"'obsSpace' object has no attribute or variable '{name}'") diff --git a/DAmonitor/shapes.py b/DAmonitor/shapes.py index 0f920c5..984b529 100644 --- a/DAmonitor/shapes.py +++ b/DAmonitor/shapes.py @@ -6,6 +6,7 @@ import geopandas as gp pyDAmonitor_ROOT = os.getenv("pyDAmonitor_ROOT") + # common border lines coast_lines = gf.coastline(projection=ccrs.PlateCarree(), line_width=1, scale="50m") state_lines = gf.states(projection=ccrs.PlateCarree(), line_width=1, line_color='gray', scale="50m") diff --git a/notebooks/Untitled.ipynb b/notebooks/etc/Untitled.ipynb similarity index 100% rename from notebooks/Untitled.ipynb rename to notebooks/etc/Untitled.ipynb diff --git a/notebooks/bokeh3.7.0.ipynb b/notebooks/etc/bokeh3.7.0.ipynb similarity index 100% rename from notebooks/bokeh3.7.0.ipynb rename to notebooks/etc/bokeh3.7.0.ipynb diff --git a/notebooks/contour_t_inc.ipynb b/notebooks/etc/contour_t_inc.ipynb similarity index 100% rename from notebooks/contour_t_inc.ipynb rename to notebooks/etc/contour_t_inc.ipynb diff --git a/notebooks/jdiag_plots.ipynb b/notebooks/etc/jdiag_plots.ipynb similarity index 100% rename from notebooks/jdiag_plots.ipynb rename to notebooks/etc/jdiag_plots.ipynb diff --git a/notebooks/jdiag_satellite.ipynb b/notebooks/etc/jdiag_satellite.ipynb similarity index 100% rename from notebooks/jdiag_satellite.ipynb rename to notebooks/etc/jdiag_satellite.ipynb diff --git a/notebooks/pandas.ipynb b/notebooks/etc/pandas.ipynb similarity index 100% rename from notebooks/pandas.ipynb rename to notebooks/etc/pandas.ipynb diff --git a/notebooks/plotly.ipynb b/notebooks/etc/plotly.ipynb similarity index 100% rename from notebooks/plotly.ipynb rename to notebooks/etc/plotly.ipynb diff --git a/notebooks/uxarray.ipynb b/notebooks/etc/uxarray.ipynb similarity index 100% rename from notebooks/uxarray.ipynb rename to notebooks/etc/uxarray.ipynb diff --git a/notebooks/vapor.ipynb b/notebooks/etc/vapor.ipynb similarity index 100% rename from notebooks/vapor.ipynb rename to notebooks/etc/vapor.ipynb diff --git a/notebooks/obs.ipynb b/notebooks/obs_exploring.ipynb similarity index 90% rename from notebooks/obs.ipynb rename to notebooks/obs_exploring.ipynb index 240e100..e91a511 100644 --- a/notebooks/obs.ipynb +++ b/notebooks/obs_exploring.ipynb @@ -35,9 +35,13 @@ "# autoload external python modules if they changed\n", "%load_ext autoreload\n", "%autoreload 2\n", - "# add ../funcs to the current path\n", - "import sys, os\n", - "sys.path.append(os.path.join(os.getcwd(), \"..\"))\n", + "\n", + "pyDAmonitor_ROOT=os.getenv(\"pyDAmonitor_ROOT\")\n", + "if pyDAmonitor_ROOT is None:\n", + " print(\"!!! pyDAmonitor_ROOT is NOT set. Run `source ush/load_pyDAmonitor.sh`\")\n", + "else:\n", + " print(f\"pyDAmonitor_ROOT={pyDAmonitor_ROOT}\\n\")\n", + "sys.path.insert(0, pyDAmonitor_ROOT)\n", "\n", "import seaborn as sns # seaborn handles NaN values automatically\n", "import matplotlib.pyplot as plt\n", @@ -45,7 +49,7 @@ "import numpy as np\n", "\n", "from DAmonitor.base import query_dataset, query_data, to_dataframe\n", - "from DAmonitor.obs import ioda, fit_rate" + "from DAmonitor.obs import obsSpace, fit_rate" ] }, { @@ -64,7 +68,7 @@ "outputs": [], "source": [ "ioda_file = \"../data/samples/mpasjedi/jdiag_aircar_t133.nc\"\n", - "t133 = ioda(ioda_file)" + "obs_t133 = obsSpace(ioda_file)" ] }, { @@ -82,13 +86,13 @@ "metadata": {}, "outputs": [], "source": [ - "# query_dataset(t133.dataset)\n", - "query_data(t133.t)\n", + "#query_dataset(obs_t133.dataset)\n", + "query_data(obs_t133.t)\n", "\n", "# np.set_printoptions(threshold=np.inf) # print out all array values\n", "# print(t133.t.longitude)\n", "\n", - "df = to_dataframe(t133.t)\n", + "df = to_dataframe(obs_t133.t)\n", "df" ] }, @@ -117,7 +121,7 @@ "source": [ "plt.figure(figsize=(8, 5))\n", "#sns.histplot(df[\"oman\"], bins=50, kde=True, color=\"steelblue\")\n", - "sns.histplot(t133.t.oman, bins=100, kde=True, color=\"steelblue\")\n", + "sns.histplot(obs_t133.t.oman, bins=100, kde=True, color=\"steelblue\")\n", "plt.title(\"Histogram of oman\")\n", "plt.xlabel(\"oman values\")\n", "plt.ylabel(\"Density\")\n", @@ -198,16 +202,6 @@ "print(valid_df[valid_df[\"height\"] < 0][\"height\"]) # negative height" ] }, - { - "cell_type": "code", - "execution_count": null, - "id": "16", - "metadata": {}, - "outputs": [], - "source": [ - "print(grouped[\"height_bin\"])" - ] - }, { "cell_type": "code", "execution_count": null, @@ -216,7 +210,7 @@ "outputs": [], "source": [ "dz = 1000\n", - "grouped = fit_rate(t133.t, dz=dz)\n", + "grouped = fit_rate(obs_t133.t, dz=dz)\n", "\n", "# 5. Plot vertical profile of fit_rate vs height\n", "plt.figure(figsize=(7, 6))\n", @@ -242,6 +236,16 @@ "plt.tight_layout()\n", "plt.show()" ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "16", + "metadata": {}, + "outputs": [], + "source": [ + "print(grouped[\"height_bin\"])" + ] } ], "metadata": { From c8268ec01a612e517610dd6779fe9a8723e574d5 Mon Sep 17 00:00:00 2001 From: "guoqing.ge" Date: Tue, 12 Aug 2025 16:09:29 -0400 Subject: [PATCH 02/10] update pre-commit to skip removed notebooks --- ush/pre-commit | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/ush/pre-commit b/ush/pre-commit index 9361432..6a11eaf 100755 --- a/ush/pre-commit +++ b/ush/pre-commit @@ -11,6 +11,9 @@ if [[ "$CONDA_DEFAULT_ENV" != "pyDAmonitor" ]]; then exit 1 fi for notebook in ${STAGED_NOTEBOOKS[@]}; do - ush/clean_notebooks.py ${notebook} + if [[ -s "${notebook}" ]]; then + echo "clean ${notebook}" >&2 + ush/clean_notebooks.py ${notebook} + fi git add ${notebook} done From a3af9ec46fda223341a0b616e28779d295318049 Mon Sep 17 00:00:00 2001 From: "guoqing.ge" Date: Tue, 12 Aug 2025 16:57:46 -0400 Subject: [PATCH 03/10] remove funcs/ and batch/; rename contrib/ to scripts/ --- funcs/base.py | 125 ------------------ funcs/colormap.py | 40 ------ funcs/contour_increment.py | 125 ------------------ funcs/fit_rate.py | 36 ----- funcs/histogram.py | 20 --- funcs/jdiag.py | 41 ------ funcs/plt_profile.py | 11 -- {contrib => scripts}/USContourMap.ipynb | 0 {contrib => scripts}/Untitled.ipynb | 0 {contrib => scripts}/colormap.py | 0 {batch => scripts}/contour_t_inc.py | 0 .../horizontal_mpasjedi_inc.py | 0 {contrib => scripts}/map_zoom.ipynb | 0 {contrib => scripts}/maps_fv3gsi_inc.py | 0 {contrib => scripts}/profile_diag.py | 0 {batch => scripts}/uxarray.py | 0 .../vertical_profile_fv3gsi.py | 0 .../vertical_profile_mpasjedi.py | 0 18 files changed, 398 deletions(-) delete mode 100644 funcs/base.py delete mode 100644 funcs/colormap.py delete mode 100644 funcs/contour_increment.py delete mode 100644 funcs/fit_rate.py delete mode 100644 funcs/histogram.py delete mode 100644 funcs/jdiag.py delete mode 100644 funcs/plt_profile.py rename {contrib => scripts}/USContourMap.ipynb (100%) rename {contrib => scripts}/Untitled.ipynb (100%) rename {contrib => scripts}/colormap.py (100%) rename {batch => scripts}/contour_t_inc.py (100%) rename {contrib => scripts}/horizontal_mpasjedi_inc.py (100%) rename {contrib => scripts}/map_zoom.ipynb (100%) rename {contrib => scripts}/maps_fv3gsi_inc.py (100%) rename {contrib => scripts}/profile_diag.py (100%) rename {batch => scripts}/uxarray.py (100%) rename {contrib => scripts}/vertical_profile_fv3gsi.py (100%) rename {contrib => scripts}/vertical_profile_mpasjedi.py (100%) diff --git a/funcs/base.py b/funcs/base.py deleted file mode 100644 index 9a01c14..0000000 --- a/funcs/base.py +++ /dev/null @@ -1,125 +0,0 @@ -import os -import sys -import subprocess -from netCDF4 import Dataset - - -def source(bash_file, optional=False): - """ - Source a Bash file and capture the environment variables - """ - # check if bash_file exists - command = f"source {bash_file} && env" - proc = subprocess.Popen( - ["bash", "-c", command], stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True - ) - stdout, stderr = proc.communicate() - if proc.returncode != 0: - if optional: - return # do nothing for optional config files - else: - raise Exception(f"Error sourcing bash file: {stderr}") - env_vars = {} - for line in stdout.splitlines(): - key, _, value = line.partition("=") - env_vars[key] = value - # Update the current environment - os.environ.update(env_vars) - - -# ---- end of source(bash_file) - - -def get_run_directory(): - """Get the run directory, handling both scripts and Jupyter Notebooks.""" - if "ipykernel" in sys.modules: # Running in a Jupyter Notebook - return os.getcwd() - else: # Running as a script - return os.path.dirname(os.path.abspath(__file__)) - - -def get_inv_bkg_ana_files(expdir, cdate): - source(f"{expdir}/exp.setup") - NET = os.getenv("NET") - RUN = NET - WGF = os.getenv("WGF") - TAG = os.getenv("TAG") - COMROOT = os.getenv("COMROOT") - DATAROOT = os.getenv("DATAROOT") - with open(f"{expdir}/VERSION", "r") as file: - VERSION = file.readline().strip() - # print(NET, RUN, WGF, TAG, VERSION, COMROOT, DATAROOT) - - # find the correct invariant.nc - jedivar_log = ( - f"{COMROOT}/{NET}/{VERSION}/logs/{RUN}.{cdate[:8]}/{cdate[8:10]}/{WGF}/{RUN}_jedivar_{TAG}_{cdate}.log" - ) - end_str = "./invariant.nc" - with open(f"{jedivar_log}", "r") as file: - for line in file: - line = line.strip() - if line.endswith(end_str): - inv_file = line[:-len(end_str)].split(":", 1)[1].strip()[len("ln -snf"):].strip() - break - # print(inv_file) - - # find the background file from the prep_ic log file - prep_ic_log = ( - f"{COMROOT}/{NET}/{VERSION}/logs/{RUN}.{cdate[:8]}/{cdate[8:10]}/{WGF}/{RUN}_prep_ic_{TAG}_{cdate}.log" - ) - start_str = "warm start from" - with open(f"{prep_ic_log}", "r") as file: - for line in file: - if line.startswith(start_str): - bkg_file = line[len(start_str):].strip() - break - # print(bkg_file) - - # find the analysis file from the UMBRELLA_PREP_IC - ana_file = f"{DATAROOT}/{cdate[:8]}/{RUN}_prep_ic_{cdate[8:10]}_{VERSION}/{WGF}/mpasin.nc" - # print(ana_file) - - files = { - "inv": inv_file, - "bkg": bkg_file, - "ana": ana_file, - } - return files - - -def load_inv_bkg_ana(files): - datasets = {} - datasets["inv"] = Dataset(files["inv"], "r") - datasets["bkg"] = Dataset(files["bkg"], "r") - datasets["ana"] = Dataset(files["ana"], "r") - return datasets - - -def query_dataset(dataset): - if dataset.groups: - for grp in dataset.groups: - print(grp) - text = " " - if dataset.groups[grp].groups: - for nestgrp in dataset.groups[grp].groups: - print(text + nestgrp) - text2 = " " - for var in dataset.groups[grp].groups[nestgrp].variables: - text2 += f"{var}, " - print(text + text2.rstrip(",")) - else: - for var in dataset.groups[grp].variables: - text += f"{var}, " - print(text.rstrip(",")) - else: - text = "" - for var in dataset.variables: - text += f"{var}, " - print(text.rstrip(",")) - - -def query_data(data): - text = "" - for var in data: - text += f"{var}, " - print(text.rstrip(",")) diff --git a/funcs/colormap.py b/funcs/colormap.py deleted file mode 100644 index 4107fd7..0000000 --- a/funcs/colormap.py +++ /dev/null @@ -1,40 +0,0 @@ -import matplotlib.colors as mcolors -import numpy as np - - -def diff_colormap(clevs): - size = len(clevs) - sizeby2 = int(size / 2) - pd = 1.0 / sizeby2 - colors = [() for i in range(size)] - incup = 0 - incdown = 1 - blue = (0, 0, 1) - colors[0] = blue - - # for j in range(1, sizeby2 - 1): - for j in range(1, sizeby2): - incup = np.round(incup + pd, 4) - colors[j] = (incup, incup, 1) - - white = (1.0, 1.0, 1.0) - colors[sizeby2 - 1] = white - colors[sizeby2] = white - colors[sizeby2 + 1] = white - - red = (1, 0, 0) - colors[-1] = red - - # for k in range(sizeby2 + 2, size): - for k in range(sizeby2 + 1, size): - incdown = np.round(incdown - pd, 4) - colors[k] = (1, incdown, incdown) - - # Make values near zero gray instead of white - gray = (0.75, 0.75, 0.75) - colors[sizeby2 - 1] = gray - colors[sizeby2] = gray - colors[sizeby2 + 1] = gray - - cmap = mcolors.LinearSegmentedColormap.from_list(name="red_white_blue", colors=colors) - return cmap diff --git a/funcs/contour_increment.py b/funcs/contour_increment.py deleted file mode 100644 index 4fd1f29..0000000 --- a/funcs/contour_increment.py +++ /dev/null @@ -1,125 +0,0 @@ -#!/usr/bin/env python -from matplotlib.tri import Triangulation, TriAnalyzer -import warnings -import colormap -import numpy as np -import matplotlib.ticker as mticker -from cartopy.mpl.gridliner import LONGITUDE_FORMATTER, LATITUDE_FORMATTER -import cartopy.feature as cfeature -import cartopy.crs as ccrs -import matplotlib.pyplot as plt -import matplotlib - -matplotlib.use("agg") - -warnings.filterwarnings("ignore") - - -def contour_increment(datasets, parms, fig_name=None): - # ----------- USER INPUT ---------------------------------------------------------- - ilevel = parms["ilevel"] - plot_box_width = parms["plot_box_width"] - plot_box_height = parms["plot_box_height"] - cen_lat = parms["cen_lat"] - cen_lon = parms["cen_lon"] - convert_theta_to_t = parms["convert_theta_to_t"] - decimals = 2 # number of decimals to round for text boxes - # contour_max = 2.0 # max contour level for colorbar increment plots - # ---------------------------------------------------------------------------------- - # cartopy.config['data_dir']='../data/cartopy' - - # ---- - dsInv = datasets["inv"] - dsAna = datasets["ana"] - dsBkg = datasets["bkg"] - lats = np.array(dsInv.variables["latCell"][:]) * 180.0 / np.pi - lons0 = np.array(dsInv.variables["lonCell"][:]) * 180.0 / np.pi - lons = np.where(lons0 > 180.0, lons0 - 360.0, lons0) - - ilevel = ilevel - 1 # subtract 1 because python uses indices starting from 0 - ana = dsAna.variables["theta"][0, :, ilevel].astype(np.float64) - bkg = dsBkg.variables["theta"][0, :, ilevel].astype(np.float64) - - # Convert theta to temperature - if convert_theta_to_t: - pres_a = (dsAna.variables["pressure_p"][0, :, ilevel] + dsBkg["pressure_base"][0, :, ilevel]) / 100.0 - pres_b = (dsBkg.variables["pressure_p"][0, :, ilevel] + dsBkg["pressure_base"][0, :, ilevel]) / 100.0 - dividend_a = (1000.0 / pres_a) ** (0.286) - dividend_b = (1000.0 / pres_b) ** (0.286) - ana = ana / dividend_a - bkg = bkg / dividend_b - - # Compute increment and its max/min - increment = ana - bkg - inc_max = np.around(np.max(increment), decimals) - inc_min = np.around(np.min(increment), decimals) - - # decide the maximum color contours based on the increment values - contour_max = round((abs(inc_max) + abs(inc_min)) * 0.5) - - def plot_T_inc(var_n, clevmax): - longname = "airTemperature" - units = "K" - inc = 0.05 * clevmax - clevs = np.arange(-1.0 * clevmax, 1.0 * clevmax + inc, inc) - cm = colormap.diff_colormap(clevs) - return clevs, cm, units, longname - - # CREATE PLOT ############################## - fig = plt.figure(figsize=(7, 4)) # , dpi=200) - m1 = fig.add_subplot(1, 1, 1, projection=ccrs.PlateCarree(central_longitude=0)) - - # Determine extent for plot domain - half = plot_box_width / 2.0 - left = cen_lon - half - right = cen_lon + half - half = plot_box_height / 2.0 - bot = cen_lat - half - top = cen_lat + half - - # Set extent for both plots - m1.set_extent([left, right, top, bot]) - - # Add features to the subplots - m1.add_feature(cfeature.COASTLINE) - m1.add_feature(cfeature.BORDERS) - m1.add_feature(cfeature.STATES) - - # Gridlines for the subplots - gl1 = m1.gridlines(crs=ccrs.PlateCarree(), draw_labels=True, linewidth=0.5, color="k", alpha=0.25, linestyle="-") - gl1.xlocator = mticker.FixedLocator(np.arange(-180.0, 181.0, 5.0)) - gl1.ylocator = mticker.FixedLocator(np.arange(-80.0, 91.0, 5.0)) - gl1.xformatter = LONGITUDE_FORMATTER - gl1.yformatter = LATITUDE_FORMATTER - gl1.xlabel_style = {"size": 5, "color": "gray"} - gl1.ylabel_style = {"size": 5, "color": "gray"} - - # Create triangulation and mask - triang = Triangulation(lons, lats) - mask = TriAnalyzer(triang).get_flat_tri_mask(min_circle_ratio=0.1) - triang.set_mask(mask) - - # Plot the data using triangulation - clevs, cm, units, longname = plot_T_inc(increment, contour_max) - c1 = m1.tricontourf(triang, increment, clevs, cmap=cm, extend="both") - - # Add colorbar - cbar1 = fig.colorbar(c1, orientation="horizontal", fraction=0.046, pad=0.07) - cbar1.set_label(units, size=8) - cbar1.ax.tick_params(labelsize=5, rotation=30) - - # Add titles, text, and save the figure - # Add 1 to ilevel since indicies start from 0 - plt.suptitle(f"Temperature increment at Level: {ilevel+1}", fontsize=9, y=0.95) - subtitle1_minmax = f"min: {np.around(np.min(increment), decimals)}\nmax: {np.around(np.max(increment), decimals)}" - m1.text(left * 0.99, bot * 1.01, f"{subtitle1_minmax}", fontsize=6, ha="left", va="bottom") - plt.tight_layout() - if fig_name: - plt.savefig(fig_name, dpi=250, bbox_inches="tight") - else: - plt.show() - - # Print some final stats - print("Stats:") - print(f" {longname} max: {inc_max}") - print(f" {longname} min: {inc_min}") diff --git a/funcs/fit_rate.py b/funcs/fit_rate.py deleted file mode 100644 index 8bed858..0000000 --- a/funcs/fit_rate.py +++ /dev/null @@ -1,36 +0,0 @@ -from jdiag import get_valid_subset -from plt_profile import plt_profile -import numpy as np - - -def fit_rate(data, dz): - oman = get_valid_subset(data, "oman") - ombg = get_valid_subset(data, "ombg") - height = get_valid_subset(data, "height") - print("bias: oma=", np.mean(oman), "omb=", np.mean(ombg)) - - rms_a = np.sqrt(np.mean(oman**2)) - rms_b = np.sqrt(np.mean(ombg**2)) - print("rms : oma=", rms_a, "omb=", rms_b) - print("fit rate = ", (rms_b - rms_a) / rms_b) - - max_hgt = max(height) - max_ztick = ((max_hgt // dz) + 1) * dz - min_ztick = 0 - zticks = range(min_ztick, max_ztick, dz) - print(zticks) - - ratio = np.empty(len(zticks)) - for i, hgt in enumerate(zticks): - h1 = hgt - h2 = h1 + dz - tmp_a = oman[(height[:] >= h1) & (height[:] < h2)] - tmp_b = ombg[(height[:] >= h1) & (height[:] < h2)] - # bias_a = np.mean(tmp_a) - # bias_b = np.mean(tmp_b) - rms_a = np.sqrt(np.mean(tmp_a**2)) - rms_b = np.sqrt(np.mean(tmp_b**2)) - ratio[i] = (rms_b - rms_a) / rms_b - print(i, rms_a, rms_b, ratio[i]) - - plt_profile(ratio, zticks, "fit rate") diff --git a/funcs/histogram.py b/funcs/histogram.py deleted file mode 100644 index 68d9e83..0000000 --- a/funcs/histogram.py +++ /dev/null @@ -1,20 +0,0 @@ -import matplotlib.pyplot as plt -import numpy as np -import matplotlib.ticker as ticker - - -def histogram(data, bin_size, n_xticks, xlabel="", title="", fig_name=None): - bins = np.arange(int(data.min()), int(data.max()) + bin_size, bin_size) - plt.figure() - plt.hist(data, bins=bins, edgecolor="black") - plt.xlabel(xlabel) - plt.ylabel("Frequency") - plt.title(title) - # Set x-axis locator to limit number of ticks - plt.gca().xaxis.set_major_locator(ticker.MaxNLocator(nbins=n_xticks)) # Adjust number of labels dynamically - # plt.xticks(bins) - plt.xticks(rotation=45) - if fig_name: - plt.savefig(fig_name, dpi=250, bbox_inches="tight") - else: - plt.show() diff --git a/funcs/jdiag.py b/funcs/jdiag.py deleted file mode 100644 index 52c0a57..0000000 --- a/funcs/jdiag.py +++ /dev/null @@ -1,41 +0,0 @@ -def load_jdiag(filename): - from netCDF4 import Dataset - - return Dataset(filename, "r") - - -def get_valid_subset(data, item, condition={"EffectiveQC2": 0}): - import numpy as np - - data2 = np.array(data[item]) - key, value = next(iter(condition.items())) - return data2[data[key] == value] - - -def get_jdiag_metadata(dataset): - # this is used for get metadata only - metadata = {} - for var in dataset.groups['MetaData'].variables: - metadata[var] = dataset.groups['MetaData'].variables[var][:] - return metadata - - -def get_jdiag_data(dataset, varname, get_metadata=True): - # This will get both metadata and regular data - data = {} - for grp in dataset.groups: - if dataset.groups[grp].groups: - for nestgrp in dataset.groups[grp].groups: # DiagnosticFlags - data[nestgrp] = dataset.groups[grp].groups[nestgrp].variables[varname][:] - else: - if grp == "MetaData": - if get_metadata: - for var in dataset.groups['MetaData'].variables: - data[var] = dataset.groups['MetaData'].variables[var][:] - elif grp == "ObsError" and varname == "specificHumidity": - data["ObsError"] = dataset.groups["ObsError"].variables["relativeHumidity"][:] - elif varname == "brightnessTemperature" and (grp == "ObsValue" or grp == "ObsValueAdj"): - data[grp] = dataset.groups[grp].variables["radiance"][:] - else: - data[grp] = dataset.groups[grp].variables[varname][:] - return data diff --git a/funcs/plt_profile.py b/funcs/plt_profile.py deleted file mode 100644 index ac5fa99..0000000 --- a/funcs/plt_profile.py +++ /dev/null @@ -1,11 +0,0 @@ -import matplotlib.pyplot as plt - - -def plt_profile(data, zticks, title, figname=None): - plt.plot(data, zticks) - plt.grid(linestyle=":") - plt.title(title) - if figname: - plt.savefig(figname, dpi=100, bbox_inches="tight") - else: - plt.show() diff --git a/contrib/USContourMap.ipynb b/scripts/USContourMap.ipynb similarity index 100% rename from contrib/USContourMap.ipynb rename to scripts/USContourMap.ipynb diff --git a/contrib/Untitled.ipynb b/scripts/Untitled.ipynb similarity index 100% rename from contrib/Untitled.ipynb rename to scripts/Untitled.ipynb diff --git a/contrib/colormap.py b/scripts/colormap.py similarity index 100% rename from contrib/colormap.py rename to scripts/colormap.py diff --git a/batch/contour_t_inc.py b/scripts/contour_t_inc.py similarity index 100% rename from batch/contour_t_inc.py rename to scripts/contour_t_inc.py diff --git a/contrib/horizontal_mpasjedi_inc.py b/scripts/horizontal_mpasjedi_inc.py similarity index 100% rename from contrib/horizontal_mpasjedi_inc.py rename to scripts/horizontal_mpasjedi_inc.py diff --git a/contrib/map_zoom.ipynb b/scripts/map_zoom.ipynb similarity index 100% rename from contrib/map_zoom.ipynb rename to scripts/map_zoom.ipynb diff --git a/contrib/maps_fv3gsi_inc.py b/scripts/maps_fv3gsi_inc.py similarity index 100% rename from contrib/maps_fv3gsi_inc.py rename to scripts/maps_fv3gsi_inc.py diff --git a/contrib/profile_diag.py b/scripts/profile_diag.py similarity index 100% rename from contrib/profile_diag.py rename to scripts/profile_diag.py diff --git a/batch/uxarray.py b/scripts/uxarray.py similarity index 100% rename from batch/uxarray.py rename to scripts/uxarray.py diff --git a/contrib/vertical_profile_fv3gsi.py b/scripts/vertical_profile_fv3gsi.py similarity index 100% rename from contrib/vertical_profile_fv3gsi.py rename to scripts/vertical_profile_fv3gsi.py diff --git a/contrib/vertical_profile_mpasjedi.py b/scripts/vertical_profile_mpasjedi.py similarity index 100% rename from contrib/vertical_profile_mpasjedi.py rename to scripts/vertical_profile_mpasjedi.py From a4c073f301b129b81466bd87b564fc96031f728b Mon Sep 17 00:00:00 2001 From: "guoqing.ge" Date: Tue, 12 Aug 2025 17:05:58 -0400 Subject: [PATCH 04/10] add obs_exploring.ipynb into the book --- DAmonitor/obs/__init__.py | 2 +- docs/_toc.yml | 6 +++++- docs/build_book.sh | 1 + docs/intro.md | 6 +----- 4 files changed, 8 insertions(+), 7 deletions(-) diff --git a/DAmonitor/obs/__init__.py b/DAmonitor/obs/__init__.py index fce21ac..c03be32 100644 --- a/DAmonitor/obs/__init__.py +++ b/DAmonitor/obs/__init__.py @@ -2,6 +2,6 @@ from .fit_rate import fit_rate __all__ = [ - 'ioda', + 'obsSpace', 'fit_rate', ] diff --git a/docs/_toc.yml b/docs/_toc.yml index 17fc7c8..1435144 100644 --- a/docs/_toc.yml +++ b/docs/_toc.yml @@ -1,5 +1,6 @@ # Table of contents # Learn more at https://jupyterbook.org/customize/toc.html +# **** Be sure to remove the trailing .ipynb or .md **** format: jb-book root: intro @@ -10,7 +11,10 @@ parts: - file: example - file: markdown-notebooks -- caption: demo +- caption: DA monitor chapters: - file: notebook_docs/mpas_plotting title: mpas-jedi plotting + chapters: + - file: notebook_docs/obs_exploring + title: mpas-jedi plotting diff --git a/docs/build_book.sh b/docs/build_book.sh index 16636dd..f2f0879 100755 --- a/docs/build_book.sh +++ b/docs/build_book.sh @@ -4,6 +4,7 @@ doc_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )" #### add new notebooks to the notebooks array declare -A notebooks notebooks["mpas_plotting.ipynb"]="mpas_plotting.ipynb" +notebooks["obs_exploring.ipynb"]="obs_exploring.ipynb" ### users usually do not need to make changes below this line diff --git a/docs/intro.md b/docs/intro.md index 618e1a3..758881b 100644 --- a/docs/intro.md +++ b/docs/intro.md @@ -9,11 +9,7 @@ MarkDown files can be used to add additional documentation. # Table of Contents - [mpas_plotting Notebook](notebook_docs/mpas_plotting) -- place holder -- place holder -- place holder -- place holder -- place holder +- [obs exploring](notebook_docs/obs_exploring) - place holder - place holder - place holder From 543b17683aa6e23d21555691fe5aeb0a8cbfb8e4 Mon Sep 17 00:00:00 2001 From: "guoqing.ge" Date: Tue, 12 Aug 2025 17:10:42 -0400 Subject: [PATCH 05/10] fine tune --- notebooks/obs_exploring.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/notebooks/obs_exploring.ipynb b/notebooks/obs_exploring.ipynb index e91a511..6c18cb4 100644 --- a/notebooks/obs_exploring.ipynb +++ b/notebooks/obs_exploring.ipynb @@ -5,7 +5,7 @@ "id": "0", "metadata": {}, "source": [ - "### Welcome!\n", + "# obs exploring\n", "