diff --git a/.codespellignore b/.codespellignore deleted file mode 100644 index 9dbaff10..00000000 --- a/.codespellignore +++ /dev/null @@ -1,2 +0,0 @@ -coo -daty diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 04e8df66..1c285527 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -15,7 +15,7 @@ jobs: fail-fast: false matrix: os: [ubuntu, macos, windows] - python-version: ["3.9", "3.10", "3.11", "3.12"] + python-version: ["3.10", "3.11", "3.12", "3.13"] name: ${{ matrix.os }} - py${{ matrix.python-version }} runs-on: ${{ matrix.os }}-latest defaults: diff --git a/.github/workflows/code-style.yml b/.github/workflows/code-style.yml index 70b35153..dfbb554b 100644 --- a/.github/workflows/code-style.yml +++ b/.github/workflows/code-style.yml @@ -15,10 +15,11 @@ jobs: steps: - name: Checkout repository uses: actions/checkout@v6 - - name: Setup Python 3.10 + - name: Setup Python 3.13 uses: actions/setup-python@v6 with: - python-version: '3.10' + python-version: '3.13' + cache: 'pip' # caching pip dependencies - name: Install dependencies run: | python -m pip install --progress-bar off --upgrade pip setuptools wheel @@ -27,11 +28,6 @@ jobs: run: ruff check . - name: Run codespell uses: codespell-project/actions-codespell@master - with: - check_filenames: true - check_hidden: true - skip: './.git,./build,./.mypy_cache,./.pytest_cache' - ignore_words_file: ./.codespellignore - name: Run pydocstyle run: pydocstyle . - name: Run bibclean diff --git a/.github/workflows/doc.yml b/.github/workflows/doc.yml index ca4abab4..afe909ce 100644 --- a/.github/workflows/doc.yml +++ b/.github/workflows/doc.yml @@ -20,10 +20,15 @@ jobs: uses: actions/checkout@v6 with: path: ./main - - name: Setup Python 3.10 + - name: Setup Python 3.13 uses: actions/setup-python@v6 with: - python-version: '3.10' + python-version: '3.13' + cache: 'pip' # caching pip dependencies + - name: Install system dependencies + run: | + sudo apt-get update + sudo apt-get install -y pandoc - name: Install package run: | python -m pip install --progress-bar off --upgrade pip setuptools wheel @@ -36,7 +41,9 @@ jobs: uses: actions/upload-artifact@v6 with: name: doc-dev - path: ./doc-build/dev + path: | + doc-build/dev + !doc-build/dev/.doctrees deploy: if: github.event_name == 'push' diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 2d4b1093..42e42a24 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -11,10 +11,10 @@ jobs: steps: - name: Checkout repository uses: actions/checkout@v6 - - name: Setup Python 3.10 + - name: Setup Python 3.13 uses: actions/setup-python@v6 with: - python-version: '3.10' + python-version: '3.13' - name: Install dependencies run: | python -m pip install --progress-bar off --upgrade pip setuptools wheel diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 548c6f69..3c2d47f2 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -19,11 +19,7 @@ jobs: fail-fast: false matrix: os: [ubuntu, macos, windows] - python-version: ["3.9", "3.10", "3.11", "3.12"] - # some tests fail (numerical issues) in older python on mac, so we ... - exclude: - - os: macos - python-version: '3.9' + python-version: ["3.10", "3.11", "3.12", "3.13"] name: ${{ matrix.os }} - py${{ matrix.python-version }} runs-on: ${{ matrix.os }}-latest defaults: diff --git a/doc/conf.py b/doc/conf.py index a372b488..f18653b6 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -27,7 +27,7 @@ # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration # If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "5.0" +needs_sphinx = "7.0" # The document name of the “root” document, that is, the document that contains # the root toctree directive. @@ -87,7 +87,6 @@ "css/style.css", ] html_title = project -html_show_sphinx = False # Documentation to change footer icons: # https://pradyunsg.me/furo/customisation/footer/#changing-footer-icons @@ -110,10 +109,9 @@ autosummary_generate = True # -- autodoc ----------------------------------------------------------------- -autodoc_typehints = "none" +autodoc_typehints = "description" autodoc_member_order = "groupwise" autodoc_warningiserror = True -autoclass_content = "class" # -- intersphinx ------------------------------------------------------------- intersphinx_mapping = { @@ -143,6 +141,7 @@ # LaPy "TetMesh": "lapy.TetMesh", "TriaMesh": "lapy.TriaMesh", + "Polygon": "lapy.Polygon", # Matplotlib "Axes": "matplotlib.axes.Axes", "Figure": "matplotlib.figure.Figure", @@ -184,6 +183,12 @@ r"\.__iter__", r"\.__div__", r"\.__neg__", + # Imported third-party objects (not part of LaPy API) + r"\.Any$", # typing.Any + r"\.csr_matrix$", # scipy.sparse.csr_matrix + r"\.minimize$", # scipy.optimize.minimize + r"\.bisect$", # bisect.bisect + r"\.LinearSegmentedColormap$", # matplotlib.colors.LinearSegmentedColormap } # -- sphinxcontrib-bibtex ---------------------------------------------------- @@ -253,27 +258,9 @@ def linkcode_resolve(domain: str, info: Dict[str, str]) -> Optional[str]: "within_subsection_order": FileNameSortKey, } -# -- make sure pandoc gets installed ----------------------------------------- -from inspect import getsourcefile -import os - -# Get path to directory containing this file, conf.py. -DOCS_DIRECTORY = os.path.dirname(os.path.abspath(getsourcefile(lambda: 0))) - -def ensure_pandoc_installed(_): - import pypandoc - - # Download pandoc if necessary. If pandoc is already installed and on - # the PATH, the installed version will be used. Otherwise, we will - # download a copy of pandoc into docs/bin/ and add that to our PATH. - pandoc_dir = os.path.join(DOCS_DIRECTORY, "bin") - # Add dir containing pandoc binary to the PATH environment variable - if pandoc_dir not in os.environ["PATH"].split(os.pathsep): - os.environ["PATH"] += os.pathsep + pandoc_dir - pypandoc.ensure_pandoc_installed( - targetfolder=pandoc_dir, - delete_installer=True, - ) - -def setup(app): - app.connect("builder-inited", ensure_pandoc_installed) +# -- Pandoc requirement ------------------------------------------------------ +# Note: Pandoc must be installed on the system for nbsphinx to convert notebooks. +# Install via system package manager (apt, brew, etc.) or conda/mamba. +# See: https://pandoc.org/installing.html +# +# For CI/CD, ensure pandoc is installed in the workflow (see .github/workflows/doc.yml) diff --git a/lapy/_tria_io.py b/lapy/_tria_io.py index ee836247..bc7f0397 100644 --- a/lapy/_tria_io.py +++ b/lapy/_tria_io.py @@ -4,7 +4,7 @@ """ import logging -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING import numpy as np @@ -495,6 +495,146 @@ def read_gmsh(filename: str) -> tuple[np.ndarray, dict, dict, dict, dict]: return points, cells, point_data, cell_data, field_data +def read_gifti(filename: str) -> "TriaMesh": + """Load triangle mesh from GIFTI surface file. + + GIFTI (``.surf.gii``) is the standard surface format used by HCP, FSL, + FreeSurfer (since v6), and many other neuroimaging tools. The file must + contain at least one data array with intent ``NIFTI_INTENT_POINTSET`` + (coordinates) and one with intent ``NIFTI_INTENT_TRIANGLE`` (topology). + + Parameters + ---------- + filename : str + Path to a GIFTI surface file (e.g. ``lh.pial.surf.gii``). + + Returns + ------- + TriaMesh + Loaded triangle mesh. Vertex coordinates are returned in the + coordinate system stored in the file (usually surface RAS or MNI). + + Raises + ------ + ImportError + If ``nibabel`` is not installed. + OSError + If the file is not found or not readable. + ValueError + If the GIFTI file does not contain both a POINTSET and a TRIANGLE + data array. + + Notes + ----- + The GIFTI format supports multiple coordinate systems per file + (``GiftiCoordSystem``). This function uses whatever coordinate system + nibabel exposes by default, which corresponds to the *first* coordinate + system listed in the file (typically surface RAS for FreeSurfer output). + + Examples + -------- + >>> from lapy import TriaMesh + >>> tria = TriaMesh.read_gifti("lh.pial.surf.gii") # doctest: +SKIP + """ + logger.debug("--> GIFTI format ... ") + try: + import nibabel as nib + except ImportError as e: + raise ImportError( + "nibabel is required to read GIFTI files. " + "Install it with: pip install nibabel" + ) from e + try: + img = nib.load(filename) + except OSError: + logger.error("[file not found or not readable]") + raise + if not isinstance(img, nib.gifti.GiftiImage): + raise ValueError( + f"Expected a GIFTI image, but got {type(img).__name__}. " + "Make sure the file is a valid GIFTI surface file (.surf.gii)." + ) + try: + coords, trias = img.agg_data(("pointset", "triangle")) + except Exception as exc: + raise ValueError( + f"Could not extract POINTSET and TRIANGLE data arrays from {filename}. " + "Make sure the file is a valid GIFTI surface file." + ) from exc + if coords is None or trias is None: + raise ValueError( + f"{filename} does not contain both a POINTSET (coordinates) and a " + "TRIANGLE (topology) data array." + ) + coords = np.array(coords, dtype=float) + trias = np.array(trias, dtype=np.int64) + logger.info(" --> DONE ( V: %d , T: %d )", coords.shape[0], trias.shape[0]) + from . import TriaMesh + + return TriaMesh(coords, trias) + + +def write_gifti(tria: "TriaMesh", filename: str) -> None: + """Save triangle mesh to a GIFTI surface file. + + Writes the mesh as a ``.surf.gii`` GIFTI file using nibabel. The + coordinate data array uses intent ``NIFTI_INTENT_POINTSET`` and the + topology array uses intent ``NIFTI_INTENT_TRIANGLE``. + + Parameters + ---------- + tria : TriaMesh + Triangle mesh to save. + filename : str + Output filename (conventionally ends with ``.surf.gii``). + + Raises + ------ + ImportError + If ``nibabel`` is not installed. + OSError + If the file cannot be written. + + Notes + ----- + Vertex coordinates are stored as ``float32`` and triangle indices as + ``int32``, matching the conventions of most neuroimaging software. + + Examples + -------- + >>> from lapy import TriaMesh + >>> tria = TriaMesh.read_off("my_mesh.off") # doctest: +SKIP + >>> tria.write_gifti("my_mesh.surf.gii") # doctest: +SKIP + """ + try: + import nibabel as nib + from nibabel.gifti import GiftiDataArray, GiftiImage + from nibabel.nifti1 import intent_codes + except ImportError as e: + raise ImportError( + "nibabel is required to write GIFTI files. " + "Install it with: pip install nibabel" + ) from e + try: + coords = tria.v.astype(np.float32) + trias = tria.t.astype(np.int32) + coord_array = GiftiDataArray( + coords, + intent=intent_codes.code["NIFTI_INTENT_POINTSET"], + datatype="NIFTI_TYPE_FLOAT32", + ) + tria_array = GiftiDataArray( + trias, + intent=intent_codes.code["NIFTI_INTENT_TRIANGLE"], + datatype="NIFTI_TYPE_INT32", + ) + img = GiftiImage(darrays=[coord_array, tria_array]) + nib.save(img, filename) + except OSError: + logger.error("[File %s not writable]", filename) + raise + + def write_vtk(tria: "TriaMesh", filename: str) -> None: """Save VTK file. @@ -534,7 +674,12 @@ def write_vtk(tria: "TriaMesh", filename: str) -> None: f.close() -def write_fssurf(tria: "TriaMesh", filename: str, image: Optional[object] = None) -> None: +def write_fssurf( + tria: "TriaMesh", + filename: str, + image: object | None = None, + coords_are_voxels: bool | None = None, +) -> None: """Save Freesurfer Surface Geometry file (wrap Nibabel). Parameters @@ -544,13 +689,23 @@ def write_fssurf(tria: "TriaMesh", filename: str, image: Optional[object] = None filename : str Filename to save to. image : str, object, or None, default=None - Path to image, nibabel image object, or image header. If specified, the vertices - are assumed to be in voxel coordinates and are converted to surface RAS (tkr) - coordinates before saving. The expected order of coordinates is (x, y, z) - matching the image voxel indices in nibabel. + Path to image, nibabel image object, or image header. If specified, volume_info + will be extracted from the image header, and by default, vertices are assumed to + be in voxel coordinates and will be converted to surface RAS (tkr) before saving. + The expected order of coordinates is (x, y, z) matching the image voxel indices + in nibabel. + coords_are_voxels : bool or None, default=None + Specifies whether vertices are in voxel coordinates. If None (default), the + behavior is inferred: when image is provided, vertices are assumed to be in + voxel space and converted to surface RAS; when image is not provided, vertices + are assumed to already be in surface RAS. Set explicitly to True to force + conversion (requires image), or False to skip conversion even when image is + provided (only extracts volume_info). Raises ------ + ValueError + If coords_are_voxels is explicitly True but image is None. OSError If file is not writable. TypeError @@ -562,6 +717,17 @@ def write_fssurf(tria: "TriaMesh", filename: str, image: Optional[object] = None ``get_vox2ras_tkr()`` (e.g., ``MGHHeader``). For other header types (NIfTI1/2, Analyze/SPM, etc.), we attempt conversion via ``MGHHeader.from_header``. """ + # Infer coords_are_voxels if not explicitly set + if coords_are_voxels is None: + coords_are_voxels = image is not None + + # Validate parameters + if coords_are_voxels and image is None: + raise ValueError( + "coords_are_voxels=True requires an image to be provided for voxel-to-surface-RAS " + "coordinate conversion. Either provide an image or set coords_are_voxels=False." + ) + # open file try: from nibabel.freesurfer.io import write_geometry @@ -594,7 +760,9 @@ def write_fssurf(tria: "TriaMesh", filename: str, image: Optional[object] = None "via MGHHeader.from_header)." ) from e - v = apply_affine(header.get_vox2ras_tkr(), v) + # Convert from voxel to surface RAS coordinates if requested + if coords_are_voxels: + v = apply_affine(header.get_vox2ras_tkr(), v) # create volume_info from header affine = header.get_best_affine() diff --git a/lapy/conformal.py b/lapy/conformal.py index 3c7a0e05..11b2a8a2 100644 --- a/lapy/conformal.py +++ b/lapy/conformal.py @@ -18,7 +18,7 @@ """ import importlib import logging -from typing import Any, Union +from typing import Any import numpy as np from scipy import sparse @@ -535,7 +535,7 @@ def linear_beltrami_solver( def _sparse_symmetric_solve( A: csr_matrix, - b: Union[np.ndarray, csr_matrix], + b: np.ndarray | csr_matrix, use_cholmod: bool = False ) -> np.ndarray: """Solve a sparse symmetric linear system of equations Ax = b. diff --git a/lapy/heat.py b/lapy/heat.py index 26119058..adb0c46d 100644 --- a/lapy/heat.py +++ b/lapy/heat.py @@ -6,7 +6,6 @@ import importlib import logging -from typing import Optional, Union import numpy as np @@ -16,7 +15,7 @@ def diagonal( - t: Union[float, np.ndarray], + t: float | np.ndarray, x: np.ndarray, evecs: np.ndarray, evals: np.ndarray, @@ -60,7 +59,7 @@ def diagonal( def kernel( - t: Union[float, np.ndarray], + t: float | np.ndarray, vfix: int, evecs: np.ndarray, evals: np.ndarray, @@ -112,9 +111,9 @@ def kernel( def diffusion( geometry: object, - vids: Union[int, np.ndarray], + vids: int | np.ndarray, m: float = 1.0, - aniso: Optional[int] = None, + aniso: int | None = None, use_cholmod: bool = False, ) -> np.ndarray: """Compute the heat diffusion from initial vertices in vids. diff --git a/lapy/plot.py b/lapy/plot.py index 434074d4..12f06947 100644 --- a/lapy/plot.py +++ b/lapy/plot.py @@ -11,7 +11,6 @@ import re from bisect import bisect -from typing import Optional, Union import numpy as np import plotly @@ -21,7 +20,7 @@ from . import TetMesh, TriaMesh -def _get_color_levels() -> list[list[Union[float, str]]]: +def _get_color_levels() -> list[list[float | str]]: """Return a pre-set colorscale. Returns @@ -75,7 +74,7 @@ def _get_color_levels() -> list[list[Union[float, str]]]: return colorscale -def _get_colorscale(vmin: float, vmax: float) -> list[list[Union[float, str]]]: +def _get_colorscale(vmin: float, vmax: float) -> list[list[float | str]]: """Put together a colorscale map depending on the range of v-values. Parameters @@ -143,7 +142,7 @@ def _get_colorscale(vmin: float, vmax: float) -> list[list[Union[float, str]]]: return colorscale -def _get_colorval(t: float, colormap: list[list[Union[float, str]]]) -> str: +def _get_colorval(t: float, colormap: list[list[float | str]]) -> str: """Turn a scalar value into a color value. Parameters @@ -171,7 +170,7 @@ def _get_colorval(t: float, colormap: list[list[Union[float, str]]]) -> str: return colormap[-1][1] # ok here we need to interpolate # first find two colors before and after - columns = list(zip(*colormap)) + columns = list(zip(*colormap, strict=True)) pos = bisect(columns[0], t) # compute param between pos-1 and pos values if len(columns[0]) < pos + 1 or pos == 0: @@ -192,7 +191,7 @@ def _get_colorval(t: float, colormap: list[list[Union[float, str]]]) -> str: def _map_z2color( zval: float, - colormap: Union[LinearSegmentedColormap, list[list[Union[float, str]]]], + colormap: LinearSegmentedColormap | list[list[float | str]], zmin: float, zmax: float, ) -> str: @@ -242,21 +241,21 @@ def _map_z2color( def plot_tet_mesh( tetra: "TetMesh", - vfunc: Optional[np.ndarray] = None, + vfunc: np.ndarray | None = None, plot_edges: bool = False, plot_levels: bool = False, - tfunc: Optional[np.ndarray] = None, - cutting: Optional[Union[str, list[str]]] = None, + tfunc: np.ndarray | None = None, + cutting: str | list[str] | None = None, edge_color: str = "rgb(50,50,50)", html_output: bool = False, width: int = 800, height: int = 800, flatshading: bool = False, - xrange: Optional[Union[list[float], tuple[float, float]]] = None, - yrange: Optional[Union[list[float], tuple[float, float]]] = None, - zrange: Optional[Union[list[float], tuple[float, float]]] = None, + xrange: list[float] | tuple[float, float] | None = None, + yrange: list[float] | tuple[float, float] | None = None, + zrange: list[float] | tuple[float, float] | None = None, showcaxis: bool = False, - caxis: Optional[Union[list[float], tuple[float, float]]] = None, + caxis: list[float] | tuple[float, float] | None = None, ) -> None: """Plot tetra meshes. @@ -394,26 +393,26 @@ def plot_tet_mesh( def plot_tria_mesh( tria: "TriaMesh", - vfunc: Optional[np.ndarray] = None, - tfunc: Optional[np.ndarray] = None, - vcolor: Optional[list[str]] = None, - tcolor: Optional[list[str]] = None, + vfunc: np.ndarray | None = None, + tfunc: np.ndarray | None = None, + vcolor: list[str] | None = None, + tcolor: list[str] | None = None, showcaxis: bool = False, - caxis: Optional[Union[list[float], tuple[float, float]]] = None, - xrange: Optional[Union[list[float], tuple[float, float]]] = None, - yrange: Optional[Union[list[float], tuple[float, float]]] = None, - zrange: Optional[Union[list[float], tuple[float, float]]] = None, + caxis: list[float] | tuple[float, float] | None = None, + xrange: list[float] | tuple[float, float] | None = None, + yrange: list[float] | tuple[float, float] | None = None, + zrange: list[float] | tuple[float, float] | None = None, plot_edges: bool = False, plot_levels: bool = False, edge_color: str = "rgb(50,50,50)", tic_color: str = "rgb(50,200,10)", - background_color: Optional[str] = None, + background_color: str | None = None, flatshading: bool = False, width: int = 800, height: int = 800, - camera: Optional[dict[str, dict[str, float]]] = None, + camera: dict[str, dict[str, float]] | None = None, html_output: bool = False, - export_png: Optional[str] = None, + export_png: str | None = None, scale_png: float = 1.0, no_display: bool = False, ) -> None: @@ -496,8 +495,8 @@ def plot_tria_mesh( " but not both at the same time" ) - x, y, z = zip(*tria.v) - i, j, k = zip(*tria.t) + x, y, z = zip(*tria.v, strict=True) + i, j, k = zip(*tria.t, strict=True) vlines = [] if vfunc is None: diff --git a/lapy/polygon.py b/lapy/polygon.py index fbe10af9..0e4e0818 100644 --- a/lapy/polygon.py +++ b/lapy/polygon.py @@ -22,15 +22,21 @@ class Polygon: points : np.ndarray Array of shape (n, d) containing coordinates of polygon vertices in order, where d is 2 or 3 for 2D (x, y) or 3D (x, y, z) paths. - For closed polygons, the last point should not duplicate the first point. - closed : bool, default=False - If True, treats the path as a closed polygon. If False, treats it as - an open polyline. + If the last point duplicates the first point and closed is None, + the duplicate endpoint will be removed and the polygon will be + marked as closed automatically. + closed : bool or None, default=None + - If None (default): Auto-detect by checking if first and last points + are identical. If they are, removes duplicate endpoint and sets closed=True. + - If True: Treats the path as a closed polygon. If the last point duplicates + the first, it will be removed. + - If False: Treats the path as an open polyline regardless of endpoints. Attributes ---------- points : np.ndarray - Polygon vertex coordinates, shape (n_points, d). + Polygon vertex coordinates, shape (n_points, d). For closed polygons, + the last point does not duplicate the first. closed : bool Whether the polygon is closed or open. _is_2d : bool @@ -46,23 +52,22 @@ class Polygon: -------- >>> import numpy as np >>> from lapy.polygon import Polygon - >>> # Create a 2D closed polygon (square) - >>> square = np.array([[0, 0], [1, 0], [1, 1], [0, 1]]) - >>> poly = Polygon(square, closed=True) - >>> poly.is_2d() + >>> # Create a 2D closed polygon (square) - auto-detected + >>> square = np.array([[0, 0], [1, 0], [1, 1], [0, 1], [0, 0]]) + >>> poly = Polygon(square) # closed=None, will auto-detect and remove duplicate + >>> poly.closed True - >>> poly.length() - 4.0 - >>> # Create a 3D open path - >>> path_3d = np.array([[0, 0, 0], [1, 0, 0], [1, 1, 0], [1, 1, 1]]) - >>> poly3d = Polygon(path_3d, closed=False) - >>> poly3d.is_2d() + >>> poly.points.shape[0] + 4 + >>> # Explicitly open polygon + >>> path = np.array([[0, 0, 0], [1, 0, 0], [1, 1, 0], [1, 1, 1]]) + >>> poly3d = Polygon(path, closed=False) + >>> poly3d.closed False """ - def __init__(self, points: np.ndarray, closed: bool = False): + def __init__(self, points: np.ndarray, closed: bool | None = None): self.points = np.array(points) - self.closed = closed # Validate non-empty polygon if self.points.size == 0: @@ -93,6 +98,25 @@ def __init__(self, points: np.ndarray, closed: bool = False): else: raise ValueError("Points should have 2 or 3 coordinates") + # Auto-detect closed polygons or handle explicit closed parameter + if closed is None: + # Auto-detect: check if first and last points are identical + if len(self.points) > 1 and np.allclose(self.points[0], self.points[-1]): + # Remove duplicate endpoint + self.points = self.points[:-1] + self.closed = True + else: + # Not closed (endpoints differ) + self.closed = False + elif closed: + # Explicitly closed: remove duplicate endpoint if present + if len(self.points) > 1 and np.allclose(self.points[0], self.points[-1]): + self.points = self.points[:-1] + self.closed = True + else: + # Explicitly open + self.closed = False + def is_2d(self) -> bool: """Check if the polygon is 2D. @@ -417,12 +441,13 @@ def smooth_taubin( Polygon Smoothed polygon. Returns self if inplace=True, new instance otherwise. """ - if n <= 0: - raise ValueError("n must be a positive integer") + # Input validation to enforce documented parameter ranges + if not isinstance(n, int) or n <= 0: + raise ValueError(f"n must be a positive integer, got {n!r}") if lambda_ <= 0: - raise ValueError("lambda_ must be positive") + raise ValueError(f"lambda_ must be positive, got {lambda_!r}") if mu >= 0: - raise ValueError("mu must be negative") + raise ValueError(f"mu must be negative, got {mu!r}") mat = self._construct_smoothing_matrix() points_smooth = self.points.copy() diff --git a/lapy/shapedna.py b/lapy/shapedna.py index a6d321bb..dee85c4c 100644 --- a/lapy/shapedna.py +++ b/lapy/shapedna.py @@ -9,7 +9,7 @@ and comparison. """ import logging -from typing import TYPE_CHECKING, Optional, Union +from typing import TYPE_CHECKING, Union import numpy as np import scipy.spatial.distance as di @@ -97,7 +97,7 @@ def compute_shapedna( geom: Union["TriaMesh", "TetMesh"], k: int = 50, lump: bool = False, - aniso: Optional[Union[float, tuple[float, float]]] = None, + aniso: float | tuple[float, float] | None = None, aniso_smooth: int = 10, use_cholmod: bool = False, ) -> dict: @@ -171,9 +171,9 @@ def normalize_ev( evals: np.ndarray, method: str = "geometry", ) -> np.ndarray: - """Normalize eigenvalues for a surface or a volume. + """Normalize eigenvalues for a 2D surface or a 3D solid. - Normalizes eigenvalues to account for different mesh sizes and dimensions, + Normalizes eigenvalues to unit surface area or unit volume, enabling meaningful comparison between different shapes. Parameters @@ -185,8 +185,8 @@ def normalize_ev( method : {'surface', 'volume', 'geometry'}, default='geometry' Normalization method: - - 'surface': Normalize by surface area (for 2D surfaces) - - 'volume': Normalize by enclosed volume (for 3D objects) + - 'surface': Normalize to unit surface area + - 'volume': Normalize to unit volume - 'geometry': Automatically choose surface for TriaMesh, volume for TetMesh Returns @@ -197,13 +197,18 @@ def normalize_ev( Raises ------ ValueError - If method is not one of 'surface', 'volume', or 'geometry'. - If geometry type is unsupported for the chosen normalization. - If the measure (area/volume) is not positive. + If the method is not one of 'surface', 'volume', or 'geometry'. + If the geometry type is unsupported for the chosen normalization. + If method=volume and the volume of a surface is not defined. + + Notes + ----- + For TriaMesh with 'volume' method, the mesh must be closed and oriented + to compute a valid enclosed volume. """ geom_type = type(geom).__name__ if method == "surface": - return evals * _surface_measure(geom) ** (2.0 / 2.0) + return evals * _surface_measure(geom) if method == "volume": if geom_type == "TriaMesh": @@ -214,7 +219,7 @@ def normalize_ev( if method == "geometry": if geom_type == "TriaMesh": - return evals * _surface_measure(geom) ** (2.0 / 2.0) + return evals * _surface_measure(geom) if geom_type == "TetMesh": return evals * _boundary_volume(geom) ** (2.0 / 3.0) raise ValueError("Unsupported geometry type for geometry normalization") diff --git a/lapy/solver.py b/lapy/solver.py index e78b6cc2..90790668 100644 --- a/lapy/solver.py +++ b/lapy/solver.py @@ -1,7 +1,6 @@ import importlib import logging import sys -from typing import Optional, Union import numpy as np from scipy import sparse @@ -54,9 +53,9 @@ class Solver: def __init__( self, - geometry: Union[TriaMesh, TetMesh], + geometry: TriaMesh | TetMesh, lump: bool = False, - aniso: Optional[Union[float, tuple[float, float]]] = None, + aniso: float | tuple[float, float] | None = None, aniso_smooth: int = 10, use_cholmod: bool = False, dtype: np.dtype = np.float64, @@ -665,7 +664,7 @@ def _fem_voxels( b = sparse.csc_matrix((local_b, (i, j)), dtype=dtype) return a, b - def eigs(self, k: int = 10) -> tuple[np.ndarray, np.ndarray]: + def eigs(self, k: int = 10, sigma: float = -0.01) -> tuple[np.ndarray, np.ndarray]: """Compute the linear finite-element method Laplace-Beltrami spectrum. Parameters @@ -674,6 +673,10 @@ def eigs(self, k: int = 10) -> tuple[np.ndarray, np.ndarray]: The number of eigenvalues and eigenvectors desired. ``k`` must be smaller than ``N`` (number of vertices). It is not possible to compute all eigenvectors of a matrix. + sigma : float, default=-0.01 + Shift value for the shift-invert mode. The solver finds eigenvalues + near sigma. Negative values work well for finding smallest eigenvalues. + Adjust if convergence issues occur (typically small negative). Returns ------- @@ -688,7 +691,6 @@ def eigs(self, k: int = 10) -> tuple[np.ndarray, np.ndarray]: """ from scipy.sparse.linalg import LinearOperator, eigsh - sigma = -0.01 if self.use_cholmod: logger.info("Solver: Cholesky decomposition from scikit-sparse cholmod ...") chol = self.sksparse.cholmod.cholesky(self.stiffness - sigma * self.mass) @@ -715,7 +717,7 @@ def eigs(self, k: int = 10) -> tuple[np.ndarray, np.ndarray]: def poisson( self, - h: Union[float, np.ndarray] = 0.0, + h: float | np.ndarray = 0.0, dtup: tuple = (), ntup: tuple = (), ) -> np.ndarray: @@ -807,7 +809,7 @@ def poisson( ndat = ntup[1] if not (len(nidx) > 0 and len(nidx) == len(ndat)): raise ValueError( - "dtup should contain index and data arrays (same lengths > 0)" + "ntup should contain index and data arrays (same lengths > 0)" ) nvec = sparse.csc_matrix( (ndat, (nidx, np.zeros(len(nidx), dtype=np.uint32))), (dim, 1), dtype=dtype @@ -830,7 +832,7 @@ def poisson( a = a[mask, :] a = a.tocsc() elif self.stiffness.getformat() == "csr": - a = self.stiffness[mask, :].tocrc() + a = self.stiffness[mask, :].tocsr() a = a[:, mask] else: raise ValueError("A matrix needs to be sparse CSC or CSR") diff --git a/lapy/tet_mesh.py b/lapy/tet_mesh.py index 63dd1870..18a6884f 100644 --- a/lapy/tet_mesh.py +++ b/lapy/tet_mesh.py @@ -1,5 +1,5 @@ import logging -from typing import TYPE_CHECKING, Optional, Union +from typing import TYPE_CHECKING, Union import numpy as np from scipy import sparse @@ -195,7 +195,7 @@ def avg_edge_length(self) -> float: return edgelens.mean() def boundary_tria( - self, tetfunc: Optional[np.ndarray] = None + self, tetfunc: np.ndarray | None = None ) -> Union["TriaMesh", tuple["TriaMesh", np.ndarray]]: """Get boundary triangle mesh of tetrahedra. diff --git a/lapy/tria_mesh.py b/lapy/tria_mesh.py index a40e5845..5e120791 100644 --- a/lapy/tria_mesh.py +++ b/lapy/tria_mesh.py @@ -1,7 +1,6 @@ import logging import sys import warnings -from typing import Optional, Union import numpy as np from scipy import sparse @@ -64,7 +63,7 @@ class TriaMesh: maintaining compatibility with 2D mesh data. Empty meshes are not allowed. Use class methods (read_fssurf, read_vtk, - read_off) to load mesh data from files. + read_off, read_gifti) to load mesh data from files. """ def __init__(self, v, t, fsinfo=None): @@ -186,6 +185,48 @@ def read_vtk(cls, filename): """ return io.read_vtk(filename) + @classmethod + def read_gifti(cls, filename: str) -> "TriaMesh": + """Load triangle mesh from a GIFTI surface file. + + GIFTI (``.surf.gii``) is the standard surface format used by HCP, + FSL, FreeSurfer (since v6), and many other neuroimaging tools. + + Parameters + ---------- + filename : str + Path to a GIFTI surface file (e.g. ``lh.pial.surf.gii``). + + Returns + ------- + TriaMesh + Loaded triangle mesh. Vertex coordinates are in the coordinate + system stored in the file (usually surface RAS or MNI). + + Raises + ------ + ImportError + If ``nibabel`` is not installed. + OSError + If the file is not found or not readable. + ValueError + If the GIFTI file does not contain both a POINTSET and a TRIANGLE + data array. + + Notes + ----- + The GIFTI format supports multiple coordinate systems per file. + This function uses whatever coordinate system nibabel exposes by + default, which corresponds to the first coordinate system listed in + the file (typically surface RAS for FreeSurfer output). + + Examples + -------- + >>> from lapy import TriaMesh + >>> tria = TriaMesh.read_gifti("lh.pial.surf.gii") # doctest: +SKIP + """ + return io.read_gifti(filename) + def write_vtk(self, filename: str) -> None: """Save mesh as VTK file. @@ -201,7 +242,42 @@ def write_vtk(self, filename: str) -> None: """ io.write_vtk(self, filename) - def write_fssurf(self, filename: str, image: Optional[object] = None) -> None: + def write_gifti(self, filename: str) -> None: + """Save mesh as a GIFTI surface file. + + Writes the mesh as a ``.surf.gii`` GIFTI file using nibabel. + + Parameters + ---------- + filename : str + Output filename (conventionally ends with ``.surf.gii``). + + Raises + ------ + ImportError + If ``nibabel`` is not installed. + OSError + If the file cannot be written. + + Notes + ----- + Vertex coordinates are stored as ``float32`` and triangle indices as + ``int32``, matching the conventions of most neuroimaging software. + + Examples + -------- + >>> from lapy import TriaMesh + >>> tria = TriaMesh.read_off("my_mesh.off") # doctest: +SKIP + >>> tria.write_gifti("my_mesh.surf.gii") # doctest: +SKIP + """ + io.write_gifti(self, filename) + + def write_fssurf( + self, + filename: str, + image: object | None = None, + coords_are_voxels: bool | None = None, + ) -> None: """Save as Freesurfer Surface Geometry file (wrap Nibabel). Parameters @@ -209,17 +285,26 @@ def write_fssurf(self, filename: str, image: Optional[object] = None) -> None: filename : str Filename to save to. image : str, object, None - Path to image, nibabel image object, or image header. If specified, the vertices - are assumed to be in voxel coordinates and are converted to surface RAS (tkr) - coordinates before saving. The expected order of coordinates is (x, y, z) - matching the image voxel indices in nibabel. + Path to image, nibabel image object, or image header. If specified, volume_info + will be extracted from the image header, and by default, vertices are assumed to + be in voxel coordinates and will be converted to surface RAS (tkr) before saving. + The expected order of coordinates is (x, y, z) matching the image voxel indices + in nibabel. + coords_are_voxels : bool or None, default=None + Specifies whether vertices are in voxel coordinates. If None (default), the + behavior is inferred: when image is provided, vertices are assumed to be in + voxel space and converted to surface RAS; when image is not provided, vertices + are assumed to already be in surface RAS. Set explicitly to True to force + conversion (requires image), or False to skip conversion even when image is + provided (only extracts volume_info). Raises ------ - IOError - If file cannot be written. ValueError + If coords_are_voxels is explicitly True but image is None. If image header cannot be processed. + IOError + If file cannot be written. Notes ----- @@ -227,7 +312,7 @@ def write_fssurf(self, filename: str, image: Optional[object] = None) -> None: ``get_vox2ras_tkr()`` (e.g., ``MGHHeader``). For other header types (NIfTI1/2, Analyze/SPM, etc.), we attempt conversion via ``MGHHeader.from_header``. """ - io.write_fssurf(self, filename, image=image) + io.write_fssurf(self, filename, image=image, coords_are_voxels=coords_are_voxels) def _construct_adj_sym(self): """Construct symmetric adjacency matrix (edge graph) of triangle mesh. @@ -329,6 +414,24 @@ def is_manifold(self): """ return np.max(self.adj_sym.data) <= 2 + def is_boundary(self) -> np.ndarray: + """Check which vertices are on the boundary. + + Returns + ------- + np.ndarray + Boolean array of shape (n_vertices,) where True indicates the vertex + is on a boundary edge (an edge that belongs to only one triangle). + """ + # Boundary edges have value 1 in the symmetric adjacency matrix + boundary_edges = self.adj_sym == 1 + # Get vertices that are part of any boundary edge + boundary_vertices = np.zeros(self.v.shape[0], dtype=bool) + rows, cols = boundary_edges.nonzero() + boundary_vertices[rows] = True + boundary_vertices[cols] = True + return boundary_vertices + def is_oriented(self) -> bool: """Check if triangle mesh is oriented. @@ -679,7 +782,7 @@ def connected_components(self) -> tuple[int, np.ndarray]: def keep_largest_connected_component_( self, clean: bool = True - ) -> tuple[Optional[np.ndarray], Optional[np.ndarray]]: + ) -> tuple[np.ndarray | None, np.ndarray | None]: """Keep only the largest connected component of the mesh. Modifies the mesh in-place. @@ -1014,6 +1117,113 @@ def curvature_tria( # find orthogonal umin next? return tumin2, tumax2, tcmin, tcmax + def critical_points(self, vfunc: np.ndarray) -> tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]: + """Compute critical points (extrema and saddles) of a function on mesh vertices. + + A minimum is a vertex where all neighbor values are larger. + A maximum is a vertex where all neighbor values are smaller. + A saddle is a vertex with at least two regions of neighbors with larger values, + and two with smaller values. Boundary vertices assume a virtual edge outside + the mesh that closes the boundary loop around the vertex. + + Parameters + ---------- + vfunc : np.ndarray + Real-valued function defined on mesh vertices, shape (n_vertices,). + + Returns + ------- + minima : np.ndarray + Array of vertex indices that are local minima, shape (n_minima,). + maxima : np.ndarray + Array of vertex indices that are local maxima, shape (n_maxima,). + saddles : np.ndarray + Array of vertex indices that are saddles (all orders), shape (n_saddles,). + saddle_orders : np.ndarray + Array of saddle orders for each saddle vertex (same length as saddles), shape (n_saddles,). + Order 2 = simple saddle (4 sign flips), order 3+ = higher-order saddles. + + Notes + ----- + The algorithm works by: + + 1. For each vertex, compute difference: neighbor_value - vertex_value + 2. Minima: all differences are positive (all neighbors higher) + 3. Maxima: all differences are negative (all neighbors lower) + 4. Saddles: count sign flips across opposite edges in triangles at vertex + + - Regular point: 2 sign flips + - Simple saddle (order 2): 4 sign flips + - Higher-order saddle (order n): 2n sign flips, order = n_flips / 2 + + 5. Tie-breaker: when two vertices have equal function values, the vertex + with the higher vertex ID is treated as slightly larger to remove ambiguity. + """ + vfunc = np.asarray(vfunc) + if len(vfunc) != self.v.shape[0]: + raise ValueError("vfunc length must match number of vertices") + n_vertices = self.v.shape[0] + + # Use SYMMETRIC adjacency matrix to get ALL edges (including boundary edges in both directions) + # COMPUTE EDGE SIGNS ONCE for all neighbor relationships + rows, cols = self.adj_sym.nonzero() + edge_diffs = vfunc[cols] - vfunc[rows] + edge_signs = np.sign(edge_diffs) + # Tie-breaker: when function values are equal, vertex with higher ID is larger + zero_mask = edge_signs == 0 + edge_signs[zero_mask] = np.sign(cols[zero_mask] - rows[zero_mask]) + # Create sparse matrix of edge signs for O(1) lookup + edge_sign_matrix = sparse.csr_matrix( + (edge_signs, (rows, cols)), + shape=(n_vertices, n_vertices) + ) + + # CLASSIFY MINIMA AND MAXIMA + # Compute min and max edge sign per vertex (row-wise) + # Note: edge_sign_matrix only contains +1/-1 (never 0 due to tie-breaker) + # Implicit zeros in sparse matrix represent non-neighbors and can be ignored + min_signs = edge_sign_matrix.min(axis=1).toarray().flatten() + max_signs = edge_sign_matrix.max(axis=1).toarray().flatten() + # Minimum: all neighbor signs are positive (+1) + # If min in {0,1} and max=1, all actual neighbors are +1 (zeros are non-neighbors) + is_minimum = (min_signs > -1) & (max_signs == 1) + # Maximum: all neighbor signs are negative (-1) + # If min=-1 and max in {-1,0}, all actual neighbors are -1 (zeros are non-neighbors) + is_maximum = (min_signs == -1) & (max_signs < 1) + minima = np.where(is_minimum)[0] + maxima = np.where(is_maximum)[0] + + # COUNT SIGN FLIPS at opposite edge for saddle detection + sign_flips = np.zeros(n_vertices, dtype=int) + t0 = self.t[:, 0] + t1 = self.t[:, 1] + t2 = self.t[:, 2] + # For vertex 0, opposite edge is (v1, v2) + sign0_1 = np.array(edge_sign_matrix[t0, t1]).flatten() + sign0_2 = np.array(edge_sign_matrix[t0, t2]).flatten() + sign1_2 = np.array(edge_sign_matrix[t1, t2]).flatten() + flip0 = (sign0_1 * sign0_2) < 0 + np.add.at(sign_flips, t0[flip0], 1) + # For vertex 1, opposite edge is (v2, v0) + # sign(v1→v0) = -sign(v0→v1) = -sign0_1 + flip1 = (sign1_2 * (-sign0_1)) < 0 + np.add.at(sign_flips, t1[flip1], 1) + # For vertex 2, opposite edge is (v0, v1) + # sign(v2→v0) = -sign(v0→v2) = -sign0_2 + # sign(v2→v1) = -sign(v1→v2) = -sign1_2 + flip2 = (sign0_2 * sign1_2) < 0 + np.add.at(sign_flips, t2[flip2], 1) + + # CLASSIFY SADDLES + # Saddles have 4+ flips (regular points have 2, minima/maxima have 0) + # Boundary vertices can have 3 flips (or more) to be a saddle, assuming an additional flip outside + # Order = n_flips / 2 + is_saddle = sign_flips >= 3 + saddles = np.where(is_saddle)[0] + saddle_orders = (sign_flips[saddles] + 1) // 2 + + return minima, maxima, saddles, saddle_orders + def normalize_(self) -> None: """Normalize TriaMesh to unit surface area and centroid at the origin. @@ -1373,10 +1583,10 @@ def smooth_vfunc(self, vfunc, n=1): def smooth_laplace( self, - vfunc: Optional[np.ndarray] = None, + vfunc: np.ndarray | None = None, n: int = 1, lambda_: float = 0.5, - mat: Optional[sparse.csc_matrix] = None, + mat: sparse.csc_matrix | None = None, ) -> np.ndarray: """Smooth the mesh or a vertex function using Laplace smoothing. @@ -1420,7 +1630,7 @@ def smooth_laplace( def smooth_taubin( self, - vfunc: Optional[np.ndarray] = None, + vfunc: np.ndarray | None = None, n: int = 1, lambda_: float = 0.5, mu: float = -0.53, @@ -1483,7 +1693,7 @@ def smooth_(self, n: int = 1) -> None: self.v = vfunc return - def level_length(self, vfunc: np.ndarray, level: Union[float, np.ndarray]) -> Union[float, np.ndarray]: + def level_length(self, vfunc: np.ndarray, level: float | np.ndarray) -> float | np.ndarray: """Compute the length of level sets. For a scalar function defined on mesh vertices, computes the total length @@ -1560,95 +1770,318 @@ def level_length(self, vfunc: np.ndarray, level: Union[float, np.ndarray]) -> Un @staticmethod def __reduce_edges_to_path( edges: np.ndarray, - start_idx: Optional[int] = None, + start_idx: int | None = None, get_edge_idx: bool = False, - ) -> Union[np.ndarray, tuple[np.ndarray, np.ndarray]]: - """Reduce undirected unsorted edges (index pairs) to path (index array). + ) -> list | tuple[list, list]: + """Reduce undirected unsorted edges to ordered path(s). - Converts an unordered list of edge pairs into an ordered path by finding - a traversal from one endpoint to the other. + Converts unordered edge pairs into ordered paths by finding traversals. + Handles single open paths, closed loops, and multiple disconnected components. + Always returns lists to handle multiple components uniformly. Parameters ---------- edges : np.ndarray Array of shape (n, 2) with pairs of positive integer node indices - representing undirected edges. All indices from 0 to max(edges) must - be used and graph needs to be connected. Nodes cannot have more than - 2 neighbors. Requires exactly two endnodes (nodes with only one - neighbor). For closed loops, cut it open by removing one edge before - passing to this function. + representing undirected edges. Node indices can have gaps (i.e., not all + indices from 0 to max need to appear in the edges). Only nodes that + actually appear in edges are processed. start_idx : int or None, default=None - Node with only one neighbor to start path. If None, the node with - the smaller index will be selected as start point. + Preferred start node. If None, selects an endpoint (degree 1) automatically, + or arbitrary node for closed loops. Must be a node that appears in edges. get_edge_idx : bool, default=False - If True, also return index of edge into edges for each path segment. - The returned edge index list has length n, while path has length n+1. + If True, also return edge indices for each path segment. Returns ------- - path : np.ndarray - Array of length n+1 containing node indices as single path from start - to endpoint. - edge_idx : np.ndarray - Array of length n containing corresponding edge idx into edges for each - path segment. Only returned if get_edge_idx is True. + paths : list[np.ndarray] + List of ordered paths, one per connected component. For closed loops, + first node is repeated at end. + edge_idxs : list[np.ndarray] + List of edge index arrays, one per component. Only returned if + get_edge_idx=True. Raises ------ ValueError - If graph does not have exactly two endpoints. - If start_idx is not one of the endpoints. - If graph is not connected. + If start_idx is invalid. + If graph has degree > 2 (branching or self-intersections). """ - from scipy.sparse.csgraph import shortest_path - - # Extract node indices and create a sparse adjacency matrix edges = np.array(edges) + if edges.shape[0] == 0: + return ([], []) if get_edge_idx else [] + + # Build adjacency matrix i = np.column_stack((edges[:, 0], edges[:, 1])).reshape(-1) j = np.column_stack((edges[:, 1], edges[:, 0])).reshape(-1) dat = np.ones(i.shape) n = edges.max() + 1 adj_matrix = sparse.csr_matrix((dat, (i, j)), shape=(n, n)) - # Find the degree of each node, sum over rows to get degree degrees = np.asarray(adj_matrix.sum(axis=1)).ravel() - endpoints = np.where(degrees == 1)[0] - if len(endpoints) != 2: - raise ValueError( - "The graph does not have exactly two endpoints; invalid input." - ) - if not start_idx: - start_idx = endpoints[0] - else: - if not np.isin(start_idx, endpoints): + + # Find connected components + n_comp, labels = sparse.csgraph.connected_components(adj_matrix, directed=False) + + # Build edge index lookup matrix + edge_dat = np.arange(edges.shape[0]) + 1 + edge_dat = np.column_stack((edge_dat, edge_dat)).reshape(-1) + eidx_matrix = sparse.csr_matrix((edge_dat, (i, j)), shape=(n, n)) + + paths = [] + edge_idxs = [] + + for comp_id in range(n_comp): + comp_nodes = np.where(labels == comp_id)[0] + if len(comp_nodes) == 0: + continue + + comp_degrees = degrees[comp_nodes] + + # Skip isolated nodes (degree 0) that exist only due to matrix sizing. + # When edges use indices [0, 5, 10], we create a matrix of size 11x11. + # Indices [1,2,3,4,6,7,8,9] don't appear in any edges (have degree 0). + # connected_components treats each as a separate component, but they're + # not real nodes - just phantom entries from sizing matrix to max_index+1. + if np.all(comp_degrees == 0): + continue + + # SAFETY CHECK: Reject graphs with nodes of degree > 2 + # This single check catches all problematic cases: + # - Branching structures (Y-shape, star graph) + # - Self-intersections (figure-8, etc.) + # - Trees with multiple endpoints + # Valid graphs have only degree 1 (endpoints) and degree 2 (path nodes) + max_degree = comp_degrees.max() + if max_degree > 2: + high_degree_nodes = comp_nodes[comp_degrees > 2] raise ValueError( - f"start_idx {start_idx} must be one of the endpoints {endpoints}." + f"Component {comp_id}: found {len(high_degree_nodes)} node(s) with degree > 2 " + f"(max degree: {max_degree}). Degrees: {np.sort(comp_degrees)}. " + f"This indicates branching or self-intersecting structure. " + f"Only simple paths and simple closed loops are supported." ) - # Traverse the graph by computing shortest path - dist_matrix = shortest_path( - csgraph=adj_matrix, - directed=False, - indices=start_idx, - return_predecessors=False, - ) - if np.isinf(dist_matrix).any(): - raise ValueError( - "Ensure connected graph with indices from 0 to max_idx without gaps." - ) - # sort indices according to distance form start - path = dist_matrix.argsort() - # get edge idx of each segment from original list - enum = edges.shape[0] - dat = np.arange(enum) + 1 - dat = np.column_stack((dat, dat)).reshape(-1) - eidx_matrix = sparse.csr_matrix((dat, (i, j)), shape=(n, n)) - ei = path[0:-1] - ej = path[(np.arange(path.size - 1) + 1)] - eidx = np.asarray(eidx_matrix[ei, ej] - 1).ravel() + + # Determine if closed loop: all nodes have degree 2 + is_closed = np.all(comp_degrees == 2) + + # For closed loops: break one edge to convert to open path + if is_closed: + # Pick start node + if start_idx is not None and start_idx in comp_nodes: + start = start_idx + else: + start = comp_nodes[0] + + # Find neighbors of start node to break one edge + neighbors = adj_matrix[start, :].nonzero()[1] + neighbors_in_comp = [n for n in neighbors if n in comp_nodes] + + if len(neighbors_in_comp) < 2: + raise ValueError(f"Component {comp_id}: Closed loop node {start} should have 2 neighbors") + + # Break the edge from start to first neighbor (temporarily) + adj_matrix = adj_matrix.tolil() + break_neighbor = neighbors_in_comp[0] + adj_matrix[start, break_neighbor] = 0 + adj_matrix[break_neighbor, start] = 0 + adj_matrix = adj_matrix.tocsr() + + # Update degrees after breaking edge + degrees = np.asarray(adj_matrix.sum(axis=1)).ravel() + comp_degrees = degrees[comp_nodes] + + # Now handle as open path (both originally open and converted closed loops) + endpoints = comp_nodes[comp_degrees == 1] + + if len(endpoints) != 2: + raise ValueError( + f"Component {comp_id}: Expected 2 endpoints after breaking loop, found {len(endpoints)}" + ) + + # Select start node + if is_closed: + # For closed loops, start is already selected above + pass + elif start_idx is not None and start_idx in endpoints: + start = start_idx + else: + # For originally open paths, pick first endpoint + start = endpoints[0] + + # Use shortest_path to order nodes by distance from start + dist = sparse.csgraph.shortest_path(adj_matrix, indices=start, directed=False) + + if np.isinf(dist[comp_nodes]).any(): + raise ValueError(f"Component {comp_id} is not fully connected.") + + # Sort nodes by distance from start + path = comp_nodes[dist[comp_nodes].argsort()] + + # For closed loops: append start again to close the loop + if is_closed: + path = np.append(path, path[0]) + + paths.append(path) + + # Get edge indices if requested + if get_edge_idx: + ei = path[:-1] + ej = path[1:] + eidx = np.asarray(eidx_matrix[ei, ej] - 1).ravel() + edge_idxs.append(eidx) + + # Always return lists if get_edge_idx: - return path, eidx + return paths, edge_idxs else: - return path + return paths + + def extract_level_paths( + self, + vfunc: np.ndarray, + level: float, + ) -> list[polygon.Polygon]: + """Extract level set paths as Polygon objects with triangle/edge metadata. + + For a given scalar function on mesh vertices, extracts all paths where + the function equals the specified level. Returns polygons with embedded + metadata about which triangles and edges were intersected. + Handles single open paths, closed loops, and multiple disconnected components. + + Parameters + ---------- + vfunc : np.ndarray + Scalar function values at vertices, shape (n_vertices,). Must be 1D. + level : float + Level set value to extract. + + Returns + ------- + list[polygon.Polygon] + List of Polygon objects, one for each connected level curve component. + Each polygon has the following additional attributes: + - points : np.ndarray of shape (n_points, 3) - 3D coordinates on level curve + - closed : bool - whether the curve is closed + - tria_idx : np.ndarray of shape (n_segments,) - triangle index for each segment + - edge_vidx : np.ndarray of shape (n_points, 2) - mesh vertex indices for edge + - edge_bary : np.ndarray of shape (n_points,) - barycentric coordinate [0,1] + along edge where level set intersects (0=first vertex, 1=second vertex) + + Raises + ------ + ValueError + If vfunc is not 1-dimensional. + """ + if vfunc.ndim > 1: + raise ValueError(f"vfunc needs to be 1-dim, but is {vfunc.ndim}-dim!") + + # Get intersecting triangles + intersect = vfunc[self.t] > level + t_idx = np.where( + np.logical_or( + np.sum(intersect, axis=1) == 1, np.sum(intersect, axis=1) == 2 + ) + )[0] + + if t_idx.size == 0: + return [] + + # Reduce to triangles that intersect with level + t_level = self.t[t_idx, :] + intersect = intersect[t_idx, :] + + # Invert triangles with two true values so single vertex is true + intersect[np.sum(intersect, axis=1) > 1, :] = np.logical_not( + intersect[np.sum(intersect, axis=1) > 1, :] + ) + + # Get index within triangle with single vertex + idx_single = np.argmax(intersect, axis=1) + idx_o1 = (idx_single + 1) % 3 + idx_o2 = (idx_single + 2) % 3 + + # Get global indices + gidx0 = t_level[np.arange(t_level.shape[0]), idx_single] + gidx1 = t_level[np.arange(t_level.shape[0]), idx_o1] + gidx2 = t_level[np.arange(t_level.shape[0]), idx_o2] + + # Sort edge indices (rows are triangles, cols are the two vertex ids) + # This creates unique edge identifiers + gg1 = np.sort( + np.concatenate((gidx0[:, np.newaxis], gidx1[:, np.newaxis]), axis=1) + ) + gg2 = np.sort( + np.concatenate((gidx0[:, np.newaxis], gidx2[:, np.newaxis]), axis=1) + ) + + # Concatenate all edges and get unique ones + gg = np.concatenate((gg1, gg2), axis=0) + gg_unique = np.unique(gg, axis=0) + + # Generate level set intersection points for unique edges + # Barycentric coordinate (0=first vertex, 1=second vertex) + xl = ((level - vfunc[gg_unique[:, 0]]) + / (vfunc[gg_unique[:, 1]] - vfunc[gg_unique[:, 0]])) + + # 3D points on unique edges + p = ((1 - xl)[:, np.newaxis] * self.v[gg_unique[:, 0]] + + xl[:, np.newaxis] * self.v[gg_unique[:, 1]]) + + # Fill sparse matrix with new point indices (+1 to distinguish from zero) + a_mat = sparse.csc_matrix( + (np.arange(gg_unique.shape[0]) + 1, (gg_unique[:, 0], gg_unique[:, 1])) + ) + + # For each triangle, create edge pair via lookup in matrix + edge_idxs = (np.concatenate((a_mat[gg1[:, 0], gg1[:, 1]], + a_mat[gg2[:, 0], gg2[:, 1]]), axis=0).T - 1) + + # Reduce edges to paths (returns list of paths for multiple components) + paths, path_edge_idxs = self._TriaMesh__reduce_edges_to_path(edge_idxs, get_edge_idx=True) + + # Build polygon objects for each connected component + polygons = [] + for path_nodes, path_edge_idx in zip(paths, path_edge_idxs, strict=True): + # Get 3D coordinates for this path + poly_v = p[path_nodes, :] + + # Remove duplicate vertices (when levelset hits a vertex almost perfectly) + dd = ((poly_v[0:-1, :] - poly_v[1:, :]) ** 2).sum(1) + dd = np.append(dd, 1) # Never delete last node + eps = 0.000001 + keep_ids = dd > eps + poly_v = poly_v[keep_ids, :] + + # Get triangle indices for each edge + poly_tria_idx = t_idx[path_edge_idx[keep_ids[:-1]]] + + # Get edge vertex indices + poly_edge_vidx = gg_unique[path_nodes[keep_ids], :] + + # Get barycentric coordinates + poly_edge_bary = xl[path_nodes[keep_ids]] + + # Create polygon with metadata + # Use closed=None to let Polygon auto-detect based on endpoints + # This will automatically remove duplicate endpoint if present + n_points_before = poly_v.shape[0] + poly = polygon.Polygon(poly_v, closed=None) + n_points_after = poly.points.shape[0] + + # If Polygon removed duplicate endpoint, adjust metadata + if n_points_after < n_points_before: + # Remove last entry from metadata to match polygon points + poly_edge_vidx = poly_edge_vidx[:n_points_after] + poly_edge_bary = poly_edge_bary[:n_points_after] + + poly.tria_idx = poly_tria_idx + poly.edge_vidx = poly_edge_vidx + poly.edge_bary = poly_edge_bary + + polygons.append(poly) + + return polygons def level_path( self, @@ -1656,7 +2089,7 @@ def level_path( level: float, get_tria_idx: bool = False, get_edges: bool = False, - n_points: Optional[int] = None, + n_points: int | None = None, ) -> tuple[np.ndarray, ...]: """Extract levelset of vfunc at a specific level as a path of 3D points. @@ -1666,10 +2099,23 @@ def level_path( The points are sorted and returned as an ordered array of 3D coordinates together with the length of the level set path. + This implementation uses extract_level_paths internally to compute the + level set, ensuring consistent handling of closed loops and metadata. + .. note:: - Only works for level sets that represent a single non-intersecting - path with exactly one start and one endpoint (e.g., not closed)! + Works for level sets that represent a single path or closed loop. + For closed loops, the first and last points are identical (duplicated) + so you can detect closure via ``np.allclose(path[0], path[-1])``. + For open paths, the path has two distinct endpoints. + This function is kept mainly for backward compatibility. + + **For more advanced use cases, consider using extract_level_paths() directly:** + + - Multiple disconnected components: extract_level_paths returns all components + - Custom resampling: Get Polygon objects and use Polygon.resample() directly + - Metadata analysis: Access triangle indices and edge information per component + - Closed loop detection: Polygon objects have a ``closed`` attribute Parameters ---------- @@ -1679,6 +2125,8 @@ def level_path( Level set value to extract. get_tria_idx : bool, default=False If True, also return array of triangle indices for each path segment. + For closed loops with n points (including duplicate), returns n-1 triangle + indices. For open paths with n points, returns n-1 triangle indices. get_edges : bool, default=False If True, also return arrays of vertex indices (i,j) and relative positions for each 3D point along the intersecting edge. @@ -1690,12 +2138,15 @@ def level_path( ------- path : np.ndarray Array of shape (n, 3) containing 3D coordinates of vertices on the - level path. + level path. For closed loops, the last point duplicates the first point, + so ``np.allclose(path[0], path[-1])`` will be True. length : float - Length of the level set. + Total length of the level set path. For closed loops, includes the + closing segment from last to first point. tria_idx : np.ndarray Array of triangle indices for each segment on the path, shape (n-1,) - if the path has length n. Only returned if get_tria_idx is True. + where n is the number of points (including duplicate for closed loops). + Only returned if get_tria_idx is True. edges_vidxs : np.ndarray Array of shape (n, 2) with vertex indices (i,j) for each 3D point, defining the vertices of the original mesh edge intersecting the @@ -1711,101 +2162,92 @@ def level_path( ------ ValueError If vfunc is not 1-dimensional. + If multiple disconnected level paths are found (use extract_level_paths). If n_points is combined with get_tria_idx=True. If n_points is combined with get_edges=True. + + See Also + -------- + extract_level_paths : Extract multiple disconnected level paths as Polygon objects. + Recommended for advanced use cases with multiple components, custom resampling, + or when you need explicit closed/open information. + + Examples + -------- + >>> # Extract a simple level curve + >>> path, length = mesh.level_path(vfunc, 0.5) + >>> print(f"Path has {path.shape[0]} points, length {length:.2f}") + + >>> # Check if the path is closed + >>> is_closed = np.allclose(path[0], path[-1]) + >>> print(f"Path is closed: {is_closed}") + + >>> # Get triangle indices for each segment + >>> path, length, tria_idx = mesh.level_path(vfunc, 0.5, get_tria_idx=True) + + >>> # Get complete edge metadata + >>> path, length, edges, relpos = mesh.level_path(vfunc, 0.5, get_edges=True) + + >>> # Resample to fixed number of points + >>> path, length = mesh.level_path(vfunc, 0.5, n_points=100) + + >>> # For multiple components, use extract_level_paths instead + >>> curves = mesh.extract_level_paths(vfunc, 0.5) + >>> for i, curve in enumerate(curves): + >>> print(f"Component {i}: {curve.points.shape[0]} points, closed={curve.closed}") """ - if vfunc.ndim > 1: - raise ValueError(f"vfunc needs to be 1-dim, but is {vfunc.ndim}-dim!") - # get intersecting triangles - intersect = vfunc[self.t] > level - t_idx = np.where( - np.logical_or( - np.sum(intersect, axis=1) == 1, np.sum(intersect, axis=1) == 2 - ) - )[0] - # reduce to triangles that intersect with level: - t_level = self.t[t_idx, :] - intersect = intersect[t_idx, :] - # trias have one vertex on one side and two on the other side of the level set - # invert trias with two true values, so that single vertex is true - intersect[np.sum(intersect, axis=1) > 1, :] = np.logical_not( - intersect[np.sum(intersect, axis=1) > 1, :] - ) - # get idx within tria with single vertex: - idx_single = np.argmax(intersect, axis=1) - idx_o1 = (idx_single + 1) % 3 - idx_o2 = (idx_single + 2) % 3 - # get global idx - gidx0 = t_level[np.arange(t_level.shape[0]), idx_single] - gidx1 = t_level[np.arange(t_level.shape[0]), idx_o1] - gidx2 = t_level[np.arange(t_level.shape[0]), idx_o2] - # sort edge indices (rows are trias, cols are the two vertex ids) - gg1 = np.sort( - np.concatenate((gidx0[:, np.newaxis], gidx1[:, np.newaxis]), axis=1) - ) - gg2 = np.sort( - np.concatenate((gidx0[:, np.newaxis], gidx2[:, np.newaxis]), axis=1) - ) - # concatenate all and get unique ones - gg = np.concatenate((gg1, gg2), axis=0) - gg_unique = np.unique(gg, axis=0) - # generate level set intersection points for unique edges - xl = ((level - vfunc[gg_unique[:, 0]]) - / ( vfunc[gg_unique[:, 1]] - vfunc[gg_unique[:, 0]])) - p = ((1 - xl)[:, np.newaxis] * self.v[gg_unique[:, 0]] - + xl[:, np.newaxis] * self.v[gg_unique[:, 1]]) - # fill sparse matrix with new point indices (+1 to distinguish from zero) - a_mat = sparse.csc_matrix( - (np.arange(gg_unique.shape[0]) + 1, (gg_unique[:, 0], gg_unique[:, 1])) - ) - # for each tria create one edge via lookup in matrix - edge_idxs = ( np.concatenate((a_mat[gg1[:, 0], gg1[:, 1]], - a_mat[gg2[:, 0], gg2[:, 1]]), axis=0).T - 1 ) - # lengths computation - p1 = np.squeeze(p[edge_idxs[:, 0]]) - p2 = np.squeeze(p[edge_idxs[:, 1]]) - llength = np.sqrt(((p1 - p2) ** 2).sum(1)).sum() - # compute path from unordered, not-directed edge list - # and return path as list of points, and path length - if get_tria_idx: - path, edge_idx = TriaMesh.__reduce_edges_to_path( - edge_idxs, get_edge_idx=get_tria_idx + # Use extract_level_paths to get polygons + curves = self.extract_level_paths(vfunc, level) + + # level_path expects single component + if len(curves) != 1: + raise ValueError( + f"Found {len(curves)} disconnected level curves. " + f"Use extract_level_paths() for multiple components." ) - # translate local edge id to global tria id - tria_idx = t_idx[edge_idx] - else: - path = TriaMesh.__reduce_edges_to_path(edge_idxs, get_tria_idx) - # remove duplicate vertices (happens when levelset hits a vertex almost - # perfectly) - path3d = p[path, :] - dd = ((path3d[0:-1, :] - path3d[1:, :]) ** 2).sum(1) - # append 1 (never delete last node, if identical to the one before, we delete - # the one before) - dd = np.append(dd, 1) - eps = 0.000001 - keep_ids = dd > eps - path3d = path3d[keep_ids, :] - edges_vidxs = gg_unique[path, :] - edges_vidxs = edges_vidxs[keep_ids, :] - edges_relpos = xl[path] - edges_relpos = edges_relpos[keep_ids] - if get_tria_idx: - if n_points: + + # Get the single curve + curve = curves[0] + + # Extract data from polygon + path3d = curve.points + edges_vidxs = curve.edge_vidx + edges_relpos = curve.edge_bary + + # Compute length using polygon's length method + length = curve.length() + + # For closed loops, duplicate the last point to match first point + # This allows users to detect closure via np.allclose(path[0], path[-1]) + # and maintains backward compatibility with the original level_path behavior + if curve.closed: + path3d = np.vstack([path3d, path3d[0:1]]) + edges_vidxs = np.vstack([edges_vidxs, edges_vidxs[0:1]]) + edges_relpos = np.append(edges_relpos, edges_relpos[0]) + + # Handle optional resampling + if n_points: + if get_tria_idx: raise ValueError("n_points cannot be combined with get_tria_idx=True.") - tria_idx = tria_idx[dd[:-1] > eps] if get_edges: - return path3d, llength, tria_idx, edges_vidxs, edges_relpos + raise ValueError("n_points cannot be combined with get_edges=True.") + poly = polygon.Polygon(path3d, closed=curve.closed) + path3d = poly.resample(n_points=n_points, n_iter=3, inplace=False) + path3d = path3d.get_points() + if curve.closed: + path3d = np.vstack([path3d, path3d[0:1]]) + + # Build return tuple based on options + if get_tria_idx: + tria_idx = curve.tria_idx + if get_edges: + return path3d, length, tria_idx, edges_vidxs, edges_relpos else: - return path3d, llength, tria_idx + return path3d, length, tria_idx else: - if n_points: - if get_edges: - raise ValueError("n_points cannot be combined with get_edges=True.") - poly = polygon.Polygon(path3d, closed=False) - path3d = poly.resample(n_points=n_points, n_iter=3, inplace=False) - path3d = path3d.get_points() if get_edges: - return path3d, llength, edges_vidxs, edges_relpos + return path3d, length, edges_vidxs, edges_relpos else: - return path3d, llength + return path3d, length + diff --git a/lapy/utils/_config.py b/lapy/utils/_config.py index 44722fd4..774d8b1a 100644 --- a/lapy/utils/_config.py +++ b/lapy/utils/_config.py @@ -1,14 +1,15 @@ import platform import re import sys +from collections.abc import Callable from functools import partial from importlib.metadata import requires, version -from typing import IO, Callable, Optional +from typing import IO import psutil -def sys_info(fid: Optional[IO] = None, developer: bool = False): +def sys_info(fid: IO | None = None, developer: bool = False): """Print the system information for debugging. Parameters diff --git a/lapy/utils/tests/test_tria_mesh.py b/lapy/utils/tests/test_tria_mesh.py index a3b0a4a5..d9b1106d 100644 --- a/lapy/utils/tests/test_tria_mesh.py +++ b/lapy/utils/tests/test_tria_mesh.py @@ -630,3 +630,466 @@ def test_2d_mesh_support(): assert v2d.shape == (4, 2), "Should return 2D vertices" np.testing.assert_array_almost_equal(v2d, vertices_2d) + +def test_critical_points_simple(): + """ + Test critical_points on a simple mesh with known extrema. + """ + # Create a simple triangular mesh with 3 peaks and a valley + vertices = np.array([ + [0.0, 0.0, 0.5], # 0: center (middle height) + [1.0, 0.0, 1.0], # 1: peak + [0.0, 1.0, 0.0], # 2: valley + [-1.0, 0.0, 1.0], # 3: peak + ]) + triangles = np.array([ + [0, 1, 2], + [0, 2, 3], + [0, 3, 1], + ]) + mesh = TriaMesh(vertices, triangles) + + # Height function: z-coordinate + vfunc = vertices[:, 2] + + # Compute critical points + minima, maxima, saddles, saddle_orders = mesh.critical_points(vfunc) + + # Expected: vertex 2 is minimum (z=0), vertices 1 and 3 are maxima (z=1) + assert 2 in minima, f"Expected vertex 2 (valley) as minimum, got minima={minima}" + assert 1 in maxima or 3 in maxima, f"Expected vertices 1 or 3 (peaks) as maxima, got maxima={maxima}" + # Vertex 0 is at mid-height, surrounded by higher and lower neighbors - could be saddle or regular + + # Simpler test: just verify the function runs without error + assert len(minima) + len(maxima) + len(saddles) > 0, "Should find some critical points" + + +def test_critical_points_saddle(): + """ + Test critical_points on a mesh with a saddle point. + """ + # Create a simple saddle mesh: 3x3 grid in xy-plane + # Height creates saddle at center + vertices = np.array([ + [-1, -1, 1], # 0: corner (high) + [0, -1, 0], # 1: edge midpoint (low) + [1, -1, 1], # 2: corner (high) + [-1, 0, 0], # 3: edge midpoint (low) + [0, 0, 0.5], # 4: center (saddle) + [1, 0, 0], # 5: edge midpoint (low) + [-1, 1, 1], # 6: corner (high) + [0, 1, 0], # 7: edge midpoint (low) + [1, 1, 1], # 8: corner (high) + ], dtype=float) + + # Create triangular mesh from grid + triangles = np.array([ + [0, 1, 4], [0, 4, 3], # bottom-left quad + [1, 2, 5], [1, 5, 4], # bottom-right quad + [3, 4, 7], [3, 7, 6], # top-left quad + [4, 5, 8], [4, 8, 7], # top-right quad + ]) + mesh = TriaMesh(vertices, triangles) + + # Use z-coordinate as function + vfunc = vertices[:, 2] + + # Compute critical points + minima, maxima, saddles, saddle_orders = mesh.critical_points(vfunc) + + # Center should be a saddle (neighbors alternate high-low) + assert 4 in saddles, f"Expected vertex 4 (center) to be a saddle, saddles={saddles}" + # Check saddle order for center vertex + saddle_idx = np.where(saddles == 4)[0] + if len(saddle_idx) > 0: + order = saddle_orders[saddle_idx[0]] + assert order >= 2, f"Expected saddle order >= 2, got {order}" + + +def test_critical_points_with_ties(): + """ + Test critical_points with equal function values (tie-breaker rule). + """ + # Simple triangle mesh + vertices = np.array([ + [0.0, 0.0, 0.0], + [1.0, 0.0, 0.0], + [0.5, 1.0, 0.0], + ]) + triangles = np.array([[0, 1, 2]]) + mesh = TriaMesh(vertices, triangles) + + # All vertices have same value (ties everywhere) + vfunc = np.array([1.0, 1.0, 1.0]) + + # Should not crash, tie-breaker should resolve ambiguities + minima, maxima, saddles, saddle_orders = mesh.critical_points(vfunc) + + # With tie-breaker, vertex with highest ID is treated as larger + # So vertex 2 should be maximum, vertex 0 should be minimum + assert 0 in minima, "Expected vertex 0 to be minimum with tie-breaker" + assert 2 in maxima, "Expected vertex 2 to be maximum with tie-breaker" + + +def test_critical_points_boundary(): + """ + Test critical_points on a mesh with boundary (non-closed). + """ + # Single triangle (has boundary) + vertices = np.array([ + [0.0, 0.0, 0.0], + [1.0, 0.0, 1.0], + [0.0, 1.0, 0.0], + ]) + triangles = np.array([[0, 1, 2]]) + mesh = TriaMesh(vertices, triangles) + + # Height function + vfunc = vertices[:, 2] + + minima, maxima, saddles, saddle_orders = mesh.critical_points(vfunc) + + # Vertex 1 is highest (z=1), vertices 0 and 2 are lowest (z=0) + assert 1 in maxima, f"Expected vertex 1 as maximum, got maxima={maxima}" + assert len(minima) >= 1, f"Expected at least one minimum, got {len(minima)}" + + +def test_extract_level_paths_simple(): + """ + Test extract_level_paths on a simple mesh with single level curve. + """ + # Create a simple mesh: unit square in xy-plane + vertices = np.array([ + [0.0, 0.0, 0.0], + [1.0, 0.0, 0.0], + [1.0, 1.0, 1.0], + [0.0, 1.0, 1.0], + ]) + triangles = np.array([ + [0, 1, 2], + [0, 2, 3], + ]) + mesh = TriaMesh(vertices, triangles) + + # Height function (z-coordinate) + vfunc = vertices[:, 2] + + # Extract level curve at z=0.5 + curves = mesh.extract_level_paths(vfunc, 0.5) + + # Should have at least one curve + assert len(curves) > 0, "Expected at least one level curve" + + # Check that returned objects are Polygon + from ...polygon import Polygon + for curve in curves: + assert isinstance(curve, Polygon), f"Expected Polygon, got {type(curve)}" + + # Check that polygons have required attributes + for curve in curves: + assert hasattr(curve, 'points'), "Polygon should have 'points' attribute" + assert hasattr(curve, 'closed'), "Polygon should have 'closed' attribute" + assert hasattr(curve, 'tria_idx'), "Polygon should have 'tria_idx' attribute" + assert hasattr(curve, 'edge_vidx'), "Polygon should have 'edge_vidx' attribute" + assert hasattr(curve, 'edge_bary'), "Polygon should have 'edge_bary' attribute" + + # Check that points are 3D + for curve in curves: + assert curve.points.shape[1] == 3, f"Expected 3D points, got shape {curve.points.shape}" + + +def test_extract_level_paths_closed_loop(): + """ + Test extract_level_paths on a mesh with closed loop level curve. + """ + # Create a pyramid mesh + vertices = np.array([ + [0.0, 0.0, 0.0], # base + [1.0, 0.0, 0.0], # base + [1.0, 1.0, 0.0], # base + [0.0, 1.0, 0.0], # base + [0.5, 0.5, 1.0], # apex + ]) + triangles = np.array([ + [0, 1, 4], + [1, 2, 4], + [2, 3, 4], + [3, 0, 4], + [0, 2, 1], + [0, 3, 2], + ]) + mesh = TriaMesh(vertices, triangles) + + # Height function + vfunc = vertices[:, 2] + + # Extract level curve at mid-height (should create closed loop around pyramid) + curves = mesh.extract_level_paths(vfunc, 0.5) + + assert len(curves) == 1, "Expected one level curve" + + # Curve should be closed + assert curves[0].closed, "Expected closed level curve" + + # Note: May or may not be closed depending on mesh topology + + # All points should be approximately at z=0.5 + for curve in curves: + z_coords = curve.points[:, 2] + np.testing.assert_allclose(z_coords, 0.5, atol=1e-5, + err_msg="Level curve points should be at z=0.5") + + +def test_extract_level_paths_no_intersection(): + """ + Test extract_level_paths when level doesn't intersect mesh. + """ + # Simple triangle + vertices = np.array([ + [0.0, 0.0, 0.0], + [1.0, 0.0, 1.0], + [0.0, 1.0, 1.0], + ]) + triangles = np.array([[0, 1, 2]]) + mesh = TriaMesh(vertices, triangles) + + # Height function + vfunc = vertices[:, 2] + + # Extract level curve at z=10.0 (way above mesh) + curves = mesh.extract_level_paths(vfunc, 10.0) + + # Should return empty list + assert len(curves) == 0, f"Expected no curves, got {len(curves)}" + + +def test_extract_level_paths_metadata(): + """ + Test that extract_level_paths returns correct metadata. + """ + # Create a simple mesh + vertices = np.array([ + [0.0, 0.0, 0.0], + [1.0, 0.0, 0.5], + [1.0, 1.0, 1.0], + [0.0, 1.0, 0.5], + ]) + triangles = np.array([ + [0, 1, 2], + [0, 2, 3], + ]) + mesh = TriaMesh(vertices, triangles) + + # Height function + vfunc = vertices[:, 2] + + # Extract level curve + curves = mesh.extract_level_paths(vfunc, 0.5) + + for curve in curves: + n_points = curve.points.shape[0] + + # Check tria_idx + assert hasattr(curve, 'tria_idx'), "Missing tria_idx" + # Number of segments (tria_idx) depends on whether curve is closed + expected_tria_len = n_points if curve.closed else n_points - 1 + assert curve.tria_idx.shape == (expected_tria_len,), \ + f"tria_idx should be ({expected_tria_len},), got {curve.tria_idx.shape}" + + # Check edge_vidx + assert hasattr(curve, 'edge_vidx'), "Missing edge_vidx" + assert curve.edge_vidx.shape == (n_points, 2), \ + f"edge_vidx should be (n_points, 2), got {curve.edge_vidx.shape}" + + # Check edge_bary + assert hasattr(curve, 'edge_bary'), "Missing edge_bary" + assert curve.edge_bary.shape == (n_points,), \ + f"edge_bary should be (n_points,), got {curve.edge_bary.shape}" + # Barycentric coordinates should be in [0, 1] + assert np.all(curve.edge_bary >= 0) and np.all(curve.edge_bary <= 1), \ + "edge_bary should be in [0, 1]" + + +def test_level_path(): + """ + Test level_path function for single path extraction with various options. + """ + # Create a simple mesh with a clear level curve + vertices = np.array([ + [0.0, 0.0, 0.0], + [1.0, 0.0, 0.5], + [1.0, 1.0, 1.0], + [0.0, 1.0, 0.5], + ]) + triangles = np.array([ + [0, 1, 2], + [0, 2, 3], + ]) + mesh = TriaMesh(vertices, triangles) + vfunc = vertices[:, 2] + level = 0.5 + + # Test 1: Basic usage - get path and length + path, length = mesh.level_path(vfunc, level) + + assert path.ndim == 2, "Path should be 2D array" + assert path.shape[1] == 3, f"Path should have 3D points, got shape {path.shape}" + assert path.shape[0] >= 2, "Path should have at least 2 points" + assert length > 0, f"Path length should be positive, got {length}" + + # Check all points are approximately at the level + z_coords = path[:, 2] + np.testing.assert_allclose(z_coords, level, atol=1e-5, + err_msg=f"All path points should be at z={level}") + + # Test 2: Check if path is closed (first == last point) + is_closed = np.allclose(path[0], path[-1]) + if is_closed: + print(" Path is closed (first point == last point)") + else: + print(" Path is open (endpoints differ)") + + # Test 3: Get triangle indices + path_with_tria, length_with_tria, tria_idx = mesh.level_path( + vfunc, level, get_tria_idx=True + ) + + np.testing.assert_array_equal(path_with_tria, path, + err_msg="Path should be same with get_tria_idx=True") + assert length_with_tria == length, "Length should be same" + assert tria_idx.ndim == 1, "tria_idx should be 1D array" + # For n points, we have n-1 segments + expected_tria_len = path.shape[0] - 1 + assert tria_idx.shape[0] == expected_tria_len, \ + f"tria_idx should have {expected_tria_len} elements, got {tria_idx.shape[0]}" + # Triangle indices should be valid + assert np.all(tria_idx >= 0), "Triangle indices should be non-negative" + assert np.all(tria_idx < len(triangles)), \ + f"Triangle indices should be < {len(triangles)}" + + # Test 4: Get edge information + path_with_edges, length_with_edges, edges_vidxs, edges_relpos = mesh.level_path( + vfunc, level, get_edges=True + ) + + np.testing.assert_array_equal(path_with_edges, path, + err_msg="Path should be same with get_edges=True") + assert length_with_edges == length, "Length should be same" + assert edges_vidxs.shape == (path.shape[0], 2), \ + f"edges_vidxs should be (n_points, 2), got {edges_vidxs.shape}" + assert edges_relpos.shape == (path.shape[0],), \ + f"edges_relpos should be (n_points,), got {edges_relpos.shape}" + # Barycentric coordinates should be in [0, 1] + assert np.all(edges_relpos >= 0) and np.all(edges_relpos <= 1), \ + "Barycentric coordinates should be in [0, 1]" + + # Test 5: Get both triangle and edge information + result = mesh.level_path(vfunc, level, get_tria_idx=True, get_edges=True) + path_full, length_full, tria_idx_full, edges_vidxs_full, edges_relpos_full = result + + np.testing.assert_array_equal(path_full, path, + err_msg="Path should be consistent") + np.testing.assert_array_equal(tria_idx_full, tria_idx, + err_msg="tria_idx should be consistent") + np.testing.assert_array_equal(edges_vidxs_full, edges_vidxs, + err_msg="edges_vidxs should be consistent") + np.testing.assert_array_equal(edges_relpos_full, edges_relpos, + err_msg="edges_relpos should be consistent") + + # Test 6: Resampling to fixed number of points + n_resample = 50 + path_resampled, length_resampled = mesh.level_path( + vfunc, level, n_points=n_resample + ) + + assert path_resampled.shape[0] == n_resample, \ + f"Resampled path should have {n_resample} points, got {path_resampled.shape[0]}" + assert path_resampled.shape[1] == 3, "Resampled path should have 3D points" + # Length should be approximately the same + np.testing.assert_allclose(length_resampled, length, rtol=0.1, + err_msg="Resampled length should be close to original") + + +def test_level_path_closed_loop(): + """ + Test level_path on a mesh that produces a closed loop. + """ + # Create a pyramid mesh + vertices = np.array([ + [0.0, 0.0, 0.0], # base + [1.0, 0.0, 0.0], # base + [1.0, 1.0, 0.0], # base + [0.0, 1.0, 0.0], # base + [0.5, 0.5, 1.0], # apex + ]) + triangles = np.array([ + [0, 1, 4], + [1, 2, 4], + [2, 3, 4], + [3, 0, 4], + [0, 2, 1], + [0, 3, 2], + ]) + mesh = TriaMesh(vertices, triangles) + vfunc = vertices[:, 2] + + # Extract level at mid-height (should create closed loop around pyramid) + path, length = mesh.level_path(vfunc, 0.5) + + # For closed loops, first and last points should be identical + is_closed = np.allclose(path[0], path[-1]) + assert is_closed, "Closed loop should have first point equal to last point" + + # All points should be at z=0.5 + np.testing.assert_allclose(path[:, 2], 0.5, atol=1e-5, + err_msg="All points should be at z=0.5") + + # Length should be positive + assert length > 0, "Closed loop should have positive length" + + +def test_gifti_io(tmp_path): + """ + Test reading and writing GIFTI surface meshes using TriaMesh.read_gifti and write_gifti. + """ + import numpy as np + + from lapy.tria_mesh import TriaMesh + + # Create a simple mesh + vertices = np.array([ + [0.0, 0.0, 0.0], + [1.0, 0.0, 0.0], + [0.0, 1.0, 0.0], + [0.0, 0.0, 1.0], + ]) + triangles = np.array([ + [0, 1, 2], + [0, 1, 3], + [0, 2, 3], + [1, 2, 3], + ]) + mesh = TriaMesh(vertices, triangles) + + # Write to GIFTI + gifti_file = tmp_path / "test.surf.gii" + mesh.write_gifti(str(gifti_file)) + + # Read back + mesh2 = TriaMesh.read_gifti(str(gifti_file)) + + # Check vertices and triangles + np.testing.assert_allclose(mesh2.v, mesh.v, rtol=1e-6, atol=1e-8) + np.testing.assert_array_equal(mesh2.t, mesh.t) + assert mesh2.v.shape == mesh.v.shape + assert mesh2.t.shape == mesh.t.shape + + # Check that mesh2 is a TriaMesh + assert isinstance(mesh2, TriaMesh) + + # Check that mesh2 is not empty + assert mesh2.v.size > 0 + assert mesh2.t.size > 0 + + # Check that mesh2 is not 2D + assert not mesh2.is_2d() diff --git a/pyproject.toml b/pyproject.toml index a2061ffe..b79d5018 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,7 +11,7 @@ version = '1.6.0-dev' description = 'A package for differential geometry on meshes (Laplace, FEM)' readme = 'README.md' license = {file = 'LICENSE'} -requires-python = '>=3.9' +requires-python = '>=3.10' authors = [ {name = 'Martin Reuter', email = 'martin.reuter@dzne.de'}, ] @@ -34,10 +34,10 @@ classifiers = [ 'Operating System :: Unix', 'Operating System :: MacOS', 'Programming Language :: Python :: 3 :: Only', - 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', 'Programming Language :: Python :: 3.11', 'Programming Language :: Python :: 3.12', + 'Programming Language :: Python :: 3.13', 'Natural Language :: English', 'License :: OSI Approved :: MIT License', 'Intended Audience :: Science/Research', @@ -63,13 +63,12 @@ doc = [ 'matplotlib', 'memory-profiler', 'numpydoc', - 'sphinx!=7.2.*', + 'sphinx>=7.0,!=7.2.*', 'sphinxcontrib-bibtex', 'sphinx-copybutton', 'sphinx-design', 'sphinx-gallery', 'sphinx-issues', - 'pypandoc', 'nbsphinx', 'IPython', # For syntax highlighting in notebooks 'ipykernel', @@ -142,6 +141,12 @@ select = [ "__init__.py" = ["F401"] "examples/*" = ["E501"] # ignore too long lines in example ipynb +[tool.codespell] +ignore-words-list = 'coo,daty' +check-filenames = true +check-hidden = true +skip = './.git,./build,./.mypy_cache,./.pytest_cache' + [tool.pytest.ini_options] minversion = '6.0' addopts = '--durations 20 --junit-xml=junit-results.xml --verbose'